From 8f5698dbce3325461d572c4029ef2dbc364e819b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jan=20Cie=C5=9Blak?= Date: Wed, 11 Dec 2024 11:32:19 +0100 Subject: [PATCH 1/2] feat: Account v1 readiness resource (#3252) Introduces refactor to already existing `snowflake_account` resource. ### Changes - Refactored existing account resource - Completely rewritten acceptance tests - Introduced a return type from Create command that is used to parse org_name + acc_name that is later used in creating new resource id. The output is always returned. Adding another SQL calls to get this information may an issue. For example, when account was successfully created, but e.g. current organization_name couldn't be fetched; this situation will end up with created account, but with no connection to the terraform config of this account. This could be fixed by manually importing the resource. We can discuss this on Monday. ### Next pr - data source - next account resource --- MIGRATION_GUIDE.md | 13 +- docs/resources/account.md | 121 ++- .../resources/snowflake_account/import.sh | 1 + .../resources/snowflake_account/resource.tf | 50 +- .../objectassert/account_snowflake_gen.go | 6 +- .../resourceassert/account_resource_ext.go | 11 + .../resourceassert/account_resource_gen.go | 10 + .../account_show_output_ext.go | 80 ++ .../config/model/account_model_ext.go | 11 + .../config/model/account_model_gen.go | 23 +- pkg/acceptance/helpers/account_client.go | 4 +- pkg/acceptance/helpers/random/certs.go | 9 - .../helpers/random/random_helpers.go | 4 +- pkg/datasources/accounts.go | 2 +- pkg/resources/account.go | 623 +++++++++------- pkg/resources/account_acceptance_test.go | 688 ++++++++++++++++-- pkg/resources/account_state_upgraders.go | 28 + pkg/resources/common.go | 11 +- pkg/schemas/account_gen.go | 142 +++- pkg/schemas/account_parameters.go | 71 ++ pkg/sdk/accounts.go | 78 +- pkg/sdk/accounts_test.go | 155 ++++ pkg/sdk/identifier_helpers.go | 4 + pkg/sdk/testint/accounts_integration_test.go | 30 +- templates/resources/account.md.tmpl | 15 +- 25 files changed, 1769 insertions(+), 421 deletions(-) create mode 100644 examples/resources/snowflake_account/import.sh create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_ext.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/account_show_output_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/account_model_ext.go create mode 100644 pkg/resources/account_state_upgraders.go create mode 100644 pkg/schemas/account_parameters.go diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 2d14a2d000..03571a8037 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -9,9 +9,20 @@ across different versions. ## v0.99.0 ➞ v0.100.0 +### snowflake_account resource changes + +Changes: +- `admin_user_type` is now supported. No action required during the migration. +- `grace_period_in_days` is now required. The field should be explicitly set in the following versions. +- Account renaming is now supported. +- `is_org_admin` is a settable field (previously it was read-only field). Changing its value is also supported. +- `must_change_password` and `is_org_admin` type was changed from `bool` to bool-string (more on that [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/v1-preparations/CHANGES_BEFORE_V1.md#empty-values)). No action required during the migration. +- The underlying resource identifier was changed from `` to `.`. Migration will be done automatically. Notice this introduces changes in how `snowflake_account` resource is imported. +- New `show_output` field was added (see [raw Snowflake output](./v1-preparations/CHANGES_BEFORE_V1.md#raw-snowflake-output)). + ### snowflake_tag_association resource changes #### *(behavior change)* new id format -In order to provide more functionality for tagging objects, we have changed the resource id from `"TAG_DATABASE"."TAG_SCHEMA"."TAG_NAME"` to `"TAG_DATABASE"."TAG_SCHEMA"."TAG_NAME"|TAG_VALUE|OBJECT_TYPE`. This allows to group tags associations per tag ID, tag value and object type in one resource. +To provide more functionality for tagging objects, we have changed the resource id from `"TAG_DATABASE"."TAG_SCHEMA"."TAG_NAME"` to `"TAG_DATABASE"."TAG_SCHEMA"."TAG_NAME"|TAG_VALUE|OBJECT_TYPE`. This allows to group tags associations per tag ID, tag value and object type in one resource. ``` resource "snowflake_tag_association" "gold_warehouses" { object_identifiers = [snowflake_warehouse.w1.fully_qualified_name, snowflake_warehouse.w2.fully_qualified_name] diff --git a/docs/resources/account.md b/docs/resources/account.md index 4d3a8fea48..6597e1e855 100644 --- a/docs/resources/account.md +++ b/docs/resources/account.md @@ -5,34 +5,58 @@ description: |- The account resource allows you to create and manage Snowflake accounts. --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000) to use it. + # snowflake_account (Resource) The account resource allows you to create and manage Snowflake accounts. -!> **Warning** This resource cannot be destroyed!!! The only way to delete accounts is to go through [Snowflake Support](https://docs.snowflake.com/en/user-guide/organizations-manage-accounts.html#deleting-an-account) - -~> **Note** ORGADMIN priviliges are required for this resource +~> **Note** To use this resource you have to use an account with a privilege to use the ORGADMIN role. ## Example Usage ```terraform -provider "snowflake" { - role = "ORGADMIN" - alias = "orgadmin" +## Minimal +resource "snowflake_account" "minimal" { + name = "ACCOUNT_NAME" + admin_name = "ADMIN_NAME" + admin_password = "ADMIN_PASSWORD" + email = "admin@email.com" + edition = "STANDARD" + grace_period_in_days = 3 +} + +## Complete (with SERVICE user type) +resource "snowflake_account" "complete" { + name = "ACCOUNT_NAME" + admin_name = "ADMIN_NAME" + admin_rsa_public_key = "" + admin_user_type = "SERVICE" + email = "admin@email.com" + edition = "STANDARD" + region_group = "PUBLIC" + region = "AWS_US_WEST_2" + comment = "some comment" + is_org_admin = "true" + grace_period_in_days = 3 } -resource "snowflake_account" "ac1" { - provider = snowflake.orgadmin - name = "SNOWFLAKE_TEST_ACCOUNT" - admin_name = "John Doe" - admin_password = "Abcd1234!" - email = "john.doe@snowflake.com" - first_name = "John" - last_name = "Doe" - must_change_password = true +## Complete (with PERSON user type) +resource "snowflake_account" "complete" { + name = "ACCOUNT_NAME" + admin_name = "ADMIN_NAME" + admin_password = "ADMIN_PASSWORD" + admin_user_type = "PERSON" + first_name = "first_name" + last_name = "last_name" + email = "admin@email.com" + must_change_password = "false" edition = "STANDARD" - comment = "Snowflake Test Account" + region_group = "PUBLIC" region = "AWS_US_WEST_2" + comment = "some comment" + is_org_admin = "true" + grace_period_in_days = 3 } ``` -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). @@ -43,33 +67,70 @@ resource "snowflake_account" "ac1" { ### Required -- `admin_name` (String) Login name of the initial administrative user of the account. A new user is created in the new account with this name and password and granted the ACCOUNTADMIN role in the account. A login name can be any string consisting of letters, numbers, and underscores. Login names are always case-insensitive. -- `edition` (String) [Snowflake Edition](https://docs.snowflake.com/en/user-guide/intro-editions.html) of the account. Valid values are: STANDARD | ENTERPRISE | BUSINESS_CRITICAL -- `email` (String, Sensitive) Email address of the initial administrative user of the account. This email address is used to send any notifications about the account. -- `name` (String) Specifies the identifier (i.e. name) for the account; must be unique within an organization, regardless of which Snowflake Region the account is in. In addition, the identifier must start with an alphabetic character and cannot contain spaces or special characters except for underscores (_). Note that if the account name includes underscores, features that do not accept account names with underscores (e.g. Okta SSO or SCIM) can reference a version of the account name that substitutes hyphens (-) for the underscores. +- `admin_name` (String, Sensitive) Login name of the initial administrative user of the account. A new user is created in the new account with this name and password and granted the ACCOUNTADMIN role in the account. A login name can be any string consisting of letters, numbers, and underscores. Login names are always case-insensitive. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `edition` (String) Snowflake Edition of the account. See more about Snowflake Editions in the [official documentation](https://docs.snowflake.com/en/user-guide/intro-editions). Valid options are: `STANDARD` | `ENTERPRISE` | `BUSINESS_CRITICAL` +- `email` (String, Sensitive) Email address of the initial administrative user of the account. This email address is used to send any notifications about the account. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `grace_period_in_days` (Number) Specifies the number of days during which the account can be restored (“undropped”). The minimum is 3 days and the maximum is 90 days. +- `name` (String) Specifies the identifier (i.e. name) for the account. It must be unique within an organization, regardless of which Snowflake Region the account is in and must start with an alphabetic character and cannot contain spaces or special characters except for underscores (_). Note that if the account name includes underscores, features that do not accept account names with underscores (e.g. Okta SSO or SCIM) can reference a version of the account name that substitutes hyphens (-) for the underscores. ### Optional -- `admin_password` (String, Sensitive) Password for the initial administrative user of the account. Optional if the `ADMIN_RSA_PUBLIC_KEY` parameter is specified. For more information about passwords in Snowflake, see [Snowflake-provided Password Policy](https://docs.snowflake.com/en/sql-reference/sql/create-account.html#:~:text=Snowflake%2Dprovided%20Password%20Policy). -- `admin_rsa_public_key` (String, Sensitive) Assigns a public key to the initial administrative user of the account in order to implement [key pair authentication](https://docs.snowflake.com/en/sql-reference/sql/create-account.html#:~:text=key%20pair%20authentication) for the user. Optional if the `ADMIN_PASSWORD` parameter is specified. +- `admin_password` (String, Sensitive) Password for the initial administrative user of the account. Either admin_password or admin_rsa_public_key has to be specified. This field cannot be used whenever admin_user_type is set to SERVICE. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `admin_rsa_public_key` (String) Assigns a public key to the initial administrative user of the account. Either admin_password or admin_rsa_public_key has to be specified. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `admin_user_type` (String) Used for setting the type of the first user that is assigned the ACCOUNTADMIN role during account creation. Valid options are: `PERSON` | `SERVICE` | `LEGACY_SERVICE` External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". - `comment` (String) Specifies a comment for the account. -- `first_name` (String, Sensitive) First name of the initial administrative user of the account -- `grace_period_in_days` (Number) Specifies the number of days to wait before dropping the account. The default is 3 days. -- `last_name` (String, Sensitive) Last name of the initial administrative user of the account -- `must_change_password` (Boolean) Specifies whether the new user created to administer the account is forced to change their password upon first login into the account. -- `region` (String) ID of the Snowflake Region where the account is created. If no value is provided, Snowflake creates the account in the same Snowflake Region as the current account (i.e. the account in which the CREATE ACCOUNT statement is executed.) -- `region_group` (String) ID of the Snowflake Region where the account is created. If no value is provided, Snowflake creates the account in the same Snowflake Region as the current account (i.e. the account in which the CREATE ACCOUNT statement is executed.) +- `first_name` (String, Sensitive) First name of the initial administrative user of the account. This field cannot be used whenever admin_user_type is set to SERVICE. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `is_org_admin` (String) Sets an account property that determines whether the ORGADMIN role is enabled in the account. Only an organization administrator (i.e. user with the ORGADMIN role) can set the property. +- `last_name` (String, Sensitive) Last name of the initial administrative user of the account. This field cannot be used whenever admin_user_type is set to SERVICE. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `must_change_password` (String) Specifies whether the new user created to administer the account is forced to change their password upon first login into the account. This field cannot be used whenever admin_user_type is set to SERVICE. External changes for this field won't be detected. In case you want to apply external changes, you can re-create the resource manually using "terraform taint". +- `region` (String) [Snowflake Region ID](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#label-snowflake-region-ids) of the region where the account is created. If no value is provided, Snowflake creates the account in the same Snowflake Region as the current account (i.e. the account in which the CREATE ACCOUNT statement is executed.) +- `region_group` (String) ID of the region group where the account is created. To retrieve the region group ID for existing accounts in your organization, execute the [SHOW REGIONS](https://docs.snowflake.com/en/sql-reference/sql/show-regions) command. For information about when you might need to specify region group, see [Region groups](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#label-region-groups). ### Read-Only - `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). - `id` (String) The ID of this resource. -- `is_org_admin` (Boolean) Indicates whether the ORGADMIN role is enabled in an account. If TRUE, the role is enabled. +- `show_output` (List of Object) Outputs the result of `SHOW ACCOUNTS` for the given account. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `show_output` + +Read-Only: + +- `account_locator` (String) +- `account_locator_url` (String) +- `account_name` (String) +- `account_old_url_last_used` (String) +- `account_old_url_saved_on` (String) +- `account_url` (String) +- `comment` (String) +- `consumption_billing_entity_name` (String) +- `created_on` (String) +- `dropped_on` (String) +- `edition` (String) +- `is_events_account` (Boolean) +- `is_org_admin` (Boolean) +- `is_organization_account` (Boolean) +- `managed_accounts` (Number) +- `marketplace_consumer_billing_entity_name` (String) +- `marketplace_provider_billing_entity_name` (String) +- `moved_on` (String) +- `moved_to_organization` (String) +- `old_account_url` (String) +- `organization_name` (String) +- `organization_old_url` (String) +- `organization_old_url_last_used` (String) +- `organization_old_url_saved_on` (String) +- `organization_url_expiration_on` (String) +- `region_group` (String) +- `restored_on` (String) +- `scheduled_deletion_time` (String) +- `snowflake_region` (String) ## Import Import is supported using the following syntax: ```shell -terraform import snowflake_account.account +terraform import snowflake_account.example '"".""' ``` diff --git a/examples/resources/snowflake_account/import.sh b/examples/resources/snowflake_account/import.sh new file mode 100644 index 0000000000..8076279421 --- /dev/null +++ b/examples/resources/snowflake_account/import.sh @@ -0,0 +1 @@ +terraform import snowflake_account.example '"".""' diff --git a/examples/resources/snowflake_account/resource.tf b/examples/resources/snowflake_account/resource.tf index 3de2897d40..a9e61e2d3f 100644 --- a/examples/resources/snowflake_account/resource.tf +++ b/examples/resources/snowflake_account/resource.tf @@ -1,18 +1,42 @@ -provider "snowflake" { - role = "ORGADMIN" - alias = "orgadmin" +## Minimal +resource "snowflake_account" "minimal" { + name = "ACCOUNT_NAME" + admin_name = "ADMIN_NAME" + admin_password = "ADMIN_PASSWORD" + email = "admin@email.com" + edition = "STANDARD" + grace_period_in_days = 3 +} + +## Complete (with SERVICE user type) +resource "snowflake_account" "complete" { + name = "ACCOUNT_NAME" + admin_name = "ADMIN_NAME" + admin_rsa_public_key = "" + admin_user_type = "SERVICE" + email = "admin@email.com" + edition = "STANDARD" + region_group = "PUBLIC" + region = "AWS_US_WEST_2" + comment = "some comment" + is_org_admin = "true" + grace_period_in_days = 3 } -resource "snowflake_account" "ac1" { - provider = snowflake.orgadmin - name = "SNOWFLAKE_TEST_ACCOUNT" - admin_name = "John Doe" - admin_password = "Abcd1234!" - email = "john.doe@snowflake.com" - first_name = "John" - last_name = "Doe" - must_change_password = true +## Complete (with PERSON user type) +resource "snowflake_account" "complete" { + name = "ACCOUNT_NAME" + admin_name = "ADMIN_NAME" + admin_password = "ADMIN_PASSWORD" + admin_user_type = "PERSON" + first_name = "first_name" + last_name = "last_name" + email = "admin@email.com" + must_change_password = "false" edition = "STANDARD" - comment = "Snowflake Test Account" + region_group = "PUBLIC" region = "AWS_US_WEST_2" + comment = "some comment" + is_org_admin = "true" + grace_period_in_days = 3 } diff --git a/pkg/acceptance/bettertestspoc/assert/objectassert/account_snowflake_gen.go b/pkg/acceptance/bettertestspoc/assert/objectassert/account_snowflake_gen.go index d394c1ae39..6ba8eceb11 100644 --- a/pkg/acceptance/bettertestspoc/assert/objectassert/account_snowflake_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/objectassert/account_snowflake_gen.go @@ -148,11 +148,11 @@ func (a *AccountAssert) HasAccountLocator(expected string) *AccountAssert { func (a *AccountAssert) HasAccountLocatorURL(expected string) *AccountAssert { a.AddAssertion(func(t *testing.T, o *sdk.Account) error { t.Helper() - if o.AccountLocatorURL == nil { + if o.AccountLocatorUrl == nil { return fmt.Errorf("expected account locator url to have value; got: nil") } - if *o.AccountLocatorURL != expected { - return fmt.Errorf("expected account locator url: %v; got: %v", expected, *o.AccountLocatorURL) + if *o.AccountLocatorUrl != expected { + return fmt.Errorf("expected account locator url: %v; got: %v", expected, *o.AccountLocatorUrl) } return nil }) diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_ext.go new file mode 100644 index 0000000000..daf6dd018a --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_ext.go @@ -0,0 +1,11 @@ +package resourceassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func (a *AccountResourceAssert) HasAdminUserType(expected sdk.UserType) *AccountResourceAssert { + a.AddAssertion(assert.ValueSet("admin_user_type", string(expected))) + return a +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_gen.go index c68f6424c1..5d6f9d2d0a 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_gen.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/account_resource_gen.go @@ -47,6 +47,11 @@ func (a *AccountResourceAssert) HasAdminRsaPublicKeyString(expected string) *Acc return a } +func (a *AccountResourceAssert) HasAdminUserTypeString(expected string) *AccountResourceAssert { + a.AddAssertion(assert.ValueSet("admin_user_type", expected)) + return a +} + func (a *AccountResourceAssert) HasCommentString(expected string) *AccountResourceAssert { a.AddAssertion(assert.ValueSet("comment", expected)) return a @@ -126,6 +131,11 @@ func (a *AccountResourceAssert) HasNoAdminRsaPublicKey() *AccountResourceAssert return a } +func (a *AccountResourceAssert) HasNoAdminUserType() *AccountResourceAssert { + a.AddAssertion(assert.ValueNotSet("admin_user_type")) + return a +} + func (a *AccountResourceAssert) HasNoComment() *AccountResourceAssert { a.AddAssertion(assert.ValueNotSet("comment")) return a diff --git a/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/account_show_output_ext.go b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/account_show_output_ext.go new file mode 100644 index 0000000000..66a7a98a42 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert/account_show_output_ext.go @@ -0,0 +1,80 @@ +package resourceshowoutputassert + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +func (a *AccountShowOutputAssert) HasAccountUrlNotEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValuePresent("account_url")) + return a +} + +func (a *AccountShowOutputAssert) HasCreatedOnNotEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValuePresent("created_on")) + return a +} + +func (a *AccountShowOutputAssert) HasAccountLocatorNotEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValuePresent("account_locator")) + return a +} + +func (a *AccountShowOutputAssert) HasAccountLocatorUrlNotEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValuePresent("account_locator_url")) + return a +} + +func (a *AccountShowOutputAssert) HasConsumptionBillingEntityNameNotEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValuePresent("consumption_billing_entity_name")) + return a +} + +func (a *AccountShowOutputAssert) HasMarketplaceProviderBillingEntityNameNotEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValuePresent("marketplace_provider_billing_entity_name")) + return a +} + +func (a *AccountShowOutputAssert) HasAccountOldUrlSavedOnEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("account_old_url_saved_on", "")) + return a +} + +func (a *AccountShowOutputAssert) HasAccountOldUrlLastUsedEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("account_old_url_last_used", "")) + return a +} + +func (a *AccountShowOutputAssert) HasOrganizationOldUrlSavedOnEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("organization_old_url_saved_on", "")) + return a +} + +func (a *AccountShowOutputAssert) HasOrganizationOldUrlLastUsedEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("organization_old_url_last_used", "")) + return a +} + +func (a *AccountShowOutputAssert) HasDroppedOnEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("dropped_on", "")) + return a +} + +func (a *AccountShowOutputAssert) HasScheduledDeletionTimeEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("scheduled_deletion_time", "")) + return a +} + +func (a *AccountShowOutputAssert) HasRestoredOnEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("restored_on", "")) + return a +} + +func (a *AccountShowOutputAssert) HasMovedOnEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("moved_on", "")) + return a +} + +func (a *AccountShowOutputAssert) HasOrganizationUrlExpirationOnEmpty() *AccountShowOutputAssert { + a.AddAssertion(assert.ResourceShowOutputValueSet("organization_url_expiration_on", "")) + return a +} diff --git a/pkg/acceptance/bettertestspoc/config/model/account_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/account_model_ext.go new file mode 100644 index 0000000000..4d81e2e589 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/account_model_ext.go @@ -0,0 +1,11 @@ +package model + +import ( + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" +) + +func (a *AccountModel) WithAdminUserTypeEnum(adminUserType sdk.UserType) *AccountModel { + a.AdminUserType = tfconfig.StringVariable(string(adminUserType)) + return a +} diff --git a/pkg/acceptance/bettertestspoc/config/model/account_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/account_model_gen.go index 87ddf58d4e..b1000b7931 100644 --- a/pkg/acceptance/bettertestspoc/config/model/account_model_gen.go +++ b/pkg/acceptance/bettertestspoc/config/model/account_model_gen.go @@ -13,6 +13,7 @@ type AccountModel struct { AdminName tfconfig.Variable `json:"admin_name,omitempty"` AdminPassword tfconfig.Variable `json:"admin_password,omitempty"` AdminRsaPublicKey tfconfig.Variable `json:"admin_rsa_public_key,omitempty"` + AdminUserType tfconfig.Variable `json:"admin_user_type,omitempty"` Comment tfconfig.Variable `json:"comment,omitempty"` Edition tfconfig.Variable `json:"edition,omitempty"` Email tfconfig.Variable `json:"email,omitempty"` @@ -38,12 +39,14 @@ func Account( adminName string, edition string, email string, + gracePeriodInDays int, name string, ) *AccountModel { a := &AccountModel{ResourceModelMeta: config.Meta(resourceName, resources.Account)} a.WithAdminName(adminName) a.WithEdition(edition) a.WithEmail(email) + a.WithGracePeriodInDays(gracePeriodInDays) a.WithName(name) return a } @@ -52,12 +55,14 @@ func AccountWithDefaultMeta( adminName string, edition string, email string, + gracePeriodInDays int, name string, ) *AccountModel { a := &AccountModel{ResourceModelMeta: config.DefaultMeta(resources.Account)} a.WithAdminName(adminName) a.WithEdition(edition) a.WithEmail(email) + a.WithGracePeriodInDays(gracePeriodInDays) a.WithName(name) return a } @@ -81,6 +86,11 @@ func (a *AccountModel) WithAdminRsaPublicKey(adminRsaPublicKey string) *AccountM return a } +func (a *AccountModel) WithAdminUserType(adminUserType string) *AccountModel { + a.AdminUserType = tfconfig.StringVariable(adminUserType) + return a +} + func (a *AccountModel) WithComment(comment string) *AccountModel { a.Comment = tfconfig.StringVariable(comment) return a @@ -111,8 +121,8 @@ func (a *AccountModel) WithGracePeriodInDays(gracePeriodInDays int) *AccountMode return a } -func (a *AccountModel) WithIsOrgAdmin(isOrgAdmin bool) *AccountModel { - a.IsOrgAdmin = tfconfig.BoolVariable(isOrgAdmin) +func (a *AccountModel) WithIsOrgAdmin(isOrgAdmin string) *AccountModel { + a.IsOrgAdmin = tfconfig.StringVariable(isOrgAdmin) return a } @@ -121,8 +131,8 @@ func (a *AccountModel) WithLastName(lastName string) *AccountModel { return a } -func (a *AccountModel) WithMustChangePassword(mustChangePassword bool) *AccountModel { - a.MustChangePassword = tfconfig.BoolVariable(mustChangePassword) +func (a *AccountModel) WithMustChangePassword(mustChangePassword string) *AccountModel { + a.MustChangePassword = tfconfig.StringVariable(mustChangePassword) return a } @@ -160,6 +170,11 @@ func (a *AccountModel) WithAdminRsaPublicKeyValue(value tfconfig.Variable) *Acco return a } +func (a *AccountModel) WithAdminUserTypeValue(value tfconfig.Variable) *AccountModel { + a.AdminUserType = value + return a +} + func (a *AccountModel) WithCommentValue(value tfconfig.Variable) *AccountModel { a.Comment = value return a diff --git a/pkg/acceptance/helpers/account_client.go b/pkg/acceptance/helpers/account_client.go index 96605949ab..b91d10579a 100644 --- a/pkg/acceptance/helpers/account_client.go +++ b/pkg/acceptance/helpers/account_client.go @@ -69,7 +69,7 @@ func (c *AccountClient) Create(t *testing.T) (*sdk.Account, func()) { func (c *AccountClient) CreateWithRequest(t *testing.T, id sdk.AccountObjectIdentifier, opts *sdk.CreateAccountOptions) (*sdk.Account, func()) { t.Helper() - err := c.client().Create(context.Background(), id, opts) + _, err := c.client().Create(context.Background(), id, opts) require.NoError(t, err) account, err := c.client().ShowByID(context.Background(), id) @@ -141,7 +141,7 @@ func (c *AccountClient) CreateAndLogIn(t *testing.T) (*sdk.Account, *sdk.Client, newClient, err := sdk.NewClient(&gosnowflake.Config{ Account: fmt.Sprintf("%s-%s", account.OrganizationName, account.AccountName), User: name, - Host: strings.TrimPrefix(*account.AccountLocatorURL, `https://`), + Host: strings.TrimPrefix(*account.AccountLocatorUrl, `https://`), Authenticator: gosnowflake.AuthTypeJwt, PrivateKey: privateKey, Role: snowflakeroles.Accountadmin.Name(), diff --git a/pkg/acceptance/helpers/random/certs.go b/pkg/acceptance/helpers/random/certs.go index c0e0142d7c..b314a0cbfa 100644 --- a/pkg/acceptance/helpers/random/certs.go +++ b/pkg/acceptance/helpers/random/certs.go @@ -60,15 +60,6 @@ func GenerateRSAPublicKeyFromPrivateKey(t *testing.T, key *rsa.PrivateKey) (stri return encode(t, "RSA PUBLIC KEY", b), hash(t, b) } -func GenerateRSAPublicKeyBasedOnPrivateKey(t *testing.T, key *rsa.PrivateKey) (string, string) { - t.Helper() - - pub := key.Public() - b, err := x509.MarshalPKIXPublicKey(pub.(*rsa.PublicKey)) - require.NoError(t, err) - return encode(t, "RSA PUBLIC KEY", b), hash(t, b) -} - // GenerateRSAPrivateKey returns an RSA private key. func GenerateRSAPrivateKey(t *testing.T) *rsa.PrivateKey { t.Helper() diff --git a/pkg/acceptance/helpers/random/random_helpers.go b/pkg/acceptance/helpers/random/random_helpers.go index 5a9e270b13..978e044174 100644 --- a/pkg/acceptance/helpers/random/random_helpers.go +++ b/pkg/acceptance/helpers/random/random_helpers.go @@ -1,6 +1,8 @@ package random import ( + "strings" + "github.com/brianvoe/gofakeit/v6" "github.com/hashicorp/go-uuid" ) @@ -22,7 +24,7 @@ func Password() string { // 090088 (22000): ADMIN_NAME can only contain letters, numbers and underscores. // 090089 (22000): ADMIN_NAME must start with a letter. func AdminName() string { - return AlphaN(1) + AlphanumericN(11) + return strings.ToUpper(AlphaN(1) + AlphanumericN(11)) } func Bool() bool { diff --git a/pkg/datasources/accounts.go b/pkg/datasources/accounts.go index d5aff6abec..89efac8d7e 100644 --- a/pkg/datasources/accounts.go +++ b/pkg/datasources/accounts.go @@ -153,7 +153,7 @@ func ReadAccounts(ctx context.Context, d *schema.ResourceData, meta any) diag.Di m["created_on"] = account.CreatedOn.String() m["comment"] = account.Comment m["account_locator"] = account.AccountLocator - m["account_locator_url"] = account.AccountLocatorURL + m["account_locator_url"] = account.AccountLocatorUrl m["managed_accounts"] = account.ManagedAccounts m["consumption_billing_entity_name"] = account.ConsumptionBillingEntityName m["marketplace_consumer_billing_entity_name"] = account.MarketplaceConsumerBillingEntityName diff --git a/pkg/resources/account.go b/pkg/resources/account.go index 8b687c7cd8..2f5a1e0249 100644 --- a/pkg/resources/account.go +++ b/pkg/resources/account.go @@ -2,18 +2,19 @@ package resources import ( "context" + "errors" "fmt" - "log" "strings" - "time" - "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider/docs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/util" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" @@ -21,384 +22,460 @@ import ( "github.com/hashicorp/terraform-plugin-sdk/v2/helper/validation" ) -// Note: no test case was created for account since we cannot actually delete them after creation, which is a critical part of the test suite. Instead, this resource -// was manually tested - var accountSchema = map[string]*schema.Schema{ "name": { Type: schema.TypeString, Required: true, - Description: "Specifies the identifier (i.e. name) for the account; must be unique within an organization, regardless of which Snowflake Region the account is in. In addition, the identifier must start with an alphabetic character and cannot contain spaces or special characters except for underscores (_). Note that if the account name includes underscores, features that do not accept account names with underscores (e.g. Okta SSO or SCIM) can reference a version of the account name that substitutes hyphens (-) for the underscores.", - // Name is automatically uppercase by Snowflake - StateFunc: func(val interface{}) string { - return strings.ToUpper(val.(string)) - }, - ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "Specifies the identifier (i.e. name) for the account. It must be unique within an organization, regardless of which Snowflake Region the account is in and must start with an alphabetic character and cannot contain spaces or special characters except for underscores (_). Note that if the account name includes underscores, features that do not accept account names with underscores (e.g. Okta SSO or SCIM) can reference a version of the account name that substitutes hyphens (-) for the underscores.", }, "admin_name": { - Type: schema.TypeString, - Required: true, - Description: "Login name of the initial administrative user of the account. A new user is created in the new account with this name and password and granted the ACCOUNTADMIN role in the account. A login name can be any string consisting of letters, numbers, and underscores. Login names are always case-insensitive.", - // We have no way of assuming a role into this account to change the admin user name so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: externalChangesNotDetectedFieldDescription("Login name of the initial administrative user of the account. A new user is created in the new account with this name and password and granted the ACCOUNTADMIN role in the account. A login name can be any string consisting of letters, numbers, and underscores. Login names are always case-insensitive."), + DiffSuppressFunc: IgnoreAfterCreation, }, "admin_password": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: "Password for the initial administrative user of the account. Optional if the `ADMIN_RSA_PUBLIC_KEY` parameter is specified. For more information about passwords in Snowflake, see [Snowflake-provided Password Policy](https://docs.snowflake.com/en/sql-reference/sql/create-account.html#:~:text=Snowflake%2Dprovided%20Password%20Policy).", - AtLeastOneOf: []string{"admin_password", "admin_rsa_public_key"}, - // We have no way of assuming a role into this account to change the password so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: externalChangesNotDetectedFieldDescription("Password for the initial administrative user of the account. Either admin_password or admin_rsa_public_key has to be specified. This field cannot be used whenever admin_user_type is set to SERVICE."), + DiffSuppressFunc: IgnoreAfterCreation, + AtLeastOneOf: []string{"admin_password", "admin_rsa_public_key"}, }, "admin_rsa_public_key": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: "Assigns a public key to the initial administrative user of the account in order to implement [key pair authentication](https://docs.snowflake.com/en/sql-reference/sql/create-account.html#:~:text=key%20pair%20authentication) for the user. Optional if the `ADMIN_PASSWORD` parameter is specified.", - AtLeastOneOf: []string{"admin_password", "admin_rsa_public_key"}, - // We have no way of assuming a role into this account to change the admin rsa public key so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, - }, - "email": { - Type: schema.TypeString, - Required: true, - Sensitive: true, - Description: "Email address of the initial administrative user of the account. This email address is used to send any notifications about the account.", - // We have no way of assuming a role into this account to change the admin email so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, + Type: schema.TypeString, + Optional: true, + Description: externalChangesNotDetectedFieldDescription("Assigns a public key to the initial administrative user of the account. Either admin_password or admin_rsa_public_key has to be specified."), + DiffSuppressFunc: IgnoreAfterCreation, + AtLeastOneOf: []string{"admin_password", "admin_rsa_public_key"}, }, - "edition": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - Description: "[Snowflake Edition](https://docs.snowflake.com/en/user-guide/intro-editions.html) of the account. Valid values are: STANDARD | ENTERPRISE | BUSINESS_CRITICAL", - ValidateFunc: validation.StringInSlice([]string{string(sdk.EditionStandard), string(sdk.EditionEnterprise), string(sdk.EditionBusinessCritical)}, false), + "admin_user_type": { + Type: schema.TypeString, + Optional: true, + Description: externalChangesNotDetectedFieldDescription(fmt.Sprintf("Used for setting the type of the first user that is assigned the ACCOUNTADMIN role during account creation. Valid options are: %s", docs.PossibleValuesListed(sdk.AllUserTypes))), + DiffSuppressFunc: SuppressIfAny(IgnoreAfterCreation, NormalizeAndCompare(sdk.ToUserType)), + ValidateDiagFunc: sdkValidation(sdk.ToUserType), }, "first_name": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: "First name of the initial administrative user of the account", - // We have no way of assuming a role into this account to change the admin first name so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: externalChangesNotDetectedFieldDescription("First name of the initial administrative user of the account. This field cannot be used whenever admin_user_type is set to SERVICE."), + DiffSuppressFunc: IgnoreAfterCreation, }, "last_name": { - Type: schema.TypeString, - Optional: true, - Sensitive: true, - Description: "Last name of the initial administrative user of the account", - // We have no way of assuming a role into this account to change the admin last name so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, + Type: schema.TypeString, + Optional: true, + Sensitive: true, + Description: externalChangesNotDetectedFieldDescription("Last name of the initial administrative user of the account. This field cannot be used whenever admin_user_type is set to SERVICE."), + DiffSuppressFunc: IgnoreAfterCreation, + }, + "email": { + Type: schema.TypeString, + Required: true, + Sensitive: true, + Description: externalChangesNotDetectedFieldDescription("Email address of the initial administrative user of the account. This email address is used to send any notifications about the account."), + DiffSuppressFunc: IgnoreAfterCreation, }, "must_change_password": { - Type: schema.TypeBool, - Optional: true, - Default: false, - Description: "Specifies whether the new user created to administer the account is forced to change their password upon first login into the account.", - // We have no way of assuming a role into this account to change the admin password policy so this has to be ForceNew even though it's not ideal - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return old == "" - }, + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + Description: externalChangesNotDetectedFieldDescription("Specifies whether the new user created to administer the account is forced to change their password upon first login into the account. This field cannot be used whenever admin_user_type is set to SERVICE."), + DiffSuppressFunc: IgnoreAfterCreation, + ValidateDiagFunc: validateBooleanString, + }, + "edition": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: fmt.Sprintf("Snowflake Edition of the account. See more about Snowflake Editions in the [official documentation](https://docs.snowflake.com/en/user-guide/intro-editions). Valid options are: %s", docs.PossibleValuesListed(sdk.AllAccountEditions)), + DiffSuppressFunc: NormalizeAndCompare(sdk.ToAccountEdition), + ValidateDiagFunc: sdkValidation(sdk.ToAccountEdition), }, "region_group": { - Type: schema.TypeString, - Optional: true, - Description: "ID of the Snowflake Region where the account is created. If no value is provided, Snowflake creates the account in the same Snowflake Region as the current account (i.e. the account in which the CREATE ACCOUNT statement is executed.)", - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return new == "" - }, + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "ID of the region group where the account is created. To retrieve the region group ID for existing accounts in your organization, execute the [SHOW REGIONS](https://docs.snowflake.com/en/sql-reference/sql/show-regions) command. For information about when you might need to specify region group, see [Region groups](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#label-region-groups).", }, "region": { - Type: schema.TypeString, - Optional: true, - Description: "ID of the Snowflake Region where the account is created. If no value is provided, Snowflake creates the account in the same Snowflake Region as the current account (i.e. the account in which the CREATE ACCOUNT statement is executed.)", - ForceNew: true, - DiffSuppressOnRefresh: true, - DiffSuppressFunc: func(k, old, new string, d *schema.ResourceData) bool { - // For new resources always show the diff - if d.Id() == "" { - return false - } - // This suppresses the diff if the old value is empty. This would happen in the event of importing existing accounts since we have no way of reading this value - return new == "" - }, + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "[Snowflake Region ID](https://docs.snowflake.com/en/user-guide/admin-account-identifier.html#label-snowflake-region-ids) of the region where the account is created. If no value is provided, Snowflake creates the account in the same Snowflake Region as the current account (i.e. the account in which the CREATE ACCOUNT statement is executed.)", }, "comment": { Type: schema.TypeString, Optional: true, - Description: "Specifies a comment for the account.", ForceNew: true, + Description: "Specifies a comment for the account.", + DiffSuppressFunc: SuppressIfAny( + IgnoreChangeToCurrentSnowflakeValueInShow("comment"), + func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return oldValue == "SNOWFLAKE" && newValue == "" + }, + ), }, "is_org_admin": { - Type: schema.TypeBool, - Computed: true, - Description: "Indicates whether the ORGADMIN role is enabled in an account. If TRUE, the role is enabled.", + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShow("is_org_admin"), + ValidateDiagFunc: validateBooleanString, + Description: "Sets an account property that determines whether the ORGADMIN role is enabled in the account. Only an organization administrator (i.e. user with the ORGADMIN role) can set the property.", }, "grace_period_in_days": { - Type: schema.TypeInt, - Optional: true, - Default: 3, - Description: "Specifies the number of days to wait before dropping the account. The default is 3 days.", + Type: schema.TypeInt, + Required: true, + Description: "Specifies the number of days during which the account can be restored (“undropped”). The minimum is 3 days and the maximum is 90 days.", + ValidateDiagFunc: validation.ToDiagFunc(validation.IntAtLeast(3)), }, FullyQualifiedNameAttributeName: schemas.FullyQualifiedNameSchema, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW ACCOUNTS` for the given account.", + Elem: &schema.Resource{ + Schema: schemas.ShowAccountSchema, + }, + }, } func Account() *schema.Resource { return &schema.Resource{ Description: "The account resource allows you to create and manage Snowflake accounts.", CreateContext: TrackingCreateWrapper(resources.Account, CreateAccount), - ReadContext: TrackingReadWrapper(resources.Account, ReadAccount), + ReadContext: TrackingReadWrapper(resources.Account, ReadAccount(true)), UpdateContext: TrackingUpdateWrapper(resources.Account, UpdateAccount), DeleteContext: TrackingDeleteWrapper(resources.Account, DeleteAccount), CustomizeDiff: TrackingCustomDiffWrapper(resources.Account, customdiff.All( ComputedIfAnyAttributeChanged(accountSchema, FullyQualifiedNameAttributeName, "name"), + ComputedIfAnyAttributeChanged(accountSchema, ShowOutputAttributeName, "name", "is_org_admin"), )), Schema: accountSchema, Importer: &schema.ResourceImporter{ - StateContext: schema.ImportStatePassthroughContext, + StateContext: TrackingImportWrapper(resources.Account, ImportAccount), + }, + + SchemaVersion: 1, + StateUpgraders: []schema.StateUpgrader{ + { + Version: 0, + // setting type to cty.EmptyObject is a bit hacky here but following https://developer.hashicorp.com/terraform/plugin/framework/migrating/resources/state-upgrade#sdkv2-1 would require lots of repetitive code; this should work with cty.EmptyObject + Type: cty.EmptyObject, + Upgrade: v0_99_0_AccountStateUpgrader, + }, }, } } -// CreateAccount implements schema.CreateFunc. -func CreateAccount(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func ImportAccount(ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { + client := meta.(*provider.Context).Client + + isOrgAdmin, err := client.ContextFunctions.IsRoleInSession(ctx, snowflakeroles.Orgadmin) + if err != nil { + return nil, err + } + if !isOrgAdmin { + return nil, errors.New("current user doesn't have the orgadmin role in session") + } + + id, err := sdk.ParseAccountIdentifier(d.Id()) + if err != nil { + return nil, err + } + + account, err := client.Accounts.ShowByID(ctx, id.AsAccountObjectIdentifier()) + if err != nil { + return nil, err + } + + if _, err := ImportName[sdk.AccountIdentifier](context.Background(), d, nil); err != nil { + return nil, err + } + + if account.RegionGroup != nil { + if err = d.Set("region_group", *account.RegionGroup); err != nil { + return nil, err + } + } + + if err := errors.Join( + d.Set("edition", string(*account.Edition)), + d.Set("region", account.SnowflakeRegion), + d.Set("comment", *account.Comment), + d.Set("is_org_admin", booleanStringFromBool(*account.IsOrgAdmin)), + ); err != nil { + return nil, err + } + + return []*schema.ResourceData{d}, nil +} + +func CreateAccount(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - name := d.Get("name").(string) - objectIdentifier := sdk.NewAccountObjectIdentifier(name) + isOrgAdmin, err := client.ContextFunctions.IsRoleInSession(ctx, snowflakeroles.Orgadmin) + if err != nil { + return diag.FromErr(err) + } + if !isOrgAdmin { + return diag.FromErr(errors.New("current user doesn't have the orgadmin role in session")) + } + + id := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) - createOptions := &sdk.CreateAccountOptions{ + opts := &sdk.CreateAccountOptions{ AdminName: d.Get("admin_name").(string), Email: d.Get("email").(string), Edition: sdk.AccountEdition(d.Get("edition").(string)), } - // get optional fields. if v, ok := d.GetOk("admin_password"); ok { - createOptions.AdminPassword = sdk.String(v.(string)) + opts.AdminPassword = sdk.String(v.(string)) } if v, ok := d.GetOk("admin_rsa_public_key"); ok { - createOptions.AdminRSAPublicKey = sdk.String(v.(string)) + opts.AdminRSAPublicKey = sdk.String(v.(string)) + } + if v, ok := d.GetOk("admin_user_type"); ok { + userType, err := sdk.ToUserType(v.(string)) + if err != nil { + return diag.FromErr(err) + } + opts.AdminUserType = &userType } if v, ok := d.GetOk("first_name"); ok { - createOptions.FirstName = sdk.String(v.(string)) + opts.FirstName = sdk.String(v.(string)) } if v, ok := d.GetOk("last_name"); ok { - createOptions.LastName = sdk.String(v.(string)) + opts.LastName = sdk.String(v.(string)) } - - // Has default, don't fetch with GetOk because this can be falsey and valid - v := d.Get("must_change_password") - createOptions.MustChangePassword = sdk.Bool(v.(bool)) - - if v, ok := d.GetOk("region_group"); ok { - createOptions.RegionGroup = sdk.String(v.(string)) - } else { - // For organizations that have accounts in multiple region groups, returns . so we need to split on "." - currentRegion, err := client.ContextFunctions.CurrentRegion(ctx) + if v := d.Get("must_change_password"); v != BooleanDefault { + parsedBool, err := booleanStringToBool(v.(string)) if err != nil { return diag.FromErr(err) } - regionParts := strings.Split(currentRegion, ".") - if len(regionParts) == 2 { - createOptions.RegionGroup = sdk.String(regionParts[0]) - } + opts.MustChangePassword = &parsedBool + } + if v, ok := d.GetOk("region_group"); ok { + opts.RegionGroup = sdk.String(v.(string)) } if v, ok := d.GetOk("region"); ok { - createOptions.Region = sdk.String(v.(string)) - } else { - // For organizations that have accounts in multiple region groups, returns . so we need to split on "." - currentRegion, err := client.ContextFunctions.CurrentRegion(ctx) - if err != nil { - return diag.FromErr(err) - } - regionParts := strings.Split(currentRegion, ".") - if len(regionParts) == 2 { - createOptions.Region = sdk.String(regionParts[1]) - } else { - createOptions.Region = sdk.String(currentRegion) - } + opts.Region = sdk.String(v.(string)) } if v, ok := d.GetOk("comment"); ok { - createOptions.Comment = sdk.String(v.(string)) + opts.Comment = sdk.String(v.(string)) } - err := client.Accounts.Create(ctx, objectIdentifier, createOptions) + createResponse, err := client.Accounts.Create(ctx, id, opts) if err != nil { return diag.FromErr(err) } - var account *sdk.Account - err = util.Retry(5, 3*time.Second, func() (error, bool) { - account, err = client.Accounts.ShowByID(ctx, objectIdentifier) + d.SetId(helpers.EncodeResourceIdentifier(sdk.NewAccountIdentifier(createResponse.OrganizationName, createResponse.AccountName))) + + if v, ok := d.GetOk("is_org_admin"); ok && v == BooleanTrue { + err := client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{ + SetIsOrgAdmin: &sdk.AccountSetIsOrgAdmin{ + Name: id, + OrgAdmin: true, + }, + }) if err != nil { - log.Printf("[DEBUG] retryable operation resulted in error: %v\n", err) - return nil, false + return diag.FromErr(err) } - return nil, true - }) - if err != nil { - return diag.FromErr(err) } - d.SetId(helpers.EncodeSnowflakeID(account.AccountLocator)) - return ReadAccount(ctx, d, meta) + return ReadAccount(false)(ctx, d, meta) } -// ReadAccount implements schema.ReadFunc. -func ReadAccount(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - client := meta.(*provider.Context).Client +func ReadAccount(withExternalChangesMarking bool) schema.ReadContextFunc { + return func(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + isOrgAdmin, err := client.ContextFunctions.IsRoleInSession(ctx, snowflakeroles.Orgadmin) + if err != nil { + return diag.FromErr(err) + } + if !isOrgAdmin { + return diag.FromErr(errors.New("current user doesn't have the orgadmin role in session")) + } - var acc *sdk.Account - var err error - err = util.Retry(5, 3*time.Second, func() (error, bool) { - acc, err = client.Accounts.ShowByID(ctx, id) + id, err := sdk.ParseAccountIdentifier(d.Id()) if err != nil { - log.Printf("[DEBUG] retryable operation resulted in error: %v\n", err) - return nil, false + return diag.FromErr(err) } - return nil, true - }) - if err != nil { - return diag.FromErr(err) + + account, err := client.Accounts.ShowByID(ctx, id.AsAccountObjectIdentifier()) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query account. Marking the resource as removed.", + Detail: fmt.Sprintf("Account: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return diag.FromErr(err) + } + + if withExternalChangesMarking { + var regionGroup string + if account.RegionGroup != nil { + regionGroup = *account.RegionGroup + + // For organizations that have accounts in multiple region groups, returns . so we need to split on "." + parts := strings.Split(regionGroup, ".") + if len(parts) == 2 { + regionGroup = parts[0] + } + } + if err = handleExternalChangesToObjectInShow(d, + outputMapping{"edition", "edition", *account.Edition, *account.Edition, nil}, + outputMapping{"is_org_admin", "is_org_admin", *account.IsOrgAdmin, booleanStringFromBool(*account.IsOrgAdmin), nil}, + outputMapping{"region_group", "region_group", regionGroup, regionGroup, nil}, + outputMapping{"snowflake_region", "region", account.SnowflakeRegion, account.SnowflakeRegion, nil}, + outputMapping{"comment", "comment", *account.Comment, *account.Comment, nil}, + ); err != nil { + return diag.FromErr(err) + } + } else { + if err = setStateToValuesFromConfig(d, accountSchema, []string{ + "name", + "admin_name", + "admin_password", + "admin_rsa_public_key", + "admin_user_type", + "first_name", + "last_name", + "email", + "must_change_password", + "edition", + "region_group", + "region", + "comment", + "is_org_admin", + "grace_period_in_days", + }); err != nil { + return diag.FromErr(err) + } + } + + if errs := errors.Join( + d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()), + d.Set(ShowOutputAttributeName, []map[string]any{schemas.AccountToSchema(account)}), + ); errs != nil { + return diag.FromErr(errs) + } + + return nil } +} - if err := d.Set(FullyQualifiedNameAttributeName, id.FullyQualifiedName()); err != nil { +func UpdateAccount(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + isOrgAdmin, err := client.ContextFunctions.IsRoleInSession(ctx, snowflakeroles.Orgadmin) + if err != nil { return diag.FromErr(err) } - - if err = d.Set("name", acc.AccountName); err != nil { - return diag.FromErr(fmt.Errorf("error setting name: %w", err)) + if !isOrgAdmin { + return diag.FromErr(errors.New("current user doesn't have the orgadmin role in session")) } - if err = d.Set("edition", acc.Edition); err != nil { - return diag.FromErr(fmt.Errorf("error setting edition: %w", err)) + id, err := sdk.ParseAccountIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) } - if err = d.Set("region_group", acc.RegionGroup); err != nil { - return diag.FromErr(fmt.Errorf("error setting region_group: %w", err)) - } + if d.HasChange("name") { + newId := sdk.NewAccountIdentifier(id.OrganizationName(), d.Get("name").(string)) - if err = d.Set("region", acc.SnowflakeRegion); err != nil { - return diag.FromErr(fmt.Errorf("error setting region: %w", err)) - } + err = client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{ + Rename: &sdk.AccountRename{ + Name: id.AsAccountObjectIdentifier(), + NewName: newId.AsAccountObjectIdentifier(), + }, + }) + if err != nil { + return diag.FromErr(err) + } - if err = d.Set("comment", acc.Comment); err != nil { - return diag.FromErr(fmt.Errorf("error setting comment: %w", err)) + d.SetId(helpers.EncodeResourceIdentifier(newId)) + id = newId } - if err = d.Set("is_org_admin", acc.IsOrgAdmin); err != nil { - return diag.FromErr(fmt.Errorf("error setting is_org_admin: %w", err)) - } + if d.HasChange("is_org_admin") { + oldIsOrgAdmin, newIsOrgAdmin := d.GetChange("is_org_admin") - return nil -} + // Setting from default to false and vice versa is not allowed because Snowflake throws an error on already disabled IsOrgAdmin + canUpdate := true + if (oldIsOrgAdmin.(string) == BooleanFalse && newIsOrgAdmin.(string) == BooleanDefault) || + (oldIsOrgAdmin.(string) == BooleanDefault && newIsOrgAdmin.(string) == BooleanFalse) { + canUpdate = false + } -// UpdateAccount implements schema.UpdateFunc. -func UpdateAccount(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { - /* - todo: comments may eventually work again for accounts, so this can be uncommented when that happens - client := meta.(*provider.Context).Client - client := sdk.NewClientFromDB(db) - ctx := context.Background() - - id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) - - // Change comment - if d.HasChange("comment") { - // changing comment isn't supported for accounts - err := client.Comments.Set(ctx, &sdk.SetCommentOptions{ - ObjectType: sdk.ObjectTypeAccount, - ObjectName: sdk.NewAccountObjectIdentifier(d.Get("name").(string)), - Value: sdk.String(d.Get("comment").(string)), - }) - if err != nil { - return err + if canUpdate { + if newIsOrgAdmin.(string) != BooleanDefault { + parsed, err := booleanStringToBool(newIsOrgAdmin.(string)) + if err != nil { + return diag.FromErr(err) + } + if err := client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{ + SetIsOrgAdmin: &sdk.AccountSetIsOrgAdmin{ + Name: id.AsAccountObjectIdentifier(), + OrgAdmin: parsed, + }, + }); err != nil { + return diag.FromErr(err) + } + } else { + // No unset available for this field (setting Snowflake default) + if err := client.Accounts.Alter(ctx, &sdk.AlterAccountOptions{ + SetIsOrgAdmin: &sdk.AccountSetIsOrgAdmin{ + Name: id.AsAccountObjectIdentifier(), + OrgAdmin: false, + }, + }); err != nil { + return diag.FromErr(err) + } } } - */ - return nil + } + + return ReadAccount(false)(ctx, d, meta) } -// DeleteAccount implements schema.DeleteFunc. -func DeleteAccount(ctx context.Context, d *schema.ResourceData, meta interface{}) diag.Diagnostics { +func DeleteAccount(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { client := meta.(*provider.Context).Client - gracePeriodInDays := d.Get("grace_period_in_days").(int) - err := client.Accounts.Drop(ctx, helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier), gracePeriodInDays, &sdk.DropAccountOptions{ + + isOrgAdmin, err := client.ContextFunctions.IsRoleInSession(ctx, snowflakeroles.Orgadmin) + if err != nil { + return diag.FromErr(err) + } + if !isOrgAdmin { + return diag.FromErr(errors.New("current user doesn't have the orgadmin role in session")) + } + + id, err := sdk.ParseAccountIdentifier(d.Id()) + if err != nil { + return diag.FromErr(err) + } + + err = client.Accounts.Drop(ctx, id.AsAccountObjectIdentifier(), d.Get("grace_period_in_days").(int), &sdk.DropAccountOptions{ IfExists: sdk.Bool(true), }) - return diag.FromErr(err) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + + return nil } diff --git a/pkg/resources/account_acceptance_test.go b/pkg/resources/account_acceptance_test.go index ceb1a5df64..3b2e699d9f 100644 --- a/pkg/resources/account_acceptance_test.go +++ b/pkg/resources/account_acceptance_test.go @@ -2,9 +2,24 @@ package resources_test import ( "fmt" + "regexp" "testing" + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert/resourceshowoutputassert" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config/model" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeenvs" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/snowflakeroles" + r "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/plancheck" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/testenvs" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" @@ -12,81 +27,650 @@ import ( "github.com/hashicorp/terraform-plugin-testing/tfversion" ) -func TestAcc_Account_complete(t *testing.T) { +func TestAcc_Account_Minimal(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + organizationName := acc.TestClient().Context.CurrentAccountId(t).OrganizationName() + id := random.AdminName() + accountId := sdk.NewAccountIdentifier(organizationName, id) + email := random.Email() + name := random.AdminName() + key, _ := random.GenerateRSAPublicKey(t) + region := acc.TestClient().Context.CurrentRegion(t) + + configModel := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminRsaPublicKey(key) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, configModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModel.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminNameString(name). + HasAdminRsaPublicKeyString(key). + HasNoAdminUserType(). + HasEmailString(email). + HasNoFirstName(). + HasNoLastName(). + HasMustChangePasswordString(r.BooleanDefault). + HasNoRegionGroup(). + HasNoRegion(). + HasNoComment(). + HasIsOrgAdminString(r.BooleanDefault). + HasGracePeriodInDaysString("3"), + resourceshowoutputassert.AccountShowOutput(t, configModel.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id). + HasSnowflakeRegion(region). + HasRegionGroup(""). + HasEdition(sdk.EditionStandard). + HasAccountUrlNotEmpty(). + HasCreatedOnNotEmpty(). + HasComment("SNOWFLAKE"). + HasAccountLocatorNotEmpty(). + HasAccountLocatorUrlNotEmpty(). + HasManagedAccounts(0). + HasConsumptionBillingEntityNameNotEmpty(). + HasMarketplaceConsumerBillingEntityName(""). + HasMarketplaceProviderBillingEntityNameNotEmpty(). + HasOldAccountURL(""). + HasIsOrgAdmin(false). + HasAccountOldUrlSavedOnEmpty(). + HasAccountOldUrlLastUsedEmpty(). + HasOrganizationOldUrl(""). + HasOrganizationOldUrlSavedOnEmpty(). + HasOrganizationOldUrlLastUsedEmpty(). + HasIsEventsAccount(false). + HasIsOrganizationAccount(false). + HasDroppedOnEmpty(). + HasScheduledDeletionTimeEmpty(). + HasRestoredOnEmpty(). + HasMovedToOrganization(""). + HasMovedOn(""). + HasOrganizationUrlExpirationOnEmpty(), + ), + }, + { + ResourceName: configModel.ResourceReference(), + Config: config.FromModel(t, configModel), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedAccountResource(t, helpers.EncodeResourceIdentifier(accountId)). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasNoAdminName(). + HasNoAdminRsaPublicKey(). + HasNoAdminUserType(). + HasNoEmail(). + HasNoFirstName(). + HasNoLastName(). + HasNoMustChangePassword(). + HasEditionString(string(sdk.EditionStandard)). + HasNoRegionGroup(). + HasRegionString(region). + HasCommentString("SNOWFLAKE"). + HasIsOrgAdminString(r.BooleanFalse). + HasNoGracePeriodInDays(), + ), + }, + }, + }) +} + +func TestAcc_Account_Complete(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) - id := acc.TestClient().Ids.RandomAccountObjectIdentifier() - password := acc.TestClient().Ids.AlphaContaining("123ABC") + organizationName := acc.TestClient().Context.CurrentAccountId(t).OrganizationName() + id := random.AdminName() + accountId := sdk.NewAccountIdentifier(organizationName, id) + firstName := acc.TestClient().Ids.Alpha() + lastName := acc.TestClient().Ids.Alpha() + email := random.Email() + name := random.AdminName() + key, _ := random.GenerateRSAPublicKey(t) + region := acc.TestClient().Context.CurrentRegion(t) + comment := random.Comment() + + configModel := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypePerson). + WithAdminRsaPublicKey(key). + WithFirstName(firstName). + WithLastName(lastName). + WithMustChangePassword(r.BooleanTrue). + WithRegionGroup("PUBLIC"). + WithRegion(region). + WithComment(comment). + WithIsOrgAdmin(r.BooleanFalse) resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, - PreCheck: func() { acc.TestAccPreCheck(t) }, TerraformVersionChecks: []tfversion.TerraformVersionCheck{ tfversion.RequireAbove(tfversion.Version1_5_0), }, CheckDestroy: acc.CheckDestroy(t, resources.Account), - // this errors with: Error running post-test destroy, there may be dangling resources: exit status 1 - // unless we change the resource to return nil on destroy then this is unavoidable Steps: []resource.TestStep{ { - Config: accountConfig(id.Name(), password, "Terraform acceptance test", 3), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account.test", "name", id.Name()), - resource.TestCheckResourceAttr("snowflake_account.test", "fully_qualified_name", id.FullyQualifiedName()), - resource.TestCheckResourceAttr("snowflake_account.test", "admin_name", "someadmin"), - resource.TestCheckResourceAttr("snowflake_account.test", "first_name", "Ad"), - resource.TestCheckResourceAttr("snowflake_account.test", "last_name", "Min"), - resource.TestCheckResourceAttr("snowflake_account.test", "email", "admin@example.com"), - resource.TestCheckResourceAttr("snowflake_account.test", "must_change_password", "false"), - resource.TestCheckResourceAttr("snowflake_account.test", "edition", "BUSINESS_CRITICAL"), - resource.TestCheckResourceAttr("snowflake_account.test", "comment", "Terraform acceptance test"), - resource.TestCheckResourceAttr("snowflake_account.test", "grace_period_in_days", "3"), + Config: config.FromModel(t, configModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModel.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(sdk.NewAccountIdentifier(organizationName, id).FullyQualifiedName()). + HasAdminNameString(name). + HasAdminRsaPublicKeyString(key). + HasAdminUserType(sdk.UserTypePerson). + HasEmailString(email). + HasFirstNameString(firstName). + HasLastNameString(lastName). + HasMustChangePasswordString(r.BooleanTrue). + HasRegionGroupString("PUBLIC"). + HasRegionString(region). + HasCommentString(comment). + HasIsOrgAdminString(r.BooleanFalse). + HasGracePeriodInDaysString("3"), + resourceshowoutputassert.AccountShowOutput(t, configModel.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id). + HasSnowflakeRegion(region). + HasRegionGroup(""). + HasEdition(sdk.EditionStandard). + HasAccountUrlNotEmpty(). + HasCreatedOnNotEmpty(). + HasComment(comment). + HasAccountLocatorNotEmpty(). + HasAccountLocatorUrlNotEmpty(). + HasManagedAccounts(0). + HasConsumptionBillingEntityNameNotEmpty(). + HasMarketplaceConsumerBillingEntityName(""). + HasMarketplaceProviderBillingEntityNameNotEmpty(). + HasOldAccountURL(""). + HasIsOrgAdmin(false). + HasAccountOldUrlSavedOnEmpty(). + HasAccountOldUrlLastUsedEmpty(). + HasOrganizationOldUrl(""). + HasOrganizationOldUrlSavedOnEmpty(). + HasOrganizationOldUrlLastUsedEmpty(). + HasIsEventsAccount(false). + HasIsOrganizationAccount(false). + HasDroppedOnEmpty(). + HasScheduledDeletionTimeEmpty(). + HasRestoredOnEmpty(). + HasMovedToOrganization(""). + HasMovedOn(""). + HasOrganizationUrlExpirationOnEmpty(), ), - Destroy: false, }, - // Change Grace Period In Days { - Config: accountConfig(id.Name(), password, "Terraform acceptance test", 4), - Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_account.test", "grace_period_in_days", "4"), + ResourceName: configModel.ResourceReference(), + Config: config.FromModel(t, configModel), + ImportState: true, + ImportStateCheck: assert.AssertThatImport(t, + resourceassert.ImportedAccountResource(t, helpers.EncodeResourceIdentifier(accountId)). + HasNameString(id). + HasFullyQualifiedNameString(sdk.NewAccountIdentifier(organizationName, id).FullyQualifiedName()). + HasNoAdminName(). + HasNoAdminRsaPublicKey(). + HasNoEmail(). + HasNoFirstName(). + HasNoLastName(). + HasNoAdminUserType(). + HasNoMustChangePassword(). + HasEditionString(string(sdk.EditionStandard)). + HasNoRegionGroup(). + HasRegionString(region). + HasCommentString(comment). + HasIsOrgAdminString(r.BooleanFalse). + HasNoGracePeriodInDays(), ), }, - // IMPORT - { - ResourceName: "snowflake_account.test", - ImportState: true, - ImportStateVerify: true, - ImportStateVerifyIgnore: []string{ - "admin_name", - "admin_password", - "admin_rsa_public_key", - "email", - "must_change_password", - "first_name", - "last_name", - "grace_period_in_days", + }, + }) +} + +func TestAcc_Account_Rename(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + organizationName := acc.TestClient().Context.CurrentAccountId(t).OrganizationName() + id := random.AdminName() + accountId := sdk.NewAccountIdentifier(organizationName, id) + + newId := random.AdminName() + newAccountId := sdk.NewAccountIdentifier(organizationName, newId) + + email := random.Email() + name := random.AdminName() + key, _ := random.GenerateRSAPublicKey(t) + + configModel := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key) + newConfigModel := model.Account("test", name, string(sdk.EditionStandard), email, 3, newId). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, configModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModel.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminUserType(sdk.UserTypeService), + resourceshowoutputassert.AccountShowOutput(t, configModel.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id), + ), + }, + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(newConfigModel.ResourceReference(), plancheck.ResourceActionUpdate), + }, }, + Config: config.FromModel(t, newConfigModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, newConfigModel.ResourceReference()). + HasNameString(newId). + HasFullyQualifiedNameString(newAccountId.FullyQualifiedName()). + HasAdminUserType(sdk.UserTypeService), + resourceshowoutputassert.AccountShowOutput(t, newConfigModel.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(newId), + ), }, }, }) } -func accountConfig(name string, password string, comment string, gracePeriodInDays int) string { - return fmt.Sprintf(` -data "snowflake_current_account" "current" {} +func TestAcc_Account_IsOrgAdmin(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + organizationName := acc.TestClient().Context.CurrentAccountId(t).OrganizationName() + id := random.AdminName() + accountId := sdk.NewAccountIdentifier(organizationName, id) + + email := random.Email() + name := random.AdminName() + key, _ := random.GenerateRSAPublicKey(t) + + configModelWithOrgAdminTrue := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key). + WithIsOrgAdmin(r.BooleanTrue) + + configModelWithOrgAdminFalse := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key). + WithIsOrgAdmin(r.BooleanFalse) + + configModelWithoutOrgAdmin := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + // Create with ORGADMIN enabled + { + Config: config.FromModel(t, configModelWithOrgAdminTrue), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModelWithOrgAdminTrue.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminUserType(sdk.UserTypeService). + HasIsOrgAdminString(r.BooleanTrue), + resourceshowoutputassert.AccountShowOutput(t, configModelWithOrgAdminTrue.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id). + HasIsOrgAdmin(true), + ), + }, + // Disable ORGADMIN + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(configModelWithOrgAdminFalse.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModel(t, configModelWithOrgAdminFalse), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModelWithOrgAdminFalse.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminUserType(sdk.UserTypeService). + HasIsOrgAdminString(r.BooleanFalse), + resourceshowoutputassert.AccountShowOutput(t, configModelWithOrgAdminFalse.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id). + HasIsOrgAdmin(false), + ), + }, + // Remove is_org_admin from the config and go back to default (disabled) + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(configModelWithoutOrgAdmin.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModel(t, configModelWithoutOrgAdmin), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModelWithoutOrgAdmin.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminUserType(sdk.UserTypeService). + HasIsOrgAdminString(r.BooleanDefault), + resourceshowoutputassert.AccountShowOutput(t, configModelWithoutOrgAdmin.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id). + HasIsOrgAdmin(false), + ), + }, + // External change (enable ORGADMIN) + { + PreConfig: func() { + acc.TestClient().Account.Alter(t, &sdk.AlterAccountOptions{ + SetIsOrgAdmin: &sdk.AccountSetIsOrgAdmin{ + Name: accountId.AsAccountObjectIdentifier(), + OrgAdmin: true, + }, + }) + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(configModelWithoutOrgAdmin.ResourceReference(), plancheck.ResourceActionUpdate), + }, + }, + Config: config.FromModel(t, configModelWithoutOrgAdmin), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModelWithoutOrgAdmin.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminUserType(sdk.UserTypeService). + HasIsOrgAdminString(r.BooleanDefault), + resourceshowoutputassert.AccountShowOutput(t, configModelWithoutOrgAdmin.ResourceReference()). + HasOrganizationName(organizationName). + HasAccountName(id). + HasIsOrgAdmin(false), + ), + }, + }, + }) +} + +func TestAcc_Account_IgnoreUpdateAfterCreationOnCertainFields(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + organizationName := acc.TestClient().Context.CurrentAccountId(t).OrganizationName() + id := random.AdminName() + accountId := sdk.NewAccountIdentifier(organizationName, id) + + firstName := random.AdminName() + lastName := random.AdminName() + email := random.Email() + name := random.AdminName() + pass := random.Password() + newFirstName := random.AdminName() + newLastName := random.AdminName() + newEmail := random.Email() + newName := random.AdminName() + newPass := random.Password() + + configModel := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypePerson). + WithFirstName(firstName). + WithLastName(lastName). + WithMustChangePassword(r.BooleanTrue). + WithAdminPassword(pass) + + newConfigModel := model.Account("test", newName, string(sdk.EditionStandard), newEmail, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminPassword(newPass). + WithFirstName(newFirstName). + WithLastName(newLastName) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, configModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModel.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminNameString(name). + HasAdminPasswordString(pass). + HasAdminUserType(sdk.UserTypePerson). + HasEmailString(email). + HasFirstNameString(firstName). + HasLastNameString(lastName). + HasMustChangePasswordString(r.BooleanTrue), + ), + }, + { + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{ + plancheck.ExpectResourceAction(newConfigModel.ResourceReference(), plancheck.ResourceActionNoop), + }, + }, + Config: config.FromModel(t, newConfigModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, newConfigModel.ResourceReference()). + HasNameString(id). + HasFullyQualifiedNameString(accountId.FullyQualifiedName()). + HasAdminNameString(name). + HasAdminPasswordString(pass). + HasAdminUserType(sdk.UserTypePerson). + HasEmailString(email). + HasFirstNameString(firstName). + HasLastNameString(lastName). + HasMustChangePasswordString(r.BooleanTrue), + ), + }, + }, + }) +} + +func TestAcc_Account_TryToCreateWithoutOrgadmin(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + id := random.AdminName() + email := random.Email() + name := random.AdminName() + key, _ := random.GenerateRSAPublicKey(t) + + t.Setenv(string(testenvs.ConfigureClientOnce), "") + t.Setenv(snowflakeenvs.Role, snowflakeroles.Accountadmin.Name()) + + configModel := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, configModel), + ExpectError: regexp.MustCompile("Error: current user doesn't have the orgadmin role in session"), + }, + }, + }) +} + +func TestAcc_Account_InvalidValues(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + id := random.AdminName() + email := random.Email() + name := random.AdminName() + key, _ := random.GenerateRSAPublicKey(t) + + configModelInvalidUserType := model.Account("test", name, string(sdk.EditionStandard), email, 3, id). + WithAdminUserType("invalid_user_type"). + WithAdminRsaPublicKey(key) + + configModelInvalidAccountEdition := model.Account("test", name, "invalid_account_edition", email, 3, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key) + + configModelInvalidGracePeriodInDays := model.Account("test", name, string(sdk.EditionStandard), email, 2, id). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminRsaPublicKey(key) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + { + Config: config.FromModel(t, configModelInvalidUserType), + ExpectError: regexp.MustCompile("invalid user type: invalid_user_type"), + }, + { + Config: config.FromModel(t, configModelInvalidAccountEdition), + ExpectError: regexp.MustCompile("unknown account edition: invalid_account_edition"), + }, + { + Config: config.FromModel(t, configModelInvalidGracePeriodInDays), + ExpectError: regexp.MustCompile(`Error: expected grace_period_in_days to be at least \(3\), got 2`), + }, + }, + }) +} + +func TestAcc_Account_UpgradeFrom_v0_99_0(t *testing.T) { + _ = testenvs.GetOrSkipTest(t, testenvs.EnableAcceptance) + _ = testenvs.GetOrSkipTest(t, testenvs.TestAccountCreate) + + email := random.Email() + name := random.AdminName() + adminName := random.AdminName() + adminPassword := random.Password() + firstName := random.AdminName() + lastName := random.AdminName() + region := acc.TestClient().Context.CurrentRegion(t) + comment := random.Comment() + + configModel := model.Account("test", adminName, string(sdk.EditionStandard), email, 3, name). + WithAdminUserTypeEnum(sdk.UserTypeService). + WithAdminPassword(adminPassword). + WithFirstName(firstName). + WithLastName(lastName). + WithMustChangePasswordValue(tfconfig.BoolVariable(true)). + WithRegion(region). + WithIsOrgAdmin(r.BooleanFalse). + WithComment(comment) + + resource.Test(t, resource.TestCase{ + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: acc.CheckDestroy(t, resources.Account), + Steps: []resource.TestStep{ + { + ExternalProviders: map[string]resource.ExternalProvider{ + "snowflake": { + VersionConstraint: "=0.99.0", + Source: "Snowflake-Labs/snowflake", + }, + }, + Config: accountConfig_v0_99_0(name, adminName, adminPassword, email, sdk.EditionStandard, firstName, lastName, true, region, 3, comment), + }, + { + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + Config: config.FromModel(t, configModel), + Check: assert.AssertThat(t, + resourceassert.AccountResource(t, configModel.ResourceReference()). + HasNameString(name). + HasAdminNameString(adminName). + HasAdminPasswordString(adminPassword). + HasEmailString(email). + HasFirstNameString(firstName). + HasLastNameString(lastName). + HasMustChangePasswordString(r.BooleanTrue). + HasRegionGroupString(""). + HasRegionString(region). + HasCommentString(comment). + HasIsOrgAdminString(r.BooleanFalse). + HasGracePeriodInDaysString("3"), + ), + }, + }, + }) +} + +func accountConfig_v0_99_0( + name string, + adminName string, + adminPassword string, + email string, + edition sdk.AccountEdition, + firstName string, + lastName string, + mustChangePassword bool, + region string, + gracePeriodInDays int, + comment string, +) string { + return fmt.Sprintf(` resource "snowflake_account" "test" { - name = "%s" - admin_name = "someadmin" - admin_password = "%s" - first_name = "Ad" - last_name = "Min" - email = "admin@example.com" - must_change_password = false - edition = "BUSINESS_CRITICAL" - comment = "%s" - region = data.snowflake_current_account.current.region - grace_period_in_days = %d + name = "%[1]s" + admin_name = "%[2]s" + admin_password = "%[3]s" + email = "%[4]s" + edition = "%[5]s" + first_name = "%[6]s" + last_name = "%[7]s" + must_change_password = %[8]t + region = "%[9]s" + grace_period_in_days = %[10]d + comment = "%[11]s" } -`, name, password, comment, gracePeriodInDays) +`, + name, + adminName, + adminPassword, + email, + edition, + firstName, + lastName, + mustChangePassword, + region, + gracePeriodInDays, + comment, + ) } diff --git a/pkg/resources/account_state_upgraders.go b/pkg/resources/account_state_upgraders.go new file mode 100644 index 0000000000..bfd0b60bba --- /dev/null +++ b/pkg/resources/account_state_upgraders.go @@ -0,0 +1,28 @@ +package resources + +import ( + "context" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" +) + +func v0_99_0_AccountStateUpgrader(ctx context.Context, state map[string]any, meta any) (map[string]any, error) { + if state == nil { + return state, nil + } + + client := meta.(*provider.Context).Client + state["must_change_password"] = booleanStringFromBool(state["must_change_password"].(bool)) + state["is_org_admin"] = booleanStringFromBool(state["is_org_admin"].(bool)) + account, err := client.Accounts.ShowByID(ctx, sdk.NewAccountObjectIdentifier(state["name"].(string))) + if err != nil { + return nil, err + } + + state["id"] = helpers.EncodeResourceIdentifier(sdk.NewAccountIdentifier(account.OrganizationName, account.AccountName)) + + return state, nil +} diff --git a/pkg/resources/common.go b/pkg/resources/common.go index 643524f9d9..4c84ac1c4c 100644 --- a/pkg/resources/common.go +++ b/pkg/resources/common.go @@ -60,7 +60,7 @@ func ctyValToSliceString(valueElems []cty.Value) []string { return elems } -func ImportName[T sdk.AccountObjectIdentifier | sdk.DatabaseObjectIdentifier | sdk.SchemaObjectIdentifier](ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { +func ImportName[T sdk.AccountObjectIdentifier | sdk.DatabaseObjectIdentifier | sdk.SchemaObjectIdentifier | sdk.AccountIdentifier](ctx context.Context, d *schema.ResourceData, meta any) ([]*schema.ResourceData, error) { switch any(new(T)).(type) { case *sdk.AccountObjectIdentifier: id, err := sdk.ParseAccountObjectIdentifier(d.Id()) @@ -101,6 +101,15 @@ func ImportName[T sdk.AccountObjectIdentifier | sdk.DatabaseObjectIdentifier | s if err := d.Set("schema", id.SchemaName()); err != nil { return nil, err } + case *sdk.AccountIdentifier: + id, err := sdk.ParseAccountIdentifier(d.Id()) + if err != nil { + return nil, err + } + + if err := d.Set("name", id.AccountName()); err != nil { + return nil, err + } } return []*schema.ResourceData{d}, nil diff --git a/pkg/schemas/account_gen.go b/pkg/schemas/account_gen.go index 715e1fb9cf..e6f4413875 100644 --- a/pkg/schemas/account_gen.go +++ b/pkg/schemas/account_gen.go @@ -17,11 +17,11 @@ var ShowAccountSchema = map[string]*schema.Schema{ Type: schema.TypeString, Computed: true, }, - "region_group": { + "snowflake_region": { Type: schema.TypeString, Computed: true, }, - "snowflake_region": { + "region_group": { Type: schema.TypeString, Computed: true, }, @@ -73,6 +73,58 @@ var ShowAccountSchema = map[string]*schema.Schema{ Type: schema.TypeBool, Computed: true, }, + "account_old_url_saved_on": { + Type: schema.TypeString, + Computed: true, + }, + "account_old_url_last_used": { + Type: schema.TypeString, + Computed: true, + }, + "organization_old_url": { + Type: schema.TypeString, + Computed: true, + }, + "organization_old_url_saved_on": { + Type: schema.TypeString, + Computed: true, + }, + "organization_old_url_last_used": { + Type: schema.TypeString, + Computed: true, + }, + "is_events_account": { + Type: schema.TypeBool, + Computed: true, + }, + "is_organization_account": { + Type: schema.TypeBool, + Computed: true, + }, + "dropped_on": { + Type: schema.TypeString, + Computed: true, + }, + "scheduled_deletion_time": { + Type: schema.TypeString, + Computed: true, + }, + "restored_on": { + Type: schema.TypeString, + Computed: true, + }, + "moved_to_organization": { + Type: schema.TypeString, + Computed: true, + }, + "moved_on": { + Type: schema.TypeString, + Computed: true, + }, + "organization_url_expiration_on": { + Type: schema.TypeString, + Computed: true, + }, } var _ = ShowAccountSchema @@ -81,20 +133,82 @@ func AccountToSchema(account *sdk.Account) map[string]any { accountSchema := make(map[string]any) accountSchema["organization_name"] = account.OrganizationName accountSchema["account_name"] = account.AccountName - accountSchema["region_group"] = account.RegionGroup accountSchema["snowflake_region"] = account.SnowflakeRegion - accountSchema["edition"] = account.Edition - accountSchema["account_url"] = account.AccountURL - accountSchema["created_on"] = account.CreatedOn.String() - accountSchema["comment"] = account.Comment + if account.RegionGroup != nil { + accountSchema["region_group"] = account.RegionGroup + } + if account.Edition != nil { + // Manually modified, please don't re-generate + accountSchema["edition"] = string(*account.Edition) + } + if account.AccountURL != nil { + accountSchema["account_url"] = account.AccountURL + } + if account.CreatedOn != nil { + accountSchema["created_on"] = account.CreatedOn.String() + } + if account.Comment != nil { + accountSchema["comment"] = account.Comment + } accountSchema["account_locator"] = account.AccountLocator - accountSchema["account_locator_url"] = account.AccountLocatorURL - accountSchema["managed_accounts"] = account.ManagedAccounts - accountSchema["consumption_billing_entity_name"] = account.ConsumptionBillingEntityName - accountSchema["marketplace_consumer_billing_entity_name"] = account.MarketplaceConsumerBillingEntityName - accountSchema["marketplace_provider_billing_entity_name"] = account.MarketplaceProviderBillingEntityName - accountSchema["old_account_url"] = account.OldAccountURL - accountSchema["is_org_admin"] = account.IsOrgAdmin + if account.AccountLocatorUrl != nil { + accountSchema["account_locator_url"] = account.AccountLocatorUrl + } + if account.ManagedAccounts != nil { + accountSchema["managed_accounts"] = account.ManagedAccounts + } + if account.ConsumptionBillingEntityName != nil { + accountSchema["consumption_billing_entity_name"] = account.ConsumptionBillingEntityName + } + if account.MarketplaceConsumerBillingEntityName != nil { + accountSchema["marketplace_consumer_billing_entity_name"] = account.MarketplaceConsumerBillingEntityName + } + if account.MarketplaceProviderBillingEntityName != nil { + accountSchema["marketplace_provider_billing_entity_name"] = account.MarketplaceProviderBillingEntityName + } + if account.OldAccountURL != nil { + accountSchema["old_account_url"] = account.OldAccountURL + } + if account.IsOrgAdmin != nil { + accountSchema["is_org_admin"] = account.IsOrgAdmin + } + if account.AccountOldUrlSavedOn != nil { + accountSchema["account_old_url_saved_on"] = account.AccountOldUrlSavedOn.String() + } + if account.AccountOldUrlLastUsed != nil { + accountSchema["account_old_url_last_used"] = account.AccountOldUrlLastUsed.String() + } + if account.OrganizationOldUrl != nil { + accountSchema["organization_old_url"] = account.OrganizationOldUrl + } + if account.OrganizationOldUrlSavedOn != nil { + accountSchema["organization_old_url_saved_on"] = account.OrganizationOldUrlSavedOn.String() + } + if account.OrganizationOldUrlLastUsed != nil { + accountSchema["organization_old_url_last_used"] = account.OrganizationOldUrlLastUsed.String() + } + if account.IsEventsAccount != nil { + accountSchema["is_events_account"] = account.IsEventsAccount + } + accountSchema["is_organization_account"] = account.IsOrganizationAccount + if account.DroppedOn != nil { + accountSchema["dropped_on"] = account.DroppedOn.String() + } + if account.ScheduledDeletionTime != nil { + accountSchema["scheduled_deletion_time"] = account.ScheduledDeletionTime.String() + } + if account.RestoredOn != nil { + accountSchema["restored_on"] = account.RestoredOn.String() + } + if account.MovedToOrganization != nil { + accountSchema["moved_to_organization"] = account.MovedToOrganization + } + if account.MovedOn != nil { + accountSchema["moved_on"] = account.MovedOn + } + if account.OrganizationUrlExpirationOn != nil { + accountSchema["organization_url_expiration_on"] = account.OrganizationUrlExpirationOn.String() + } return accountSchema } diff --git a/pkg/schemas/account_parameters.go b/pkg/schemas/account_parameters.go new file mode 100644 index 0000000000..e5885b7967 --- /dev/null +++ b/pkg/schemas/account_parameters.go @@ -0,0 +1,71 @@ +package schemas + +import ( + "slices" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var ( + ShowAccountParametersSchema = make(map[string]*schema.Schema) + accountParameters = []sdk.AccountParameter{ + // TODO(SNOW-1348092 - next prs): Add parameters + // session parameters + sdk.AccountParameterAbortDetachedQuery, + sdk.AccountParameterAutocommit, + sdk.AccountParameterBinaryInputFormat, + sdk.AccountParameterBinaryOutputFormat, + sdk.AccountParameterClientMetadataRequestUseConnectionCtx, + sdk.AccountParameterClientResultColumnCaseInsensitive, + sdk.AccountParameterDateInputFormat, + sdk.AccountParameterDateOutputFormat, + sdk.AccountParameterErrorOnNondeterministicMerge, + sdk.AccountParameterErrorOnNondeterministicUpdate, + sdk.AccountParameterGeographyOutputFormat, + sdk.AccountParameterLockTimeout, + sdk.AccountParameterLogLevel, + sdk.AccountParameterMultiStatementCount, + sdk.AccountParameterQueryTag, + sdk.AccountParameterQuotedIdentifiersIgnoreCase, + sdk.AccountParameterRowsPerResultset, + sdk.AccountParameterS3StageVpceDnsName, + sdk.AccountParameterStatementQueuedTimeoutInSeconds, + sdk.AccountParameterStatementTimeoutInSeconds, + sdk.AccountParameterTimestampDayIsAlways24h, + sdk.AccountParameterTimestampInputFormat, + sdk.AccountParameterTimestampLtzOutputFormat, + sdk.AccountParameterTimestampNtzOutputFormat, + sdk.AccountParameterTimestampOutputFormat, + sdk.AccountParameterTimestampTypeMapping, + sdk.AccountParameterTimestampTzOutputFormat, + sdk.AccountParameterTimezone, + sdk.AccountParameterTimeInputFormat, + sdk.AccountParameterTimeOutputFormat, + sdk.AccountParameterTraceLevel, + sdk.AccountParameterTransactionAbortOnError, + sdk.AccountParameterTransactionDefaultIsolationLevel, + sdk.AccountParameterTwoDigitCenturyStart, + sdk.AccountParameterUnsupportedDdlAction, + sdk.AccountParameterUseCachedResult, + sdk.AccountParameterWeekOfYearPolicy, + sdk.AccountParameterWeekStart, + } +) + +func init() { + for _, param := range accountParameters { + ShowAccountParametersSchema[strings.ToLower(string(param))] = ParameterListSchema + } +} + +func AccountParametersToSchema(parameters []*sdk.Parameter) map[string]any { + accountParametersValue := make(map[string]any) + for _, param := range parameters { + if slices.Contains(accountParameters, sdk.AccountParameter(param.Key)) { + accountParametersValue[strings.ToLower(param.Key)] = []map[string]any{ParameterToSchema(param)} + } + } + return accountParametersValue +} diff --git a/pkg/sdk/accounts.go b/pkg/sdk/accounts.go index 00557cba7a..997c5f2086 100644 --- a/pkg/sdk/accounts.go +++ b/pkg/sdk/accounts.go @@ -3,9 +3,15 @@ package sdk import ( "context" "database/sql" + "encoding/json" "errors" + "fmt" + "log" + "strings" "time" + "github.com/snowflakedb/gosnowflake" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" ) @@ -16,7 +22,7 @@ var ( ) type Accounts interface { - Create(ctx context.Context, id AccountObjectIdentifier, opts *CreateAccountOptions) error + Create(ctx context.Context, id AccountObjectIdentifier, opts *CreateAccountOptions) (*AccountCreateResponse, error) Alter(ctx context.Context, opts *AlterAccountOptions) error Show(ctx context.Context, opts *ShowAccountOptions) ([]Account, error) ShowByID(ctx context.Context, id AccountObjectIdentifier) (*Account, error) @@ -39,6 +45,21 @@ var ( EditionBusinessCritical AccountEdition = "BUSINESS_CRITICAL" ) +var AllAccountEditions = []AccountEdition{ + EditionStandard, + EditionEnterprise, + EditionBusinessCritical, +} + +func ToAccountEdition(edition string) (AccountEdition, error) { + switch typedEdition := AccountEdition(strings.ToUpper(edition)); typedEdition { + case EditionStandard, EditionEnterprise, EditionBusinessCritical: + return typedEdition, nil + default: + return "", fmt.Errorf("unknown account edition: %s", edition) + } +} + // CreateAccountOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-account. type CreateAccountOptions struct { create bool `ddl:"static" sql:"CREATE"` @@ -81,12 +102,59 @@ func (opts *CreateAccountOptions) validate() error { return errors.Join(errs...) } -func (c *accounts) Create(ctx context.Context, id AccountObjectIdentifier, opts *CreateAccountOptions) error { +type AccountCreateResponse struct { + AccountLocator string `json:"accountLocator,omitempty"` + AccountLocatorUrl string `json:"accountLocatorUrl,omitempty"` + OrganizationName string + AccountName string `json:"accountName,omitempty"` + Url string `json:"url,omitempty"` + Edition AccountEdition `json:"edition,omitempty"` + RegionGroup string `json:"regionGroup,omitempty"` + Cloud string `json:"cloud,omitempty"` + Region string `json:"region,omitempty"` +} + +func ToAccountCreateResponse(v string) (*AccountCreateResponse, error) { + var res AccountCreateResponse + err := json.Unmarshal([]byte(v), &res) + if err != nil { + return nil, err + } + if len(res.Url) > 0 { + url := strings.TrimPrefix(res.Url, `https://`) + url = strings.TrimPrefix(url, `http://`) + parts := strings.SplitN(url, "-", 2) + if len(parts) == 2 { + res.OrganizationName = strings.ToUpper(parts[0]) + } + } + return &res, nil +} + +func (c *accounts) Create(ctx context.Context, id AccountObjectIdentifier, opts *CreateAccountOptions) (*AccountCreateResponse, error) { if opts == nil { opts = &CreateAccountOptions{} } opts.name = id - return validateAndExec(c.client, ctx, opts) + queryChanId := make(chan string, 1) + err := validateAndExec(c.client, gosnowflake.WithQueryIDChan(ctx, queryChanId), opts) + if err != nil { + return nil, err + } + + queryId := <-queryChanId + rows, err := c.client.QueryUnsafe(gosnowflake.WithFetchResultByID(ctx, queryId), "") + if err != nil { + log.Printf("[WARN] Unable to retrieve create account output, err = %v", err) + } + + if len(rows) == 1 && rows[0]["status"] != nil { + if status, ok := (*rows[0]["status"]).(string); ok { + return ToAccountCreateResponse(status) + } + } + + return nil, nil } // AlterAccountOptions is based on https://docs.snowflake.com/en/sql-reference/sql/alter-account. @@ -299,7 +367,7 @@ type Account struct { CreatedOn *time.Time Comment *string AccountLocator string - AccountLocatorURL *string + AccountLocatorUrl *string ManagedAccounts *int ConsumptionBillingEntityName *string MarketplaceConsumerBillingEntityName *string @@ -387,7 +455,7 @@ func (row accountDBRow) convert() *Account { acc.Comment = &row.Comment.String } if row.AccountLocatorURL.Valid { - acc.AccountLocatorURL = &row.AccountLocatorURL.String + acc.AccountLocatorUrl = &row.AccountLocatorURL.String } if row.ManagedAccounts.Valid { acc.ManagedAccounts = Int(int(row.ManagedAccounts.Int32)) diff --git a/pkg/sdk/accounts_test.go b/pkg/sdk/accounts_test.go index d275c82883..e072eabd71 100644 --- a/pkg/sdk/accounts_test.go +++ b/pkg/sdk/accounts_test.go @@ -1,10 +1,13 @@ package sdk import ( + "encoding/json" "fmt" "testing" "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestAccountCreate(t *testing.T) { @@ -404,3 +407,155 @@ func TestAccountShow(t *testing.T) { assertOptsValidAndSQLEquals(t, opts, `SHOW ACCOUNTS LIKE 'myaccount'`) }) } + +func TestToAccountCreateResponse(t *testing.T) { + testCases := []struct { + Name string + RawInput string + Input AccountCreateResponse + ExpectedOutput *AccountCreateResponse + Error string + }{ + { + Name: "validation: empty input", + RawInput: "", + Error: "unexpected end of JSON input", + }, + { + Name: "validation: only a few fields filled", + Input: AccountCreateResponse{ + AccountName: "acc_name", + Url: `https://org_name-acc_name.snowflakecomputing.com`, + Edition: EditionStandard, + RegionGroup: "region_group", + Cloud: "cloud", + Region: "region", + }, + ExpectedOutput: &AccountCreateResponse{ + AccountName: "acc_name", + Url: `https://org_name-acc_name.snowflakecomputing.com`, + OrganizationName: "ORG_NAME", + Edition: EditionStandard, + RegionGroup: "region_group", + Cloud: "cloud", + Region: "region", + }, + }, + { + Name: "validation: invalid url", + Input: AccountCreateResponse{ + Url: `https://org_name_acc_name.snowflake.computing.com`, + }, + ExpectedOutput: &AccountCreateResponse{ + Url: `https://org_name_acc_name.snowflake.computing.com`, + // OrganizationName is not filled + }, + }, + { + Name: "validation: valid url", + Input: AccountCreateResponse{ + Url: `https://org_name-acc_name.snowflakecomputing.com`, + }, + ExpectedOutput: &AccountCreateResponse{ + Url: `https://org_name-acc_name.snowflakecomputing.com`, + OrganizationName: "ORG_NAME", + }, + }, + { + Name: "validation: valid http url", + Input: AccountCreateResponse{ + Url: `http://org_name-acc_name.snowflakecomputing.com`, + }, + ExpectedOutput: &AccountCreateResponse{ + Url: `http://org_name-acc_name.snowflakecomputing.com`, + OrganizationName: "ORG_NAME", + }, + }, + { + Name: "complete", + Input: AccountCreateResponse{ + AccountLocator: "locator", + AccountLocatorUrl: "locator_url", + AccountName: "acc_name", + Url: `https://org_name-acc_name.snowflakecomputing.com`, + Edition: EditionBusinessCritical, + RegionGroup: "region_group", + Cloud: "cloud", + Region: "region", + }, + ExpectedOutput: &AccountCreateResponse{ + AccountLocator: "locator", + AccountLocatorUrl: "locator_url", + AccountName: "acc_name", + Url: `https://org_name-acc_name.snowflakecomputing.com`, + OrganizationName: "ORG_NAME", + Edition: EditionBusinessCritical, + RegionGroup: "region_group", + Cloud: "cloud", + Region: "region", + }, + }, + } + + for _, tc := range testCases { + t.Run(tc.Name, func(t *testing.T) { + input := tc.RawInput + if tc.Input != (AccountCreateResponse{}) { + bytes, err := json.Marshal(tc.Input) + if err != nil { + assert.Fail(t, err.Error()) + } + input = string(bytes) + } + + createResponse, err := ToAccountCreateResponse(input) + + if tc.Error != "" { + assert.EqualError(t, err, tc.Error) + assert.Nil(t, createResponse) + } else { + assert.NoError(t, err) + assert.Equal(t, tc.ExpectedOutput, createResponse) + } + }) + } +} + +func TestToAccountEdition(t *testing.T) { + type test struct { + input string + want AccountEdition + } + + valid := []test{ + // case insensitive. + {input: "standard", want: EditionStandard}, + + // Supported Values + {input: "STANDARD", want: EditionStandard}, + {input: "ENTERPRISE", want: EditionEnterprise}, + {input: "BUSINESS_CRITICAL", want: EditionBusinessCritical}, + } + + invalid := []test{ + // bad values + {input: ""}, + {input: "foo"}, + {input: "businesscritical"}, + } + + for _, tc := range valid { + t.Run(tc.input, func(t *testing.T) { + got, err := ToAccountEdition(tc.input) + require.NoError(t, err) + require.Equal(t, tc.want, got) + }) + } + + for _, tc := range invalid { + t.Run(tc.input, func(t *testing.T) { + _, err := ToAccountEdition(tc.input) + require.Error(t, err) + }) + } +} diff --git a/pkg/sdk/identifier_helpers.go b/pkg/sdk/identifier_helpers.go index 7e857fb8de..1609593d71 100644 --- a/pkg/sdk/identifier_helpers.go +++ b/pkg/sdk/identifier_helpers.go @@ -124,6 +124,10 @@ func (i AccountIdentifier) AccountName() string { return i.accountName } +func (i AccountIdentifier) AsAccountObjectIdentifier() AccountObjectIdentifier { + return NewAccountObjectIdentifier(i.accountName) +} + func (i AccountIdentifier) Name() string { if i.organizationName != "" && i.accountName != "" { return fmt.Sprintf("%s.%s", i.organizationName, i.accountName) diff --git a/pkg/sdk/testint/accounts_integration_test.go b/pkg/sdk/testint/accounts_integration_test.go index c4c864e445..6ed6b1ac5d 100644 --- a/pkg/sdk/testint/accounts_integration_test.go +++ b/pkg/sdk/testint/accounts_integration_test.go @@ -37,7 +37,7 @@ func TestInt_Account(t *testing.T) { assert.NotEmpty(t, *account.CreatedOn) assert.Equal(t, "SNOWFLAKE", *account.Comment) assert.NotEmpty(t, account.AccountLocator) - assert.NotEmpty(t, *account.AccountLocatorURL) + assert.NotEmpty(t, *account.AccountLocatorUrl) assert.Zero(t, *account.ManagedAccounts) assert.NotEmpty(t, *account.ConsumptionBillingEntityName) assert.Nil(t, account.MarketplaceConsumerBillingEntityName) @@ -65,7 +65,7 @@ func TestInt_Account(t *testing.T) { assert.Nil(t, account.AccountURL) assert.Nil(t, account.CreatedOn) assert.Nil(t, account.Comment) - assert.Nil(t, account.AccountLocatorURL) + assert.Nil(t, account.AccountLocatorUrl) assert.Nil(t, account.ManagedAccounts) assert.Nil(t, account.ConsumptionBillingEntityName) assert.Nil(t, account.MarketplaceConsumerBillingEntityName) @@ -92,13 +92,27 @@ func TestInt_Account(t *testing.T) { assert.Nil(t, account.OrganizationUrlExpirationOn) } + assertCreateResponse := func(t *testing.T, response *sdk.AccountCreateResponse, account sdk.Account) { + t.Helper() + require.NotNil(t, response) + assert.Equal(t, account.AccountLocator, response.AccountLocator) + assert.Equal(t, *account.AccountLocatorUrl, response.AccountLocatorUrl) + assert.Equal(t, account.AccountName, response.AccountName) + assert.Equal(t, *account.AccountURL, response.Url) + assert.Equal(t, account.OrganizationName, response.OrganizationName) + assert.Equal(t, *account.Edition, response.Edition) + assert.NotEmpty(t, response.RegionGroup) + assert.NotEmpty(t, response.Cloud) + assert.NotEmpty(t, response.Region) + } + t.Run("create: minimal", func(t *testing.T) { id := testClientHelper().Ids.RandomAccountObjectIdentifier() name := testClientHelper().Ids.Alpha() password := random.Password() email := random.Email() - err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ + createResponse, err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ AdminName: name, AdminPassword: sdk.String(password), Email: email, @@ -110,6 +124,7 @@ func TestInt_Account(t *testing.T) { acc, err := client.Accounts.ShowByID(ctx, id) require.NoError(t, err) require.Equal(t, id, acc.ID()) + assertCreateResponse(t, createResponse, *acc) }) t.Run("create: user type service", func(t *testing.T) { @@ -118,7 +133,7 @@ func TestInt_Account(t *testing.T) { key, _ := random.GenerateRSAPublicKey(t) email := random.Email() - err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ + createResponse, err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ AdminName: name, AdminRSAPublicKey: sdk.String(key), AdminUserType: sdk.Pointer(sdk.UserTypeService), @@ -131,6 +146,7 @@ func TestInt_Account(t *testing.T) { acc, err := client.Accounts.ShowByID(ctx, id) require.NoError(t, err) require.Equal(t, id, acc.ID()) + assertCreateResponse(t, createResponse, *acc) }) t.Run("create: user type legacy service", func(t *testing.T) { @@ -139,7 +155,7 @@ func TestInt_Account(t *testing.T) { password := random.Password() email := random.Email() - err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ + createResponse, err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ AdminName: name, AdminPassword: sdk.String(password), AdminUserType: sdk.Pointer(sdk.UserTypeLegacyService), @@ -152,6 +168,7 @@ func TestInt_Account(t *testing.T) { acc, err := client.Accounts.ShowByID(ctx, id) require.NoError(t, err) require.Equal(t, id, acc.ID()) + assertCreateResponse(t, createResponse, *acc) }) t.Run("create: complete", func(t *testing.T) { @@ -167,7 +184,7 @@ func TestInt_Account(t *testing.T) { require.NoError(t, err) comment := random.Comment() - err = client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ + createResponse, err := client.Accounts.Create(ctx, id, &sdk.CreateAccountOptions{ AdminName: name, AdminPassword: sdk.String(password), FirstName: sdk.String("firstName"), @@ -187,6 +204,7 @@ func TestInt_Account(t *testing.T) { acc, err := client.Accounts.ShowByID(ctx, id) require.NoError(t, err) require.Equal(t, id, acc.ID()) + assertCreateResponse(t, createResponse, *acc) }) t.Run("alter: set / unset is org admin", func(t *testing.T) { diff --git a/templates/resources/account.md.tmpl b/templates/resources/account.md.tmpl index 973e844784..c05e6ff4bc 100644 --- a/templates/resources/account.md.tmpl +++ b/templates/resources/account.md.tmpl @@ -9,26 +9,29 @@ description: |- {{- end }} --- +!> **V1 release candidate** This resource was reworked and is a release candidate for the V1. We do not expect significant changes in it before the V1. We will welcome any feedback and adjust the resource if needed. Any errors reported will be resolved with a higher priority. We encourage checking this resource out before the V1 release. Please follow the [migration guide](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/MIGRATION_GUIDE.md#v0990--v01000) to use it. + # {{.Name}} ({{.Type}}) {{ .Description | trimspace }} -!> **Warning** This resource cannot be destroyed!!! The only way to delete accounts is to go through [Snowflake Support](https://docs.snowflake.com/en/user-guide/organizations-manage-accounts.html#deleting-an-account) - -~> **Note** ORGADMIN priviliges are required for this resource +~> **Note** To use this resource you have to use an account with a privilege to use the ORGADMIN role. +{{ if .HasExample -}} ## Example Usage {{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} -> **Note** Instead of using fully_qualified_name, you can reference objects managed outside Terraform by constructing a correct ID, consult [identifiers guide](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs/guides/identifiers#new-computed-fully-qualified-name-field-in-resources). +{{- end }} + {{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} ## Import Import is supported using the following syntax: -```shell -terraform import snowflake_account.account -``` +{{ codefile "shell" (printf "examples/resources/%s/import.sh" .Name)}} +{{- end }} From 211ad46223f1bdf03b20cc7a06110bfce18a967e Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 11 Dec 2024 14:31:39 +0100 Subject: [PATCH 2/2] feat: Procedures schemas and generated sources (#3263) Continuation to https://github.com/Snowflake-Labs/terraform-provider-snowflake/pull/3262: - define resource schema for each procedure type - add base definition for each procedure - generate all resource assertions for each procedure resource - generate model builders for each procedure resource - generate docs for the new resources (without examples yet) Next PRs: - basic logic with tests for each resource - complex tests for different fields - migration guide/documentation improvements (examples and limitations) - data sources for functions and procedures --- docs/resources/function_java.md | 8 +- docs/resources/function_javascript.md | 4 +- docs/resources/function_python.md | 8 +- docs/resources/function_scala.md | 8 +- docs/resources/function_sql.md | 4 +- docs/resources/procedure_java.md | 102 +++++ docs/resources/procedure_javascript.md | 85 +++++ docs/resources/procedure_python.md | 101 +++++ docs/resources/procedure_scala.md | 102 +++++ docs/resources/procedure_sql.md | 85 +++++ .../resourceassert/gen/resource_schema_def.go | 20 + .../procedure_java_resource_gen.go | 277 ++++++++++++++ .../procedure_javascript_resource_gen.go | 197 ++++++++++ .../procedure_python_resource_gen.go | 267 +++++++++++++ .../procedure_scala_resource_gen.go | 277 ++++++++++++++ .../procedure_sql_resource_gen.go | 197 ++++++++++ .../config/model/procedure_java_model_ext.go | 16 + .../config/model/procedure_java_model_gen.go | 321 ++++++++++++++++ .../model/procedure_javascript_model_ext.go | 16 + .../model/procedure_javascript_model_gen.go | 233 ++++++++++++ .../model/procedure_python_model_ext.go | 16 + .../model/procedure_python_model_gen.go | 310 +++++++++++++++ .../config/model/procedure_scala_model_ext.go | 16 + .../config/model/procedure_scala_model_gen.go | 321 ++++++++++++++++ .../config/model/procedure_sql_model_ext.go | 16 + .../config/model/procedure_sql_model_gen.go | 233 ++++++++++++ pkg/provider/provider.go | 5 + pkg/provider/resources/resources.go | 5 + pkg/resources/function_commons.go | 352 +++++++++--------- pkg/resources/procedure_commons.go | 343 +++++++++++++++++ pkg/resources/procedure_java.go | 52 +++ pkg/resources/procedure_javascript.go | 52 +++ pkg/resources/procedure_python.go | 52 +++ pkg/resources/procedure_scala.go | 52 +++ pkg/resources/procedure_sql.go | 52 +++ pkg/sdk/common_types.go | 17 + pkg/sdk/common_types_test.go | 32 ++ 37 files changed, 4063 insertions(+), 191 deletions(-) create mode 100644 docs/resources/procedure_java.md create mode 100644 docs/resources/procedure_javascript.md create mode 100644 docs/resources/procedure_python.md create mode 100644 docs/resources/procedure_scala.md create mode 100644 docs/resources/procedure_sql.md create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_javascript_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_sql_resource_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go create mode 100644 pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_gen.go create mode 100644 pkg/resources/procedure_commons.go create mode 100644 pkg/resources/procedure_java.go create mode 100644 pkg/resources/procedure_javascript.go create mode 100644 pkg/resources/procedure_python.go create mode 100644 pkg/resources/procedure_scala.go create mode 100644 pkg/resources/procedure_sql.go diff --git a/docs/resources/function_java.md b/docs/resources/function_java.md index eb8062232b..23ab3b5dc2 100644 --- a/docs/resources/function_java.md +++ b/docs/resources/function_java.md @@ -17,7 +17,7 @@ Resource used to manage java function objects. For more information, check [func ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Java source code. For more information, see [Introduction to Java UDFs](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Java source code. For more information, see [Introduction to Java UDFs](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -30,14 +30,14 @@ Resource used to manage java function objects. For more information, check [func - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. - `imports` (Set of String) The location (stage), path, and name of the file(s) to import. A file can be a JAR file or another type of file. If the file is a JAR file, it can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). Java UDFs can also read non-JAR files. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#java). -- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure views (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`. - `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. - `runtime_version` (String) Specifies the Java JDK runtime version to use. The supported versions of Java are 11.x and 17.x. If RUNTIME_VERSION is not set, Java JDK 11 is used. -- `secrets` (Block Set) Assigns the names of secrets to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) +- `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `target_path` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). @@ -63,7 +63,7 @@ Required: Required: -- `secret_id` (String) Fully qualified name of the allowed secret. You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. +- `secret_id` (String) Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. diff --git a/docs/resources/function_javascript.md b/docs/resources/function_javascript.md index 7925c74c59..2680ff6653 100644 --- a/docs/resources/function_javascript.md +++ b/docs/resources/function_javascript.md @@ -17,7 +17,7 @@ Resource used to manage javascript function objects. For more information, check ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be JavaScript source code. For more information, see [Introduction to JavaScript UDFs](https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be JavaScript source code. For more information, see [Introduction to JavaScript UDFs](https://docs.snowflake.com/en/developer-guide/udf/javascript/udf-javascript-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). - `schema` (String) The schema in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -27,7 +27,7 @@ Resource used to manage javascript function objects. For more information, check - `arguments` (Block List) List of the arguments for the function. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure views (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. diff --git a/docs/resources/function_python.md b/docs/resources/function_python.md index 814e7a38de..5f68cfb014 100644 --- a/docs/resources/function_python.md +++ b/docs/resources/function_python.md @@ -17,7 +17,7 @@ Resource used to manage python function objects. For more information, check [fu ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Python source code. For more information, see [Introduction to Python UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-introduction). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Python source code. For more information, see [Introduction to Python UDFs](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler function or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a function name. If the handler code is in-line with the CREATE FUNCTION statement, you can use the function name alone. When the handler code is referenced at a stage, this value should be qualified with the module name, as in the following form: `my_module.my_function`. If the handler is for a tabular UDF, the HANDLER value should be the name of a handler class. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -32,13 +32,13 @@ Resource used to manage python function objects. For more information, check [fu - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. - `imports` (Set of String) The location (stage), path, and name of the file(s) to import. A file can be a `.py` file or another type of file. Python UDFs can also read non-Python files, such as text files. For an example, see [Reading a file](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-examples.html#label-udf-python-read-files). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#python). - `is_aggregate` (String) Specifies that the function is an aggregate function. For more information about user-defined aggregate functions, see [Python user-defined aggregate functions](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-aggregate-functions). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. -- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure views (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of packages required as dependencies. The value should be of the form `package_name==version_number`. - `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. -- `secrets` (Block Set) Assigns the names of secrets to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) +- `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). ### Read-Only @@ -63,7 +63,7 @@ Required: Required: -- `secret_id` (String) Fully qualified name of the allowed secret. You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. +- `secret_id` (String) Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. diff --git a/docs/resources/function_scala.md b/docs/resources/function_scala.md index 5fffff6762..9ec48d3866 100644 --- a/docs/resources/function_scala.md +++ b/docs/resources/function_scala.md @@ -17,7 +17,7 @@ Resource used to manage scala function objects. For more information, check [fun ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Scala source code. For more information, see [Introduction to Scala UDFs](https://docs.snowflake.com/en/developer-guide/udf/scala/udf-scala-introduction). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be Scala source code. For more information, see [Introduction to Scala UDFs](https://docs.snowflake.com/en/developer-guide/udf/scala/udf-scala-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `handler` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). @@ -31,13 +31,13 @@ Resource used to manage scala function objects. For more information, check [fun - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). - `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. - `imports` (Set of String) The location (stage), path, and name of the file(s) to import, such as a JAR or other kind of file. The JAR file might contain handler dependency libraries. It can contain one or more .class files and zero or more resource files. JNI (Java Native Interface) is not supported. Snowflake prohibits loading libraries that contain native code (as opposed to Java bytecode). A non-JAR file might a file read by handler code. For an example, see [Reading a file specified statically in IMPORTS](https://docs.snowflake.com/en/developer-guide/udf/java/udf-java-cookbook.html#label-reading-file-from-java-udf-imports). Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#scala). -- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure views (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. - `packages` (Set of String) The name and version number of Snowflake system packages required as dependencies. The value should be of the form `package_name:version_number`, where `package_name` is `snowflake_domain:package`. - `return_behavior` (String) Specifies the behavior of the function when returning results. Valid values are (case-insensitive): `VOLATILE` | `IMMUTABLE`. -- `secrets` (Block Set) Assigns the names of secrets to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) +- `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) - `target_path` (String) The name of the handler method or class. If the handler is for a scalar UDF, returning a non-tabular value, the HANDLER value should be a method name, as in the following form: `MyClass.myMethod`. - `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). @@ -63,7 +63,7 @@ Required: Required: -- `secret_id` (String) Fully qualified name of the allowed secret. You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. +- `secret_id` (String) Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. - `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. diff --git a/docs/resources/function_sql.md b/docs/resources/function_sql.md index e37e57514d..80d83727fb 100644 --- a/docs/resources/function_sql.md +++ b/docs/resources/function_sql.md @@ -17,7 +17,7 @@ Resource used to manage sql function objects. For more information, check [funct ### Required - `database` (String) The database in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. -- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be SQL source code. For more information, see [Introduction to SQL UDFs](https://docs.snowflake.com/en/developer-guide/udf/sql/udf-sql-introduction). +- `function_definition` (String) Defines the handler code executed when the UDF is called. Wrapping `$$` signs are added by the provider automatically; do not include them. The `function_definition` value must be SQL source code. For more information, see [Introduction to SQL UDFs](https://docs.snowflake.com/en/developer-guide/udf/sql/udf-sql-introduction). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. - `name` (String) The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. - `return_type` (String) Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages). - `schema` (String) The schema in which to create the function. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. @@ -27,7 +27,7 @@ Resource used to manage sql function objects. For more information, check [funct - `arguments` (Block List) List of the arguments for the function. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) - `comment` (String) Specifies a comment for the function. - `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). -- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure views (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `is_secure` (String) Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions. Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. - `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). - `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). - `null_input_behavior` (String) Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. diff --git a/docs/resources/procedure_java.md b/docs/resources/procedure_java.md new file mode 100644 index 0000000000..94490ed21e --- /dev/null +++ b/docs/resources/procedure_java.md @@ -0,0 +1,102 @@ +--- +page_title: "snowflake_procedure_java Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage java procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. +--- + +# snowflake_procedure_java (Resource) + +Resource used to manage java procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). + + + + +## Schema + +### Required + +- `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `handler` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Java source code. For more information, see [Java (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Java Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-java-data-type-mappings)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). +- `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 11. +- `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `snowpark_package` (String) The Snowpark package is required for stored procedures, so it must always be present. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). + +### Optional + +- `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) +- `comment` (String) Specifies a comment for the procedure. +- `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. +- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). +- `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). +- `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. +- `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) +- `target_path` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `parameters` (List of Object) Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--parameters)) +- `procedure_language` (String) Specifies language for the procedure. Used to detect external changes. +- `show_output` (List of Object) Outputs the result of `SHOW PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `arguments` + +Required: + +- `arg_data_type` (String) The argument type. +- `arg_name` (String) The argument name. + + + +### Nested Schema for `secrets` + +Required: + +- `secret_id` (String) Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. +- `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + + + +### Nested Schema for `parameters` + +Read-Only: + +- `enable_console_output` (Boolean) +- `log_level` (String) +- `metric_level` (String) +- `trace_level` (String) + + + +### Nested Schema for `show_output` + +Read-Only: + +- `arguments_raw` (String) +- `catalog_name` (String) +- `created_on` (String) +- `description` (String) +- `external_access_integrations` (String) +- `is_aggregate` (Boolean) +- `is_ansi` (Boolean) +- `is_builtin` (Boolean) +- `is_secure` (Boolean) +- `is_table_function` (Boolean) +- `max_num_arguments` (Number) +- `min_num_arguments` (Number) +- `name` (String) +- `schema_name` (String) +- `secrets` (String) +- `valid_for_clustering` (Boolean) diff --git a/docs/resources/procedure_javascript.md b/docs/resources/procedure_javascript.md new file mode 100644 index 0000000000..a562ad589d --- /dev/null +++ b/docs/resources/procedure_javascript.md @@ -0,0 +1,85 @@ +--- +page_title: "snowflake_procedure_javascript Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage javascript procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. +--- + +# snowflake_procedure_javascript (Resource) + +Resource used to manage javascript procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). + + + + +## Schema + +### Required + +- `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be JavaScript source code. For more information, see [JavaScript](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL and JavaScript data type mapping](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript.html#label-stored-procedure-data-type-mapping)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). +- `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. + +### Optional + +- `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) +- `comment` (String) Specifies a comment for the procedure. +- `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). +- `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). +- `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. +- `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `parameters` (List of Object) Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--parameters)) +- `procedure_language` (String) Specifies language for the procedure. Used to detect external changes. +- `show_output` (List of Object) Outputs the result of `SHOW PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `arguments` + +Required: + +- `arg_data_type` (String) The argument type. +- `arg_name` (String) The argument name. + + + +### Nested Schema for `parameters` + +Read-Only: + +- `enable_console_output` (Boolean) +- `log_level` (String) +- `metric_level` (String) +- `trace_level` (String) + + + +### Nested Schema for `show_output` + +Read-Only: + +- `arguments_raw` (String) +- `catalog_name` (String) +- `created_on` (String) +- `description` (String) +- `external_access_integrations` (String) +- `is_aggregate` (Boolean) +- `is_ansi` (Boolean) +- `is_builtin` (Boolean) +- `is_secure` (Boolean) +- `is_table_function` (Boolean) +- `max_num_arguments` (Number) +- `min_num_arguments` (Number) +- `name` (String) +- `schema_name` (String) +- `secrets` (String) +- `valid_for_clustering` (Boolean) diff --git a/docs/resources/procedure_python.md b/docs/resources/procedure_python.md new file mode 100644 index 0000000000..7b6759ef75 --- /dev/null +++ b/docs/resources/procedure_python.md @@ -0,0 +1,101 @@ +--- +page_title: "snowflake_procedure_python Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage python procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. +--- + +# snowflake_procedure_python (Resource) + +Resource used to manage python procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). + + + + +## Schema + +### Required + +- `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `handler` (String) Use the name of the stored procedure’s function or method. This can differ depending on whether the code is in-line or referenced at a stage. When the code is in-line, you can specify just the function name. When the code is imported from a stage, specify the fully-qualified handler function name as `.`. +- `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Python source code. For more information, see [Python (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/python/procedure-python-overview). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Python Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-python-data-type-mappings)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). +- `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 3.9, 3.10, and 3.11. +- `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `snowpark_package` (String) The Snowpark package is required for stored procedures, so it must always be present. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). + +### Optional + +- `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) +- `comment` (String) Specifies a comment for the procedure. +- `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. +- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). +- `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). +- `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. +- `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) +- `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `parameters` (List of Object) Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--parameters)) +- `procedure_language` (String) Specifies language for the procedure. Used to detect external changes. +- `show_output` (List of Object) Outputs the result of `SHOW PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `arguments` + +Required: + +- `arg_data_type` (String) The argument type. +- `arg_name` (String) The argument name. + + + +### Nested Schema for `secrets` + +Required: + +- `secret_id` (String) Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. +- `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + + + +### Nested Schema for `parameters` + +Read-Only: + +- `enable_console_output` (Boolean) +- `log_level` (String) +- `metric_level` (String) +- `trace_level` (String) + + + +### Nested Schema for `show_output` + +Read-Only: + +- `arguments_raw` (String) +- `catalog_name` (String) +- `created_on` (String) +- `description` (String) +- `external_access_integrations` (String) +- `is_aggregate` (Boolean) +- `is_ansi` (Boolean) +- `is_builtin` (Boolean) +- `is_secure` (Boolean) +- `is_table_function` (Boolean) +- `max_num_arguments` (Number) +- `min_num_arguments` (Number) +- `name` (String) +- `schema_name` (String) +- `secrets` (String) +- `valid_for_clustering` (Boolean) diff --git a/docs/resources/procedure_scala.md b/docs/resources/procedure_scala.md new file mode 100644 index 0000000000..1347bfb5cf --- /dev/null +++ b/docs/resources/procedure_scala.md @@ -0,0 +1,102 @@ +--- +page_title: "snowflake_procedure_scala Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage scala procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. +--- + +# snowflake_procedure_scala (Resource) + +Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). + + + + +## Schema + +### Required + +- `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `handler` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be Scala source code. For more information, see [Scala (using Snowpark)](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL-Scala Data Type Mappings](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-types-to-scala-types)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). +- `runtime_version` (String) The language runtime version to use. Currently, the supported versions are: 2.12. +- `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `snowpark_package` (String) The Snowpark package is required for stored procedures, so it must always be present. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). + +### Optional + +- `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) +- `comment` (String) Specifies a comment for the procedure. +- `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `external_access_integrations` (Set of String) The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API. +- `imports` (Set of String) The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages. +- `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). +- `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). +- `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. +- `packages` (Set of String) List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index). +- `secrets` (Block Set) Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter. (see [below for nested schema](#nestedblock--secrets)) +- `target_path` (String) Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`. +- `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `parameters` (List of Object) Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--parameters)) +- `procedure_language` (String) Specifies language for the procedure. Used to detect external changes. +- `show_output` (List of Object) Outputs the result of `SHOW PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `arguments` + +Required: + +- `arg_data_type` (String) The argument type. +- `arg_name` (String) The argument name. + + + +### Nested Schema for `secrets` + +Required: + +- `secret_id` (String) Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter. +- `secret_variable_name` (String) The variable that will be used in handler code when retrieving information from the secret. + + + +### Nested Schema for `parameters` + +Read-Only: + +- `enable_console_output` (Boolean) +- `log_level` (String) +- `metric_level` (String) +- `trace_level` (String) + + + +### Nested Schema for `show_output` + +Read-Only: + +- `arguments_raw` (String) +- `catalog_name` (String) +- `created_on` (String) +- `description` (String) +- `external_access_integrations` (String) +- `is_aggregate` (Boolean) +- `is_ansi` (Boolean) +- `is_builtin` (Boolean) +- `is_secure` (Boolean) +- `is_table_function` (Boolean) +- `max_num_arguments` (Number) +- `min_num_arguments` (Number) +- `name` (String) +- `schema_name` (String) +- `secrets` (String) +- `valid_for_clustering` (Boolean) diff --git a/docs/resources/procedure_sql.md b/docs/resources/procedure_sql.md new file mode 100644 index 0000000000..3b078e3977 --- /dev/null +++ b/docs/resources/procedure_sql.md @@ -0,0 +1,85 @@ +--- +page_title: "snowflake_procedure_sql Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Resource used to manage sql procedure objects. For more information, check procedure documentation https://docs.snowflake.com/en/sql-reference/sql/create-procedure. +--- + +# snowflake_procedure_sql (Resource) + +Resource used to manage sql procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure). + + + + +## Schema + +### Required + +- `database` (String) The database in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `name` (String) The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading). Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. +- `procedure_definition` (String) Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be SQL source code. For more information, see [Snowflake Scripting](https://docs.snowflake.com/en/developer-guide/snowflake-scripting/index). To mitigate permadiff on this field, the provider replaces blank characters with a space. This can lead to false positives in cases where a change in case or run of whitespace is semantically significant. +- `return_type` (String) Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [SQL data type](https://docs.snowflake.com/en/sql-reference-data-types)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`). +- `schema` (String) The schema in which to create the procedure. Due to technical limitations (read more [here](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/docs/technical-documentation/identifiers_rework_design_decisions.md#known-limitations-and-identifier-recommendations)), avoid using the following characters: `|`, `.`, `"`. + +### Optional + +- `arguments` (Block List) List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details. (see [below for nested schema](#nestedblock--arguments)) +- `comment` (String) Specifies a comment for the procedure. +- `enable_console_output` (Boolean) Enable stdout/stderr fast path logging for anonyous stored procs. This is a public parameter (similar to LOG_LEVEL). For more information, check [ENABLE_CONSOLE_OUTPUT docs](https://docs.snowflake.com/en/sql-reference/parameters#enable-console-output). +- `execute_as` (String) Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): `EXECUTE AS CALLER` | `EXECUTE AS OWNER`. +- `is_secure` (String) Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure). Available options are: "true" or "false". When the value is not set in the configuration the provider will put "default" there which means to use the Snowflake default for this value. +- `log_level` (String) LOG_LEVEL to use when filtering events For more information, check [LOG_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#log-level). +- `metric_level` (String) METRIC_LEVEL value to control whether to emit metrics to Event Table For more information, check [METRIC_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#metric-level). +- `null_input_behavior` (String) Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): `CALLED ON NULL INPUT` | `RETURNS NULL ON NULL INPUT`. +- `trace_level` (String) Trace level value to use when generating/filtering trace events For more information, check [TRACE_LEVEL docs](https://docs.snowflake.com/en/sql-reference/parameters#trace-level). + +### Read-Only + +- `fully_qualified_name` (String) Fully qualified name of the resource. For more information, see [object name resolution](https://docs.snowflake.com/en/sql-reference/name-resolution). +- `id` (String) The ID of this resource. +- `parameters` (List of Object) Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--parameters)) +- `procedure_language` (String) Specifies language for the procedure. Used to detect external changes. +- `show_output` (List of Object) Outputs the result of `SHOW PROCEDURE` for the given procedure. (see [below for nested schema](#nestedatt--show_output)) + + +### Nested Schema for `arguments` + +Required: + +- `arg_data_type` (String) The argument type. +- `arg_name` (String) The argument name. + + + +### Nested Schema for `parameters` + +Read-Only: + +- `enable_console_output` (Boolean) +- `log_level` (String) +- `metric_level` (String) +- `trace_level` (String) + + + +### Nested Schema for `show_output` + +Read-Only: + +- `arguments_raw` (String) +- `catalog_name` (String) +- `created_on` (String) +- `description` (String) +- `external_access_integrations` (String) +- `is_aggregate` (Boolean) +- `is_ansi` (Boolean) +- `is_builtin` (Boolean) +- `is_secure` (Boolean) +- `is_table_function` (Boolean) +- `max_num_arguments` (Number) +- `min_num_arguments` (Number) +- `name` (String) +- `schema_name` (String) +- `secrets` (String) +- `valid_for_clustering` (Boolean) diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go index 44fa5d5490..23d5e9d5d9 100644 --- a/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/gen/resource_schema_def.go @@ -141,4 +141,24 @@ var allResourceSchemaDefs = []ResourceSchemaDef{ name: "FunctionSql", schema: resources.FunctionSql().Schema, }, + { + name: "ProcedureJava", + schema: resources.ProcedureJava().Schema, + }, + { + name: "ProcedureJavascript", + schema: resources.ProcedureJavascript().Schema, + }, + { + name: "ProcedurePython", + schema: resources.ProcedurePython().Schema, + }, + { + name: "ProcedureScala", + schema: resources.ProcedureScala().Schema, + }, + { + name: "ProcedureSql", + schema: resources.ProcedureSql().Schema, + }, } diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_gen.go new file mode 100644 index 0000000000..4330077161 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_java_resource_gen.go @@ -0,0 +1,277 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type ProcedureJavaResourceAssert struct { + *assert.ResourceAssert +} + +func ProcedureJavaResource(t *testing.T, name string) *ProcedureJavaResourceAssert { + t.Helper() + + return &ProcedureJavaResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedProcedureJavaResource(t *testing.T, id string) *ProcedureJavaResourceAssert { + t.Helper() + + return &ProcedureJavaResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (p *ProcedureJavaResourceAssert) HasArgumentsString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("arguments", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasCommentString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("comment", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasDatabaseString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("database", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasEnableConsoleOutputString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("enable_console_output", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasExecuteAsString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("execute_as", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasExternalAccessIntegrationsString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("external_access_integrations", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasFullyQualifiedNameString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasHandlerString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("handler", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasImportsString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("imports", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasIsSecureString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("is_secure", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasLogLevelString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("log_level", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasMetricLevelString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("metric_level", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNameString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("name", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNullInputBehaviorString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("null_input_behavior", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasPackagesString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("packages", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasProcedureDefinitionString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_definition", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasProcedureLanguageString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_language", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasReturnTypeString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("return_type", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasRuntimeVersionString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("runtime_version", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasSchemaString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("schema", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasSecretsString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("secrets", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasSnowparkPackageString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("snowpark_package", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasTargetPathString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("target_path", expected)) + return p +} + +func (p *ProcedureJavaResourceAssert) HasTraceLevelString(expected string) *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueSet("trace_level", expected)) + return p +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (p *ProcedureJavaResourceAssert) HasNoArguments() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("arguments")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoComment() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("comment")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoDatabase() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("database")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoEnableConsoleOutput() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("enable_console_output")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoExecuteAs() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("execute_as")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoExternalAccessIntegrations() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("external_access_integrations")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoFullyQualifiedName() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoHandler() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("handler")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoImports() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("imports")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoIsSecure() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("is_secure")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoLogLevel() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("log_level")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoMetricLevel() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("metric_level")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoName() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("name")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoNullInputBehavior() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("null_input_behavior")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoPackages() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("packages")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoProcedureDefinition() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_definition")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoProcedureLanguage() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_language")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoReturnType() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("return_type")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoRuntimeVersion() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("runtime_version")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoSchema() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("schema")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoSecrets() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("secrets")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoSnowparkPackage() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("snowpark_package")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoTargetPath() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("target_path")) + return p +} + +func (p *ProcedureJavaResourceAssert) HasNoTraceLevel() *ProcedureJavaResourceAssert { + p.AddAssertion(assert.ValueNotSet("trace_level")) + return p +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_javascript_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_javascript_resource_gen.go new file mode 100644 index 0000000000..2539011ad4 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_javascript_resource_gen.go @@ -0,0 +1,197 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type ProcedureJavascriptResourceAssert struct { + *assert.ResourceAssert +} + +func ProcedureJavascriptResource(t *testing.T, name string) *ProcedureJavascriptResourceAssert { + t.Helper() + + return &ProcedureJavascriptResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedProcedureJavascriptResource(t *testing.T, id string) *ProcedureJavascriptResourceAssert { + t.Helper() + + return &ProcedureJavascriptResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (p *ProcedureJavascriptResourceAssert) HasArgumentsString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("arguments", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasCommentString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("comment", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasDatabaseString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("database", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasEnableConsoleOutputString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("enable_console_output", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasExecuteAsString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("execute_as", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasFullyQualifiedNameString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasIsSecureString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("is_secure", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasLogLevelString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("log_level", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasMetricLevelString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("metric_level", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNameString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("name", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNullInputBehaviorString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("null_input_behavior", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasProcedureDefinitionString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_definition", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasProcedureLanguageString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_language", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasReturnTypeString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("return_type", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasSchemaString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("schema", expected)) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasTraceLevelString(expected string) *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueSet("trace_level", expected)) + return p +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (p *ProcedureJavascriptResourceAssert) HasNoArguments() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("arguments")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoComment() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("comment")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoDatabase() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("database")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoEnableConsoleOutput() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("enable_console_output")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoExecuteAs() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("execute_as")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoFullyQualifiedName() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoIsSecure() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("is_secure")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoLogLevel() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("log_level")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoMetricLevel() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("metric_level")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoName() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("name")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoNullInputBehavior() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("null_input_behavior")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoProcedureDefinition() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_definition")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoProcedureLanguage() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_language")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoReturnType() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("return_type")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoSchema() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("schema")) + return p +} + +func (p *ProcedureJavascriptResourceAssert) HasNoTraceLevel() *ProcedureJavascriptResourceAssert { + p.AddAssertion(assert.ValueNotSet("trace_level")) + return p +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_gen.go new file mode 100644 index 0000000000..119c04708c --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_python_resource_gen.go @@ -0,0 +1,267 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type ProcedurePythonResourceAssert struct { + *assert.ResourceAssert +} + +func ProcedurePythonResource(t *testing.T, name string) *ProcedurePythonResourceAssert { + t.Helper() + + return &ProcedurePythonResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedProcedurePythonResource(t *testing.T, id string) *ProcedurePythonResourceAssert { + t.Helper() + + return &ProcedurePythonResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (p *ProcedurePythonResourceAssert) HasArgumentsString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("arguments", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasCommentString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("comment", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasDatabaseString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("database", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasEnableConsoleOutputString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("enable_console_output", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasExecuteAsString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("execute_as", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasExternalAccessIntegrationsString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("external_access_integrations", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasFullyQualifiedNameString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasHandlerString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("handler", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasImportsString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("imports", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasIsSecureString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("is_secure", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasLogLevelString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("log_level", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasMetricLevelString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("metric_level", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNameString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("name", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNullInputBehaviorString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("null_input_behavior", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasPackagesString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("packages", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasProcedureDefinitionString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_definition", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasProcedureLanguageString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_language", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasReturnTypeString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("return_type", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasRuntimeVersionString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("runtime_version", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasSchemaString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("schema", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasSecretsString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("secrets", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasSnowparkPackageString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("snowpark_package", expected)) + return p +} + +func (p *ProcedurePythonResourceAssert) HasTraceLevelString(expected string) *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueSet("trace_level", expected)) + return p +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (p *ProcedurePythonResourceAssert) HasNoArguments() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("arguments")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoComment() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("comment")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoDatabase() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("database")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoEnableConsoleOutput() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("enable_console_output")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoExecuteAs() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("execute_as")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoExternalAccessIntegrations() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("external_access_integrations")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoFullyQualifiedName() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoHandler() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("handler")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoImports() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("imports")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoIsSecure() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("is_secure")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoLogLevel() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("log_level")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoMetricLevel() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("metric_level")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoName() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("name")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoNullInputBehavior() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("null_input_behavior")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoPackages() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("packages")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoProcedureDefinition() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_definition")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoProcedureLanguage() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_language")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoReturnType() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("return_type")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoRuntimeVersion() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("runtime_version")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoSchema() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("schema")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoSecrets() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("secrets")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoSnowparkPackage() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("snowpark_package")) + return p +} + +func (p *ProcedurePythonResourceAssert) HasNoTraceLevel() *ProcedurePythonResourceAssert { + p.AddAssertion(assert.ValueNotSet("trace_level")) + return p +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_gen.go new file mode 100644 index 0000000000..e668adf41e --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_scala_resource_gen.go @@ -0,0 +1,277 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type ProcedureScalaResourceAssert struct { + *assert.ResourceAssert +} + +func ProcedureScalaResource(t *testing.T, name string) *ProcedureScalaResourceAssert { + t.Helper() + + return &ProcedureScalaResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedProcedureScalaResource(t *testing.T, id string) *ProcedureScalaResourceAssert { + t.Helper() + + return &ProcedureScalaResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (p *ProcedureScalaResourceAssert) HasArgumentsString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("arguments", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasCommentString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("comment", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasDatabaseString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("database", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasEnableConsoleOutputString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("enable_console_output", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasExecuteAsString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("execute_as", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasExternalAccessIntegrationsString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("external_access_integrations", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasFullyQualifiedNameString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasHandlerString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("handler", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasImportsString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("imports", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasIsSecureString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("is_secure", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasLogLevelString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("log_level", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasMetricLevelString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("metric_level", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNameString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("name", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNullInputBehaviorString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("null_input_behavior", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasPackagesString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("packages", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasProcedureDefinitionString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_definition", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasProcedureLanguageString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_language", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasReturnTypeString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("return_type", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasRuntimeVersionString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("runtime_version", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasSchemaString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("schema", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasSecretsString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("secrets", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasSnowparkPackageString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("snowpark_package", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasTargetPathString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("target_path", expected)) + return p +} + +func (p *ProcedureScalaResourceAssert) HasTraceLevelString(expected string) *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueSet("trace_level", expected)) + return p +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (p *ProcedureScalaResourceAssert) HasNoArguments() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("arguments")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoComment() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("comment")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoDatabase() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("database")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoEnableConsoleOutput() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("enable_console_output")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoExecuteAs() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("execute_as")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoExternalAccessIntegrations() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("external_access_integrations")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoFullyQualifiedName() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoHandler() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("handler")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoImports() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("imports")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoIsSecure() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("is_secure")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoLogLevel() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("log_level")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoMetricLevel() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("metric_level")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoName() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("name")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoNullInputBehavior() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("null_input_behavior")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoPackages() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("packages")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoProcedureDefinition() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_definition")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoProcedureLanguage() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_language")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoReturnType() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("return_type")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoRuntimeVersion() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("runtime_version")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoSchema() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("schema")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoSecrets() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("secrets")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoSnowparkPackage() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("snowpark_package")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoTargetPath() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("target_path")) + return p +} + +func (p *ProcedureScalaResourceAssert) HasNoTraceLevel() *ProcedureScalaResourceAssert { + p.AddAssertion(assert.ValueNotSet("trace_level")) + return p +} diff --git a/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_sql_resource_gen.go b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_sql_resource_gen.go new file mode 100644 index 0000000000..6beb382c31 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/assert/resourceassert/procedure_sql_resource_gen.go @@ -0,0 +1,197 @@ +// Code generated by assertions generator; DO NOT EDIT. + +package resourceassert + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/assert" +) + +type ProcedureSqlResourceAssert struct { + *assert.ResourceAssert +} + +func ProcedureSqlResource(t *testing.T, name string) *ProcedureSqlResourceAssert { + t.Helper() + + return &ProcedureSqlResourceAssert{ + ResourceAssert: assert.NewResourceAssert(name, "resource"), + } +} + +func ImportedProcedureSqlResource(t *testing.T, id string) *ProcedureSqlResourceAssert { + t.Helper() + + return &ProcedureSqlResourceAssert{ + ResourceAssert: assert.NewImportedResourceAssert(id, "imported resource"), + } +} + +/////////////////////////////////// +// Attribute value string checks // +/////////////////////////////////// + +func (p *ProcedureSqlResourceAssert) HasArgumentsString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("arguments", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasCommentString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("comment", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasDatabaseString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("database", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasEnableConsoleOutputString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("enable_console_output", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasExecuteAsString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("execute_as", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasFullyQualifiedNameString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("fully_qualified_name", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasIsSecureString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("is_secure", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasLogLevelString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("log_level", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasMetricLevelString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("metric_level", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNameString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("name", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNullInputBehaviorString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("null_input_behavior", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasProcedureDefinitionString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_definition", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasProcedureLanguageString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("procedure_language", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasReturnTypeString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("return_type", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasSchemaString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("schema", expected)) + return p +} + +func (p *ProcedureSqlResourceAssert) HasTraceLevelString(expected string) *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueSet("trace_level", expected)) + return p +} + +//////////////////////////// +// Attribute empty checks // +//////////////////////////// + +func (p *ProcedureSqlResourceAssert) HasNoArguments() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("arguments")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoComment() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("comment")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoDatabase() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("database")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoEnableConsoleOutput() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("enable_console_output")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoExecuteAs() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("execute_as")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoFullyQualifiedName() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("fully_qualified_name")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoIsSecure() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("is_secure")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoLogLevel() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("log_level")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoMetricLevel() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("metric_level")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoName() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("name")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoNullInputBehavior() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("null_input_behavior")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoProcedureDefinition() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_definition")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoProcedureLanguage() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("procedure_language")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoReturnType() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("return_type")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoSchema() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("schema")) + return p +} + +func (p *ProcedureSqlResourceAssert) HasNoTraceLevel() *ProcedureSqlResourceAssert { + p.AddAssertion(assert.ValueNotSet("trace_level")) + return p +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go new file mode 100644 index 0000000000..1fa425aa28 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_ext.go @@ -0,0 +1,16 @@ +package model + +import ( + "encoding/json" +) + +func (f *ProcedureJavaModel) MarshalJSON() ([]byte, error) { + type Alias ProcedureJavaModel + return json.Marshal(&struct { + *Alias + DependsOn []string `json:"depends_on,omitempty"` + }{ + Alias: (*Alias)(f), + DependsOn: f.DependsOn(), + }) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go new file mode 100644 index 0000000000..5be880ae22 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_java_model_gen.go @@ -0,0 +1,321 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type ProcedureJavaModel struct { + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + ExecuteAs tfconfig.Variable `json:"execute_as,omitempty"` + ExternalAccessIntegrations tfconfig.Variable `json:"external_access_integrations,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Handler tfconfig.Variable `json:"handler,omitempty"` + Imports tfconfig.Variable `json:"imports,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + Packages tfconfig.Variable `json:"packages,omitempty"` + ProcedureDefinition tfconfig.Variable `json:"procedure_definition,omitempty"` + ProcedureLanguage tfconfig.Variable `json:"procedure_language,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + Secrets tfconfig.Variable `json:"secrets,omitempty"` + SnowparkPackage tfconfig.Variable `json:"snowpark_package,omitempty"` + TargetPath tfconfig.Variable `json:"target_path,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func ProcedureJava( + resourceName string, + database string, + handler string, + name string, + procedureDefinition string, + returnType string, + runtimeVersion string, + schema string, + snowparkPackage string, +) *ProcedureJavaModel { + p := &ProcedureJavaModel{ResourceModelMeta: config.Meta(resourceName, resources.ProcedureJava)} + p.WithDatabase(database) + p.WithHandler(handler) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithRuntimeVersion(runtimeVersion) + p.WithSchema(schema) + p.WithSnowparkPackage(snowparkPackage) + return p +} + +func ProcedureJavaWithDefaultMeta( + database string, + handler string, + name string, + procedureDefinition string, + returnType string, + runtimeVersion string, + schema string, + snowparkPackage string, +) *ProcedureJavaModel { + p := &ProcedureJavaModel{ResourceModelMeta: config.DefaultMeta(resources.ProcedureJava)} + p.WithDatabase(database) + p.WithHandler(handler) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithRuntimeVersion(runtimeVersion) + p.WithSchema(schema) + p.WithSnowparkPackage(snowparkPackage) + return p +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// arguments attribute type is not yet supported, so WithArguments can't be generated + +func (p *ProcedureJavaModel) WithComment(comment string) *ProcedureJavaModel { + p.Comment = tfconfig.StringVariable(comment) + return p +} + +func (p *ProcedureJavaModel) WithDatabase(database string) *ProcedureJavaModel { + p.Database = tfconfig.StringVariable(database) + return p +} + +func (p *ProcedureJavaModel) WithEnableConsoleOutput(enableConsoleOutput bool) *ProcedureJavaModel { + p.EnableConsoleOutput = tfconfig.BoolVariable(enableConsoleOutput) + return p +} + +func (p *ProcedureJavaModel) WithExecuteAs(executeAs string) *ProcedureJavaModel { + p.ExecuteAs = tfconfig.StringVariable(executeAs) + return p +} + +// external_access_integrations attribute type is not yet supported, so WithExternalAccessIntegrations can't be generated + +func (p *ProcedureJavaModel) WithFullyQualifiedName(fullyQualifiedName string) *ProcedureJavaModel { + p.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return p +} + +func (p *ProcedureJavaModel) WithHandler(handler string) *ProcedureJavaModel { + p.Handler = tfconfig.StringVariable(handler) + return p +} + +// imports attribute type is not yet supported, so WithImports can't be generated + +func (p *ProcedureJavaModel) WithIsSecure(isSecure string) *ProcedureJavaModel { + p.IsSecure = tfconfig.StringVariable(isSecure) + return p +} + +func (p *ProcedureJavaModel) WithLogLevel(logLevel string) *ProcedureJavaModel { + p.LogLevel = tfconfig.StringVariable(logLevel) + return p +} + +func (p *ProcedureJavaModel) WithMetricLevel(metricLevel string) *ProcedureJavaModel { + p.MetricLevel = tfconfig.StringVariable(metricLevel) + return p +} + +func (p *ProcedureJavaModel) WithName(name string) *ProcedureJavaModel { + p.Name = tfconfig.StringVariable(name) + return p +} + +func (p *ProcedureJavaModel) WithNullInputBehavior(nullInputBehavior string) *ProcedureJavaModel { + p.NullInputBehavior = tfconfig.StringVariable(nullInputBehavior) + return p +} + +// packages attribute type is not yet supported, so WithPackages can't be generated + +func (p *ProcedureJavaModel) WithProcedureDefinition(procedureDefinition string) *ProcedureJavaModel { + p.ProcedureDefinition = tfconfig.StringVariable(procedureDefinition) + return p +} + +func (p *ProcedureJavaModel) WithProcedureLanguage(procedureLanguage string) *ProcedureJavaModel { + p.ProcedureLanguage = tfconfig.StringVariable(procedureLanguage) + return p +} + +func (p *ProcedureJavaModel) WithReturnType(returnType string) *ProcedureJavaModel { + p.ReturnType = tfconfig.StringVariable(returnType) + return p +} + +func (p *ProcedureJavaModel) WithRuntimeVersion(runtimeVersion string) *ProcedureJavaModel { + p.RuntimeVersion = tfconfig.StringVariable(runtimeVersion) + return p +} + +func (p *ProcedureJavaModel) WithSchema(schema string) *ProcedureJavaModel { + p.Schema = tfconfig.StringVariable(schema) + return p +} + +// secrets attribute type is not yet supported, so WithSecrets can't be generated + +func (p *ProcedureJavaModel) WithSnowparkPackage(snowparkPackage string) *ProcedureJavaModel { + p.SnowparkPackage = tfconfig.StringVariable(snowparkPackage) + return p +} + +func (p *ProcedureJavaModel) WithTargetPath(targetPath string) *ProcedureJavaModel { + p.TargetPath = tfconfig.StringVariable(targetPath) + return p +} + +func (p *ProcedureJavaModel) WithTraceLevel(traceLevel string) *ProcedureJavaModel { + p.TraceLevel = tfconfig.StringVariable(traceLevel) + return p +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (p *ProcedureJavaModel) WithArgumentsValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Arguments = value + return p +} + +func (p *ProcedureJavaModel) WithCommentValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Comment = value + return p +} + +func (p *ProcedureJavaModel) WithDatabaseValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Database = value + return p +} + +func (p *ProcedureJavaModel) WithEnableConsoleOutputValue(value tfconfig.Variable) *ProcedureJavaModel { + p.EnableConsoleOutput = value + return p +} + +func (p *ProcedureJavaModel) WithExecuteAsValue(value tfconfig.Variable) *ProcedureJavaModel { + p.ExecuteAs = value + return p +} + +func (p *ProcedureJavaModel) WithExternalAccessIntegrationsValue(value tfconfig.Variable) *ProcedureJavaModel { + p.ExternalAccessIntegrations = value + return p +} + +func (p *ProcedureJavaModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *ProcedureJavaModel { + p.FullyQualifiedName = value + return p +} + +func (p *ProcedureJavaModel) WithHandlerValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Handler = value + return p +} + +func (p *ProcedureJavaModel) WithImportsValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Imports = value + return p +} + +func (p *ProcedureJavaModel) WithIsSecureValue(value tfconfig.Variable) *ProcedureJavaModel { + p.IsSecure = value + return p +} + +func (p *ProcedureJavaModel) WithLogLevelValue(value tfconfig.Variable) *ProcedureJavaModel { + p.LogLevel = value + return p +} + +func (p *ProcedureJavaModel) WithMetricLevelValue(value tfconfig.Variable) *ProcedureJavaModel { + p.MetricLevel = value + return p +} + +func (p *ProcedureJavaModel) WithNameValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Name = value + return p +} + +func (p *ProcedureJavaModel) WithNullInputBehaviorValue(value tfconfig.Variable) *ProcedureJavaModel { + p.NullInputBehavior = value + return p +} + +func (p *ProcedureJavaModel) WithPackagesValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Packages = value + return p +} + +func (p *ProcedureJavaModel) WithProcedureDefinitionValue(value tfconfig.Variable) *ProcedureJavaModel { + p.ProcedureDefinition = value + return p +} + +func (p *ProcedureJavaModel) WithProcedureLanguageValue(value tfconfig.Variable) *ProcedureJavaModel { + p.ProcedureLanguage = value + return p +} + +func (p *ProcedureJavaModel) WithReturnTypeValue(value tfconfig.Variable) *ProcedureJavaModel { + p.ReturnType = value + return p +} + +func (p *ProcedureJavaModel) WithRuntimeVersionValue(value tfconfig.Variable) *ProcedureJavaModel { + p.RuntimeVersion = value + return p +} + +func (p *ProcedureJavaModel) WithSchemaValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Schema = value + return p +} + +func (p *ProcedureJavaModel) WithSecretsValue(value tfconfig.Variable) *ProcedureJavaModel { + p.Secrets = value + return p +} + +func (p *ProcedureJavaModel) WithSnowparkPackageValue(value tfconfig.Variable) *ProcedureJavaModel { + p.SnowparkPackage = value + return p +} + +func (p *ProcedureJavaModel) WithTargetPathValue(value tfconfig.Variable) *ProcedureJavaModel { + p.TargetPath = value + return p +} + +func (p *ProcedureJavaModel) WithTraceLevelValue(value tfconfig.Variable) *ProcedureJavaModel { + p.TraceLevel = value + return p +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go new file mode 100644 index 0000000000..548259aa97 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_ext.go @@ -0,0 +1,16 @@ +package model + +import ( + "encoding/json" +) + +func (f *ProcedureJavascriptModel) MarshalJSON() ([]byte, error) { + type Alias ProcedureJavascriptModel + return json.Marshal(&struct { + *Alias + DependsOn []string `json:"depends_on,omitempty"` + }{ + Alias: (*Alias)(f), + DependsOn: f.DependsOn(), + }) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_gen.go new file mode 100644 index 0000000000..c90c47d6fb --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_javascript_model_gen.go @@ -0,0 +1,233 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type ProcedureJavascriptModel struct { + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + ExecuteAs tfconfig.Variable `json:"execute_as,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + ProcedureDefinition tfconfig.Variable `json:"procedure_definition,omitempty"` + ProcedureLanguage tfconfig.Variable `json:"procedure_language,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func ProcedureJavascript( + resourceName string, + database string, + name string, + procedureDefinition string, + returnType string, + schema string, +) *ProcedureJavascriptModel { + p := &ProcedureJavascriptModel{ResourceModelMeta: config.Meta(resourceName, resources.ProcedureJavascript)} + p.WithDatabase(database) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithSchema(schema) + return p +} + +func ProcedureJavascriptWithDefaultMeta( + database string, + name string, + procedureDefinition string, + returnType string, + schema string, +) *ProcedureJavascriptModel { + p := &ProcedureJavascriptModel{ResourceModelMeta: config.DefaultMeta(resources.ProcedureJavascript)} + p.WithDatabase(database) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithSchema(schema) + return p +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// arguments attribute type is not yet supported, so WithArguments can't be generated + +func (p *ProcedureJavascriptModel) WithComment(comment string) *ProcedureJavascriptModel { + p.Comment = tfconfig.StringVariable(comment) + return p +} + +func (p *ProcedureJavascriptModel) WithDatabase(database string) *ProcedureJavascriptModel { + p.Database = tfconfig.StringVariable(database) + return p +} + +func (p *ProcedureJavascriptModel) WithEnableConsoleOutput(enableConsoleOutput bool) *ProcedureJavascriptModel { + p.EnableConsoleOutput = tfconfig.BoolVariable(enableConsoleOutput) + return p +} + +func (p *ProcedureJavascriptModel) WithExecuteAs(executeAs string) *ProcedureJavascriptModel { + p.ExecuteAs = tfconfig.StringVariable(executeAs) + return p +} + +func (p *ProcedureJavascriptModel) WithFullyQualifiedName(fullyQualifiedName string) *ProcedureJavascriptModel { + p.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return p +} + +func (p *ProcedureJavascriptModel) WithIsSecure(isSecure string) *ProcedureJavascriptModel { + p.IsSecure = tfconfig.StringVariable(isSecure) + return p +} + +func (p *ProcedureJavascriptModel) WithLogLevel(logLevel string) *ProcedureJavascriptModel { + p.LogLevel = tfconfig.StringVariable(logLevel) + return p +} + +func (p *ProcedureJavascriptModel) WithMetricLevel(metricLevel string) *ProcedureJavascriptModel { + p.MetricLevel = tfconfig.StringVariable(metricLevel) + return p +} + +func (p *ProcedureJavascriptModel) WithName(name string) *ProcedureJavascriptModel { + p.Name = tfconfig.StringVariable(name) + return p +} + +func (p *ProcedureJavascriptModel) WithNullInputBehavior(nullInputBehavior string) *ProcedureJavascriptModel { + p.NullInputBehavior = tfconfig.StringVariable(nullInputBehavior) + return p +} + +func (p *ProcedureJavascriptModel) WithProcedureDefinition(procedureDefinition string) *ProcedureJavascriptModel { + p.ProcedureDefinition = tfconfig.StringVariable(procedureDefinition) + return p +} + +func (p *ProcedureJavascriptModel) WithProcedureLanguage(procedureLanguage string) *ProcedureJavascriptModel { + p.ProcedureLanguage = tfconfig.StringVariable(procedureLanguage) + return p +} + +func (p *ProcedureJavascriptModel) WithReturnType(returnType string) *ProcedureJavascriptModel { + p.ReturnType = tfconfig.StringVariable(returnType) + return p +} + +func (p *ProcedureJavascriptModel) WithSchema(schema string) *ProcedureJavascriptModel { + p.Schema = tfconfig.StringVariable(schema) + return p +} + +func (p *ProcedureJavascriptModel) WithTraceLevel(traceLevel string) *ProcedureJavascriptModel { + p.TraceLevel = tfconfig.StringVariable(traceLevel) + return p +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (p *ProcedureJavascriptModel) WithArgumentsValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.Arguments = value + return p +} + +func (p *ProcedureJavascriptModel) WithCommentValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.Comment = value + return p +} + +func (p *ProcedureJavascriptModel) WithDatabaseValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.Database = value + return p +} + +func (p *ProcedureJavascriptModel) WithEnableConsoleOutputValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.EnableConsoleOutput = value + return p +} + +func (p *ProcedureJavascriptModel) WithExecuteAsValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.ExecuteAs = value + return p +} + +func (p *ProcedureJavascriptModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.FullyQualifiedName = value + return p +} + +func (p *ProcedureJavascriptModel) WithIsSecureValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.IsSecure = value + return p +} + +func (p *ProcedureJavascriptModel) WithLogLevelValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.LogLevel = value + return p +} + +func (p *ProcedureJavascriptModel) WithMetricLevelValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.MetricLevel = value + return p +} + +func (p *ProcedureJavascriptModel) WithNameValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.Name = value + return p +} + +func (p *ProcedureJavascriptModel) WithNullInputBehaviorValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.NullInputBehavior = value + return p +} + +func (p *ProcedureJavascriptModel) WithProcedureDefinitionValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.ProcedureDefinition = value + return p +} + +func (p *ProcedureJavascriptModel) WithProcedureLanguageValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.ProcedureLanguage = value + return p +} + +func (p *ProcedureJavascriptModel) WithReturnTypeValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.ReturnType = value + return p +} + +func (p *ProcedureJavascriptModel) WithSchemaValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.Schema = value + return p +} + +func (p *ProcedureJavascriptModel) WithTraceLevelValue(value tfconfig.Variable) *ProcedureJavascriptModel { + p.TraceLevel = value + return p +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go new file mode 100644 index 0000000000..1bff75bcc2 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_ext.go @@ -0,0 +1,16 @@ +package model + +import ( + "encoding/json" +) + +func (f *ProcedurePythonModel) MarshalJSON() ([]byte, error) { + type Alias ProcedurePythonModel + return json.Marshal(&struct { + *Alias + DependsOn []string `json:"depends_on,omitempty"` + }{ + Alias: (*Alias)(f), + DependsOn: f.DependsOn(), + }) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go new file mode 100644 index 0000000000..dfe2801f00 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_python_model_gen.go @@ -0,0 +1,310 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type ProcedurePythonModel struct { + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + ExecuteAs tfconfig.Variable `json:"execute_as,omitempty"` + ExternalAccessIntegrations tfconfig.Variable `json:"external_access_integrations,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Handler tfconfig.Variable `json:"handler,omitempty"` + Imports tfconfig.Variable `json:"imports,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + Packages tfconfig.Variable `json:"packages,omitempty"` + ProcedureDefinition tfconfig.Variable `json:"procedure_definition,omitempty"` + ProcedureLanguage tfconfig.Variable `json:"procedure_language,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + Secrets tfconfig.Variable `json:"secrets,omitempty"` + SnowparkPackage tfconfig.Variable `json:"snowpark_package,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func ProcedurePython( + resourceName string, + database string, + handler string, + name string, + procedureDefinition string, + returnType string, + runtimeVersion string, + schema string, + snowparkPackage string, +) *ProcedurePythonModel { + p := &ProcedurePythonModel{ResourceModelMeta: config.Meta(resourceName, resources.ProcedurePython)} + p.WithDatabase(database) + p.WithHandler(handler) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithRuntimeVersion(runtimeVersion) + p.WithSchema(schema) + p.WithSnowparkPackage(snowparkPackage) + return p +} + +func ProcedurePythonWithDefaultMeta( + database string, + handler string, + name string, + procedureDefinition string, + returnType string, + runtimeVersion string, + schema string, + snowparkPackage string, +) *ProcedurePythonModel { + p := &ProcedurePythonModel{ResourceModelMeta: config.DefaultMeta(resources.ProcedurePython)} + p.WithDatabase(database) + p.WithHandler(handler) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithRuntimeVersion(runtimeVersion) + p.WithSchema(schema) + p.WithSnowparkPackage(snowparkPackage) + return p +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// arguments attribute type is not yet supported, so WithArguments can't be generated + +func (p *ProcedurePythonModel) WithComment(comment string) *ProcedurePythonModel { + p.Comment = tfconfig.StringVariable(comment) + return p +} + +func (p *ProcedurePythonModel) WithDatabase(database string) *ProcedurePythonModel { + p.Database = tfconfig.StringVariable(database) + return p +} + +func (p *ProcedurePythonModel) WithEnableConsoleOutput(enableConsoleOutput bool) *ProcedurePythonModel { + p.EnableConsoleOutput = tfconfig.BoolVariable(enableConsoleOutput) + return p +} + +func (p *ProcedurePythonModel) WithExecuteAs(executeAs string) *ProcedurePythonModel { + p.ExecuteAs = tfconfig.StringVariable(executeAs) + return p +} + +// external_access_integrations attribute type is not yet supported, so WithExternalAccessIntegrations can't be generated + +func (p *ProcedurePythonModel) WithFullyQualifiedName(fullyQualifiedName string) *ProcedurePythonModel { + p.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return p +} + +func (p *ProcedurePythonModel) WithHandler(handler string) *ProcedurePythonModel { + p.Handler = tfconfig.StringVariable(handler) + return p +} + +// imports attribute type is not yet supported, so WithImports can't be generated + +func (p *ProcedurePythonModel) WithIsSecure(isSecure string) *ProcedurePythonModel { + p.IsSecure = tfconfig.StringVariable(isSecure) + return p +} + +func (p *ProcedurePythonModel) WithLogLevel(logLevel string) *ProcedurePythonModel { + p.LogLevel = tfconfig.StringVariable(logLevel) + return p +} + +func (p *ProcedurePythonModel) WithMetricLevel(metricLevel string) *ProcedurePythonModel { + p.MetricLevel = tfconfig.StringVariable(metricLevel) + return p +} + +func (p *ProcedurePythonModel) WithName(name string) *ProcedurePythonModel { + p.Name = tfconfig.StringVariable(name) + return p +} + +func (p *ProcedurePythonModel) WithNullInputBehavior(nullInputBehavior string) *ProcedurePythonModel { + p.NullInputBehavior = tfconfig.StringVariable(nullInputBehavior) + return p +} + +// packages attribute type is not yet supported, so WithPackages can't be generated + +func (p *ProcedurePythonModel) WithProcedureDefinition(procedureDefinition string) *ProcedurePythonModel { + p.ProcedureDefinition = tfconfig.StringVariable(procedureDefinition) + return p +} + +func (p *ProcedurePythonModel) WithProcedureLanguage(procedureLanguage string) *ProcedurePythonModel { + p.ProcedureLanguage = tfconfig.StringVariable(procedureLanguage) + return p +} + +func (p *ProcedurePythonModel) WithReturnType(returnType string) *ProcedurePythonModel { + p.ReturnType = tfconfig.StringVariable(returnType) + return p +} + +func (p *ProcedurePythonModel) WithRuntimeVersion(runtimeVersion string) *ProcedurePythonModel { + p.RuntimeVersion = tfconfig.StringVariable(runtimeVersion) + return p +} + +func (p *ProcedurePythonModel) WithSchema(schema string) *ProcedurePythonModel { + p.Schema = tfconfig.StringVariable(schema) + return p +} + +// secrets attribute type is not yet supported, so WithSecrets can't be generated + +func (p *ProcedurePythonModel) WithSnowparkPackage(snowparkPackage string) *ProcedurePythonModel { + p.SnowparkPackage = tfconfig.StringVariable(snowparkPackage) + return p +} + +func (p *ProcedurePythonModel) WithTraceLevel(traceLevel string) *ProcedurePythonModel { + p.TraceLevel = tfconfig.StringVariable(traceLevel) + return p +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (p *ProcedurePythonModel) WithArgumentsValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Arguments = value + return p +} + +func (p *ProcedurePythonModel) WithCommentValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Comment = value + return p +} + +func (p *ProcedurePythonModel) WithDatabaseValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Database = value + return p +} + +func (p *ProcedurePythonModel) WithEnableConsoleOutputValue(value tfconfig.Variable) *ProcedurePythonModel { + p.EnableConsoleOutput = value + return p +} + +func (p *ProcedurePythonModel) WithExecuteAsValue(value tfconfig.Variable) *ProcedurePythonModel { + p.ExecuteAs = value + return p +} + +func (p *ProcedurePythonModel) WithExternalAccessIntegrationsValue(value tfconfig.Variable) *ProcedurePythonModel { + p.ExternalAccessIntegrations = value + return p +} + +func (p *ProcedurePythonModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *ProcedurePythonModel { + p.FullyQualifiedName = value + return p +} + +func (p *ProcedurePythonModel) WithHandlerValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Handler = value + return p +} + +func (p *ProcedurePythonModel) WithImportsValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Imports = value + return p +} + +func (p *ProcedurePythonModel) WithIsSecureValue(value tfconfig.Variable) *ProcedurePythonModel { + p.IsSecure = value + return p +} + +func (p *ProcedurePythonModel) WithLogLevelValue(value tfconfig.Variable) *ProcedurePythonModel { + p.LogLevel = value + return p +} + +func (p *ProcedurePythonModel) WithMetricLevelValue(value tfconfig.Variable) *ProcedurePythonModel { + p.MetricLevel = value + return p +} + +func (p *ProcedurePythonModel) WithNameValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Name = value + return p +} + +func (p *ProcedurePythonModel) WithNullInputBehaviorValue(value tfconfig.Variable) *ProcedurePythonModel { + p.NullInputBehavior = value + return p +} + +func (p *ProcedurePythonModel) WithPackagesValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Packages = value + return p +} + +func (p *ProcedurePythonModel) WithProcedureDefinitionValue(value tfconfig.Variable) *ProcedurePythonModel { + p.ProcedureDefinition = value + return p +} + +func (p *ProcedurePythonModel) WithProcedureLanguageValue(value tfconfig.Variable) *ProcedurePythonModel { + p.ProcedureLanguage = value + return p +} + +func (p *ProcedurePythonModel) WithReturnTypeValue(value tfconfig.Variable) *ProcedurePythonModel { + p.ReturnType = value + return p +} + +func (p *ProcedurePythonModel) WithRuntimeVersionValue(value tfconfig.Variable) *ProcedurePythonModel { + p.RuntimeVersion = value + return p +} + +func (p *ProcedurePythonModel) WithSchemaValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Schema = value + return p +} + +func (p *ProcedurePythonModel) WithSecretsValue(value tfconfig.Variable) *ProcedurePythonModel { + p.Secrets = value + return p +} + +func (p *ProcedurePythonModel) WithSnowparkPackageValue(value tfconfig.Variable) *ProcedurePythonModel { + p.SnowparkPackage = value + return p +} + +func (p *ProcedurePythonModel) WithTraceLevelValue(value tfconfig.Variable) *ProcedurePythonModel { + p.TraceLevel = value + return p +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go new file mode 100644 index 0000000000..b7434a4250 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_ext.go @@ -0,0 +1,16 @@ +package model + +import ( + "encoding/json" +) + +func (f *ProcedureScalaModel) MarshalJSON() ([]byte, error) { + type Alias ProcedureScalaModel + return json.Marshal(&struct { + *Alias + DependsOn []string `json:"depends_on,omitempty"` + }{ + Alias: (*Alias)(f), + DependsOn: f.DependsOn(), + }) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go new file mode 100644 index 0000000000..01ff2f1107 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_scala_model_gen.go @@ -0,0 +1,321 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type ProcedureScalaModel struct { + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + ExecuteAs tfconfig.Variable `json:"execute_as,omitempty"` + ExternalAccessIntegrations tfconfig.Variable `json:"external_access_integrations,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + Handler tfconfig.Variable `json:"handler,omitempty"` + Imports tfconfig.Variable `json:"imports,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + Packages tfconfig.Variable `json:"packages,omitempty"` + ProcedureDefinition tfconfig.Variable `json:"procedure_definition,omitempty"` + ProcedureLanguage tfconfig.Variable `json:"procedure_language,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + RuntimeVersion tfconfig.Variable `json:"runtime_version,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + Secrets tfconfig.Variable `json:"secrets,omitempty"` + SnowparkPackage tfconfig.Variable `json:"snowpark_package,omitempty"` + TargetPath tfconfig.Variable `json:"target_path,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func ProcedureScala( + resourceName string, + database string, + handler string, + name string, + procedureDefinition string, + returnType string, + runtimeVersion string, + schema string, + snowparkPackage string, +) *ProcedureScalaModel { + p := &ProcedureScalaModel{ResourceModelMeta: config.Meta(resourceName, resources.ProcedureScala)} + p.WithDatabase(database) + p.WithHandler(handler) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithRuntimeVersion(runtimeVersion) + p.WithSchema(schema) + p.WithSnowparkPackage(snowparkPackage) + return p +} + +func ProcedureScalaWithDefaultMeta( + database string, + handler string, + name string, + procedureDefinition string, + returnType string, + runtimeVersion string, + schema string, + snowparkPackage string, +) *ProcedureScalaModel { + p := &ProcedureScalaModel{ResourceModelMeta: config.DefaultMeta(resources.ProcedureScala)} + p.WithDatabase(database) + p.WithHandler(handler) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithRuntimeVersion(runtimeVersion) + p.WithSchema(schema) + p.WithSnowparkPackage(snowparkPackage) + return p +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// arguments attribute type is not yet supported, so WithArguments can't be generated + +func (p *ProcedureScalaModel) WithComment(comment string) *ProcedureScalaModel { + p.Comment = tfconfig.StringVariable(comment) + return p +} + +func (p *ProcedureScalaModel) WithDatabase(database string) *ProcedureScalaModel { + p.Database = tfconfig.StringVariable(database) + return p +} + +func (p *ProcedureScalaModel) WithEnableConsoleOutput(enableConsoleOutput bool) *ProcedureScalaModel { + p.EnableConsoleOutput = tfconfig.BoolVariable(enableConsoleOutput) + return p +} + +func (p *ProcedureScalaModel) WithExecuteAs(executeAs string) *ProcedureScalaModel { + p.ExecuteAs = tfconfig.StringVariable(executeAs) + return p +} + +// external_access_integrations attribute type is not yet supported, so WithExternalAccessIntegrations can't be generated + +func (p *ProcedureScalaModel) WithFullyQualifiedName(fullyQualifiedName string) *ProcedureScalaModel { + p.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return p +} + +func (p *ProcedureScalaModel) WithHandler(handler string) *ProcedureScalaModel { + p.Handler = tfconfig.StringVariable(handler) + return p +} + +// imports attribute type is not yet supported, so WithImports can't be generated + +func (p *ProcedureScalaModel) WithIsSecure(isSecure string) *ProcedureScalaModel { + p.IsSecure = tfconfig.StringVariable(isSecure) + return p +} + +func (p *ProcedureScalaModel) WithLogLevel(logLevel string) *ProcedureScalaModel { + p.LogLevel = tfconfig.StringVariable(logLevel) + return p +} + +func (p *ProcedureScalaModel) WithMetricLevel(metricLevel string) *ProcedureScalaModel { + p.MetricLevel = tfconfig.StringVariable(metricLevel) + return p +} + +func (p *ProcedureScalaModel) WithName(name string) *ProcedureScalaModel { + p.Name = tfconfig.StringVariable(name) + return p +} + +func (p *ProcedureScalaModel) WithNullInputBehavior(nullInputBehavior string) *ProcedureScalaModel { + p.NullInputBehavior = tfconfig.StringVariable(nullInputBehavior) + return p +} + +// packages attribute type is not yet supported, so WithPackages can't be generated + +func (p *ProcedureScalaModel) WithProcedureDefinition(procedureDefinition string) *ProcedureScalaModel { + p.ProcedureDefinition = tfconfig.StringVariable(procedureDefinition) + return p +} + +func (p *ProcedureScalaModel) WithProcedureLanguage(procedureLanguage string) *ProcedureScalaModel { + p.ProcedureLanguage = tfconfig.StringVariable(procedureLanguage) + return p +} + +func (p *ProcedureScalaModel) WithReturnType(returnType string) *ProcedureScalaModel { + p.ReturnType = tfconfig.StringVariable(returnType) + return p +} + +func (p *ProcedureScalaModel) WithRuntimeVersion(runtimeVersion string) *ProcedureScalaModel { + p.RuntimeVersion = tfconfig.StringVariable(runtimeVersion) + return p +} + +func (p *ProcedureScalaModel) WithSchema(schema string) *ProcedureScalaModel { + p.Schema = tfconfig.StringVariable(schema) + return p +} + +// secrets attribute type is not yet supported, so WithSecrets can't be generated + +func (p *ProcedureScalaModel) WithSnowparkPackage(snowparkPackage string) *ProcedureScalaModel { + p.SnowparkPackage = tfconfig.StringVariable(snowparkPackage) + return p +} + +func (p *ProcedureScalaModel) WithTargetPath(targetPath string) *ProcedureScalaModel { + p.TargetPath = tfconfig.StringVariable(targetPath) + return p +} + +func (p *ProcedureScalaModel) WithTraceLevel(traceLevel string) *ProcedureScalaModel { + p.TraceLevel = tfconfig.StringVariable(traceLevel) + return p +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (p *ProcedureScalaModel) WithArgumentsValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Arguments = value + return p +} + +func (p *ProcedureScalaModel) WithCommentValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Comment = value + return p +} + +func (p *ProcedureScalaModel) WithDatabaseValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Database = value + return p +} + +func (p *ProcedureScalaModel) WithEnableConsoleOutputValue(value tfconfig.Variable) *ProcedureScalaModel { + p.EnableConsoleOutput = value + return p +} + +func (p *ProcedureScalaModel) WithExecuteAsValue(value tfconfig.Variable) *ProcedureScalaModel { + p.ExecuteAs = value + return p +} + +func (p *ProcedureScalaModel) WithExternalAccessIntegrationsValue(value tfconfig.Variable) *ProcedureScalaModel { + p.ExternalAccessIntegrations = value + return p +} + +func (p *ProcedureScalaModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *ProcedureScalaModel { + p.FullyQualifiedName = value + return p +} + +func (p *ProcedureScalaModel) WithHandlerValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Handler = value + return p +} + +func (p *ProcedureScalaModel) WithImportsValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Imports = value + return p +} + +func (p *ProcedureScalaModel) WithIsSecureValue(value tfconfig.Variable) *ProcedureScalaModel { + p.IsSecure = value + return p +} + +func (p *ProcedureScalaModel) WithLogLevelValue(value tfconfig.Variable) *ProcedureScalaModel { + p.LogLevel = value + return p +} + +func (p *ProcedureScalaModel) WithMetricLevelValue(value tfconfig.Variable) *ProcedureScalaModel { + p.MetricLevel = value + return p +} + +func (p *ProcedureScalaModel) WithNameValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Name = value + return p +} + +func (p *ProcedureScalaModel) WithNullInputBehaviorValue(value tfconfig.Variable) *ProcedureScalaModel { + p.NullInputBehavior = value + return p +} + +func (p *ProcedureScalaModel) WithPackagesValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Packages = value + return p +} + +func (p *ProcedureScalaModel) WithProcedureDefinitionValue(value tfconfig.Variable) *ProcedureScalaModel { + p.ProcedureDefinition = value + return p +} + +func (p *ProcedureScalaModel) WithProcedureLanguageValue(value tfconfig.Variable) *ProcedureScalaModel { + p.ProcedureLanguage = value + return p +} + +func (p *ProcedureScalaModel) WithReturnTypeValue(value tfconfig.Variable) *ProcedureScalaModel { + p.ReturnType = value + return p +} + +func (p *ProcedureScalaModel) WithRuntimeVersionValue(value tfconfig.Variable) *ProcedureScalaModel { + p.RuntimeVersion = value + return p +} + +func (p *ProcedureScalaModel) WithSchemaValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Schema = value + return p +} + +func (p *ProcedureScalaModel) WithSecretsValue(value tfconfig.Variable) *ProcedureScalaModel { + p.Secrets = value + return p +} + +func (p *ProcedureScalaModel) WithSnowparkPackageValue(value tfconfig.Variable) *ProcedureScalaModel { + p.SnowparkPackage = value + return p +} + +func (p *ProcedureScalaModel) WithTargetPathValue(value tfconfig.Variable) *ProcedureScalaModel { + p.TargetPath = value + return p +} + +func (p *ProcedureScalaModel) WithTraceLevelValue(value tfconfig.Variable) *ProcedureScalaModel { + p.TraceLevel = value + return p +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go b/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go new file mode 100644 index 0000000000..8b5dc3afbf --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_ext.go @@ -0,0 +1,16 @@ +package model + +import ( + "encoding/json" +) + +func (f *ProcedureSqlModel) MarshalJSON() ([]byte, error) { + type Alias ProcedureSqlModel + return json.Marshal(&struct { + *Alias + DependsOn []string `json:"depends_on,omitempty"` + }{ + Alias: (*Alias)(f), + DependsOn: f.DependsOn(), + }) +} diff --git a/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_gen.go b/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_gen.go new file mode 100644 index 0000000000..274dfc5b50 --- /dev/null +++ b/pkg/acceptance/bettertestspoc/config/model/procedure_sql_model_gen.go @@ -0,0 +1,233 @@ +// Code generated by config model builder generator; DO NOT EDIT. + +package model + +import ( + tfconfig "github.com/hashicorp/terraform-plugin-testing/config" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/bettertestspoc/config" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" +) + +type ProcedureSqlModel struct { + Arguments tfconfig.Variable `json:"arguments,omitempty"` + Comment tfconfig.Variable `json:"comment,omitempty"` + Database tfconfig.Variable `json:"database,omitempty"` + EnableConsoleOutput tfconfig.Variable `json:"enable_console_output,omitempty"` + ExecuteAs tfconfig.Variable `json:"execute_as,omitempty"` + FullyQualifiedName tfconfig.Variable `json:"fully_qualified_name,omitempty"` + IsSecure tfconfig.Variable `json:"is_secure,omitempty"` + LogLevel tfconfig.Variable `json:"log_level,omitempty"` + MetricLevel tfconfig.Variable `json:"metric_level,omitempty"` + Name tfconfig.Variable `json:"name,omitempty"` + NullInputBehavior tfconfig.Variable `json:"null_input_behavior,omitempty"` + ProcedureDefinition tfconfig.Variable `json:"procedure_definition,omitempty"` + ProcedureLanguage tfconfig.Variable `json:"procedure_language,omitempty"` + ReturnType tfconfig.Variable `json:"return_type,omitempty"` + Schema tfconfig.Variable `json:"schema,omitempty"` + TraceLevel tfconfig.Variable `json:"trace_level,omitempty"` + + *config.ResourceModelMeta +} + +///////////////////////////////////////////////// +// Basic builders (resource name and required) // +///////////////////////////////////////////////// + +func ProcedureSql( + resourceName string, + database string, + name string, + procedureDefinition string, + returnType string, + schema string, +) *ProcedureSqlModel { + p := &ProcedureSqlModel{ResourceModelMeta: config.Meta(resourceName, resources.ProcedureSql)} + p.WithDatabase(database) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithSchema(schema) + return p +} + +func ProcedureSqlWithDefaultMeta( + database string, + name string, + procedureDefinition string, + returnType string, + schema string, +) *ProcedureSqlModel { + p := &ProcedureSqlModel{ResourceModelMeta: config.DefaultMeta(resources.ProcedureSql)} + p.WithDatabase(database) + p.WithName(name) + p.WithProcedureDefinition(procedureDefinition) + p.WithReturnType(returnType) + p.WithSchema(schema) + return p +} + +///////////////////////////////// +// below all the proper values // +///////////////////////////////// + +// arguments attribute type is not yet supported, so WithArguments can't be generated + +func (p *ProcedureSqlModel) WithComment(comment string) *ProcedureSqlModel { + p.Comment = tfconfig.StringVariable(comment) + return p +} + +func (p *ProcedureSqlModel) WithDatabase(database string) *ProcedureSqlModel { + p.Database = tfconfig.StringVariable(database) + return p +} + +func (p *ProcedureSqlModel) WithEnableConsoleOutput(enableConsoleOutput bool) *ProcedureSqlModel { + p.EnableConsoleOutput = tfconfig.BoolVariable(enableConsoleOutput) + return p +} + +func (p *ProcedureSqlModel) WithExecuteAs(executeAs string) *ProcedureSqlModel { + p.ExecuteAs = tfconfig.StringVariable(executeAs) + return p +} + +func (p *ProcedureSqlModel) WithFullyQualifiedName(fullyQualifiedName string) *ProcedureSqlModel { + p.FullyQualifiedName = tfconfig.StringVariable(fullyQualifiedName) + return p +} + +func (p *ProcedureSqlModel) WithIsSecure(isSecure string) *ProcedureSqlModel { + p.IsSecure = tfconfig.StringVariable(isSecure) + return p +} + +func (p *ProcedureSqlModel) WithLogLevel(logLevel string) *ProcedureSqlModel { + p.LogLevel = tfconfig.StringVariable(logLevel) + return p +} + +func (p *ProcedureSqlModel) WithMetricLevel(metricLevel string) *ProcedureSqlModel { + p.MetricLevel = tfconfig.StringVariable(metricLevel) + return p +} + +func (p *ProcedureSqlModel) WithName(name string) *ProcedureSqlModel { + p.Name = tfconfig.StringVariable(name) + return p +} + +func (p *ProcedureSqlModel) WithNullInputBehavior(nullInputBehavior string) *ProcedureSqlModel { + p.NullInputBehavior = tfconfig.StringVariable(nullInputBehavior) + return p +} + +func (p *ProcedureSqlModel) WithProcedureDefinition(procedureDefinition string) *ProcedureSqlModel { + p.ProcedureDefinition = tfconfig.StringVariable(procedureDefinition) + return p +} + +func (p *ProcedureSqlModel) WithProcedureLanguage(procedureLanguage string) *ProcedureSqlModel { + p.ProcedureLanguage = tfconfig.StringVariable(procedureLanguage) + return p +} + +func (p *ProcedureSqlModel) WithReturnType(returnType string) *ProcedureSqlModel { + p.ReturnType = tfconfig.StringVariable(returnType) + return p +} + +func (p *ProcedureSqlModel) WithSchema(schema string) *ProcedureSqlModel { + p.Schema = tfconfig.StringVariable(schema) + return p +} + +func (p *ProcedureSqlModel) WithTraceLevel(traceLevel string) *ProcedureSqlModel { + p.TraceLevel = tfconfig.StringVariable(traceLevel) + return p +} + +////////////////////////////////////////// +// below it's possible to set any value // +////////////////////////////////////////// + +func (p *ProcedureSqlModel) WithArgumentsValue(value tfconfig.Variable) *ProcedureSqlModel { + p.Arguments = value + return p +} + +func (p *ProcedureSqlModel) WithCommentValue(value tfconfig.Variable) *ProcedureSqlModel { + p.Comment = value + return p +} + +func (p *ProcedureSqlModel) WithDatabaseValue(value tfconfig.Variable) *ProcedureSqlModel { + p.Database = value + return p +} + +func (p *ProcedureSqlModel) WithEnableConsoleOutputValue(value tfconfig.Variable) *ProcedureSqlModel { + p.EnableConsoleOutput = value + return p +} + +func (p *ProcedureSqlModel) WithExecuteAsValue(value tfconfig.Variable) *ProcedureSqlModel { + p.ExecuteAs = value + return p +} + +func (p *ProcedureSqlModel) WithFullyQualifiedNameValue(value tfconfig.Variable) *ProcedureSqlModel { + p.FullyQualifiedName = value + return p +} + +func (p *ProcedureSqlModel) WithIsSecureValue(value tfconfig.Variable) *ProcedureSqlModel { + p.IsSecure = value + return p +} + +func (p *ProcedureSqlModel) WithLogLevelValue(value tfconfig.Variable) *ProcedureSqlModel { + p.LogLevel = value + return p +} + +func (p *ProcedureSqlModel) WithMetricLevelValue(value tfconfig.Variable) *ProcedureSqlModel { + p.MetricLevel = value + return p +} + +func (p *ProcedureSqlModel) WithNameValue(value tfconfig.Variable) *ProcedureSqlModel { + p.Name = value + return p +} + +func (p *ProcedureSqlModel) WithNullInputBehaviorValue(value tfconfig.Variable) *ProcedureSqlModel { + p.NullInputBehavior = value + return p +} + +func (p *ProcedureSqlModel) WithProcedureDefinitionValue(value tfconfig.Variable) *ProcedureSqlModel { + p.ProcedureDefinition = value + return p +} + +func (p *ProcedureSqlModel) WithProcedureLanguageValue(value tfconfig.Variable) *ProcedureSqlModel { + p.ProcedureLanguage = value + return p +} + +func (p *ProcedureSqlModel) WithReturnTypeValue(value tfconfig.Variable) *ProcedureSqlModel { + p.ReturnType = value + return p +} + +func (p *ProcedureSqlModel) WithSchemaValue(value tfconfig.Variable) *ProcedureSqlModel { + p.Schema = value + return p +} + +func (p *ProcedureSqlModel) WithTraceLevelValue(value tfconfig.Variable) *ProcedureSqlModel { + p.TraceLevel = value + return p +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 5692d9a5eb..37e5316e39 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -522,6 +522,11 @@ func getResources() map[string]*schema.Resource { "snowflake_pipe": resources.Pipe(), "snowflake_primary_connection": resources.PrimaryConnection(), "snowflake_procedure": resources.Procedure(), + "snowflake_procedure_java": resources.ProcedureJava(), + "snowflake_procedure_javascript": resources.ProcedureJavascript(), + "snowflake_procedure_python": resources.ProcedurePython(), + "snowflake_procedure_scala": resources.ProcedureScala(), + "snowflake_procedure_sql": resources.ProcedureSql(), "snowflake_resource_monitor": resources.ResourceMonitor(), "snowflake_role": resources.Role(), "snowflake_row_access_policy": resources.RowAccessPolicy(), diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index a8173a6d96..8b43f012e7 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -55,6 +55,11 @@ const ( Pipe resource = "snowflake_pipe" PrimaryConnection resource = "snowflake_primary_connection" Procedure resource = "snowflake_procedure" + ProcedureJava resource = "snowflake_procedure_java" + ProcedureJavascript resource = "snowflake_procedure_javascript" + ProcedurePython resource = "snowflake_procedure_python" + ProcedureScala resource = "snowflake_procedure_scala" + ProcedureSql resource = "snowflake_procedure_sql" ResourceMonitor resource = "snowflake_resource_monitor" Role resource = "snowflake_role" RowAccessPolicy resource = "snowflake_row_access_policy" diff --git a/pkg/resources/function_commons.go b/pkg/resources/function_commons.go index d2e703a0e6..fd4d57913e 100644 --- a/pkg/resources/function_commons.go +++ b/pkg/resources/function_commons.go @@ -30,14 +30,14 @@ type functionSchemaDef struct { func setUpFunctionSchema(definition functionSchemaDef) map[string]*schema.Schema { currentSchema := make(map[string]*schema.Schema) - for k, v := range functionBaseSchema { + for k, v := range functionBaseSchema() { v := v if slices.Contains(definition.additionalArguments, k) || slices.Contains(commonFunctionArguments, k) { currentSchema[k] = &v } } if v, ok := currentSchema["function_definition"]; ok && v != nil { - v.Description = definition.functionDefinitionDescription + v.Description = diffSuppressStatementFieldDescription(definition.functionDefinitionDescription) } if v, ok := currentSchema["runtime_version"]; ok && v != nil { if definition.runtimeVersionRequired { @@ -158,187 +158,189 @@ var ( // TODO [SNOW-1348103]: currently database and schema are ForceNew but based on the docs it is possible to rename with moving to different db/schema // TODO [SNOW-1348103]: copyGrants and orReplace logic omitted for now, will be added to the limitations docs // TODO [SNOW-1348103]: temporary is not supported because it creates a per-session object; add to limitations/design decisions -var functionBaseSchema = map[string]schema.Schema{ - "database": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: suppressIdentifierQuoting, - Description: blocklistedCharactersFieldDescription("The database in which to create the function."), - }, - "schema": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: suppressIdentifierQuoting, - Description: blocklistedCharactersFieldDescription("The schema in which to create the function."), - }, - "name": { - Type: schema.TypeString, - Required: true, - Description: blocklistedCharactersFieldDescription("The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages)."), - DiffSuppressFunc: suppressIdentifierQuoting, - }, - "is_secure": { - Type: schema.TypeString, - Optional: true, - Default: BooleanDefault, - ValidateDiagFunc: validateBooleanString, - DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShow("is_secure"), - Description: booleanStringFieldDescription("Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure views (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions."), - }, - "is_aggregate": { - Type: schema.TypeString, - Optional: true, - Default: BooleanDefault, - ValidateDiagFunc: validateBooleanString, - DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShow("is_aggregate"), - Description: booleanStringFieldDescription("Specifies that the function is an aggregate function. For more information about user-defined aggregate functions, see [Python user-defined aggregate functions](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-aggregate-functions)."), - }, - "arguments": { - Type: schema.TypeList, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "arg_name": { - Type: schema.TypeString, - Required: true, - // TODO [SNOW-1348103]: adjust diff suppression accordingly. - Description: "The argument name.", - }, - // TODO [SNOW-1348103]: after testing weird names add limitations to the docs and add validation here - "arg_data_type": { - Type: schema.TypeString, - Required: true, - ValidateDiagFunc: IsDataTypeValid, - DiffSuppressFunc: DiffSuppressDataTypes, - Description: "The argument type.", +func functionBaseSchema() map[string]schema.Schema { + return map[string]schema.Schema{ + "database": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppressIdentifierQuoting, + Description: blocklistedCharactersFieldDescription("The database in which to create the function."), + }, + "schema": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppressIdentifierQuoting, + Description: blocklistedCharactersFieldDescription("The schema in which to create the function."), + }, + "name": { + Type: schema.TypeString, + Required: true, + Description: blocklistedCharactersFieldDescription("The name of the function; the identifier does not need to be unique for the schema in which the function is created because UDFs are identified and resolved by the combination of the name and argument types. Check the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages)."), + DiffSuppressFunc: suppressIdentifierQuoting, + }, + "is_secure": { + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + ValidateDiagFunc: validateBooleanString, + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShow("is_secure"), + Description: booleanStringFieldDescription("Specifies that the function is secure. By design, the Snowflake's `SHOW FUNCTIONS` command does not provide information about secure functions (consult [function docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#id1) and [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)) which is essential to manage/import function with Terraform. Use the role owning the function while managing secure functions."), + }, + "is_aggregate": { + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + ValidateDiagFunc: validateBooleanString, + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShow("is_aggregate"), + Description: booleanStringFieldDescription("Specifies that the function is an aggregate function. For more information about user-defined aggregate functions, see [Python user-defined aggregate functions](https://docs.snowflake.com/en/developer-guide/udf/python/udf-python-aggregate-functions)."), + }, + "arguments": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "arg_name": { + Type: schema.TypeString, + Required: true, + // TODO [SNOW-1348103]: adjust diff suppression accordingly. + Description: "The argument name.", + }, + // TODO [SNOW-1348103]: after testing weird names add limitations to the docs and add validation here + "arg_data_type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: IsDataTypeValid, + DiffSuppressFunc: DiffSuppressDataTypes, + Description: "The argument type.", + }, }, }, + Optional: true, + ForceNew: true, + Description: "List of the arguments for the function. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages) for more details.", }, - Optional: true, - ForceNew: true, - Description: "List of the arguments for the function. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages) for more details.", - }, - // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) - "return_type": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - ValidateDiagFunc: IsDataTypeValid, - DiffSuppressFunc: DiffSuppressDataTypes, - Description: "Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages).", - // TODO [SNOW-1348103]: adjust DiffSuppressFunc - }, - "null_input_behavior": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), - Description: fmt.Sprintf("Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), - }, - "return_behavior": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - ValidateDiagFunc: sdkValidation(sdk.ToReturnResultsBehavior), - DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToReturnResultsBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("return_behavior")), - Description: fmt.Sprintf("Specifies the behavior of the function when returning results. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedReturnResultsBehaviors)), - }, - "runtime_version": { - Type: schema.TypeString, - ForceNew: true, - // TODO [SNOW-1348103]: may be optional for java without consequence because if it is not set, the describe is not returning any version. - }, - "comment": { - Type: schema.TypeString, - Optional: true, - // TODO [SNOW-1348103]: handle dynamic comment - this is a workaround for now - Default: "user-defined function", - Description: "Specifies a comment for the function.", - }, - // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage_name + target_path - "imports": { - Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, - Optional: true, - ForceNew: true, - }, - // TODO [SNOW-1348103]: what do we do with the version "latest". - "packages": { - Type: schema.TypeSet, - Elem: &schema.Schema{Type: schema.TypeString}, - Optional: true, - ForceNew: true, - }, - "handler": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - }, - // TODO [SNOW-1348103]: use suppress from network policies when adding logic - "external_access_integrations": { - Type: schema.TypeSet, - Elem: &schema.Schema{ + // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) + "return_type": { Type: schema.TypeString, - ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Required: true, + ForceNew: true, + ValidateDiagFunc: IsDataTypeValid, + DiffSuppressFunc: DiffSuppressDataTypes, + Description: "Specifies the results returned by the UDF, which determines the UDF type. Use `` to create a scalar UDF that returns a single value with the specified data type. Use `TABLE (col_name col_data_type, ...)` to creates a table UDF that returns tabular results with the specified table column(s) and column type(s). For the details, consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-function#all-languages).", + // TODO [SNOW-1348103]: adjust DiffSuppressFunc }, - Optional: true, - ForceNew: true, - Description: "The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API.", - }, - "secrets": { - Type: schema.TypeSet, - Optional: true, - Elem: &schema.Resource{ - Schema: map[string]*schema.Schema{ - "secret_variable_name": { - Type: schema.TypeString, - Required: true, - Description: "The variable that will be used in handler code when retrieving information from the secret.", - }, - "secret_id": { - Type: schema.TypeString, - Required: true, - Description: "Fully qualified name of the allowed secret. You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter.", - DiffSuppressFunc: suppressIdentifierQuoting, + "null_input_behavior": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), + Description: fmt.Sprintf("Specifies the behavior of the function when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), + }, + "return_behavior": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: sdkValidation(sdk.ToReturnResultsBehavior), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToReturnResultsBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("return_behavior")), + Description: fmt.Sprintf("Specifies the behavior of the function when returning results. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedReturnResultsBehaviors)), + }, + "runtime_version": { + Type: schema.TypeString, + ForceNew: true, + // TODO [SNOW-1348103]: may be optional for java without consequence because if it is not set, the describe is not returning any version. + }, + "comment": { + Type: schema.TypeString, + Optional: true, + // TODO [SNOW-1348103]: handle dynamic comment - this is a workaround for now + Default: "user-defined function", + Description: "Specifies a comment for the function.", + }, + // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage_name + target_path + "imports": { + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + ForceNew: true, + }, + // TODO [SNOW-1348103]: what do we do with the version "latest". + "packages": { + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + ForceNew: true, + }, + "handler": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + // TODO [SNOW-1348103]: use suppress from network policies when adding logic + "external_access_integrations": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + }, + Optional: true, + ForceNew: true, + Description: "The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this function’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API.", + }, + "secrets": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "secret_variable_name": { + Type: schema.TypeString, + Required: true, + Description: "The variable that will be used in handler code when retrieving information from the secret.", + }, + "secret_id": { + Type: schema.TypeString, + Required: true, + Description: "Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter.", + DiffSuppressFunc: suppressIdentifierQuoting, + }, }, }, + Description: "Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", }, - Description: "Assigns the names of secrets to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", - }, - // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage + path - "target_path": { - Type: schema.TypeString, - Optional: true, - ForceNew: true, - }, - "function_definition": { - Type: schema.TypeString, - Required: true, - ForceNew: true, - DiffSuppressFunc: DiffSuppressStatement, - }, - "function_language": { - Type: schema.TypeString, - Computed: true, - Description: "Specifies language for the user. Used to detect external changes.", - }, - ShowOutputAttributeName: { - Type: schema.TypeList, - Computed: true, - Description: "Outputs the result of `SHOW FUNCTION` for the given function.", - Elem: &schema.Resource{ - Schema: schemas.ShowFunctionSchema, + // TODO [SNOW-1348103]: because of https://docs.snowflake.com/en/sql-reference/sql/create-function#id6, maybe it will be better to split into stage + path + "target_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, }, - }, - ParametersAttributeName: { - Type: schema.TypeList, - Computed: true, - Description: "Outputs the result of `SHOW PARAMETERS IN FUNCTION` for the given function.", - Elem: &schema.Resource{ - Schema: functionParametersSchema, + "function_definition": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: DiffSuppressStatement, + }, + "function_language": { + Type: schema.TypeString, + Computed: true, + Description: "Specifies language for the user. Used to detect external changes.", + }, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW FUNCTION` for the given function.", + Elem: &schema.Resource{ + Schema: schemas.ShowFunctionSchema, + }, }, - }, - FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, + ParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW PARAMETERS IN FUNCTION` for the given function.", + Elem: &schema.Resource{ + Schema: functionParametersSchema, + }, + }, + FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, + } } diff --git a/pkg/resources/procedure_commons.go b/pkg/resources/procedure_commons.go new file mode 100644 index 0000000000..88e815978b --- /dev/null +++ b/pkg/resources/procedure_commons.go @@ -0,0 +1,343 @@ +package resources + +import ( + "fmt" + "slices" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/schemas" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func init() { + javaProcedureSchema = setUpProcedureSchema(javaProcedureSchemaDefinition) + javascriptProcedureSchema = setUpProcedureSchema(javascriptProcedureSchemaDefinition) + pythonProcedureSchema = setUpProcedureSchema(pythonProcedureSchemaDefinition) + scalaProcedureSchema = setUpProcedureSchema(scalaProcedureSchemaDefinition) + sqlProcedureSchema = setUpProcedureSchema(sqlProcedureSchemaDefinition) +} + +type procedureSchemaDef struct { + additionalArguments []string + procedureDefinitionDescription string + returnTypeLinkName string + returnTypeLinkUrl string + runtimeVersionDescription string + importsDescription string + handlerDescription string + targetPathDescription string +} + +func setUpProcedureSchema(definition procedureSchemaDef) map[string]*schema.Schema { + currentSchema := make(map[string]*schema.Schema) + for k, v := range procedureBaseSchema() { + v := v + if slices.Contains(definition.additionalArguments, k) || slices.Contains(commonProcedureArguments, k) { + currentSchema[k] = &v + } + } + if v, ok := currentSchema["procedure_definition"]; ok && v != nil { + v.Description = diffSuppressStatementFieldDescription(definition.procedureDefinitionDescription) + } + if v, ok := currentSchema["return_type"]; ok && v != nil { + v.Description = procedureReturnsTemplate(definition.returnTypeLinkName, definition.returnTypeLinkUrl) + } + if v, ok := currentSchema["runtime_version"]; ok && v != nil { + v.Description = definition.runtimeVersionDescription + } + if v, ok := currentSchema["imports"]; ok && v != nil { + v.Description = definition.importsDescription + } + if v, ok := currentSchema["handler"]; ok && v != nil { + v.Description = definition.handlerDescription + } + if v, ok := currentSchema["target_path"]; ok && v != nil { + v.Description = definition.handlerDescription + } + return currentSchema +} + +func procedureDefinitionTemplate(language string, linkName string, linkUrl string) string { + return fmt.Sprintf("Defines the code executed by the stored procedure. The definition can consist of any valid code. Wrapping `$$` signs are added by the provider automatically; do not include them. The `procedure_definition` value must be %[1]s source code. For more information, see [%[2]s](%[3]s).", language, linkName, linkUrl) +} + +func procedureReturnsTemplate(linkName string, linkUrl string) string { + return fmt.Sprintf("Specifies the type of the result returned by the stored procedure. For ``, use the Snowflake data type that corresponds to the type of the language that you are using (see [%s](%s)). For `RETURNS TABLE ( [ col_name col_data_type [ , ... ] ] )`, if you know the Snowflake data types of the columns in the returned table, specify the column names and types. Otherwise (e.g. if you are determining the column types during run time), you can omit the column names and types (i.e. `TABLE ()`).", linkName, linkUrl) +} + +var ( + commonProcedureArguments = []string{ + "database", + "schema", + "name", + "is_secure", + "arguments", + "return_type", + "null_input_behavior", + "comment", + "execute_as", + "procedure_definition", + "procedure_language", + ShowOutputAttributeName, + ParametersAttributeName, + FullyQualifiedNameAttributeName, + } + javaProcedureSchemaDefinition = procedureSchemaDef{ + additionalArguments: []string{ + "runtime_version", + "imports", + "snowpark_package", + "packages", + "handler", + "external_access_integrations", + "secrets", + "target_path", + }, + procedureDefinitionDescription: procedureDefinitionTemplate("Java", "Java (using Snowpark)", "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-java"), + returnTypeLinkName: "SQL-Java Data Type Mappings", + returnTypeLinkUrl: "https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-java-data-type-mappings", + runtimeVersionDescription: "The language runtime version to use. Currently, the supported versions are: 11.", + importsDescription: "The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages.", + handlerDescription: "Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`.", + targetPathDescription: "For stored procedures with inline handler code, specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the ``. If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. If you specify this clause uou cannot set this to an existing file. Snowflake returns an error if the TARGET_PATH points to an existing file. If you specify both the IMPORTS and TARGET_PATH clauses, the file name in the TARGET_PATH clause must be different from each file name in the IMPORTS clause, even if the files are in different subdirectories or different stages. If you no longer need to use the stored procedure (e.g. if you drop the stored procedure), you must manually remove this JAR file.", + } + javascriptProcedureSchemaDefinition = procedureSchemaDef{ + additionalArguments: []string{}, + returnTypeLinkName: "SQL and JavaScript data type mapping", + returnTypeLinkUrl: "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript.html#label-stored-procedure-data-type-mapping", + procedureDefinitionDescription: procedureDefinitionTemplate("JavaScript", "JavaScript", "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-javascript"), + } + pythonProcedureSchemaDefinition = procedureSchemaDef{ + additionalArguments: []string{ + "runtime_version", + "imports", + "snowpark_package", + "packages", + "handler", + "external_access_integrations", + "secrets", + }, + procedureDefinitionDescription: procedureDefinitionTemplate("Python", "Python (using Snowpark)", "https://docs.snowflake.com/en/developer-guide/stored-procedure/python/procedure-python-overview"), + returnTypeLinkName: "SQL-Python Data Type Mappings", + returnTypeLinkUrl: "https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-python-data-type-mappings", + runtimeVersionDescription: "The language runtime version to use. Currently, the supported versions are: 3.9, 3.10, and 3.11.", + importsDescription: "The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If your stored procedure’s code will be on a stage, you must also include a path to the module file your code is in. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages.", + handlerDescription: "Use the name of the stored procedure’s function or method. This can differ depending on whether the code is in-line or referenced at a stage. When the code is in-line, you can specify just the function name. When the code is imported from a stage, specify the fully-qualified handler function name as `.`.", + } + scalaProcedureSchemaDefinition = procedureSchemaDef{ + additionalArguments: []string{ + "runtime_version", + "imports", + "snowpark_package", + "packages", + "handler", + "external_access_integrations", + "secrets", + "target_path", + }, + procedureDefinitionDescription: procedureDefinitionTemplate("Scala", "Scala (using Snowpark)", "https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-scala"), + returnTypeLinkName: "SQL-Scala Data Type Mappings", + returnTypeLinkUrl: "https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-data-type-mapping.html#label-sql-types-to-scala-types", + runtimeVersionDescription: "The language runtime version to use. Currently, the supported versions are: 2.12.", + importsDescription: "The location (stage), path, and name of the file(s) to import. You must set the IMPORTS clause to include any files that your stored procedure depends on. If you are writing an in-line stored procedure, you can omit this clause, unless your code depends on classes defined outside the stored procedure or resource files. If you are writing a stored procedure with a staged handler, you must also include a path to the JAR file containing the stored procedure’s handler code. The IMPORTS definition cannot reference variables from arguments that are passed into the stored procedure. Each file in the IMPORTS clause must have a unique name, even if the files are in different subdirectories or different stages.", + handlerDescription: "Use the fully qualified name of the method or function for the stored procedure. This is typically in the following form: `com.my_company.my_package.MyClass.myMethod` where `com.my_company.my_package` corresponds to the package containing the object or class: `package com.my_company.my_package;`.", + targetPathDescription: "For stored procedures with inline handler code, specifies the location to which Snowflake should write the compiled code (JAR file) after compiling the source code specified in the procedure_definition. If this clause is omitted, Snowflake re-compiles the source code each time the code is needed. If you specify this clause you cannot set this to an existing file. Snowflake returns an error if the TARGET_PATH points to an existing file. If you specify both the IMPORTS and TARGET_PATH clauses, the file name in the TARGET_PATH clause must be different from each file name in the IMPORTS clause, even if the files are in different subdirectories or different stages. If you no longer need to use the stored procedure (e.g. if you drop the stored procedure), you must manually remove this JAR file.", + } + sqlProcedureSchemaDefinition = procedureSchemaDef{ + additionalArguments: []string{}, + procedureDefinitionDescription: procedureDefinitionTemplate("SQL", "Snowflake Scripting", "https://docs.snowflake.com/en/developer-guide/snowflake-scripting/index"), + returnTypeLinkName: "SQL data type", + returnTypeLinkUrl: "https://docs.snowflake.com/en/sql-reference-data-types", + } +) + +var ( + javaProcedureSchema map[string]*schema.Schema + javascriptProcedureSchema map[string]*schema.Schema + pythonProcedureSchema map[string]*schema.Schema + scalaProcedureSchema map[string]*schema.Schema + sqlProcedureSchema map[string]*schema.Schema +) + +// TODO [SNOW-1348103]: add null/not null +// TODO [SNOW-1348103]: currently database and schema are ForceNew but based on the docs it is possible to rename with moving to different db/schema +// TODO [SNOW-1348103]: copyGrants and orReplace logic omitted for now, will be added to the limitations docs +func procedureBaseSchema() map[string]schema.Schema { + return map[string]schema.Schema{ + "database": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppressIdentifierQuoting, + Description: blocklistedCharactersFieldDescription("The database in which to create the procedure."), + }, + "schema": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: suppressIdentifierQuoting, + Description: blocklistedCharactersFieldDescription("The schema in which to create the procedure."), + }, + "name": { + Type: schema.TypeString, + Required: true, + Description: blocklistedCharactersFieldDescription("The name of the procedure; the identifier does not need to be unique for the schema in which the procedure is created because stored procedures are [identified and resolved by the combination of the name and argument types](https://docs.snowflake.com/en/developer-guide/udf-stored-procedure-naming-conventions.html#label-procedure-function-name-overloading)."), + DiffSuppressFunc: suppressIdentifierQuoting, + }, + "is_secure": { + Type: schema.TypeString, + Optional: true, + Default: BooleanDefault, + ValidateDiagFunc: validateBooleanString, + DiffSuppressFunc: IgnoreChangeToCurrentSnowflakeValueInShow("is_secure"), + Description: booleanStringFieldDescription("Specifies that the procedure is secure. For more information about secure procedures, see [Protecting Sensitive Information with Secure UDFs and Stored Procedures](https://docs.snowflake.com/en/developer-guide/secure-udf-procedure)."), + }, + "arguments": { + Type: schema.TypeList, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "arg_name": { + Type: schema.TypeString, + Required: true, + // TODO [SNOW-1348103]: adjust diff suppression accordingly. + Description: "The argument name.", + }, + "arg_data_type": { + Type: schema.TypeString, + Required: true, + ValidateDiagFunc: IsDataTypeValid, + DiffSuppressFunc: DiffSuppressDataTypes, + Description: "The argument type.", + }, + }, + }, + Optional: true, + ForceNew: true, + Description: "List of the arguments for the procedure. Consult the [docs](https://docs.snowflake.com/en/sql-reference/sql/create-procedure#all-languages) for more details.", + }, + // TODO [SNOW-1348103]: for now, the proposal is to leave return type as string, add TABLE to data types, and here always parse (easier handling and diff suppression) + "return_type": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + ValidateDiagFunc: IsDataTypeValid, + DiffSuppressFunc: DiffSuppressDataTypes, + }, + "null_input_behavior": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: sdkValidation(sdk.ToNullInputBehavior), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToNullInputBehavior), IgnoreChangeToCurrentSnowflakeValueInShow("null_input_behavior")), + Description: fmt.Sprintf("Specifies the behavior of the procedure when called with null inputs. Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedNullInputBehaviors)), + }, + // "return_behavior" removed because it is deprecated in the docs: https://docs.snowflake.com/en/sql-reference/sql/create-procedure#id1 + "runtime_version": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "comment": { + Type: schema.TypeString, + Optional: true, + // TODO [SNOW-1348103]: handle dynamic comment - this is a workaround for now + Default: "user-defined procedure", + Description: "Specifies a comment for the procedure.", + }, + "imports": { + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + ForceNew: true, + }, + "snowpark_package": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "The Snowpark package is required for stored procedures, so it must always be present. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index).", + }, + // TODO [SNOW-1348103]: what do we do with the version "latest". + "packages": { + Type: schema.TypeSet, + Elem: &schema.Schema{Type: schema.TypeString}, + Optional: true, + ForceNew: true, + Description: "List of the names of packages deployed in Snowflake that should be included in the handler code’s execution environment. The Snowpark package is required for stored procedures, but is specified in the `snowpark_package` attribute. For more information about Snowpark, see [Snowpark API](https://docs.snowflake.com/en/developer-guide/snowpark/index).", + }, + "handler": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + }, + "external_access_integrations": { + Type: schema.TypeSet, + Elem: &schema.Schema{ + Type: schema.TypeString, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + }, + Optional: true, + ForceNew: true, + Description: "The names of [external access integrations](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) needed in order for this procedure’s handler code to access external networks. An external access integration specifies [network rules](https://docs.snowflake.com/en/sql-reference/sql/create-network-rule) and [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) that specify external locations and credentials (if any) allowed for use by handler code when making requests of an external network, such as an external REST API.", + }, + "secrets": { + Type: schema.TypeSet, + Optional: true, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "secret_variable_name": { + Type: schema.TypeString, + Required: true, + Description: "The variable that will be used in handler code when retrieving information from the secret.", + }, + "secret_id": { + Type: schema.TypeString, + Required: true, + Description: "Fully qualified name of the allowed [secret](https://docs.snowflake.com/en/sql-reference/sql/create-secret). You will receive an error if you specify a SECRETS value whose secret isn’t also included in an integration specified by the EXTERNAL_ACCESS_INTEGRATIONS parameter.", + DiffSuppressFunc: suppressIdentifierQuoting, + }, + }, + }, + Description: "Assigns the names of [secrets](https://docs.snowflake.com/en/sql-reference/sql/create-secret) to variables so that you can use the variables to reference the secrets when retrieving information from secrets in handler code. Secrets you specify here must be allowed by the [external access integration](https://docs.snowflake.com/en/sql-reference/sql/create-external-access-integration) specified as a value of this CREATE FUNCTION command’s EXTERNAL_ACCESS_INTEGRATIONS parameter.", + }, + "target_path": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + }, + "execute_as": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: sdkValidation(sdk.ToExecuteAs), + DiffSuppressFunc: SuppressIfAny(NormalizeAndCompare(sdk.ToExecuteAs), IgnoreChangeToCurrentSnowflakeValueInShow("execute_as")), + Description: fmt.Sprintf("Specifies whether the stored procedure executes with the privileges of the owner (an “owner’s rights” stored procedure) or with the privileges of the caller (a “caller’s rights” stored procedure). If you execute the statement CREATE PROCEDURE … EXECUTE AS CALLER, then in the future the procedure will execute as a caller’s rights procedure. If you execute CREATE PROCEDURE … EXECUTE AS OWNER, then the procedure will execute as an owner’s rights procedure. For more information, see [Understanding caller’s rights and owner’s rights stored procedures](https://docs.snowflake.com/en/developer-guide/stored-procedure/stored-procedures-rights). Valid values are (case-insensitive): %s.", possibleValuesListed(sdk.AllAllowedExecuteAs)), + }, + "procedure_definition": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + DiffSuppressFunc: DiffSuppressStatement, + }, + "procedure_language": { + Type: schema.TypeString, + Computed: true, + Description: "Specifies language for the procedure. Used to detect external changes.", + }, + ShowOutputAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW PROCEDURE` for the given procedure.", + Elem: &schema.Resource{ + Schema: schemas.ShowProcedureSchema, + }, + }, + ParametersAttributeName: { + Type: schema.TypeList, + Computed: true, + Description: "Outputs the result of `SHOW PARAMETERS IN PROCEDURE` for the given procedure.", + Elem: &schema.Resource{ + Schema: procedureParametersSchema, + }, + }, + FullyQualifiedNameAttributeName: *schemas.FullyQualifiedNameSchema, + } +} diff --git a/pkg/resources/procedure_java.go b/pkg/resources/procedure_java.go new file mode 100644 index 0000000000..8019e72689 --- /dev/null +++ b/pkg/resources/procedure_java.go @@ -0,0 +1,52 @@ +package resources + +import ( + "context" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ProcedureJava() *schema.Resource { + return &schema.Resource{ + CreateContext: TrackingCreateWrapper(resources.ProcedureJava, CreateContextProcedureJava), + ReadContext: TrackingReadWrapper(resources.ProcedureJava, ReadContextProcedureJava), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureJava, UpdateContextProcedureJava), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureJava, DeleteContextProcedureJava), + Description: "Resource used to manage java procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", + + CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureJava, customdiff.All( + // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(javaProcedureSchema, ShowOutputAttributeName, ...), + ComputedIfAnyAttributeChanged(javaProcedureSchema, FullyQualifiedNameAttributeName, "name"), + ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), + procedureParametersCustomDiff, + // TODO[SNOW-1348103]: recreate when type changed externally + )), + + Schema: collections.MergeMaps(javaProcedureSchema, procedureParametersSchema), + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func ReadContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func UpdateContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func DeleteContextProcedureJava(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} diff --git a/pkg/resources/procedure_javascript.go b/pkg/resources/procedure_javascript.go new file mode 100644 index 0000000000..8c3958b99e --- /dev/null +++ b/pkg/resources/procedure_javascript.go @@ -0,0 +1,52 @@ +package resources + +import ( + "context" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ProcedureJavascript() *schema.Resource { + return &schema.Resource{ + CreateContext: TrackingCreateWrapper(resources.ProcedureJavascript, CreateContextProcedureJavascript), + ReadContext: TrackingReadWrapper(resources.ProcedureJavascript, ReadContextProcedureJavascript), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureJavascript, UpdateContextProcedureJavascript), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureJavascript, DeleteContextProcedureJavascript), + Description: "Resource used to manage javascript procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", + + CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureJavascript, customdiff.All( + // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(javascriptProcedureSchema, ShowOutputAttributeName, ...), + ComputedIfAnyAttributeChanged(javascriptProcedureSchema, FullyQualifiedNameAttributeName, "name"), + ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), + procedureParametersCustomDiff, + // TODO[SNOW-1348103]: recreate when type changed externally + )), + + Schema: collections.MergeMaps(javascriptProcedureSchema, procedureParametersSchema), + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func ReadContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func UpdateContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func DeleteContextProcedureJavascript(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} diff --git a/pkg/resources/procedure_python.go b/pkg/resources/procedure_python.go new file mode 100644 index 0000000000..48d70329e7 --- /dev/null +++ b/pkg/resources/procedure_python.go @@ -0,0 +1,52 @@ +package resources + +import ( + "context" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ProcedurePython() *schema.Resource { + return &schema.Resource{ + CreateContext: TrackingCreateWrapper(resources.ProcedurePython, CreateContextProcedurePython), + ReadContext: TrackingReadWrapper(resources.ProcedurePython, ReadContextProcedurePython), + UpdateContext: TrackingUpdateWrapper(resources.ProcedurePython, UpdateContextProcedurePython), + DeleteContext: TrackingDeleteWrapper(resources.ProcedurePython, DeleteContextProcedurePython), + Description: "Resource used to manage python procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", + + CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedurePython, customdiff.All( + // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(pythonProcedureSchema, ShowOutputAttributeName, ...), + ComputedIfAnyAttributeChanged(pythonProcedureSchema, FullyQualifiedNameAttributeName, "name"), + ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), + procedureParametersCustomDiff, + // TODO[SNOW-1348103]: recreate when type changed externally + )), + + Schema: collections.MergeMaps(pythonProcedureSchema, procedureParametersSchema), + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func ReadContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func UpdateContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func DeleteContextProcedurePython(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} diff --git a/pkg/resources/procedure_scala.go b/pkg/resources/procedure_scala.go new file mode 100644 index 0000000000..3a7816b7d0 --- /dev/null +++ b/pkg/resources/procedure_scala.go @@ -0,0 +1,52 @@ +package resources + +import ( + "context" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ProcedureScala() *schema.Resource { + return &schema.Resource{ + CreateContext: TrackingCreateWrapper(resources.ProcedureScala, CreateContextProcedureScala), + ReadContext: TrackingReadWrapper(resources.ProcedureScala, ReadContextProcedureScala), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureScala, UpdateContextProcedureScala), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureScala, DeleteContextProcedureScala), + Description: "Resource used to manage scala procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", + + CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureScala, customdiff.All( + // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(scalaProcedureSchema, ShowOutputAttributeName, ...), + ComputedIfAnyAttributeChanged(scalaProcedureSchema, FullyQualifiedNameAttributeName, "name"), + ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), + procedureParametersCustomDiff, + // TODO[SNOW-1348103]: recreate when type changed externally + )), + + Schema: collections.MergeMaps(scalaProcedureSchema, procedureParametersSchema), + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func ReadContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func UpdateContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func DeleteContextProcedureScala(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} diff --git a/pkg/resources/procedure_sql.go b/pkg/resources/procedure_sql.go new file mode 100644 index 0000000000..0488941f03 --- /dev/null +++ b/pkg/resources/procedure_sql.go @@ -0,0 +1,52 @@ +package resources + +import ( + "context" + "strings" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/collections" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +func ProcedureSql() *schema.Resource { + return &schema.Resource{ + CreateContext: TrackingCreateWrapper(resources.ProcedureSql, CreateContextProcedureSql), + ReadContext: TrackingReadWrapper(resources.ProcedureSql, ReadContextProcedureSql), + UpdateContext: TrackingUpdateWrapper(resources.ProcedureSql, UpdateContextProcedureSql), + DeleteContext: TrackingDeleteWrapper(resources.ProcedureSql, DeleteContextProcedureSql), + Description: "Resource used to manage sql procedure objects. For more information, check [procedure documentation](https://docs.snowflake.com/en/sql-reference/sql/create-procedure).", + + CustomizeDiff: TrackingCustomDiffWrapper(resources.ProcedureSql, customdiff.All( + // TODO[SNOW-1348103]: ComputedIfAnyAttributeChanged(sqlProcedureSchema, ShowOutputAttributeName, ...), + ComputedIfAnyAttributeChanged(sqlProcedureSchema, FullyQualifiedNameAttributeName, "name"), + ComputedIfAnyAttributeChanged(procedureParametersSchema, ParametersAttributeName, collections.Map(sdk.AsStringList(sdk.AllProcedureParameters), strings.ToLower)...), + procedureParametersCustomDiff, + // TODO[SNOW-1348103]: recreate when type changed externally + )), + + Schema: collections.MergeMaps(sqlProcedureSchema, procedureParametersSchema), + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func ReadContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func UpdateContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} + +func DeleteContextProcedureSql(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + return nil +} diff --git a/pkg/sdk/common_types.go b/pkg/sdk/common_types.go index 4276fe58d9..0621d9d995 100644 --- a/pkg/sdk/common_types.go +++ b/pkg/sdk/common_types.go @@ -220,11 +220,28 @@ func ExecuteAsPointer(v ExecuteAs) *ExecuteAs { return &v } +// TODO [SNOW-1348103]: fix SDK - constants should have only CALLER and OWNER (not the EXECUTE AS part) const ( ExecuteAsCaller ExecuteAs = "EXECUTE AS CALLER" ExecuteAsOwner ExecuteAs = "EXECUTE AS OWNER" ) +func ToExecuteAs(value string) (ExecuteAs, error) { + switch strings.ToUpper(value) { + case string(ExecuteAsCaller): + return ExecuteAsCaller, nil + case string(ExecuteAsOwner): + return ExecuteAsOwner, nil + default: + return "", fmt.Errorf("unknown execute as: %s", value) + } +} + +var AllAllowedExecuteAs = []ExecuteAs{ + ExecuteAsCaller, + ExecuteAsOwner, +} + type NullInputBehavior string func NullInputBehaviorPointer(v NullInputBehavior) *NullInputBehavior { diff --git a/pkg/sdk/common_types_test.go b/pkg/sdk/common_types_test.go index 2cb2f55665..7df8be779e 100644 --- a/pkg/sdk/common_types_test.go +++ b/pkg/sdk/common_types_test.go @@ -262,6 +262,38 @@ func TestToLogLevel(t *testing.T) { } } +func Test_ToExecuteAs(t *testing.T) { + testCases := []struct { + Name string + Input string + Expected ExecuteAs + Error string + }{ + {Input: string(ExecuteAsCaller), Expected: ExecuteAsCaller}, + {Input: string(ExecuteAsOwner), Expected: ExecuteAsOwner}, + {Name: "validation: incorrect execute as", Input: "incorrect", Error: "unknown execute as: incorrect"}, + {Name: "validation: empty input", Input: "", Error: "unknown execute as: "}, + {Name: "validation: lower case input", Input: "execute as caller", Expected: ExecuteAsCaller}, + } + + for _, testCase := range testCases { + name := testCase.Name + if name == "" { + name = fmt.Sprintf("%v execute as", testCase.Input) + } + t.Run(name, func(t *testing.T) { + value, err := ToExecuteAs(testCase.Input) + if testCase.Error != "" { + assert.Empty(t, value) + assert.ErrorContains(t, err, testCase.Error) + } else { + assert.NoError(t, err) + assert.Equal(t, testCase.Expected, value) + } + }) + } +} + func Test_ToNullInputBehavior(t *testing.T) { testCases := []struct { Name string