diff --git a/MIGRATION_GUIDE.md b/MIGRATION_GUIDE.md index 7b15f31f10..cc9ec137b2 100644 --- a/MIGRATION_GUIDE.md +++ b/MIGRATION_GUIDE.md @@ -4,6 +4,17 @@ This document is meant to help you migrate your Terraform config to the new newe describe deprecations or breaking changes and help you to change your configuration to keep the same (or similar) behavior across different versions. +## v0.91.0 ➞ v0.92.0 +### snowflake_database new alternatives +As part of the [preparation for v1](https://github.com/Snowflake-Labs/terraform-provider-snowflake/blob/main/ROADMAP.md#preparing-essential-ga-objects-for-the-provider-v1), we split up the database resource into multiple ones: +- Standard database (in progress) +- Shared database - can be used as `snowflake_shared_database` (used to create databases from externally defined shares) +- Secondary database - can be used as `snowflake_secondary_database` (used to create replicas of databases from external sources) +From now on, please migrate and use the new database resources for their unique use cases. For more information, see the documentation for those resources on the [Terraform Registry](https://registry.terraform.io/providers/Snowflake-Labs/snowflake/latest/docs). + +The split was done (and will be done for several objects during the refactor) to simplify the resource on maintainability and usage level. +Its purpose was also to divide the resources by their specific purpose rather than cramping every use case of an object into one resource. + ## v0.89.0 ➞ v0.90.0 ### snowflake_table resource changes #### *(behavior change)* Validation to column type added @@ -23,7 +34,7 @@ resource "snowflake_tag_masking_policy_association" "name" { masking_policy_id = snowflake_masking_policy.example_masking_policy.id } ``` - + After ```terraform resource "snowflake_tag_masking_policy_association" "name" { diff --git a/docs/resources/secondary_database.md b/docs/resources/secondary_database.md new file mode 100644 index 0000000000..63383d4755 --- /dev/null +++ b/docs/resources/secondary_database.md @@ -0,0 +1,98 @@ +--- +page_title: "snowflake_secondary_database Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see Introduction to database replication across multiple accounts https://docs.snowflake.com/en/user-guide/db-replication-intro. +--- + +# snowflake_secondary_database (Resource) + +A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see [Introduction to database replication across multiple accounts](https://docs.snowflake.com/en/user-guide/db-replication-intro). + +## Example Usage + +```terraform +# 1. Preparing primary database +resource "snowflake_database" "primary" { + provider = primary_account # notice the provider fields + name = "database_name" + replication_configuration { + accounts = ["."] + ignore_edition_check = true + } +} + +# 2. Creating secondary database +resource "snowflake_secondary_database" "test" { + provider = secondary_account + name = snowflake_database.primary.name # It's recommended to give a secondary database the same name as its primary database + as_replica_of = "..${snowflake_database.primary.name}" + is_transient = false + + data_retention_time_in_days { + value = 10 + } + + max_data_extension_time_in_days { + value = 20 + } + + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A secondary database" +} +``` + + +## Schema + +### Required + +- `as_replica_of` (String) A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `""."".""`. +- `name` (String) Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database. + +### Optional + +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `comment` (String) Specifies a comment for the database. +- `data_retention_time_in_days` (Block List, Max: 1) Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel). (see [below for nested schema](#nestedblock--data_retention_time_in_days)) +- `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `is_transient` (Boolean) Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss. +- `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `max_data_extension_time_in_days` (Block List, Max: 1) Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days). (see [below for nested schema](#nestedblock--max_data_extension_time_in_days)) +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. +- `storage_serialization_policy` (String) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). + +### Read-Only + +- `id` (String) The ID of this resource. + + +### Nested Schema for `data_retention_time_in_days` + +Required: + +- `value` (Number) + + + +### Nested Schema for `max_data_extension_time_in_days` + +Required: + +- `value` (Number) + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_secondary_database.example 'secondary_database_name' +``` diff --git a/docs/resources/shared_database.md b/docs/resources/shared_database.md new file mode 100644 index 0000000000..271a1b7e01 --- /dev/null +++ b/docs/resources/shared_database.md @@ -0,0 +1,81 @@ +--- +page_title: "snowflake_shared_database Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see Introduction to Secure Data Sharing https://docs.snowflake.com/en/user-guide/data-sharing-intro. +--- + +# snowflake_shared_database (Resource) + +A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see [Introduction to Secure Data Sharing](https://docs.snowflake.com/en/user-guide/data-sharing-intro). + +## Example Usage + +```terraform +# 1. Preparing database to share +resource "snowflake_share" "test" { + provider = primary_account # notice the provider fields + name = "share_name" + accounts = ["."] +} + +resource "snowflake_database" "test" { + provider = primary_account + name = "shared_database" +} + +resource "snowflake_grant_privileges_to_share" "test" { + provider = primary_account + to_share = snowflake_share.test.name + privileges = ["USAGE"] + on_database = snowflake_database.test.name +} + +# 2. Creating shared database +resource "snowflake_shared_database" "test" { + provider = secondary_account + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_database.test.name # shared database should have the same as the "imported" one + from_share = "..${snowflake_share.test.name}" + is_transient = false + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A shared database" +} +``` + + +## Schema + +### Required + +- `from_share` (String) A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `"".""`. +- `name` (String) Specifies the identifier for the database; must be unique for your account. + +### Optional + +- `catalog` (String) The database parameter that specifies the default catalog to use for Iceberg tables. +- `comment` (String) Specifies a comment for the database. +- `default_ddl_collation` (String) Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification). +- `external_volume` (String) The database parameter that specifies the default external volume to use for Iceberg tables. +- `log_level` (String) Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: [TRACE DEBUG INFO WARN ERROR FATAL OFF]. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level). +- `replace_invalid_characters` (Boolean) Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog. +- `storage_serialization_policy` (String) Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: [COMPATIBLE OPTIMIZED]. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake. +- `trace_level` (String) Controls how trace events are ingested into the event table. Valid options are: [ALWAYS ON_EVENT OFF]. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level). + +### Read-Only + +- `id` (String) The ID of this resource. + +## Import + +Import is supported using the following syntax: + +```shell +terraform import snowflake_shared_database.example 'shared_database_name' +``` diff --git a/examples/resources/snowflake_secondary_database/import.sh b/examples/resources/snowflake_secondary_database/import.sh new file mode 100644 index 0000000000..f183eac8ac --- /dev/null +++ b/examples/resources/snowflake_secondary_database/import.sh @@ -0,0 +1 @@ +terraform import snowflake_secondary_database.example 'secondary_database_name' diff --git a/examples/resources/snowflake_secondary_database/resource.tf b/examples/resources/snowflake_secondary_database/resource.tf new file mode 100644 index 0000000000..dd606162ef --- /dev/null +++ b/examples/resources/snowflake_secondary_database/resource.tf @@ -0,0 +1,34 @@ +# 1. Preparing primary database +resource "snowflake_database" "primary" { + provider = primary_account # notice the provider fields + name = "database_name" + replication_configuration { + accounts = ["."] + ignore_edition_check = true + } +} + +# 2. Creating secondary database +resource "snowflake_secondary_database" "test" { + provider = secondary_account + name = snowflake_database.primary.name # It's recommended to give a secondary database the same name as its primary database + as_replica_of = "..${snowflake_database.primary.name}" + is_transient = false + + data_retention_time_in_days { + value = 10 + } + + max_data_extension_time_in_days { + value = 20 + } + + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A secondary database" +} diff --git a/examples/resources/snowflake_shared_database/import.sh b/examples/resources/snowflake_shared_database/import.sh new file mode 100644 index 0000000000..6cf900566c --- /dev/null +++ b/examples/resources/snowflake_shared_database/import.sh @@ -0,0 +1 @@ +terraform import snowflake_shared_database.example 'shared_database_name' diff --git a/examples/resources/snowflake_shared_database/resource.tf b/examples/resources/snowflake_shared_database/resource.tf new file mode 100644 index 0000000000..7f506bccf9 --- /dev/null +++ b/examples/resources/snowflake_shared_database/resource.tf @@ -0,0 +1,35 @@ +# 1. Preparing database to share +resource "snowflake_share" "test" { + provider = primary_account # notice the provider fields + name = "share_name" + accounts = ["."] +} + +resource "snowflake_database" "test" { + provider = primary_account + name = "shared_database" +} + +resource "snowflake_grant_privileges_to_share" "test" { + provider = primary_account + to_share = snowflake_share.test.name + privileges = ["USAGE"] + on_database = snowflake_database.test.name +} + +# 2. Creating shared database +resource "snowflake_shared_database" "test" { + provider = secondary_account + depends_on = [snowflake_grant_privileges_to_share.test] + name = snowflake_database.test.name # shared database should have the same as the "imported" one + from_share = "..${snowflake_share.test.name}" + is_transient = false + external_volume = "external_volume_name" + catalog = "catalog_name" + replace_invalid_characters = false + default_ddl_collation = "en_US" + storage_serialization_policy = "OPTIMIZED" + log_level = "OFF" + trace_level = "OFF" + comment = "A shared database" +} diff --git a/pkg/acceptance/check_destroy.go b/pkg/acceptance/check_destroy.go index 5e7b996222..31273ce056 100644 --- a/pkg/acceptance/check_destroy.go +++ b/pkg/acceptance/check_destroy.go @@ -136,12 +136,18 @@ var showByIdFunctions = map[resources.Resource]showByIdFunc{ resources.Schema: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Schemas.ShowByID) }, + resources.SecondaryDatabase: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Databases.ShowByID) + }, resources.Sequence: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Sequences.ShowByID) }, resources.Share: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Shares.ShowByID) }, + resources.SharedDatabase: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { + return runShowById(ctx, id, client.Databases.ShowByID) + }, resources.Stage: func(ctx context.Context, client *sdk.Client, id sdk.ObjectIdentifier) error { return runShowById(ctx, id, client.Stages.ShowByID) }, diff --git a/pkg/acceptance/helpers/database_client.go b/pkg/acceptance/helpers/database_client.go index 308130d8c8..ad050aff03 100644 --- a/pkg/acceptance/helpers/database_client.go +++ b/pkg/acceptance/helpers/database_client.go @@ -25,6 +25,30 @@ func (c *DatabaseClient) client() sdk.Databases { return c.context.client.Databases } +func (c *DatabaseClient) CreatePrimaryDatabase(t *testing.T, enableReplicationTo []sdk.AccountIdentifier) (*sdk.Database, sdk.ExternalObjectIdentifier, func()) { + t.Helper() + ctx := context.Background() + + primaryDatabase, primaryDatabaseCleanup := c.CreateDatabase(t) + + err := c.client().AlterReplication(ctx, primaryDatabase.ID(), &sdk.AlterDatabaseReplicationOptions{ + EnableReplication: &sdk.EnableReplication{ + ToAccounts: enableReplicationTo, + IgnoreEditionCheck: sdk.Bool(true), + }, + }) + require.NoError(t, err) + + organizationName, err := c.context.client.ContextFunctions.CurrentOrganizationName(ctx) + require.NoError(t, err) + + accountName, err := c.context.client.ContextFunctions.CurrentAccountName(ctx) + require.NoError(t, err) + + externalPrimaryId := sdk.NewExternalObjectIdentifier(sdk.NewAccountIdentifier(organizationName, accountName), primaryDatabase.ID()) + return primaryDatabase, externalPrimaryId, primaryDatabaseCleanup +} + func (c *DatabaseClient) CreateDatabase(t *testing.T) (*sdk.Database, func()) { t.Helper() return c.CreateDatabaseWithOptions(t, c.ids.RandomAccountObjectIdentifier(), &sdk.CreateDatabaseOptions{}) diff --git a/pkg/acceptance/helpers/table_client.go b/pkg/acceptance/helpers/table_client.go index 1c7b830817..627edd474f 100644 --- a/pkg/acceptance/helpers/table_client.go +++ b/pkg/acceptance/helpers/table_client.go @@ -76,7 +76,7 @@ func (c *TableClient) DropTableFunc(t *testing.T, id sdk.SchemaObjectIdentifier) return func() { // to prevent error when schema was removed before the table - _, err := c.context.client.Schemas.ShowByID(ctx, sdk.NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())) + _, err := c.context.client.Schemas.ShowByID(ctx, id.SchemaId()) if errors.Is(err, sdk.ErrObjectNotExistOrAuthorized) { return } diff --git a/pkg/datasources/alerts_acceptance_test.go b/pkg/datasources/alerts_acceptance_test.go index e48b348d51..ffa67019b2 100644 --- a/pkg/datasources/alerts_acceptance_test.go +++ b/pkg/datasources/alerts_acceptance_test.go @@ -6,12 +6,13 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" ) func TestAcc_Alerts(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + alertId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -22,33 +23,33 @@ func TestAcc_Alerts(t *testing.T) { CheckDestroy: nil, Steps: []resource.TestStep{ { - Config: alertsResourceConfig(name) + alertsDatasourceConfigNoOptionals(), + Config: alertsResourceConfig(alertId) + alertsDatasourceConfigNoOptionals(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.snowflake_alerts.test_datasource_alert", "alerts.#"), ), }, { - Config: alertsResourceConfig(name) + alertsDatasourceConfigDbOnly(), + Config: alertsResourceConfig(alertId) + alertsDatasourceConfigDbOnly(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.snowflake_alerts.test_datasource_alert", "alerts.#"), ), }, { - Config: alertsResourceConfig(name) + alertsDatasourceConfigDbAndSchema(), + Config: alertsResourceConfig(alertId) + alertsDatasourceConfigDbAndSchema(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.snowflake_alerts.test_datasource_alert", "alerts.#"), - resource.TestCheckResourceAttr("data.snowflake_alerts.test_datasource_alert", "alerts.0.name", name), + resource.TestCheckResourceAttr("data.snowflake_alerts.test_datasource_alert", "alerts.0.name", alertId.Name()), ), }, { - Config: alertsResourceConfig(name) + alertsDatasourceConfigAllOptionals(name), + Config: alertsResourceConfig(alertId) + alertsDatasourceConfigAllOptionals(alertId), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.snowflake_alerts.test_datasource_alert", "alerts.#"), - resource.TestCheckResourceAttr("data.snowflake_alerts.test_datasource_alert", "alerts.0.name", name), + resource.TestCheckResourceAttr("data.snowflake_alerts.test_datasource_alert", "alerts.0.name", alertId.Name()), ), }, { - Config: alertsResourceConfig(name) + alertsDatasourceConfigSchemaOnly(), + Config: alertsResourceConfig(alertId) + alertsDatasourceConfigSchemaOnly(), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttrSet("data.snowflake_alerts.test_datasource_alert", "alerts.#"), ), @@ -57,7 +58,7 @@ func TestAcc_Alerts(t *testing.T) { }) } -func alertsResourceConfig(name string) string { +func alertsResourceConfig(alertId sdk.SchemaObjectIdentifier) string { return fmt.Sprintf(` resource "snowflake_alert" "test_resource_alert" { name = "%s" @@ -72,7 +73,7 @@ resource "snowflake_alert" "test_resource_alert" { interval = "60" } } -`, name, acc.TestDatabaseName, acc.TestSchemaName, acc.TestWarehouseName) +`, alertId.Name(), alertId.DatabaseName(), alertId.SchemaName(), acc.TestWarehouseName) } func alertsDatasourceConfigNoOptionals() string { @@ -98,14 +99,14 @@ data "snowflake_alerts" "test_datasource_alert" { `, acc.TestDatabaseName, acc.TestSchemaName) } -func alertsDatasourceConfigAllOptionals(name string) string { +func alertsDatasourceConfigAllOptionals(alertId sdk.SchemaObjectIdentifier) string { return fmt.Sprintf(` data "snowflake_alerts" "test_datasource_alert" { database = "%s" schema = "%s" pattern = "%s" } -`, acc.TestDatabaseName, acc.TestSchemaName, name) +`, alertId.DatabaseName(), alertId.SchemaName(), alertId.Name()) } func alertsDatasourceConfigSchemaOnly() string { diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index 457f3e0b14..90c3b25a44 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -494,9 +494,11 @@ func getResources() map[string]*schema.Resource { "snowflake_saml_integration": resources.SAMLIntegration(), "snowflake_schema": resources.Schema(), "snowflake_scim_integration": resources.SCIMIntegration(), + "snowflake_secondary_database": resources.SecondaryDatabase(), "snowflake_sequence": resources.Sequence(), "snowflake_session_parameter": resources.SessionParameter(), "snowflake_share": resources.Share(), + "snowflake_shared_database": resources.SharedDatabase(), "snowflake_stage": resources.Stage(), "snowflake_storage_integration": resources.StorageIntegration(), "snowflake_stream": resources.Stream(), diff --git a/pkg/provider/resources/resources.go b/pkg/provider/resources/resources.go index 29f436295e..550836803e 100644 --- a/pkg/provider/resources/resources.go +++ b/pkg/provider/resources/resources.go @@ -28,8 +28,10 @@ const ( Role resource = "snowflake_role" RowAccessPolicy resource = "snowflake_row_access_policy" Schema resource = "snowflake_schema" + SecondaryDatabase resource = "snowflake_secondary_database" Sequence resource = "snowflake_sequence" Share resource = "snowflake_share" + SharedDatabase resource = "snowflake_shared_database" Stage resource = "snowflake_stage" StorageIntegration resource = "snowflake_storage_integration" Stream resource = "snowflake_stream" diff --git a/pkg/resources/custom_diffs.go b/pkg/resources/custom_diffs.go new file mode 100644 index 0000000000..270766a957 --- /dev/null +++ b/pkg/resources/custom_diffs.go @@ -0,0 +1,49 @@ +package resources + +import ( + "context" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +// NestedIntValueAccountObjectComputedIf is NestedValueComputedIf, +// but dedicated for account level objects with integer-typed properties. +func NestedIntValueAccountObjectComputedIf(key string, parameter sdk.AccountParameter) schema.CustomizeDiffFunc { + return NestedValueComputedIf( + key, + func(client *sdk.Client) (*sdk.Parameter, error) { + return client.Parameters.ShowAccountParameter(context.Background(), parameter) + }, + func(v any) string { return strconv.Itoa(v.(int)) }, + ) +} + +// NestedValueComputedIf internally calls schema.ResourceDiff.SetNewComputed whenever the inner function returns true. +// It's main purpose was to use it with hierarchical values that are marked with Computed and Optional. Such values should +// be recomputed whenever the value is not in the configuration and the remote value is not equal to the value in state. +func NestedValueComputedIf(key string, showParam func(client *sdk.Client) (*sdk.Parameter, error), valueToString func(v any) string) schema.CustomizeDiffFunc { + return customdiff.ComputedIf(key, func(ctx context.Context, d *schema.ResourceDiff, meta interface{}) bool { + configValue, ok := d.GetRawConfig().AsValueMap()[key] + if ok && len(configValue.AsValueSlice()) == 1 { + return false + } + + client := meta.(*provider.Context).Client + + param, err := showParam(client) + if err != nil { + return false + } + + stateValue := d.Get(key).([]any) + if len(stateValue) != 1 { + return false + } + + return param.Value != valueToString(stateValue[0].(map[string]any)["value"]) + }) +} diff --git a/pkg/resources/custom_diffs_test.go b/pkg/resources/custom_diffs_test.go new file mode 100644 index 0000000000..bc7172dc03 --- /dev/null +++ b/pkg/resources/custom_diffs_test.go @@ -0,0 +1,152 @@ +package resources_test + +import ( + "context" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/go-cty/cty" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" + "github.com/hashicorp/terraform-plugin-sdk/v2/terraform" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestNestedValueComputedIf(t *testing.T) { + customDiff := resources.NestedValueComputedIf( + "nested_value", + func(client *sdk.Client) (*sdk.Parameter, error) { + return &sdk.Parameter{ + Key: "Parameter", + Value: "snow-value", + }, nil + }, + func(v any) string { return v.(string) }, + ) + providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeString, customDiff) + + t.Run("value set in the configuration and state", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapVal(map[string]cty.Value{ + "nested_value": cty.ListVal([]cty.Value{ + cty.MapVal(map[string]cty.Value{ + "value": cty.NumberIntVal(123), + }), + }), + }), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": 123, + }, + }, + }) + assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("value set only in the configuration", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapVal(map[string]cty.Value{ + "nested_value": cty.ListVal([]cty.Value{ + cty.MapVal(map[string]cty.Value{ + "value": cty.NumberIntVal(123), + }), + }), + }), map[string]any{}) + assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("value set in the state and not equals with parameter", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": "value-to-change", + }, + }, + }) + assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("value set in the state and equals with parameter", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": "snow-value", + }, + }, + }) + assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + }) +} + +func TestNestedIntValueAccountObjectComputedIf(t *testing.T) { + providerConfig := createProviderWithNestedValueAndCustomDiff(t, schema.TypeInt, resources.NestedIntValueAccountObjectComputedIf("nested_value", sdk.AccountParameterDataRetentionTimeInDays)) + + t.Run("different value than on the Snowflake side", func(t *testing.T) { + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": 999, // value outside of valid range + }, + }, + }) + assert.True(t, diff.Attributes["nested_value.#"].NewComputed) + }) + + t.Run("same value as in Snowflake", func(t *testing.T) { + dataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + diff := calculateDiff(t, providerConfig, cty.MapValEmpty(cty.Type{}), map[string]any{ + "nested_value": []any{ + map[string]any{ + "value": dataRetentionTimeInDays.Value, + }, + }, + }) + assert.False(t, diff.Attributes["nested_value.#"].NewComputed) + }) +} + +func createProviderWithNestedValueAndCustomDiff(t *testing.T, valueType schema.ValueType, customDiffFunc schema.CustomizeDiffFunc) *schema.Provider { + t.Helper() + return &schema.Provider{ + ResourcesMap: map[string]*schema.Resource{ + "test": { + Schema: map[string]*schema.Schema{ + "nested_value": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: valueType, + Required: true, + }, + }, + }, + Computed: true, + Optional: true, + }, + }, + CustomizeDiff: customDiffFunc, + }, + }, + } +} + +func calculateDiff(t *testing.T, providerConfig *schema.Provider, rawConfigValue cty.Value, stateValue map[string]any) *terraform.InstanceDiff { + t.Helper() + diff, err := providerConfig.ResourcesMap["test"].Diff( + context.Background(), + &terraform.InstanceState{ + RawConfig: rawConfigValue, + }, + &terraform.ResourceConfig{ + Config: stateValue, + }, + &provider.Context{Client: acc.Client(t)}, + ) + require.NoError(t, err) + return diff +} diff --git a/pkg/resources/database.go b/pkg/resources/database.go index 2738d9f712..d6fd261400 100644 --- a/pkg/resources/database.go +++ b/pkg/resources/database.go @@ -62,6 +62,32 @@ var databaseSchema = map[string]*schema.Schema{ ForceNew: true, ConflictsWith: []string{"from_share", "from_database"}, }, + // TODO: Add accounts for replication (it will promote local database to serve as a primary database for replication). + // "accounts for replication": { + // Type: schema.TypeList, + // Required: true, + // MinItems: 1, + // Elem: &schema.Schema{ + // Type: schema.TypeString, + // // TODO(ticket-number): Validate account identifiers. + // }, + // // TODO: Desc + // }, + // "accounts for failover": { + // Type: schema.TypeList, + // Required: true, + // MinItems: 1, + // Elem: &schema.Schema{ + // Type: schema.TypeString, + // // TODO(ticket-number): Validate account identifiers. + // }, + // // TODO: Desc + // }, + // "ignore_edition_check": { + // Type: schema.TypeBool, + // // TODO: Desc + // Optional: true, + // }, "replication_configuration": { Type: schema.TypeList, Description: "When set, specifies the configurations for database replication.", diff --git a/pkg/resources/dynamic_table_acceptance_test.go b/pkg/resources/dynamic_table_acceptance_test.go index a5876a188f..092fa2185b 100644 --- a/pkg/resources/dynamic_table_acceptance_test.go +++ b/pkg/resources/dynamic_table_acceptance_test.go @@ -161,8 +161,8 @@ func TestAcc_DynamicTable_issue2173(t *testing.T) { tableName := dynamicTableName + "_table" tableId := acc.TestClient().Ids.NewSchemaObjectIdentifier(tableName) query := fmt.Sprintf(`select "ID" from %s`, tableId.FullyQualifiedName()) - otherSchema := acc.TestClient().Ids.Alpha() - otherSchemaId := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, otherSchema) + otherSchemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + otherSchemaName := otherSchemaId.Name() newDynamicTableId := acc.TestClient().Ids.NewSchemaObjectIdentifierInSchema(dynamicTableName, otherSchemaId) m := func() map[string]config.Variable { return map[string]config.Variable{ @@ -173,7 +173,7 @@ func TestAcc_DynamicTable_issue2173(t *testing.T) { "query": config.StringVariable(query), "comment": config.StringVariable("Terraform acceptance test for GH issue 2173"), "table_name": config.StringVariable(tableName), - "other_schema": config.StringVariable(otherSchema), + "other_schema": config.StringVariable(otherSchemaName), } } @@ -192,7 +192,7 @@ func TestAcc_DynamicTable_issue2173(t *testing.T) { PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, }, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr("snowflake_schema.other_schema", "name", otherSchema), + resource.TestCheckResourceAttr("snowflake_schema.other_schema", "name", otherSchemaName), resource.TestCheckResourceAttr("snowflake_table.t", "name", tableName), ), }, diff --git a/pkg/resources/dynamic_table_test.go b/pkg/resources/dynamic_table_test.go index f52bd8d3a3..fb362a40d3 100644 --- a/pkg/resources/dynamic_table_test.go +++ b/pkg/resources/dynamic_table_test.go @@ -19,7 +19,7 @@ func init() { } ctx := context.Background() dynamicTables, err := client.DynamicTables.Show(ctx, sdk.NewShowDynamicTableRequest().WithIn(&sdk.In{ - Schema: sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName), + Schema: acc.TestClient().Ids.SchemaId(), })) if err != nil { return fmt.Errorf("error getting dynamic tables during sweep: %w", err) diff --git a/pkg/resources/external_function_acceptance_test.go b/pkg/resources/external_function_acceptance_test.go index d4109fdc87..779bff1e00 100644 --- a/pkg/resources/external_function_acceptance_test.go +++ b/pkg/resources/external_function_acceptance_test.go @@ -226,7 +226,8 @@ func TestAcc_ExternalFunction_complete(t *testing.T) { } func TestAcc_ExternalFunction_migrateFromVersion085(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments([]sdk.DataType{sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR}) + name := id.Name() resourceName := "snowflake_external_function.f" resource.Test(t, resource.TestCase{ @@ -261,7 +262,7 @@ func TestAcc_ExternalFunction_migrateFromVersion085(t *testing.T) { PreApply: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, }, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR, sdk.DataTypeVARCHAR}).FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "name", name), resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), diff --git a/pkg/resources/function_acceptance_test.go b/pkg/resources/function_acceptance_test.go index a07b5e131a..224d5985a7 100644 --- a/pkg/resources/function_acceptance_test.go +++ b/pkg/resources/function_acceptance_test.go @@ -185,7 +185,8 @@ func TestAcc_Function_complex(t *testing.T) { // proves issue https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2490 func TestAcc_Function_migrateFromVersion085(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments([]sdk.DataType{sdk.DataTypeVARCHAR}) + name := id.Name() comment := random.Comment() resourceName := "snowflake_function.f" @@ -220,7 +221,7 @@ func TestAcc_Function_migrateFromVersion085(t *testing.T) { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: functionConfig(acc.TestDatabaseName, acc.TestSchemaName, name, comment), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR}).FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "name", name), resource.TestCheckResourceAttr(resourceName, "database", acc.TestDatabaseName), resource.TestCheckResourceAttr(resourceName, "schema", acc.TestSchemaName), diff --git a/pkg/resources/grant_database_role_acceptance_test.go b/pkg/resources/grant_database_role_acceptance_test.go index 8965ae2900..d575f49e8c 100644 --- a/pkg/resources/grant_database_role_acceptance_test.go +++ b/pkg/resources/grant_database_role_acceptance_test.go @@ -6,7 +6,6 @@ import ( acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" - "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" "github.com/hashicorp/terraform-plugin-testing/config" "github.com/hashicorp/terraform-plugin-testing/helper/resource" "github.com/hashicorp/terraform-plugin-testing/tfversion" @@ -128,19 +127,18 @@ func TestAcc_GrantDatabaseRole_accountRole(t *testing.T) { // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2410 is fixed func TestAcc_GrantDatabaseRole_share(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - databaseRoleName := acc.TestClient().Ids.Alpha() - databaseRoleId := sdk.NewDatabaseObjectIdentifier(databaseName, databaseRoleName).FullyQualifiedName() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) shareId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - shareName := shareId.Name() - resourceName := "snowflake_grant_database_role.test" + configVariables := func() config.Variables { return config.Variables{ - "database": config.StringVariable(databaseName), - "database_role_name": config.StringVariable(databaseRoleName), - "share_name": config.StringVariable(shareName), + "database": config.StringVariable(databaseId.Name()), + "database_role_name": config.StringVariable(databaseRoleId.Name()), + "share_name": config.StringVariable(shareId.Name()), } } + resourceName := "snowflake_grant_database_role.test" resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -153,9 +151,9 @@ func TestAcc_GrantDatabaseRole_share(t *testing.T) { ConfigDirectory: config.StaticDirectory("testdata/TestAcc_GrantDatabaseRole/share"), ConfigVariables: configVariables(), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId), - resource.TestCheckResourceAttr(resourceName, "share_name", shareName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf(`%v|%v|%v`, databaseRoleId, "SHARE", shareId.FullyQualifiedName())), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "share_name", shareId.Name()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf(`%v|%v|%v`, databaseRoleId.FullyQualifiedName(), "SHARE", shareId.FullyQualifiedName())), ), }, // test import diff --git a/pkg/resources/grant_ownership_acceptance_test.go b/pkg/resources/grant_ownership_acceptance_test.go index d5c2532b4e..0ed9c3cdd3 100644 --- a/pkg/resources/grant_ownership_acceptance_test.go +++ b/pkg/resources/grant_ownership_acceptance_test.go @@ -120,8 +120,9 @@ func TestAcc_GrantOwnership_OnObject_Schema_ToAccountRole(t *testing.T) { databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() databaseName := databaseId.Name() - schemaName := acc.TestClient().Ids.Alpha() - schemaFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName).FullyQualifiedName() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + schemaName := schemaId.Name() + schemaFullyQualifiedName := schemaId.FullyQualifiedName() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() @@ -168,12 +169,15 @@ func TestAcc_GrantOwnership_OnObject_Schema_ToAccountRole(t *testing.T) { } func TestAcc_GrantOwnership_OnObject_Schema_ToDatabaseRole(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - schemaFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName).FullyQualifiedName() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseName := databaseId.Name() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + schemaName := schemaId.Name() + schemaFullyQualifiedName := schemaId.FullyQualifiedName() - databaseRoleName := acc.TestClient().Ids.Alpha() - databaseRoleFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, databaseRoleName).FullyQualifiedName() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + databaseRoleName := databaseRoleId.Name() + databaseRoleFullyQualifiedName := databaseRoleId.FullyQualifiedName() configVariables := config.Variables{ "database_role_name": config.StringVariable(databaseRoleName), @@ -199,7 +203,7 @@ func TestAcc_GrantOwnership_OnObject_Schema_ToDatabaseRole(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToDatabaseRole|%s||OnObject|SCHEMA|%s", databaseRoleFullyQualifiedName, schemaFullyQualifiedName)), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ - DatabaseRole: sdk.NewDatabaseObjectIdentifier(databaseName, databaseRoleName), + DatabaseRole: databaseRoleId, }, }, sdk.ObjectTypeSchema, databaseRoleName, schemaFullyQualifiedName), ), @@ -218,19 +222,18 @@ func TestAcc_GrantOwnership_OnObject_Schema_ToDatabaseRole(t *testing.T) { func TestAcc_GrantOwnership_OnObject_Table_ToAccountRole(t *testing.T) { databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() databaseName := databaseId.Name() - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - tableFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName).FullyQualifiedName() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + schemaName := schemaId.Name() + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() - accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() configVariables := config.Variables{ "account_role_name": config.StringVariable(accountRoleName), "database_name": config.StringVariable(databaseName), "schema_name": config.StringVariable(schemaName), - "table_name": config.StringVariable(tableName), + "table_name": config.StringVariable(tableId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -247,13 +250,13 @@ func TestAcc_GrantOwnership_OnObject_Table_ToAccountRole(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), resource.TestCheckResourceAttr(resourceName, "on.0.object_type", "TABLE"), - resource.TestCheckResourceAttr(resourceName, "on.0.object_name", tableFullyQualifiedName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|TABLE|%s", accountRoleFullyQualifiedName, tableFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "on.0.object_name", tableId.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|TABLE|%s", accountRoleId.FullyQualifiedName(), tableId.FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ Role: accountRoleId, }, - }, sdk.ObjectTypeTable, accountRoleName, tableFullyQualifiedName), + }, sdk.ObjectTypeTable, accountRoleName, tableId.FullyQualifiedName()), ), }, { @@ -268,13 +271,17 @@ func TestAcc_GrantOwnership_OnObject_Table_ToAccountRole(t *testing.T) { } func TestAcc_GrantOwnership_OnObject_Table_ToDatabaseRole(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - tableFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName).FullyQualifiedName() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseName := databaseId.Name() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + schemaName := schemaId.Name() + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + tableName := tableId.Name() + tableFullyQualifiedName := tableId.FullyQualifiedName() - databaseRoleName := acc.TestClient().Ids.Alpha() - databaseRoleFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, databaseRoleName).FullyQualifiedName() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + databaseRoleName := databaseRoleId.Name() + databaseRoleFullyQualifiedName := databaseRoleId.FullyQualifiedName() configVariables := config.Variables{ "database_role_name": config.StringVariable(databaseRoleName), @@ -301,7 +308,7 @@ func TestAcc_GrantOwnership_OnObject_Table_ToDatabaseRole(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToDatabaseRole|%s||OnObject|TABLE|%s", databaseRoleFullyQualifiedName, tableFullyQualifiedName)), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ - DatabaseRole: sdk.NewDatabaseObjectIdentifier(databaseName, databaseRoleName), + DatabaseRole: databaseRoleId, }, }, sdk.ObjectTypeTable, databaseRoleName, tableFullyQualifiedName), ), @@ -319,25 +326,17 @@ func TestAcc_GrantOwnership_OnObject_Table_ToDatabaseRole(t *testing.T) { func TestAcc_GrantOwnership_OnAll_InDatabase_ToAccountRole(t *testing.T) { databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - databaseName := databaseId.Name() - databaseFullyQualifiedName := databaseId.FullyQualifiedName() - accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - accountRoleName := accountRoleId.Name() - accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() - - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - secondTableName := acc.TestClient().Ids.Alpha() - tableFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName).FullyQualifiedName() - secondTableFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, secondTableName).FullyQualifiedName() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + secondTableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) configVariables := config.Variables{ - "account_role_name": config.StringVariable(accountRoleName), - "database_name": config.StringVariable(databaseName), - "schema_name": config.StringVariable(schemaName), - "table_name": config.StringVariable(tableName), - "second_table_name": config.StringVariable(secondTableName), + "account_role_name": config.StringVariable(accountRoleId.Name()), + "database_name": config.StringVariable(databaseId.Name()), + "schema_name": config.StringVariable(schemaId.Name()), + "table_name": config.StringVariable(tableId.Name()), + "second_table_name": config.StringVariable(secondTableId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -352,15 +351,15 @@ func TestAcc_GrantOwnership_OnAll_InDatabase_ToAccountRole(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantOwnership/OnAll_InDatabase_ToAccountRole"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), + resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleId.Name()), resource.TestCheckResourceAttr(resourceName, "on.0.all.0.object_type_plural", "TABLES"), - resource.TestCheckResourceAttr(resourceName, "on.0.all.0.in_database", databaseName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnAll|TABLES|InDatabase|%s", accountRoleFullyQualifiedName, databaseFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "on.0.all.0.in_database", databaseId.Name()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnAll|TABLES|InDatabase|%s", accountRoleId.FullyQualifiedName(), databaseId.FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ Role: accountRoleId, }, - }, sdk.ObjectTypeTable, accountRoleName, tableFullyQualifiedName, secondTableFullyQualifiedName), + }, sdk.ObjectTypeTable, accountRoleId.Name(), tableId.FullyQualifiedName(), secondTableId.FullyQualifiedName()), ), }, { @@ -375,25 +374,20 @@ func TestAcc_GrantOwnership_OnAll_InDatabase_ToAccountRole(t *testing.T) { } func TestAcc_GrantOwnership_OnAll_InSchema_ToAccountRole(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - schemaFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName).FullyQualifiedName() - - tableName := acc.TestClient().Ids.Alpha() - secondTableName := acc.TestClient().Ids.Alpha() - tableFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName).FullyQualifiedName() - secondTableFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, secondTableName).FullyQualifiedName() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + secondTableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() - accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() configVariables := config.Variables{ "account_role_name": config.StringVariable(accountRoleName), - "database_name": config.StringVariable(databaseName), - "schema_name": config.StringVariable(schemaName), - "table_name": config.StringVariable(tableName), - "second_table_name": config.StringVariable(secondTableName), + "database_name": config.StringVariable(databaseId.Name()), + "schema_name": config.StringVariable(schemaId.Name()), + "table_name": config.StringVariable(tableId.Name()), + "second_table_name": config.StringVariable(secondTableId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -410,13 +404,13 @@ func TestAcc_GrantOwnership_OnAll_InSchema_ToAccountRole(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), resource.TestCheckResourceAttr(resourceName, "on.0.all.0.object_type_plural", "TABLES"), - resource.TestCheckResourceAttr(resourceName, "on.0.all.0.in_schema", schemaFullyQualifiedName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnAll|TABLES|InSchema|%s", accountRoleFullyQualifiedName, schemaFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "on.0.all.0.in_schema", schemaId.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnAll|TABLES|InSchema|%s", accountRoleId.FullyQualifiedName(), schemaId.FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ Role: accountRoleId, }, - }, sdk.ObjectTypeTable, accountRoleName, tableFullyQualifiedName, secondTableFullyQualifiedName), + }, sdk.ObjectTypeTable, accountRoleName, tableId.FullyQualifiedName(), secondTableId.FullyQualifiedName()), ), }, { @@ -480,9 +474,11 @@ func TestAcc_GrantOwnership_OnFuture_InDatabase_ToAccountRole(t *testing.T) { } func TestAcc_GrantOwnership_OnFuture_InSchema_ToAccountRole(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - schemaFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, schemaName).FullyQualifiedName() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseName := databaseId.Name() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + schemaName := schemaId.Name() + schemaFullyQualifiedName := schemaId.FullyQualifiedName() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() @@ -513,7 +509,7 @@ func TestAcc_GrantOwnership_OnFuture_InSchema_ToAccountRole(t *testing.T) { checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ Future: sdk.Bool(true), In: &sdk.ShowGrantsIn{ - Schema: sdk.Pointer(sdk.NewDatabaseObjectIdentifier(databaseName, schemaName)), + Schema: sdk.Pointer(schemaId), }, }, sdk.ObjectTypeTable, accountRoleName, fmt.Sprintf(`"%s"."%s".""`, databaseName, schemaName)), ), @@ -684,22 +680,23 @@ func TestAcc_GrantOwnership_AccountRoleRemovedOutsideTerraform(t *testing.T) { } func TestAcc_GrantOwnership_OnMaterializedView(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - materializedViewName := acc.TestClient().Ids.Alpha() - materializedViewFullyQualifiedName := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, materializedViewName).FullyQualifiedName() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseName := databaseId.Name() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + schemaName := schemaId.Name() + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + tableName := tableId.Name() + materializedViewId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() - accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() configVariables := config.Variables{ "account_role_name": config.StringVariable(accountRoleName), "database_name": config.StringVariable(databaseName), "schema_name": config.StringVariable(schemaName), "table_name": config.StringVariable(tableName), - "materialized_view_name": config.StringVariable(materializedViewName), + "materialized_view_name": config.StringVariable(materializedViewId.Name()), "warehouse_name": config.StringVariable(acc.TestWarehouseName), } resourceName := "snowflake_grant_ownership.test" @@ -717,13 +714,13 @@ func TestAcc_GrantOwnership_OnMaterializedView(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), resource.TestCheckResourceAttr(resourceName, "on.0.object_type", "MATERIALIZED VIEW"), - resource.TestCheckResourceAttr(resourceName, "on.0.object_name", materializedViewFullyQualifiedName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|MATERIALIZED VIEW|%s", accountRoleFullyQualifiedName, materializedViewFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "on.0.object_name", materializedViewId.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|MATERIALIZED VIEW|%s", accountRoleId.FullyQualifiedName(), materializedViewId.FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ Role: accountRoleId, }, - }, sdk.ObjectTypeMaterializedView, accountRoleName, materializedViewFullyQualifiedName), + }, sdk.ObjectTypeMaterializedView, accountRoleName, materializedViewId.FullyQualifiedName()), ), }, { @@ -925,12 +922,11 @@ func TestAcc_GrantOwnership_ForceOwnershipTransferOnCreate(t *testing.T) { func TestAcc_GrantOwnership_OnPipe(t *testing.T) { stageName := acc.TestClient().Ids.Alpha() tableName := acc.TestClient().Ids.Alpha() - pipeName := acc.TestClient().Ids.Alpha() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() - pipeFullyQualifiedName := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName, pipeName).FullyQualifiedName() + pipeId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() configVariables := config.Variables{ "account_role_name": config.StringVariable(accountRoleName), @@ -938,7 +934,7 @@ func TestAcc_GrantOwnership_OnPipe(t *testing.T) { "schema": config.StringVariable(acc.TestSchemaName), "stage": config.StringVariable(stageName), "table": config.StringVariable(tableName), - "pipe": config.StringVariable(pipeName), + "pipe": config.StringVariable(pipeId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -955,16 +951,16 @@ func TestAcc_GrantOwnership_OnPipe(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), resource.TestCheckResourceAttr(resourceName, "on.0.object_type", sdk.ObjectTypePipe.String()), - resource.TestCheckResourceAttr(resourceName, "on.0.object_name", pipeFullyQualifiedName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|PIPE|%s", accountRoleFullyQualifiedName, pipeFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "on.0.object_name", pipeId.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|PIPE|%s", accountRoleFullyQualifiedName, pipeId.FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ On: &sdk.ShowGrantsOn{ Object: &sdk.Object{ ObjectType: sdk.ObjectTypePipe, - Name: sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(pipeFullyQualifiedName), + Name: pipeId, }, }, - }, sdk.ObjectTypePipe, accountRoleName, pipeFullyQualifiedName), + }, sdk.ObjectTypePipe, accountRoleName, pipeId.FullyQualifiedName()), ), }, }, @@ -974,24 +970,20 @@ func TestAcc_GrantOwnership_OnPipe(t *testing.T) { func TestAcc_GrantOwnership_OnAllPipes(t *testing.T) { stageName := acc.TestClient().Ids.Alpha() tableName := acc.TestClient().Ids.Alpha() - pipeName := acc.TestClient().Ids.Alpha() - secondPipeName := acc.TestClient().Ids.Alpha() - pipeFullyQualifiedName := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName, pipeName).FullyQualifiedName() - secondPipeFullyQualifiedName := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName, secondPipeName).FullyQualifiedName() + pipeId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secondPipeId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() accountRoleName := accountRoleId.Name() accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() - schemaFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName).FullyQualifiedName() - configVariables := config.Variables{ "account_role_name": config.StringVariable(accountRoleName), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), "stage": config.StringVariable(stageName), "table": config.StringVariable(tableName), - "pipe": config.StringVariable(pipeName), - "second_pipe": config.StringVariable(secondPipeName), + "pipe": config.StringVariable(pipeId.Name()), + "second_pipe": config.StringVariable(secondPipeId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -1007,12 +999,12 @@ func TestAcc_GrantOwnership_OnAllPipes(t *testing.T) { ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnAll|PIPES|InSchema|%s", accountRoleFullyQualifiedName, schemaFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnAll|PIPES|InSchema|%s", accountRoleFullyQualifiedName, acc.TestClient().Ids.SchemaId().FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ Role: accountRoleId, }, - }, sdk.ObjectTypePipe, accountRoleName, pipeFullyQualifiedName, secondPipeFullyQualifiedName), + }, sdk.ObjectTypePipe, accountRoleName, pipeId.FullyQualifiedName(), secondPipeId.FullyQualifiedName()), ), }, }, @@ -1020,18 +1012,14 @@ func TestAcc_GrantOwnership_OnAllPipes(t *testing.T) { } func TestAcc_GrantOwnership_OnTask(t *testing.T) { - taskName := acc.TestClient().Ids.Alpha() - taskFullyQualifiedName := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName, taskName).FullyQualifiedName() - + taskId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - accountRoleName := accountRoleId.Name() - accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() configVariables := config.Variables{ - "account_role_name": config.StringVariable(accountRoleName), + "account_role_name": config.StringVariable(accountRoleId.Name()), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), - "task": config.StringVariable(taskName), + "task": config.StringVariable(taskId.Name()), "warehouse": config.StringVariable(acc.TestWarehouseName), } resourceName := "snowflake_grant_ownership.test" @@ -1047,18 +1035,18 @@ func TestAcc_GrantOwnership_OnTask(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantOwnership/OnTask"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), + resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleId.Name()), resource.TestCheckResourceAttr(resourceName, "on.0.object_type", sdk.ObjectTypeTask.String()), - resource.TestCheckResourceAttr(resourceName, "on.0.object_name", taskFullyQualifiedName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|TASK|%s", accountRoleFullyQualifiedName, taskFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "on.0.object_name", taskId.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|TASK|%s", accountRoleId.FullyQualifiedName(), taskId.FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ On: &sdk.ShowGrantsOn{ Object: &sdk.Object{ ObjectType: sdk.ObjectTypeTask, - Name: sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(taskFullyQualifiedName), + Name: taskId, }, }, - }, sdk.ObjectTypeTask, accountRoleName, taskFullyQualifiedName), + }, sdk.ObjectTypeTask, accountRoleId.Name(), taskId.FullyQualifiedName()), ), }, }, @@ -1066,21 +1054,16 @@ func TestAcc_GrantOwnership_OnTask(t *testing.T) { } func TestAcc_GrantOwnership_OnAllTasks(t *testing.T) { - taskName := acc.TestClient().Ids.Alpha() - secondTaskName := acc.TestClient().Ids.Alpha() + taskId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() + secondTaskId := acc.TestClient().Ids.RandomSchemaObjectIdentifier() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - accountRoleName := accountRoleId.Name() - accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() - schemaFullyQualifiedName := acc.TestClient().Ids.SchemaId().FullyQualifiedName() - taskFullyQualifiedName := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName, taskName).FullyQualifiedName() - secondTaskFullyQualifiedName := sdk.NewSchemaObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName, secondTaskName).FullyQualifiedName() configVariables := config.Variables{ - "account_role_name": config.StringVariable(accountRoleName), + "account_role_name": config.StringVariable(accountRoleId.Name()), "database": config.StringVariable(acc.TestDatabaseName), "schema": config.StringVariable(acc.TestSchemaName), - "task": config.StringVariable(taskName), - "second_task": config.StringVariable(secondTaskName), + "task": config.StringVariable(taskId.Name()), + "second_task": config.StringVariable(secondTaskId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -1095,14 +1078,14 @@ func TestAcc_GrantOwnership_OnAllTasks(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantOwnership/OnAllTasks"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s|REVOKE|OnAll|TASKS|InSchema|%s", accountRoleFullyQualifiedName, schemaFullyQualifiedName)), + resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleId.Name()), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s|REVOKE|OnAll|TASKS|InSchema|%s", accountRoleId.FullyQualifiedName(), acc.TestClient().Ids.SchemaId().FullyQualifiedName())), checkResourceOwnershipIsGranted(&sdk.ShowGrantOptions{ To: &sdk.ShowGrantsTo{ Role: accountRoleId, }, }, - sdk.ObjectTypeTask, accountRoleName, taskFullyQualifiedName, secondTaskFullyQualifiedName), + sdk.ObjectTypeTask, accountRoleId.Name(), taskId.FullyQualifiedName(), secondTaskId.FullyQualifiedName()), ), }, }, @@ -1110,19 +1093,19 @@ func TestAcc_GrantOwnership_OnAllTasks(t *testing.T) { } func TestAcc_GrantOwnership_OnDatabaseRole(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseName := databaseId.Name() - databaseRoleName := acc.TestClient().Ids.Alpha() - databaseRoleFullyQualifiedName := sdk.NewDatabaseObjectIdentifier(databaseName, databaseRoleName).FullyQualifiedName() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + databaseRoleFullyQualifiedName := databaseRoleId.FullyQualifiedName() accountRoleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - accountRoleName := accountRoleId.Name() accountRoleFullyQualifiedName := accountRoleId.FullyQualifiedName() configVariables := config.Variables{ - "account_role_name": config.StringVariable(accountRoleName), + "account_role_name": config.StringVariable(accountRoleId.Name()), "database_name": config.StringVariable(databaseName), - "database_role_name": config.StringVariable(databaseRoleName), + "database_role_name": config.StringVariable(databaseRoleId.Name()), } resourceName := "snowflake_grant_ownership.test" @@ -1137,7 +1120,7 @@ func TestAcc_GrantOwnership_OnDatabaseRole(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantOwnership/OnObject_DatabaseRole_ToAccountRole"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleName), + resource.TestCheckResourceAttr(resourceName, "account_role_name", accountRoleId.Name()), resource.TestCheckResourceAttr(resourceName, "on.0.object_type", "DATABASE ROLE"), resource.TestCheckResourceAttr(resourceName, "on.0.object_name", databaseRoleFullyQualifiedName), resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("ToAccountRole|%s||OnObject|DATABASE ROLE|%s", accountRoleFullyQualifiedName, databaseRoleFullyQualifiedName)), @@ -1145,10 +1128,10 @@ func TestAcc_GrantOwnership_OnDatabaseRole(t *testing.T) { On: &sdk.ShowGrantsOn{ Object: &sdk.Object{ ObjectType: sdk.ObjectTypeDatabaseRole, - Name: sdk.NewDatabaseObjectIdentifierFromFullyQualifiedName(databaseRoleFullyQualifiedName), + Name: databaseRoleId, }, }, - }, sdk.ObjectTypeRole, accountRoleName, databaseRoleFullyQualifiedName), + }, sdk.ObjectTypeRole, accountRoleId.Name(), databaseRoleFullyQualifiedName), ), }, }, diff --git a/pkg/resources/grant_privileges_to_account_role_acceptance_test.go b/pkg/resources/grant_privileges_to_account_role_acceptance_test.go index 02f7916afa..1d98b760e8 100644 --- a/pkg/resources/grant_privileges_to_account_role_acceptance_test.go +++ b/pkg/resources/grant_privileges_to_account_role_acceptance_test.go @@ -224,7 +224,7 @@ func TestAcc_GrantPrivilegesToAccountRole_OnSchema(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_account_role.test" - schemaName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName).FullyQualifiedName() + schemaId := acc.TestClient().Ids.SchemaId() resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -247,9 +247,9 @@ func TestAcc_GrantPrivilegesToAccountRole_OnSchema(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaPrivilegeCreateTable)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "on_schema.#", "1"), - resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", schemaName), + resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", schemaId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnSchema|%s", roleFullyQualifiedName, schemaName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnSchema|%s", roleFullyQualifiedName, schemaId.FullyQualifiedName())), ), }, { @@ -820,7 +820,8 @@ func TestAcc_GrantPrivilegesToAccountRole_UpdatePrivileges(t *testing.T) { func TestAcc_GrantPrivilegesToAccountRole_UpdatePrivileges_SnowflakeChecked(t *testing.T) { roleId := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := "test_database_role_schema_name" + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := func(allPrivileges bool, privileges []string, schemaName string) config.Variables { configVariables := config.Variables{ "name": config.StringVariable(roleId.FullyQualifiedName()), @@ -895,7 +896,7 @@ func TestAcc_GrantPrivilegesToAccountRole_UpdatePrivileges_SnowflakeChecked(t *t ConfigVariables: configVariables(false, []string{ sdk.SchemaPrivilegeCreateTask.String(), sdk.SchemaPrivilegeCreateExternalTable.String(), - }, schemaName), + }, schemaId.Name()), Check: queriedAccountRolePrivilegesEqualTo( roleId, sdk.SchemaPrivilegeCreateTask.String(), @@ -1201,7 +1202,7 @@ func TestAcc_GrantPrivilegesToAccountRole_MLPrivileges(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_account_role.test" - schemaName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName).FullyQualifiedName() + schemaId := acc.TestClient().Ids.SchemaId() resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, @@ -1224,9 +1225,9 @@ func TestAcc_GrantPrivilegesToAccountRole_MLPrivileges(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaPrivilegeCreateSnowflakeMlAnomalyDetection)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaPrivilegeCreateSnowflakeMlForecast)), resource.TestCheckResourceAttr(resourceName, "on_schema.#", "1"), - resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", schemaName), + resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", schemaId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SNOWFLAKE.ML.ANOMALY_DETECTION,CREATE SNOWFLAKE.ML.FORECAST|OnSchema|OnSchema|%s", roleFullyQualifiedName, schemaName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SNOWFLAKE.ML.ANOMALY_DETECTION,CREATE SNOWFLAKE.ML.FORECAST|OnSchema|OnSchema|%s", roleFullyQualifiedName, schemaId.FullyQualifiedName())), ), ConfigPlanChecks: resource.ConfigPlanChecks{ PostApplyPostRefresh: []plancheck.PlanCheck{ diff --git a/pkg/resources/grant_privileges_to_database_role_acceptance_test.go b/pkg/resources/grant_privileges_to_database_role_acceptance_test.go index 6ca99ccec4..9a9ed72871 100644 --- a/pkg/resources/grant_privileges_to_database_role_acceptance_test.go +++ b/pkg/resources/grant_privileges_to_database_role_acceptance_test.go @@ -17,9 +17,10 @@ import ( ) func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.AccountObjectPrivilegeCreateSchema)), config.StringVariable(string(sdk.AccountObjectPrivilegeModify)), @@ -30,9 +31,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -43,20 +41,20 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "3"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.AccountObjectPrivilegeCreateSchema)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.AccountObjectPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "privileges.2", string(sdk.AccountObjectPrivilegeUsage)), - resource.TestCheckResourceAttr(resourceName, "on_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "true"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|true|false|CREATE SCHEMA,MODIFY,USAGE|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|true|false|CREATE SCHEMA,MODIFY,USAGE|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -71,9 +69,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase(t *testing.T) { } func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase_PrivilegesReversed(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.AccountObjectPrivilegeUsage)), config.StringVariable(string(sdk.AccountObjectPrivilegeModify)), @@ -84,9 +83,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase_PrivilegesReversed(t *test } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -97,20 +93,20 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase_PrivilegesReversed(t *test Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "3"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.AccountObjectPrivilegeCreateSchema)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.AccountObjectPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "privileges.2", string(sdk.AccountObjectPrivilegeUsage)), - resource.TestCheckResourceAttr(resourceName, "on_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "true"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|true|false|CREATE SCHEMA,MODIFY,USAGE|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|true|false|CREATE SCHEMA,MODIFY,USAGE|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -125,9 +121,11 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnDatabase_PrivilegesReversed(t *test } func TestAcc_GrantPrivilegesToDatabaseRole_OnSchema(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + schemaName := acc.TestClient().Ids.SchemaId().FullyQualifiedName() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaPrivilegeCreateTable)), config.StringVariable(string(sdk.SchemaPrivilegeModify)), @@ -138,9 +136,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchema(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - schemaName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName).FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -151,20 +146,20 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchema(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnSchema"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaPrivilegeCreateTable)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "on_schema.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", schemaName), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnSchema|%s", databaseRoleName, schemaName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnSchema|%s", databaseRoleId.FullyQualifiedName(), schemaName)), ), }, { @@ -197,9 +192,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchema_ExactlyOneOf(t *testing.T) { } func TestAcc_GrantPrivilegesToDatabaseRole_OnAllSchemasInDatabase(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaPrivilegeCreateTable)), config.StringVariable(string(sdk.SchemaPrivilegeModify)), @@ -209,9 +205,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnAllSchemasInDatabase(t *testing.T) } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -222,20 +215,20 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnAllSchemasInDatabase(t *testing.T) Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnAllSchemasInDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaPrivilegeCreateTable)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "on_schema.#", "1"), - resource.TestCheckResourceAttr(resourceName, "on_schema.0.all_schemas_in_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_schema.0.all_schemas_in_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnAllSchemasInDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnAllSchemasInDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -250,9 +243,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnAllSchemasInDatabase(t *testing.T) } func TestAcc_GrantPrivilegesToDatabaseRole_OnFutureSchemasInDatabase(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaPrivilegeCreateTable)), config.StringVariable(string(sdk.SchemaPrivilegeModify)), @@ -262,9 +256,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnFutureSchemasInDatabase(t *testing. } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -275,20 +266,20 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnFutureSchemasInDatabase(t *testing. Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnFutureSchemasInDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaPrivilegeCreateTable)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "on_schema.#", "1"), - resource.TestCheckResourceAttr(resourceName, "on_schema.0.future_schemas_in_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_schema.0.future_schemas_in_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnFutureSchemasInDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE TABLE,MODIFY|OnSchema|OnFutureSchemasInDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -396,9 +387,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnObject_OwnershipPriv } func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_InDatabase(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaObjectPrivilegeInsert)), config.StringVariable(string(sdk.SchemaObjectPrivilegeUpdate)), @@ -409,9 +401,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_InDatabase(t *te } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -422,22 +411,22 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_InDatabase(t *te Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnSchemaObject_OnAll_InDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaObjectPrivilegeInsert)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaObjectPrivilegeUpdate)), resource.TestCheckResourceAttr(resourceName, "on_schema_object.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.object_type_plural", string(sdk.PluralObjectTypeTables)), - resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.in_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.in_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|INSERT,UPDATE|OnSchemaObject|OnAll|TABLES|InDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|INSERT,UPDATE|OnSchemaObject|OnAll|TABLES|InDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -452,9 +441,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_InDatabase(t *te } func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAllPipes(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaObjectPrivilegeMonitor)), ), @@ -463,9 +453,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAllPipes(t *testing. } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -476,21 +463,21 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAllPipes(t *testing. Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnAllPipes"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaObjectPrivilegeMonitor)), resource.TestCheckResourceAttr(resourceName, "on_schema_object.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.object_type_plural", string(sdk.PluralObjectTypePipes)), - resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.in_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.in_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|MONITOR|OnSchemaObject|OnAll|PIPES|InDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|MONITOR|OnSchemaObject|OnAll|PIPES|InDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -505,9 +492,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAllPipes(t *testing. } func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnFuture_InDatabase(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaObjectPrivilegeInsert)), config.StringVariable(string(sdk.SchemaObjectPrivilegeUpdate)), @@ -518,9 +506,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnFuture_InDatabase(t } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -531,22 +516,22 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnFuture_InDatabase(t Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnSchemaObject_OnFuture_InDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaObjectPrivilegeInsert)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaObjectPrivilegeUpdate)), resource.TestCheckResourceAttr(resourceName, "on_schema_object.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.future.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.future.0.object_type_plural", string(sdk.PluralObjectTypeTables)), - resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.future.0.in_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.future.0.in_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|INSERT,UPDATE|OnSchemaObject|OnFuture|TABLES|InDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|INSERT,UPDATE|OnSchemaObject|OnFuture|TABLES|InDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -596,9 +581,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnFuture_Streamlits_In } func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_Streamlits_InDatabase(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaObjectPrivilegeUsage)), ), @@ -608,9 +594,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_Streamlits_InDat } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -621,21 +604,21 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_Streamlits_InDat Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnSchemaObject_OnAll_InDatabase"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaObjectPrivilegeUsage)), resource.TestCheckResourceAttr(resourceName, "on_schema_object.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.#", "1"), resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.object_type_plural", string(sdk.PluralObjectTypeStreamlits)), - resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.in_database", databaseName), + resource.TestCheckResourceAttr(resourceName, "on_schema_object.0.all.0.in_database", acc.TestClient().Ids.DatabaseId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|USAGE|OnSchemaObject|OnAll|STREAMLITS|InDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|USAGE|OnSchemaObject|OnAll|STREAMLITS|InDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, }, @@ -643,10 +626,11 @@ func TestAcc_GrantPrivilegesToDatabaseRole_OnSchemaObject_OnAll_Streamlits_InDat } func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := func(allPrivileges bool, privileges []sdk.AccountObjectPrivilege) config.Variables { configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "database": config.StringVariable(acc.TestDatabaseName), } if allPrivileges { @@ -663,9 +647,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -676,7 +657,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/UpdatePrivileges/privileges"), @@ -689,7 +670,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.AccountObjectPrivilegeCreateSchema)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.AccountObjectPrivilegeModify)), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SCHEMA,MODIFY|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SCHEMA,MODIFY|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -705,7 +686,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.AccountObjectPrivilegeCreateSchema)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.AccountObjectPrivilegeMonitor)), resource.TestCheckResourceAttr(resourceName, "privileges.2", string(sdk.AccountObjectPrivilegeUsage)), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SCHEMA,USAGE,MONITOR|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SCHEMA,USAGE,MONITOR|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -714,7 +695,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "all_privileges", "true"), resource.TestCheckResourceAttr(resourceName, "privileges.#", "0"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -728,7 +709,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.AccountObjectPrivilegeModify)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.AccountObjectPrivilegeMonitor)), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|MODIFY,MONITOR|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|MODIFY,MONITOR|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, }, @@ -736,11 +717,12 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges(t *testing.T) { } func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t *testing.T) { - name := acc.TestClient().Ids.Alpha() - schemaName := "test_database_role_schema_name" + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := func(allPrivileges bool, privileges []string, schemaName string) config.Variables { configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "database": config.StringVariable(acc.TestDatabaseName), } if allPrivileges { @@ -759,8 +741,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * return configVariables } - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name) - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -771,7 +751,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/UpdatePrivileges_SnowflakeChecked/privileges"), @@ -780,7 +760,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * sdk.AccountObjectPrivilegeModify.String(), }, ""), Check: queriedPrivilegesToDatabaseRoleEqualTo( - databaseRoleName, + databaseRoleId, sdk.AccountObjectPrivilegeCreateSchema.String(), sdk.AccountObjectPrivilegeModify.String(), ), @@ -789,7 +769,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/UpdatePrivileges_SnowflakeChecked/all_privileges"), ConfigVariables: configVariables(true, []string{}, ""), Check: queriedPrivilegesToDatabaseRoleContainAtLeast( - databaseRoleName, + databaseRoleId, sdk.AccountObjectPrivilegeCreateDatabaseRole.String(), sdk.AccountObjectPrivilegeCreateSchema.String(), sdk.AccountObjectPrivilegeModify.String(), @@ -804,7 +784,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * sdk.AccountObjectPrivilegeMonitor.String(), }, ""), Check: queriedPrivilegesToDatabaseRoleEqualTo( - databaseRoleName, + databaseRoleId, sdk.AccountObjectPrivilegeModify.String(), sdk.AccountObjectPrivilegeMonitor.String(), ), @@ -814,9 +794,9 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * ConfigVariables: configVariables(false, []string{ sdk.SchemaPrivilegeCreateTask.String(), sdk.SchemaPrivilegeCreateExternalTable.String(), - }, schemaName), + }, schemaId.Name()), Check: queriedPrivilegesToDatabaseRoleEqualTo( - databaseRoleName, + databaseRoleId, sdk.SchemaPrivilegeCreateTask.String(), sdk.SchemaPrivilegeCreateExternalTable.String(), ), @@ -826,10 +806,11 @@ func TestAcc_GrantPrivilegesToDatabaseRole_UpdatePrivileges_SnowflakeChecked(t * } func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := func(alwaysApply bool) config.Variables { return config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "all_privileges": config.BoolVariable(true), "database": config.StringVariable(acc.TestDatabaseName), "always_apply": config.BoolVariable(alwaysApply), @@ -837,9 +818,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -850,7 +828,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/AlwaysApply"), @@ -862,7 +840,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "always_apply", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, { @@ -870,7 +848,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { ConfigVariables: configVariables(true), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "always_apply", "true"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), ExpectNonEmptyPlan: true, }, @@ -884,7 +862,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "always_apply", "true"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), ExpectNonEmptyPlan: true, }, @@ -898,7 +876,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "always_apply", "true"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), ExpectNonEmptyPlan: true, }, @@ -912,7 +890,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { }, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "always_apply", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, }, @@ -921,9 +899,10 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply(t *testing.T) { // proved https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2651 func TestAcc_GrantPrivilegesToDatabaseRole_MLPrivileges(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "privileges": config.ListVariable( config.StringVariable(string(sdk.SchemaPrivilegeCreateSnowflakeMlAnomalyDetection)), config.StringVariable(string(sdk.SchemaPrivilegeCreateSnowflakeMlForecast)), @@ -934,9 +913,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_MLPrivileges(t *testing.T) { } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - schemaName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, acc.TestSchemaName).FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -947,20 +923,20 @@ func TestAcc_GrantPrivilegesToDatabaseRole_MLPrivileges(t *testing.T) { Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/OnSchema"), ConfigVariables: configVariables, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleName), + resource.TestCheckResourceAttr(resourceName, "database_role_name", databaseRoleId.FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "2"), resource.TestCheckResourceAttr(resourceName, "privileges.0", string(sdk.SchemaPrivilegeCreateSnowflakeMlAnomalyDetection)), resource.TestCheckResourceAttr(resourceName, "privileges.1", string(sdk.SchemaPrivilegeCreateSnowflakeMlForecast)), resource.TestCheckResourceAttr(resourceName, "on_schema.#", "1"), - resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", schemaName), + resource.TestCheckResourceAttr(resourceName, "on_schema.0.schema_name", acc.TestClient().Ids.SchemaId().FullyQualifiedName()), resource.TestCheckResourceAttr(resourceName, "with_grant_option", "false"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SNOWFLAKE.ML.ANOMALY_DETECTION,CREATE SNOWFLAKE.ML.FORECAST|OnSchema|OnSchema|%s", databaseRoleName, schemaName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|false|CREATE SNOWFLAKE.ML.ANOMALY_DETECTION,CREATE SNOWFLAKE.ML.FORECAST|OnSchema|OnSchema|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.SchemaId().FullyQualifiedName())), ), ConfigPlanChecks: resource.ConfigPlanChecks{ PostApplyPostRefresh: []plancheck.PlanCheck{ @@ -974,7 +950,8 @@ func TestAcc_GrantPrivilegesToDatabaseRole_MLPrivileges(t *testing.T) { // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2459 is fixed func TestAcc_GrantPrivilegesToDatabaseRole_ChangeWithGrantOptionsOutsideOfTerraform_WithGrantOptions(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + name := databaseRoleId.Name() configVariables := config.Variables{ "name": config.StringVariable(name), "privileges": config.ListVariable( @@ -1008,7 +985,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_ChangeWithGrantOptionsOutsideOfTerraf { PreConfig: func() { revokeAndGrantPrivilegesOnDatabaseToDatabaseRole( - t, sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name), + t, databaseRoleId, acc.TestClient().Ids.DatabaseId(), []sdk.AccountObjectPrivilege{sdk.AccountObjectPrivilegeCreateSchema}, false, @@ -1028,7 +1005,8 @@ func TestAcc_GrantPrivilegesToDatabaseRole_ChangeWithGrantOptionsOutsideOfTerraf // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2459 is fixed func TestAcc_GrantPrivilegesToDatabaseRole_ChangeWithGrantOptionsOutsideOfTerraform_WithoutGrantOptions(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + name := databaseRoleId.Name() configVariables := config.Variables{ "name": config.StringVariable(name), "privileges": config.ListVariable( @@ -1062,7 +1040,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_ChangeWithGrantOptionsOutsideOfTerraf { PreConfig: func() { revokeAndGrantPrivilegesOnDatabaseToDatabaseRole( - t, sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name), + t, databaseRoleId, acc.TestClient().Ids.DatabaseId(), []sdk.AccountObjectPrivilege{sdk.AccountObjectPrivilegeCreateSchema}, true, @@ -1170,10 +1148,11 @@ func TestAcc_GrantPrivilegesToDatabaseRole_RemoveDatabaseRoleOutsideTerraform(t // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2689 is fixed func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply_SetAfterCreate(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + databaseRoleId := acc.TestClient().Ids.RandomDatabaseObjectIdentifier() + configVariables := func(alwaysApply bool) config.Variables { return config.Variables{ - "name": config.StringVariable(name), + "name": config.StringVariable(databaseRoleId.Name()), "all_privileges": config.BoolVariable(true), "database": config.StringVariable(acc.TestDatabaseName), "always_apply": config.BoolVariable(alwaysApply), @@ -1181,9 +1160,6 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply_SetAfterCreate(t *testing } resourceName := "snowflake_grant_privileges_to_database_role.test" - databaseRoleName := sdk.NewDatabaseObjectIdentifier(acc.TestDatabaseName, name).FullyQualifiedName() - databaseName := acc.TestClient().Ids.DatabaseId().FullyQualifiedName() - resource.Test(t, resource.TestCase{ ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, PreCheck: func() { acc.TestAccPreCheck(t) }, @@ -1194,7 +1170,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply_SetAfterCreate(t *testing Steps: []resource.TestStep{ { PreConfig: func() { - _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, name) + _, databaseRoleCleanup := acc.TestClient().DatabaseRole.CreateDatabaseRoleWithName(t, databaseRoleId.Name()) t.Cleanup(databaseRoleCleanup) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToDatabaseRole/AlwaysApply"), @@ -1202,7 +1178,7 @@ func TestAcc_GrantPrivilegesToDatabaseRole_AlwaysApply_SetAfterCreate(t *testing ExpectNonEmptyPlan: true, Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr(resourceName, "always_apply", "true"), - resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleName, databaseName)), + resource.TestCheckResourceAttr(resourceName, "id", fmt.Sprintf("%s|false|true|ALL|OnDatabase|%s", databaseRoleId.FullyQualifiedName(), acc.TestClient().Ids.DatabaseId().FullyQualifiedName())), ), }, }, diff --git a/pkg/resources/grant_privileges_to_share_acceptance_test.go b/pkg/resources/grant_privileges_to_share_acceptance_test.go index 5ae2e86528..cefa39629d 100644 --- a/pkg/resources/grant_privileges_to_share_acceptance_test.go +++ b/pkg/resources/grant_privileges_to_share_acceptance_test.go @@ -64,15 +64,15 @@ func TestAcc_GrantPrivilegesToShare_OnDatabase(t *testing.T) { } func TestAcc_GrantPrivilegesToShare_OnSchema(t *testing.T) { - databaseName := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := sdk.NewDatabaseObjectIdentifier(databaseName.Name(), acc.TestClient().Ids.Alpha()) - shareName := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + shareId := acc.TestClient().Ids.RandomAccountObjectIdentifier() configVariables := func(withGrant bool) config.Variables { variables := config.Variables{ - "to_share": config.StringVariable(shareName.Name()), - "database": config.StringVariable(databaseName.Name()), - "schema": config.StringVariable(schemaName.Name()), + "to_share": config.StringVariable(shareId.Name()), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), } if withGrant { variables["privileges"] = config.ListVariable( @@ -94,10 +94,10 @@ func TestAcc_GrantPrivilegesToShare_OnSchema(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToShare/OnSchema"), ConfigVariables: configVariables(true), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "to_share", shareName.Name()), + resource.TestCheckResourceAttr(resourceName, "to_share", shareId.Name()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", sdk.ObjectPrivilegeUsage.String()), - resource.TestCheckResourceAttr(resourceName, "on_schema", schemaName.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "on_schema", schemaId.FullyQualifiedName()), ), }, { @@ -119,17 +119,17 @@ func TestAcc_GrantPrivilegesToShare_OnSchema(t *testing.T) { // TODO(SNOW-1021686): Add on_function test func TestAcc_GrantPrivilegesToShare_OnTable(t *testing.T) { - databaseName := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := sdk.NewDatabaseObjectIdentifier(databaseName.Name(), acc.TestClient().Ids.Alpha()) - tableName := sdk.NewSchemaObjectIdentifier(databaseName.Name(), schemaName.Name(), acc.TestClient().Ids.Alpha()) - shareName := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + shareId := acc.TestClient().Ids.RandomAccountObjectIdentifier() configVariables := func(withGrant bool) config.Variables { variables := config.Variables{ - "to_share": config.StringVariable(shareName.Name()), - "database": config.StringVariable(databaseName.Name()), - "schema": config.StringVariable(schemaName.Name()), - "on_table": config.StringVariable(tableName.Name()), + "to_share": config.StringVariable(shareId.Name()), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), + "on_table": config.StringVariable(tableId.Name()), } if withGrant { variables["privileges"] = config.ListVariable( @@ -151,10 +151,10 @@ func TestAcc_GrantPrivilegesToShare_OnTable(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToShare/OnTable"), ConfigVariables: configVariables(true), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "to_share", shareName.Name()), + resource.TestCheckResourceAttr(resourceName, "to_share", shareId.Name()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", sdk.ObjectPrivilegeSelect.String()), - resource.TestCheckResourceAttr(resourceName, "on_table", tableName.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "on_table", tableId.FullyQualifiedName()), ), }, { @@ -174,15 +174,15 @@ func TestAcc_GrantPrivilegesToShare_OnTable(t *testing.T) { } func TestAcc_GrantPrivilegesToShare_OnAllTablesInSchema(t *testing.T) { - databaseName := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := sdk.NewDatabaseObjectIdentifier(databaseName.Name(), acc.TestClient().Ids.Alpha()) - shareName := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + shareId := acc.TestClient().Ids.RandomAccountObjectIdentifier() configVariables := func(withGrant bool) config.Variables { variables := config.Variables{ - "to_share": config.StringVariable(shareName.Name()), - "database": config.StringVariable(databaseName.Name()), - "schema": config.StringVariable(schemaName.Name()), + "to_share": config.StringVariable(shareId.Name()), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), } if withGrant { variables["privileges"] = config.ListVariable( @@ -204,10 +204,10 @@ func TestAcc_GrantPrivilegesToShare_OnAllTablesInSchema(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToShare/OnAllTablesInSchema"), ConfigVariables: configVariables(true), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "to_share", shareName.Name()), + resource.TestCheckResourceAttr(resourceName, "to_share", shareId.Name()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", sdk.ObjectPrivilegeSelect.String()), - resource.TestCheckResourceAttr(resourceName, "on_all_tables_in_schema", schemaName.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "on_all_tables_in_schema", schemaId.FullyQualifiedName()), ), }, { @@ -227,19 +227,19 @@ func TestAcc_GrantPrivilegesToShare_OnAllTablesInSchema(t *testing.T) { } func TestAcc_GrantPrivilegesToShare_OnView(t *testing.T) { - databaseName := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := sdk.NewDatabaseObjectIdentifier(databaseName.Name(), acc.TestClient().Ids.Alpha()) - tableName := sdk.NewSchemaObjectIdentifier(databaseName.Name(), schemaName.Name(), acc.TestClient().Ids.Alpha()) - viewName := sdk.NewSchemaObjectIdentifier(databaseName.Name(), schemaName.Name(), acc.TestClient().Ids.Alpha()) - shareName := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + viewId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + shareId := acc.TestClient().Ids.RandomAccountObjectIdentifier() configVariables := func(withGrant bool) config.Variables { variables := config.Variables{ - "to_share": config.StringVariable(shareName.Name()), - "database": config.StringVariable(databaseName.Name()), - "schema": config.StringVariable(schemaName.Name()), - "on_table": config.StringVariable(tableName.Name()), - "on_view": config.StringVariable(viewName.Name()), + "to_share": config.StringVariable(shareId.Name()), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), + "on_table": config.StringVariable(tableId.Name()), + "on_view": config.StringVariable(viewId.Name()), } if withGrant { variables["privileges"] = config.ListVariable( @@ -261,10 +261,10 @@ func TestAcc_GrantPrivilegesToShare_OnView(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToShare/OnView"), ConfigVariables: configVariables(true), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "to_share", shareName.Name()), + resource.TestCheckResourceAttr(resourceName, "to_share", shareId.Name()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", sdk.ObjectPrivilegeSelect.String()), - resource.TestCheckResourceAttr(resourceName, "on_view", viewName.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "on_view", viewId.FullyQualifiedName()), ), }, { @@ -284,17 +284,17 @@ func TestAcc_GrantPrivilegesToShare_OnView(t *testing.T) { } func TestAcc_GrantPrivilegesToShare_OnTag(t *testing.T) { - databaseName := acc.TestClient().Ids.RandomAccountObjectIdentifier() - schemaName := sdk.NewDatabaseObjectIdentifier(databaseName.Name(), acc.TestClient().Ids.Alpha()) - tagName := sdk.NewSchemaObjectIdentifier(databaseName.Name(), schemaName.Name(), acc.TestClient().Ids.Alpha()) - shareName := acc.TestClient().Ids.RandomAccountObjectIdentifier() + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tagId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) + shareId := acc.TestClient().Ids.RandomAccountObjectIdentifier() configVariables := func(withGrant bool) config.Variables { variables := config.Variables{ - "to_share": config.StringVariable(shareName.Name()), - "database": config.StringVariable(databaseName.Name()), - "schema": config.StringVariable(schemaName.Name()), - "on_tag": config.StringVariable(tagName.Name()), + "to_share": config.StringVariable(shareId.Name()), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), + "on_tag": config.StringVariable(tagId.Name()), } if withGrant { variables["privileges"] = config.ListVariable( @@ -316,10 +316,10 @@ func TestAcc_GrantPrivilegesToShare_OnTag(t *testing.T) { ConfigDirectory: acc.ConfigurationDirectory("TestAcc_GrantPrivilegesToShare/OnTag"), ConfigVariables: configVariables(true), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "to_share", shareName.Name()), + resource.TestCheckResourceAttr(resourceName, "to_share", shareId.Name()), resource.TestCheckResourceAttr(resourceName, "privileges.#", "1"), resource.TestCheckResourceAttr(resourceName, "privileges.0", sdk.ObjectPrivilegeRead.String()), - resource.TestCheckResourceAttr(resourceName, "on_tag", tagName.FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "on_tag", tagId.FullyQualifiedName()), ), }, { diff --git a/pkg/resources/helpers.go b/pkg/resources/helpers.go index d9b30ad651..9400cdb5aa 100644 --- a/pkg/resources/helpers.go +++ b/pkg/resources/helpers.go @@ -130,6 +130,38 @@ func GetPropertyAsPointer[T any](d *schema.ResourceData, property string) *T { return &typedValue } +// GetPropertyOfFirstNestedObjectByKey should be used for single objects defined in the Terraform schema as +// schema.TypeList with MaxItems set to one and inner schema with single value. To easily retrieve +// the inner value, you can specify the top-level property with propertyKey and the nested value with nestedValueKey. +func GetPropertyOfFirstNestedObjectByKey[T any](d *schema.ResourceData, propertyKey string, nestedValueKey string) (*T, error) { + value, ok := d.GetOk(propertyKey) + if !ok { + return nil, fmt.Errorf("nested property %s not found", propertyKey) + } + + typedValue, ok := value.([]any) + if !ok || len(typedValue) != 1 { + return nil, fmt.Errorf("nested property %s is not an array or has incorrect number of values: %d, expected: 1", propertyKey, len(typedValue)) + } + + typedNestedMap, ok := typedValue[0].(map[string]any) + if !ok { + return nil, fmt.Errorf("nested property %s is not of type map[string]any, got: %T", propertyKey, typedValue[0]) + } + + _, ok = typedNestedMap[nestedValueKey] + if !ok { + return nil, fmt.Errorf("nested value key %s couldn't be found in the nested property map %s", nestedValueKey, propertyKey) + } + + typedNestedValue, ok := typedNestedMap[nestedValueKey].(T) + if !ok { + return nil, fmt.Errorf("nested property %s.%s is not of type %T, got: %T", propertyKey, nestedValueKey, *new(T), typedNestedMap[nestedValueKey]) + } + + return &typedNestedValue, nil +} + type tags []tag func (t tags) toSnowflakeTagValues() []snowflake.TagValue { @@ -220,3 +252,21 @@ func getTags(from interface{}) (to tags) { } return to } + +func nestedProperty(innerType schema.ValueType, fieldDescription string) *schema.Schema { + return &schema.Schema{ + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: innerType, + Required: true, + }, + }, + }, + Computed: true, + Optional: true, + Description: fieldDescription, + } +} diff --git a/pkg/resources/helpers_test.go b/pkg/resources/helpers_test.go index d3a1c611e6..cc9534ae7f 100644 --- a/pkg/resources/helpers_test.go +++ b/pkg/resources/helpers_test.go @@ -98,3 +98,133 @@ func queriedPrivilegesContainAtLeast(query func(client *sdk.Client, ctx context. return nil } } + +func TestGetFirstNestedObjectByKey(t *testing.T) { + d := schema.TestResourceDataRaw(t, map[string]*schema.Schema{ + "int_property": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeInt, + }, + }, + }, + }, + "string_property": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeString, + }, + }, + }, + }, + "list_property": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "multiple_list_properties": { + Type: schema.TypeList, + MaxItems: 1, + Elem: &schema.Resource{ + Schema: map[string]*schema.Schema{ + "value": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + }, + }, + }, + "list": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "empty list": { + Type: schema.TypeList, + Elem: &schema.Schema{ + Type: schema.TypeString, + }, + }, + "not_property": { + Type: schema.TypeString, + }, + }, map[string]any{ + "int_property": []any{ + map[string]any{ + "value": 123, + }, + }, + "string_property": []any{ + map[string]any{ + "value": "some string", + }, + }, + "list": []any{"one"}, + "empty_list": []any{}, + "list_property": []any{ + map[string]any{ + "value": []any{"one", "two", "three"}, + }, + }, + "multiple_list_properties": []any{ + map[string]any{ + "value": []any{"one", "two", "three"}, + }, + map[string]any{ + "value": []any{"one", "two", "three"}, + }, + }, + "not_property": "not a property", + }) + + intValue, err := resources.GetPropertyOfFirstNestedObjectByKey[int](d, "int_property", "value") + assert.NoError(t, err) + assert.Equal(t, 123, *intValue) + + stringValue, err := resources.GetPropertyOfFirstNestedObjectByKey[string](d, "string_property", "value") + assert.NoError(t, err) + assert.Equal(t, "some string", *stringValue) + + listValue, err := resources.GetPropertyOfFirstNestedObjectByKey[[]any](d, "list_property", "value") + assert.NoError(t, err) + assert.Equal(t, []any{"one", "two", "three"}, *listValue) + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "non_existing_property_key", "non_existing_value_key") + assert.ErrorContains(t, err, "nested property non_existing_property_key not found") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "not_property", "value") + assert.ErrorContains(t, err, "nested property not_property is not an array or has incorrect number of values: 0, expected: 1") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "empty_list", "value") + assert.ErrorContains(t, err, "nested property empty_list not found") // Empty list is a default value, so it's treated as "not set" + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "multiple_list_properties", "value") + assert.ErrorContains(t, err, "nested property multiple_list_properties is not an array or has incorrect number of values: 2, expected: 1") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "list", "value") + assert.ErrorContains(t, err, "nested property list is not of type map[string]any, got: string") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[any](d, "int_property", "non_existing_value_key") + assert.ErrorContains(t, err, "nested value key non_existing_value_key couldn't be found in the nested property map int_property") + + _, err = resources.GetPropertyOfFirstNestedObjectByKey[int](d, "string_property", "value") + assert.ErrorContains(t, err, "nested property string_property.value is not of type int, got: string") +} diff --git a/pkg/resources/procedure_acceptance_test.go b/pkg/resources/procedure_acceptance_test.go index ed406cb8d6..4eeecabfd8 100644 --- a/pkg/resources/procedure_acceptance_test.go +++ b/pkg/resources/procedure_acceptance_test.go @@ -258,7 +258,8 @@ resource "snowflake_procedure" "p" { } func TestAcc_Procedure_proveArgsPermanentDiff(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments([]sdk.DataType{sdk.DataTypeVARCHAR, sdk.DataTypeNumber}) + name := id.Name() resourceName := "snowflake_procedure.p" resource.Test(t, resource.TestCase{ @@ -277,7 +278,7 @@ func TestAcc_Procedure_proveArgsPermanentDiff(t *testing.T) { }, Config: sqlProcedureConfigArgsPermanentDiff(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR, sdk.DataTypeNumber}).FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), ), ExpectNonEmptyPlan: true, ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -291,7 +292,7 @@ func TestAcc_Procedure_proveArgsPermanentDiff(t *testing.T) { PostApplyPreRefresh: []plancheck.PlanCheck{plancheck.ExpectEmptyPlan()}, }, Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR, sdk.DataTypeNumber}).FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), ), }, }, @@ -300,7 +301,8 @@ func TestAcc_Procedure_proveArgsPermanentDiff(t *testing.T) { // TODO [SNOW-1348106]: diff suppression for the return type (the same with functions); finish this test func TestAcc_Procedure_returnTypePermanentDiff(t *testing.T) { - name := acc.TestClient().Ids.Alpha() + id := acc.TestClient().Ids.RandomSchemaObjectIdentifierWithArguments([]sdk.DataType{sdk.DataTypeVARCHAR}) + name := id.Name() resourceName := "snowflake_procedure.p" resource.Test(t, resource.TestCase{ @@ -319,7 +321,7 @@ func TestAcc_Procedure_returnTypePermanentDiff(t *testing.T) { }, Config: sqlProcedureConfigReturnTypePermanentDiff(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR}).FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), ), ExpectNonEmptyPlan: true, ConfigPlanChecks: resource.ConfigPlanChecks{ @@ -330,7 +332,7 @@ func TestAcc_Procedure_returnTypePermanentDiff(t *testing.T) { ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, Config: sqlProcedureConfigReturnTypePermanentDiff(acc.TestDatabaseName, acc.TestSchemaName, name), Check: resource.ComposeTestCheckFunc( - resource.TestCheckResourceAttr(resourceName, "id", sdk.NewSchemaObjectIdentifierWithArguments(acc.TestDatabaseName, acc.TestSchemaName, name, []sdk.DataType{sdk.DataTypeVARCHAR}).FullyQualifiedName()), + resource.TestCheckResourceAttr(resourceName, "id", id.FullyQualifiedName()), ), // should be empty after SNOW-1348106 ExpectNonEmptyPlan: true, diff --git a/pkg/resources/secondary_database.go b/pkg/resources/secondary_database.go new file mode 100644 index 0000000000..3f1b64e7a1 --- /dev/null +++ b/pkg/resources/secondary_database.go @@ -0,0 +1,444 @@ +package resources + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/customdiff" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var secondaryDatabaseSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the identifier for the database; must be unique for your account. As a best practice for [Database Replication and Failover](https://docs.snowflake.com/en/user-guide/db-replication-intro), it is recommended to give each secondary database the same name as its primary database. This practice supports referencing fully-qualified objects (i.e. '..') by other objects in the same database, such as querying a fully-qualified table name in a view. If a secondary database has a different name from the primary database, then these object references would break in the secondary database.", + }, + "as_replica_of": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "A fully qualified path to a database to create a replica from. A fully qualified path follows the format of `\"\".\"\".\"\"`.", + }, + "is_transient": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: "Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", + }, + "data_retention_time_in_days": nestedProperty( + schema.TypeInt, + "Specifies the number of days for which Time Travel actions (CLONE and UNDROP) can be performed on the database, as well as specifying the default Time Travel retention time for all schemas created in the database. For more details, see [Understanding & Using Time Travel](https://docs.snowflake.com/en/user-guide/data-time-travel).", + ), + "max_data_extension_time_in_days": nestedProperty( + schema.TypeInt, + "Object parameter that specifies the maximum number of days for which Snowflake can extend the data retention period for tables in the database to prevent streams on the tables from becoming stale. For a detailed description of this parameter, see [MAX_DATA_EXTENSION_TIME_IN_DAYS](https://docs.snowflake.com/en/sql-reference/parameters.html#label-max-data-extension-time-in-days).", + ), + // TODO: Below parameters should be nested properties + "external_volume": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", + }, + "catalog": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", + }, + "replace_invalid_characters": { + Type: schema.TypeBool, + Optional: true, + Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", + }, + "default_ddl_collation": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", + }, + "storage_serialization_policy": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), + Description: fmt.Sprintf("Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "" + }, + }, + "log_level": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), + }, + "trace_level": { + Type: schema.TypeString, + Optional: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the database.", + }, +} + +func SecondaryDatabase() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateSecondaryDatabase, + UpdateContext: UpdateSecondaryDatabase, + ReadContext: ReadSecondaryDatabase, + DeleteContext: DeleteSecondaryDatabase, + Description: "A secondary database creates a replica of an existing primary database (i.e. a secondary database). For more information about database replication, see [Introduction to database replication across multiple accounts](https://docs.snowflake.com/en/user-guide/db-replication-intro).", + + CustomizeDiff: customdiff.All( + NestedIntValueAccountObjectComputedIf("data_retention_time_in_days", sdk.AccountParameterDataRetentionTimeInDays), + NestedIntValueAccountObjectComputedIf("max_data_extension_time_in_days", sdk.AccountParameterMaxDataExtensionTimeInDays), + ), + + Schema: secondaryDatabaseSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + secondaryDatabaseId := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + primaryDatabaseId := sdk.NewExternalObjectIdentifierFromFullyQualifiedName(d.Get("as_replica_of").(string)) + + dataRetentionTimeInDays, _ := GetPropertyOfFirstNestedObjectByKey[int](d, "data_retention_time_in_days", "value") + maxDataExtensionTimeInDays, _ := GetPropertyOfFirstNestedObjectByKey[int](d, "max_data_extension_time_in_days", "value") + + var externalVolume *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("external_volume"); ok { + externalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var catalog *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("catalog"); ok { + catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var storageSerializationPolicy *sdk.StorageSerializationPolicy + if v, ok := d.GetOk("storage_serialization_policy"); ok { + storageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(v.(string))) + } + + var logLevel *sdk.LogLevel + if v, ok := d.GetOk("log_level"); ok { + logLevel = sdk.Pointer(sdk.LogLevel(v.(string))) + } + + var traceLevel *sdk.TraceLevel + if v, ok := d.GetOk("trace_level"); ok { + traceLevel = sdk.Pointer(sdk.TraceLevel(v.(string))) + } + + err := client.Databases.CreateSecondary(ctx, secondaryDatabaseId, primaryDatabaseId, &sdk.CreateSecondaryDatabaseOptions{ + Transient: GetPropertyAsPointer[bool](d, "is_transient"), + DataRetentionTimeInDays: dataRetentionTimeInDays, + MaxDataExtensionTimeInDays: maxDataExtensionTimeInDays, + ExternalVolume: externalVolume, + Catalog: catalog, + ReplaceInvalidCharacters: GetPropertyAsPointer[bool](d, "replace_invalid_characters"), + DefaultDDLCollation: GetPropertyAsPointer[string](d, "default_ddl_collation"), + StorageSerializationPolicy: storageSerializationPolicy, + LogLevel: logLevel, + TraceLevel: traceLevel, + Comment: GetPropertyAsPointer[string](d, "comment"), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(helpers.EncodeSnowflakeID(secondaryDatabaseId)) + + return ReadSecondaryDatabase(ctx, d, meta) +} + +func UpdateSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + secondaryDatabaseId := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + if d.HasChange("name") { + newId := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + err := client.Databases.Alter(ctx, secondaryDatabaseId, &sdk.AlterDatabaseOptions{ + NewName: &newId, + }) + if err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeSnowflakeID(newId)) + secondaryDatabaseId = newId + } + + var databaseSetRequest sdk.DatabaseSet + var databaseUnsetRequest sdk.DatabaseUnset + + if d.HasChange("data_retention_time_in_days") { + dataRetentionObject, ok := d.GetOk("data_retention_time_in_days") + if ok && len(dataRetentionObject.([]any)) > 0 { + dataRetentionTimeInDays, err := GetPropertyOfFirstNestedObjectByKey[int](d, "data_retention_time_in_days", "value") + if err != nil { + return diag.FromErr(err) + } + databaseSetRequest.DataRetentionTimeInDays = dataRetentionTimeInDays + } else { + databaseUnsetRequest.DataRetentionTimeInDays = sdk.Bool(true) + } + } + + if d.HasChange("max_data_extension_time_in_days") { + maxDataExtensionTimeInDaysObject, ok := d.GetOk("max_data_extension_time_in_days") + if ok && len(maxDataExtensionTimeInDaysObject.([]any)) > 0 { + maxDataExtensionTimeInDays, err := GetPropertyOfFirstNestedObjectByKey[int](d, "max_data_extension_time_in_days", "value") + if err != nil { + return diag.FromErr(err) + } + databaseSetRequest.MaxDataExtensionTimeInDays = maxDataExtensionTimeInDays + } else { + databaseUnsetRequest.MaxDataExtensionTimeInDays = sdk.Bool(true) + } + } + + if d.HasChange("external_volume") { + externalVolume := d.Get("external_volume").(string) + if len(externalVolume) > 0 { + databaseSetRequest.ExternalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(externalVolume)) + } else { + databaseUnsetRequest.ExternalVolume = sdk.Bool(true) + } + } + + if d.HasChange("catalog") { + catalog := d.Get("catalog").(string) + if len(catalog) > 0 { + databaseSetRequest.Catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(catalog)) + } else { + databaseUnsetRequest.Catalog = sdk.Bool(true) + } + } + + if d.HasChange("replace_invalid_characters") { + if d.Get("replace_invalid_characters").(bool) { + databaseSetRequest.ReplaceInvalidCharacters = sdk.Bool(true) + } else { + databaseUnsetRequest.ReplaceInvalidCharacters = sdk.Bool(true) + } + } + + if d.HasChange("default_ddl_collation") { + defaultDdlCollation := d.Get("default_ddl_collation").(string) + if len(defaultDdlCollation) > 0 { + databaseSetRequest.DefaultDDLCollation = &defaultDdlCollation + } else { + databaseUnsetRequest.DefaultDDLCollation = sdk.Bool(true) + } + } + + if d.HasChange("storage_serialization_policy") { + storageSerializationPolicy := d.Get("storage_serialization_policy").(string) + if len(storageSerializationPolicy) > 0 { + databaseSetRequest.StorageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(storageSerializationPolicy)) + } else { + databaseUnsetRequest.StorageSerializationPolicy = sdk.Bool(true) + } + } + + if d.HasChange("log_level") { + logLevel := d.Get("log_level").(string) + if len(logLevel) > 0 { + databaseSetRequest.LogLevel = sdk.Pointer(sdk.LogLevel(logLevel)) + } else { + databaseUnsetRequest.LogLevel = sdk.Bool(true) + } + } + + if d.HasChange("trace_level") { + traceLevel := d.Get("trace_level").(string) + if len(traceLevel) > 0 { + databaseSetRequest.TraceLevel = sdk.Pointer(sdk.TraceLevel(traceLevel)) + } else { + databaseUnsetRequest.TraceLevel = sdk.Bool(true) + } + } + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + databaseSetRequest.Comment = &comment + } else { + databaseUnsetRequest.Comment = sdk.Bool(true) + } + } + + if (databaseSetRequest != sdk.DatabaseSet{}) { + err := client.Databases.Alter(ctx, secondaryDatabaseId, &sdk.AlterDatabaseOptions{ + Set: &databaseSetRequest, + }) + if err != nil { + return diag.FromErr(err) + } + } + + if (databaseUnsetRequest != sdk.DatabaseUnset{}) { + err := client.Databases.Alter(ctx, secondaryDatabaseId, &sdk.AlterDatabaseOptions{ + Unset: &databaseUnsetRequest, + }) + if err != nil { + return diag.FromErr(err) + } + } + + return ReadSecondaryDatabase(ctx, d, meta) +} + +func ReadSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + secondaryDatabaseId := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + secondaryDatabase, err := client.Databases.ShowByID(ctx, secondaryDatabaseId) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query secondary database. Marking the resource as removed.", + Detail: fmt.Sprintf("DatabaseName: %s, Err: %s", secondaryDatabaseId.FullyQualifiedName(), err), + }, + } + } + return diag.FromErr(err) + } + + secondaryDatabaseParameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: secondaryDatabaseId, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + replicationDatabases, err := client.ReplicationFunctions.ShowReplicationDatabases(ctx, &sdk.ShowReplicationDatabasesOptions{ + Like: &sdk.Like{ + Pattern: sdk.String(secondaryDatabaseId.Name()), + }, + }) + if err != nil { + return diag.FromErr(err) + } + + var replicationPrimaryDatabase *sdk.ReplicationDatabase + for _, replicationDatabase := range replicationDatabases { + replicationDatabase := replicationDatabase + if !replicationDatabase.IsPrimary && + replicationDatabase.AccountLocator == client.GetAccountLocator() && + replicationDatabase.Name == secondaryDatabaseId.Name() { + replicationPrimaryDatabase = &replicationDatabase + } + } + if replicationPrimaryDatabase == nil { + return diag.FromErr(fmt.Errorf("could not find replication database for %s", secondaryDatabaseId.Name())) + } + + if err := d.Set("name", secondaryDatabase.Name); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("as_replica_of", sdk.NewExternalObjectIdentifierFromFullyQualifiedName(replicationPrimaryDatabase.PrimaryDatabase).FullyQualifiedName()); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("is_transient", secondaryDatabase.Transient); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("data_retention_time_in_days", []any{map[string]any{"value": secondaryDatabase.RetentionTime}}); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("comment", secondaryDatabase.Comment); err != nil { + return diag.FromErr(err) + } + + for _, secondaryDatabaseParameter := range secondaryDatabaseParameters { + switch secondaryDatabaseParameter.Key { + case "MAX_DATA_EXTENSION_TIME_IN_DAYS": + maxDataExtensionTimeInDays, err := strconv.Atoi(secondaryDatabaseParameter.Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("max_data_extension_time_in_days", []any{map[string]any{"value": maxDataExtensionTimeInDays}}); err != nil { + return diag.FromErr(err) + } + case "EXTERNAL_VOLUME": + if err := d.Set("external_volume", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "CATALOG": + if err := d.Set("catalog", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "DEFAULT_DDL_COLLATION": + if err := d.Set("default_ddl_collation", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "LOG_LEVEL": + if err := d.Set("log_level", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "TRACE_LEVEL": + if err := d.Set("trace_level", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + case "REPLACE_INVALID_CHARACTERS": + boolValue, err := strconv.ParseBool(secondaryDatabaseParameter.Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("replace_invalid_characters", boolValue); err != nil { + return diag.FromErr(err) + } + case "STORAGE_SERIALIZATION_POLICY": + if err := d.Set("storage_serialization_policy", secondaryDatabaseParameter.Value); err != nil { + return diag.FromErr(err) + } + } + } + + return nil +} + +func DeleteSecondaryDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + err := client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ + IfExists: sdk.Bool(true), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/secondary_database_acceptance_test.go b/pkg/resources/secondary_database_acceptance_test.go new file mode 100644 index 0000000000..cf58d60348 --- /dev/null +++ b/pkg/resources/secondary_database_acceptance_test.go @@ -0,0 +1,404 @@ +package resources_test + +import ( + "context" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stretchr/testify/require" +) + +func TestAcc_CreateSecondaryDatabase_minimal(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + _, externalPrimaryId, primaryDatabaseCleanup := acc.SecondaryTestClient().Database.CreatePrimaryDatabase(t, []sdk.AccountIdentifier{ + acc.TestClient().Account.GetAccountIdentifier(t), + }) + t.Cleanup(primaryDatabaseCleanup) + + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newComment := random.Comment() + + accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + accountMaxDataExtensionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterMaxDataExtensionTimeInDays) + require.NoError(t, err) + + configVariables := func(id sdk.AccountObjectIdentifier, primaryDatabaseName sdk.ExternalObjectIdentifier, comment string) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "as_replica_of": config.StringVariable(primaryDatabaseName.FullyQualifiedName()), + "comment": config.StringVariable(comment), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(id, externalPrimaryId, comment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", accountMaxDataExtensionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), + ), + }, + // Rename + comment update + { + ConfigVariables: configVariables(newId, externalPrimaryId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", newId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", accountMaxDataExtensionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", newComment), + ), + }, + // Import all values + { + ConfigVariables: configVariables(newId, externalPrimaryId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/basic"), + ResourceName: "snowflake_secondary_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSecondaryDatabase_complete(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + _, externalPrimaryId, primaryDatabaseCleanup := acc.SecondaryTestClient().Database.CreatePrimaryDatabase(t, []sdk.AccountIdentifier{ + sdk.NewAccountIdentifierFromAccountLocator(acc.Client(t).GetAccountLocator()), + }) + t.Cleanup(primaryDatabaseCleanup) + + externalVolumeId, externalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(externalVolumeCleanup) + + catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(catalogCleanup) + + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newComment := random.Comment() + + newExternalVolumeId, newExternalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(newExternalVolumeCleanup) + + newCatalogId, newCatalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(newCatalogCleanup) + + accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + accountMaxDataExtensionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterMaxDataExtensionTimeInDays) + require.NoError(t, err) + + configVariables := func( + id sdk.AccountObjectIdentifier, + primaryDatabaseName sdk.ExternalObjectIdentifier, + transient bool, + dataRetentionTimeInDays *int, + maxDataExtensionTimeInDays *int, + externalVolume string, + catalog string, + replaceInvalidCharacters bool, + defaultDdlCollation string, + storageSerializationPolicy sdk.StorageSerializationPolicy, + logLevel sdk.LogLevel, + traceLevel sdk.TraceLevel, + comment string, + ) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(id.Name()), + "as_replica_of": config.StringVariable(primaryDatabaseName.FullyQualifiedName()), + "transient": config.BoolVariable(transient), + "external_volume": config.StringVariable(externalVolume), + "catalog": config.StringVariable(catalog), + "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), + "default_ddl_collation": config.StringVariable(defaultDdlCollation), + "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), + "log_level": config.StringVariable(string(logLevel)), + "trace_level": config.StringVariable(string(traceLevel)), + "comment": config.StringVariable(comment), + } + if dataRetentionTimeInDays != nil { + variables["data_retention_time_in_days"] = config.IntegerVariable(*dataRetentionTimeInDays) + } + if maxDataExtensionTimeInDays != nil { + variables["max_data_extension_time_in_days"] = config.IntegerVariable(*maxDataExtensionTimeInDays) + } + return variables + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SecondaryDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables( + id, + externalPrimaryId, + false, + sdk.Int(2), + sdk.Int(5), + externalVolumeId.Name(), + catalogId.Name(), + true, + "en_US", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "2"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", "5"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), + ), + }, + { + ConfigVariables: configVariables( + newId, + externalPrimaryId, + false, + nil, + nil, + newExternalVolumeId.Name(), + newCatalogId.Name(), + false, + "en_GB", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelDebug, + sdk.TraceLevelAlways, + newComment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", newId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", accountMaxDataExtensionTimeInDays.Value), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", newExternalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", newCatalogId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", "en_GB"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", string(sdk.LogLevelDebug)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", string(sdk.TraceLevelAlways)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", newComment), + ), + }, + { + ConfigVariables: configVariables( + id, + externalPrimaryId, + false, + sdk.Int(2), + sdk.Int(5), + externalVolumeId.Name(), + catalogId.Name(), + true, + "en_US", + sdk.StorageSerializationPolicyCompatible, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "is_transient", "false"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "as_replica_of", externalPrimaryId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "2"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "max_data_extension_time_in_days.0.value", "5"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyCompatible)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "comment", comment), + ), + }, + // Import all values + { + ConfigVariables: configVariables( + id, + externalPrimaryId, + false, + sdk.Int(2), + sdk.Int(5), + externalVolumeId.Name(), + catalogId.Name(), + true, + "en_US", + sdk.StorageSerializationPolicyCompatible, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + ResourceName: "snowflake_secondary_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSecondaryDatabase_DataRetentionTimeInDays(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + + _, externalPrimaryId, primaryDatabaseCleanup := acc.SecondaryTestClient().Database.CreatePrimaryDatabase(t, []sdk.AccountIdentifier{ + sdk.NewAccountIdentifierFromAccountLocator(acc.Client(t).GetAccountLocator()), + }) + t.Cleanup(primaryDatabaseCleanup) + + accountDataRetentionTimeInDays, err := acc.Client(t).Parameters.ShowAccountParameter(context.Background(), sdk.AccountParameterDataRetentionTimeInDays) + require.NoError(t, err) + + configVariables := func( + id sdk.AccountObjectIdentifier, + primaryDatabaseName sdk.ExternalObjectIdentifier, + dataRetentionTimeInDays *int, + ) config.Variables { + variables := config.Variables{ + "name": config.StringVariable(id.Name()), + "as_replica_of": config.StringVariable(primaryDatabaseName.FullyQualifiedName()), + "transient": config.BoolVariable(false), + "external_volume": config.StringVariable(""), + "catalog": config.StringVariable(""), + "replace_invalid_characters": config.StringVariable("false"), + "default_ddl_collation": config.StringVariable(""), + "storage_serialization_policy": config.StringVariable("OPTIMIZED"), + "log_level": config.StringVariable("OFF"), + "trace_level": config.StringVariable("OFF"), + "comment": config.StringVariable(""), + } + if dataRetentionTimeInDays != nil { + variables["data_retention_time_in_days"] = config.IntegerVariable(*dataRetentionTimeInDays) + variables["max_data_extension_time_in_days"] = config.IntegerVariable(10) + } + return variables + } + + var revertAccountParameterChange func() + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SecondaryDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(id, externalPrimaryId, sdk.Int(2)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "2"), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, sdk.Int(1)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "1"), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + ), + }, + { + PreConfig: func() { + revertAccountParameterChange = acc.TestClient().Parameter.UpdateAccountParameterTemporarily(t, sdk.AccountParameterDataRetentionTimeInDays, "3") + t.Cleanup(revertAccountParameterChange) + }, + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "3"), + ), + }, + { + PreConfig: func() { + revertAccountParameterChange() + }, + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, sdk.Int(3)), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-set"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", "3"), + ), + }, + { + ConfigVariables: configVariables(id, externalPrimaryId, nil), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SecondaryDatabase/complete-optionals-unset"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_secondary_database.test", "data_retention_time_in_days.0.value", accountDataRetentionTimeInDays.Value), + ), + }, + }, + }) +} diff --git a/pkg/resources/shared_database.go b/pkg/resources/shared_database.go new file mode 100644 index 0000000000..e6075a632b --- /dev/null +++ b/pkg/resources/shared_database.go @@ -0,0 +1,305 @@ +package resources + +import ( + "context" + "errors" + "fmt" + "strconv" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/helpers" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/internal/provider" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-sdk/v2/diag" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var sharedDatabaseSchema = map[string]*schema.Schema{ + "name": { + Type: schema.TypeString, + Required: true, + Description: "Specifies the identifier for the database; must be unique for your account.", + }, + "from_share": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "A fully qualified path to a share from which the database will be created. A fully qualified path follows the format of `\"\".\"\"`.", + }, + // TODO(SNOW-1325381): Add it as an item to discuss and either remove or uncomment (and implement) it + // "is_transient": { + // Type: schema.TypeBool, + // Optional: true, + // ForceNew: true, + // Description: "Specifies the database as transient. Transient databases do not have a Fail-safe period so they do not incur additional storage costs once they leave Time Travel; however, this means they are also not protected by Fail-safe in the event of a data loss.", + // }, + "external_volume": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default external volume to use for Iceberg tables.", + }, + "catalog": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: IsValidIdentifier[sdk.AccountObjectIdentifier](), + Description: "The database parameter that specifies the default catalog to use for Iceberg tables.", + }, + "replace_invalid_characters": { + Type: schema.TypeBool, + Optional: true, + ForceNew: true, + Description: "Specifies whether to replace invalid UTF-8 characters with the Unicode replacement character (�) in query results for an Iceberg table. You can only set this parameter for tables that use an external Iceberg catalog.", + }, + "default_ddl_collation": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + Description: "Specifies a default collation specification for all schemas and tables added to the database. It can be overridden on schema or table level. For more information, see [collation specification](https://docs.snowflake.com/en/sql-reference/collation#label-collation-specification).", + }, + "storage_serialization_policy": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllStorageSerializationPolicies), true), + Description: fmt.Sprintf("Specifies the storage serialization policy for Iceberg tables that use Snowflake as the catalog. Valid options are: %v. COMPATIBLE: Snowflake performs encoding and compression of data files that ensures interoperability with third-party compute engines. OPTIMIZED: Snowflake performs encoding and compression of data files that ensures the best table performance within Snowflake.", sdk.AsStringList(sdk.AllStorageSerializationPolicies)), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.StorageSerializationPolicyOptimized) && newValue == "" + }, + }, + "log_level": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllLogLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.LogLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Specifies the severity level of messages that should be ingested and made available in the active event table. Valid options are: %v. Messages at the specified level (and at more severe levels) are ingested. For more information, see [LOG_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-log-level).", sdk.AsStringList(sdk.AllLogLevels)), + }, + "trace_level": { + Type: schema.TypeString, + Optional: true, + ForceNew: true, + ValidateDiagFunc: StringInSlice(sdk.AsStringList(sdk.AllTraceLevels), true), + DiffSuppressFunc: func(k, oldValue, newValue string, d *schema.ResourceData) bool { + return d.Get(k).(string) == string(sdk.TraceLevelOff) && newValue == "" + }, + Description: fmt.Sprintf("Controls how trace events are ingested into the event table. Valid options are: %v. For information about levels, see [TRACE_LEVEL](https://docs.snowflake.com/en/sql-reference/parameters.html#label-trace-level).", sdk.AsStringList(sdk.AllTraceLevels)), + }, + "comment": { + Type: schema.TypeString, + Optional: true, + Description: "Specifies a comment for the database.", + }, +} + +func SharedDatabase() *schema.Resource { + return &schema.Resource{ + CreateContext: CreateSharedDatabase, + UpdateContext: UpdateSharedDatabase, + ReadContext: ReadSharedDatabase, + DeleteContext: DeleteSharedDatabase, + Description: "A shared database creates a database from a share provided by another Snowflake account. For more information about shares, see [Introduction to Secure Data Sharing](https://docs.snowflake.com/en/user-guide/data-sharing-intro).", + + Schema: sharedDatabaseSchema, + Importer: &schema.ResourceImporter{ + StateContext: schema.ImportStatePassthroughContext, + }, + } +} + +func CreateSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + + id := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + externalShareId := sdk.NewExternalObjectIdentifierFromFullyQualifiedName(d.Get("from_share").(string)) + + var externalVolume *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("external_volume"); ok { + externalVolume = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var catalog *sdk.AccountObjectIdentifier + if v, ok := d.GetOk("catalog"); ok { + catalog = sdk.Pointer(sdk.NewAccountObjectIdentifier(v.(string))) + } + + var storageSerializationPolicy *sdk.StorageSerializationPolicy + if v, ok := d.GetOk("storage_serialization_policy"); ok { + storageSerializationPolicy = sdk.Pointer(sdk.StorageSerializationPolicy(v.(string))) + } + + var logLevel *sdk.LogLevel + if v, ok := d.GetOk("log_level"); ok { + logLevel = sdk.Pointer(sdk.LogLevel(v.(string))) + } + + var traceLevel *sdk.TraceLevel + if v, ok := d.GetOk("trace_level"); ok { + traceLevel = sdk.Pointer(sdk.TraceLevel(v.(string))) + } + + err := client.Databases.CreateShared(ctx, id, externalShareId, &sdk.CreateSharedDatabaseOptions{ + // TODO(SNOW-1325381) + // Transient: GetPropertyAsPointer[bool](d, "is_transient"), + ExternalVolume: externalVolume, + Catalog: catalog, + ReplaceInvalidCharacters: GetPropertyAsPointer[bool](d, "replace_invalid_characters"), + DefaultDDLCollation: GetPropertyAsPointer[string](d, "default_ddl_collation"), + StorageSerializationPolicy: storageSerializationPolicy, + LogLevel: logLevel, + TraceLevel: traceLevel, + Comment: GetPropertyAsPointer[string](d, "comment"), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId(helpers.EncodeSnowflakeID(id)) + + return ReadSharedDatabase(ctx, d, meta) +} + +func UpdateSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + if d.HasChange("name") { + newName := sdk.NewAccountObjectIdentifier(d.Get("name").(string)) + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + NewName: &newName, + }) + if err != nil { + return diag.FromErr(err) + } + d.SetId(helpers.EncodeSnowflakeID(newName)) + id = newName + } + + if d.HasChange("comment") { + comment := d.Get("comment").(string) + if len(comment) > 0 { + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + Set: &sdk.DatabaseSet{ + Comment: &comment, + }, + }) + if err != nil { + return diag.FromErr(err) + } + } else { + err := client.Databases.Alter(ctx, id, &sdk.AlterDatabaseOptions{ + Unset: &sdk.DatabaseUnset{ + Comment: sdk.Bool(true), + }, + }) + if err != nil { + return diag.FromErr(err) + } + } + } + + return ReadSharedDatabase(ctx, d, meta) +} + +func ReadSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + database, err := client.Databases.ShowByID(ctx, id) + if err != nil { + if errors.Is(err, sdk.ErrObjectNotFound) { + d.SetId("") + return diag.Diagnostics{ + diag.Diagnostic{ + Severity: diag.Warning, + Summary: "Failed to query shared database. Marking the resource as removed.", + Detail: fmt.Sprintf("DatabaseName: %s, Err: %s", id.FullyQualifiedName(), err), + }, + } + } + return diag.FromErr(err) + } + + parameters, err := client.Parameters.ShowParameters(ctx, &sdk.ShowParametersOptions{ + In: &sdk.ParametersIn{ + Database: id, + }, + }) + if err != nil { + return diag.FromErr(err) + } + + if err := d.Set("name", database.Name); err != nil { + return diag.FromErr(err) + } + + if err := d.Set("from_share", sdk.NewExternalObjectIdentifierFromFullyQualifiedName(database.Origin).FullyQualifiedName()); err != nil { + return diag.FromErr(err) + } + + // TODO(SNOW-1325381) + // if err := d.Set("is_transient", database.Transient); err != nil { + // return diag.FromErr(err) + //} + + if err := d.Set("comment", database.Comment); err != nil { + return diag.FromErr(err) + } + + for _, parameter := range parameters { + switch parameter.Key { + case "EXTERNAL_VOLUME": + if err := d.Set("external_volume", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "CATALOG": + if err := d.Set("catalog", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "DEFAULT_DDL_COLLATION": + if err := d.Set("default_ddl_collation", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "LOG_LEVEL": + if err := d.Set("log_level", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "TRACE_LEVEL": + if err := d.Set("trace_level", parameter.Value); err != nil { + return diag.FromErr(err) + } + case "REPLACE_INVALID_CHARACTERS": + boolValue, err := strconv.ParseBool(parameter.Value) + if err != nil { + return diag.FromErr(err) + } + if err := d.Set("replace_invalid_characters", boolValue); err != nil { + return diag.FromErr(err) + } + case "STORAGE_SERIALIZATION_POLICY": + if err := d.Set("storage_serialization_policy", parameter.Value); err != nil { + return diag.FromErr(err) + } + } + } + + return nil +} + +func DeleteSharedDatabase(ctx context.Context, d *schema.ResourceData, meta any) diag.Diagnostics { + client := meta.(*provider.Context).Client + id := helpers.DecodeSnowflakeID(d.Id()).(sdk.AccountObjectIdentifier) + + err := client.Databases.Drop(ctx, id, &sdk.DropDatabaseOptions{ + IfExists: sdk.Bool(true), + }) + if err != nil { + return diag.FromErr(err) + } + + d.SetId("") + return nil +} diff --git a/pkg/resources/shared_database_acceptance_test.go b/pkg/resources/shared_database_acceptance_test.go new file mode 100644 index 0000000000..6c90d5560c --- /dev/null +++ b/pkg/resources/shared_database_acceptance_test.go @@ -0,0 +1,267 @@ +package resources_test + +import ( + "context" + "regexp" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance/helpers/random" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/provider/resources" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stretchr/testify/require" +) + +func TestAcc_CreateSharedDatabase_minimal(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + + newId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + newComment := random.Comment() + + configVariables := func(id sdk.AccountObjectIdentifier, shareName sdk.ExternalObjectIdentifier, comment string) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "from_share": config.StringVariable(shareName.FullyQualifiedName()), + "comment": config.StringVariable(comment), + } + } + + shareExternalId := createShareableDatabase(t) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables(id, shareExternalId, comment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", shareExternalId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + ), + }, + { + ConfigVariables: configVariables(newId, shareExternalId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", newId.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "from_share", shareExternalId.FullyQualifiedName()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "false"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", ""), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", "OPTIMIZED"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", "OFF"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", newComment), + ), + }, + // Import all values + { + ConfigVariables: configVariables(newId, shareExternalId, newComment), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/basic"), + ResourceName: "snowflake_shared_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSharedDatabase_complete(t *testing.T) { + id := acc.TestClient().Ids.RandomAccountObjectIdentifier() + comment := random.Comment() + externalShareId := createShareableDatabase(t) + + externalVolumeId, externalVolumeCleanup := acc.TestClient().ExternalVolume.Create(t) + t.Cleanup(externalVolumeCleanup) + + catalogId, catalogCleanup := acc.TestClient().CatalogIntegration.Create(t) + t.Cleanup(catalogCleanup) + + configVariables := func( + id sdk.AccountObjectIdentifier, + shareName sdk.ExternalObjectIdentifier, + externalVolume sdk.AccountObjectIdentifier, + catalog sdk.AccountObjectIdentifier, + replaceInvalidCharacters bool, + defaultDdlCollation string, + storageSerializationPolicy sdk.StorageSerializationPolicy, + logLevel sdk.LogLevel, + traceLevel sdk.TraceLevel, + comment string, + ) config.Variables { + return config.Variables{ + "name": config.StringVariable(id.Name()), + "from_share": config.StringVariable(shareName.FullyQualifiedName()), + "external_volume": config.StringVariable(externalVolume.Name()), + "catalog": config.StringVariable(catalog.Name()), + "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), + "default_ddl_collation": config.StringVariable(defaultDdlCollation), + "storage_serialization_policy": config.StringVariable(string(storageSerializationPolicy)), + "log_level": config.StringVariable(string(logLevel)), + "trace_level": config.StringVariable(string(traceLevel)), + "comment": config.StringVariable(comment), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables( + id, + externalShareId, + externalVolumeId, + catalogId, + true, + "en_US", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr("snowflake_shared_database.test", "name", id.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "external_volume", externalVolumeId.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "catalog", catalogId.Name()), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "replace_invalid_characters", "true"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "default_ddl_collation", "en_US"), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "storage_serialization_policy", string(sdk.StorageSerializationPolicyOptimized)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "log_level", string(sdk.LogLevelInfo)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "trace_level", string(sdk.TraceLevelOnEvent)), + resource.TestCheckResourceAttr("snowflake_shared_database.test", "comment", comment), + ), + }, + // Import all values + { + ConfigVariables: configVariables( + id, + externalShareId, + externalVolumeId, + catalogId, + true, + "en_US", + sdk.StorageSerializationPolicyOptimized, + sdk.LogLevelInfo, + sdk.TraceLevelOnEvent, + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), + ResourceName: "snowflake_shared_database.test", + ImportState: true, + ImportStateVerify: true, + }, + }, + }) +} + +func TestAcc_CreateSharedDatabase_InvalidValues(t *testing.T) { + comment := random.Comment() + + configVariables := func( + replaceInvalidCharacters bool, + defaultDdlCollation string, + storageSerializationPolicy string, + logLevel string, + traceLevel string, + comment string, + ) config.Variables { + return config.Variables{ + "name": config.StringVariable(""), + "from_share": config.StringVariable(""), + "external_volume": config.StringVariable(""), + "catalog": config.StringVariable(""), + "replace_invalid_characters": config.BoolVariable(replaceInvalidCharacters), + "default_ddl_collation": config.StringVariable(defaultDdlCollation), + "storage_serialization_policy": config.StringVariable(storageSerializationPolicy), + "log_level": config.StringVariable(logLevel), + "trace_level": config.StringVariable(traceLevel), + "comment": config.StringVariable(comment), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + PreCheck: func() { acc.TestAccPreCheck(t) }, + CheckDestroy: acc.CheckDestroy(t, resources.SharedDatabase), + Steps: []resource.TestStep{ + { + ConfigVariables: configVariables( + true, + "en_US", + "invalid_value", + "invalid_value", + "invalid_value", + comment, + ), + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_SharedDatabase/complete"), + ExpectError: regexp.MustCompile(`(expected \[{{} log_level}\] to be one of \[\"TRACE\" \"DEBUG\" \"INFO\" \"WARN\" \"ERROR\" \"FATAL\" \"OFF\"\], got invalid_value)|` + + `(expected \[{{} trace_level}\] to be one of \[\"ALWAYS\" \"ON_EVENT\" \"OFF\"\], got invalid_value)|` + + `(expected \[{{} storage_serialization_policy}\] to be one of \[\"COMPATIBLE\" \"OPTIMIZED\"\], got invalid_value)`), + }, + }, + }) +} + +// createShareableDatabase creates a database on the secondary account and enables database sharing on the primary account. +// TODO(SNOW-1431726): Later on, this function should be moved to more sophisticated helpers. +func createShareableDatabase(t *testing.T) sdk.ExternalObjectIdentifier { + t.Helper() + + ctx := context.Background() + + share, shareCleanup := acc.SecondaryTestClient().Share.CreateShare(t) + t.Cleanup(shareCleanup) + + sharedDatabase, sharedDatabaseCleanup := acc.SecondaryTestClient().Database.CreateDatabase(t) + t.Cleanup(sharedDatabaseCleanup) + + err := acc.SecondaryClient(t).Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: sharedDatabase.ID(), + }, share.ID()) + require.NoError(t, err) + t.Cleanup(func() { + err := acc.SecondaryClient(t).Grants.RevokePrivilegeFromShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ + Database: sharedDatabase.ID(), + }, share.ID()) + require.NoError(t, err) + }) + + err = acc.SecondaryClient(t).Shares.Alter(ctx, share.ID(), &sdk.AlterShareOptions{ + IfExists: sdk.Bool(true), + Set: &sdk.ShareSet{ + Accounts: []sdk.AccountIdentifier{ + acc.TestClient().Account.GetAccountIdentifier(t), + }, + }, + }) + require.NoError(t, err) + + return sdk.NewExternalObjectIdentifier(acc.SecondaryTestClient().Account.GetAccountIdentifier(t), share.ID()) +} diff --git a/pkg/resources/table_acceptance_test.go b/pkg/resources/table_acceptance_test.go index 9bdbfde50e..87e82e64a6 100644 --- a/pkg/resources/table_acceptance_test.go +++ b/pkg/resources/table_acceptance_test.go @@ -1461,16 +1461,15 @@ func TestAcc_Table_MaskingPolicy(t *testing.T) { // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - id := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName) + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) configWithDatabaseDataRetentionSet := func(databaseDataRetentionTime int) config.Variables { return config.Variables{ - "database": config.StringVariable(databaseName), - "schema": config.StringVariable(schemaName), - "table": config.StringVariable(tableName), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), + "table": config.StringVariable(tableId.Name()), "database_data_retention_time": config.IntegerVariable(databaseDataRetentionTime), } } @@ -1500,7 +1499,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithDatabaseDataRetentionSet(5), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 5, 5, 5), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 5, 5, 5), ), }, { @@ -1508,7 +1507,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithSchemaDataRetentionSet(5, 10), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 5, 10, 10), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 5, 10, 10), ), }, { @@ -1516,7 +1515,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 3, 5), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "5"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 5), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 5), ), }, { @@ -1524,7 +1523,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 3, 15), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "15"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 15), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 15), ), }, { @@ -1532,7 +1531,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithSchemaDataRetentionSet(10, 3), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 3), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 3), ), }, { @@ -1540,7 +1539,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithDatabaseDataRetentionSet(10), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 10, 10), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 10, 10), ), }, { @@ -1548,7 +1547,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 5, 0), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "0"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 5, 0), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 5, 0), ), }, { @@ -1556,7 +1555,7 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 5, 3), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "3"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 5, 3), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 5, 3), ), }, }, @@ -1565,16 +1564,15 @@ func TestAcc_Table_DefaultDataRetentionTime(t *testing.T) { // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. func TestAcc_Table_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - id := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName) + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) configWithDatabaseDataRetentionSet := func(databaseDataRetentionTime int) config.Variables { return config.Variables{ - "database": config.StringVariable(databaseName), - "schema": config.StringVariable(schemaName), - "table": config.StringVariable(tableName), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), + "table": config.StringVariable(tableId.Name()), "database_data_retention_time": config.IntegerVariable(databaseDataRetentionTime), } } @@ -1599,18 +1597,18 @@ func TestAcc_Table_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) ConfigVariables: configWithDatabaseDataRetentionSet(5), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 5, 5, 5), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 5, 5, 5), ), }, { PreConfig: func() { - acc.TestClient().Table.SetDataRetentionTime(t, id, 20) + acc.TestClient().Table.SetDataRetentionTime(t, tableId, 20) }, ConfigDirectory: acc.ConfigurationDirectory("TestAcc_Table_DefaultDataRetentionTime/WithDatabaseDataRetentionSet"), ConfigVariables: configWithDatabaseDataRetentionSet(5), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 5, 5, 5), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 5, 5, 5), ), }, { @@ -1618,7 +1616,7 @@ func TestAcc_Table_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) ConfigVariables: configWithTableDataRetentionSet(5, 10, 3), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "3"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 5, 10, 3), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 5, 10, 3), ), ConfigPlanChecks: resource.ConfigPlanChecks{ PostApplyPostRefresh: []plancheck.PlanCheck{ @@ -1632,16 +1630,15 @@ func TestAcc_Table_DefaultDataRetentionTime_SetOutsideOfTerraform(t *testing.T) // proves https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2356 issue is fixed. func TestAcc_Table_DefaultDataRetentionTimeSettingUnsetting(t *testing.T) { - databaseName := acc.TestClient().Ids.Alpha() - schemaName := acc.TestClient().Ids.Alpha() - tableName := acc.TestClient().Ids.Alpha() - id := sdk.NewSchemaObjectIdentifier(databaseName, schemaName, tableName) + databaseId := acc.TestClient().Ids.RandomAccountObjectIdentifier() + schemaId := acc.TestClient().Ids.RandomDatabaseObjectIdentifierInDatabase(databaseId) + tableId := acc.TestClient().Ids.RandomSchemaObjectIdentifierInSchema(schemaId) configWithDatabaseDataRetentionSet := func(databaseDataRetentionTime int) config.Variables { return config.Variables{ - "database": config.StringVariable(databaseName), - "schema": config.StringVariable(schemaName), - "table": config.StringVariable(tableName), + "database": config.StringVariable(databaseId.Name()), + "schema": config.StringVariable(schemaId.Name()), + "table": config.StringVariable(tableId.Name()), "database_data_retention_time": config.IntegerVariable(databaseDataRetentionTime), } } @@ -1671,7 +1668,7 @@ func TestAcc_Table_DefaultDataRetentionTimeSettingUnsetting(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 3, 5), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "5"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 5), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 5), ), }, { @@ -1679,7 +1676,7 @@ func TestAcc_Table_DefaultDataRetentionTimeSettingUnsetting(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 3, -1), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 3), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 3), ), }, { @@ -1687,7 +1684,7 @@ func TestAcc_Table_DefaultDataRetentionTimeSettingUnsetting(t *testing.T) { ConfigVariables: configWithSchemaDataRetentionSet(10, 3), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 3), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 3), ), }, { @@ -1695,7 +1692,7 @@ func TestAcc_Table_DefaultDataRetentionTimeSettingUnsetting(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 3, -1), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "-1"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 3), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 3), ), }, { @@ -1703,7 +1700,7 @@ func TestAcc_Table_DefaultDataRetentionTimeSettingUnsetting(t *testing.T) { ConfigVariables: configWithTableDataRetentionSet(10, 3, 5), Check: resource.ComposeTestCheckFunc( resource.TestCheckResourceAttr("snowflake_table.test", "data_retention_time_in_days", "5"), - checkDatabaseSchemaAndTableDataRetentionTime(id, 10, 3, 5), + checkDatabaseSchemaAndTableDataRetentionTime(tableId, 10, 3, 5), ), }, }, diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/test.tf new file mode 100644 index 0000000000..4fb82e93a2 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_secondary_database" "test" { + name = var.name + as_replica_of = var.as_replica_of + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/variables.tf new file mode 100644 index 0000000000..2fce70aa2f --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/basic/variables.tf @@ -0,0 +1,12 @@ +variable "name" { + type = string +} + +variable "as_replica_of" { + type = string +} + +variable "comment" { + type = string +} + diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/test.tf new file mode 100644 index 0000000000..4cefd5d621 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/test.tf @@ -0,0 +1,22 @@ +resource "snowflake_secondary_database" "test" { + name = var.name + as_replica_of = var.as_replica_of + is_transient = var.transient + + data_retention_time_in_days { + value = var.data_retention_time_in_days + } + + max_data_extension_time_in_days { + value = var.max_data_extension_time_in_days + } + + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf new file mode 100644 index 0000000000..cfe7514845 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-set/variables.tf @@ -0,0 +1,51 @@ +variable "name" { + type = string +} + +variable "as_replica_of" { + type = string +} + +variable "transient" { + type = bool +} + +variable "data_retention_time_in_days" { + type = string +} + +variable "max_data_extension_time_in_days" { + type = string +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = string +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf new file mode 100644 index 0000000000..5aa60d21ed --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/test.tf @@ -0,0 +1,13 @@ +resource "snowflake_secondary_database" "test" { + name = var.name + as_replica_of = var.as_replica_of + is_transient = var.transient + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/variables.tf b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/variables.tf new file mode 100644 index 0000000000..977a6bdfe1 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SecondaryDatabase/complete-optionals-unset/variables.tf @@ -0,0 +1,43 @@ +variable "name" { + type = string +} + +variable "as_replica_of" { + type = string +} + +variable "transient" { + type = bool +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = string +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/basic/test.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/test.tf new file mode 100644 index 0000000000..31b366401b --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_shared_database" "test" { + name = var.name + from_share = var.from_share + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/basic/variables.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/variables.tf new file mode 100644 index 0000000000..dc80047760 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/basic/variables.tf @@ -0,0 +1,12 @@ +variable "name" { + type = string +} + +variable "from_share" { + type = string +} + +variable "comment" { + type = string +} + diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf new file mode 100644 index 0000000000..5c2f7493b6 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/test.tf @@ -0,0 +1,12 @@ +resource "snowflake_shared_database" "test" { + name = var.name + from_share = var.from_share + external_volume = var.external_volume + catalog = var.catalog + replace_invalid_characters = var.replace_invalid_characters + default_ddl_collation = var.default_ddl_collation + storage_serialization_policy = var.storage_serialization_policy + log_level = var.log_level + trace_level = var.trace_level + comment = var.comment +} diff --git a/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf new file mode 100644 index 0000000000..b704eb8dfe --- /dev/null +++ b/pkg/resources/testdata/TestAcc_SharedDatabase/complete/variables.tf @@ -0,0 +1,39 @@ +variable "name" { + type = string +} + +variable "from_share" { + type = string +} + +variable "external_volume" { + type = string +} + +variable "catalog" { + type = string +} + +variable "replace_invalid_characters" { + type = bool +} + +variable "default_ddl_collation" { + type = string +} + +variable "storage_serialization_policy" { + type = string +} + +variable "log_level" { + type = string +} + +variable "trace_level" { + type = string +} + +variable "comment" { + type = string +} diff --git a/pkg/sdk/accounts_test.go b/pkg/sdk/accounts_test.go index 55ed1386bc..7c6379351f 100644 --- a/pkg/sdk/accounts_test.go +++ b/pkg/sdk/accounts_test.go @@ -103,21 +103,23 @@ func TestAccountAlter(t *testing.T) { }) t.Run("with set password policy", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := &AlterAccountOptions{ Set: &AccountSet{ - PasswordPolicy: NewSchemaObjectIdentifier("db", "schema", "passpol"), + PasswordPolicy: id, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT SET PASSWORD POLICY "db"."schema"."passpol"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT SET PASSWORD POLICY %s`, id.FullyQualifiedName()) }) t.Run("with set session policy", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := &AlterAccountOptions{ Set: &AccountSet{ - SessionPolicy: NewSchemaObjectIdentifier("db", "schema", "sesspol"), + SessionPolicy: id, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT SET SESSION POLICY "db"."schema"."sesspol"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT SET SESSION POLICY %s`, id.FullyQualifiedName()) }) t.Run("with unset password policy", func(t *testing.T) { @@ -139,28 +141,31 @@ func TestAccountAlter(t *testing.T) { }) t.Run("with set tag", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &AlterAccountOptions{ SetTag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "tag2"), + Name: tagId2, Value: "v2", }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT SET TAG "db"."schema"."tag1" = 'v1', "db"."schema"."tag2" = 'v2'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT SET TAG %s = 'v1', %s = 'v2'`, tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("with unset tag", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := &AlterAccountOptions{ UnsetTag: []ObjectIdentifier{ - NewSchemaObjectIdentifier("db", "schema", "tag1"), + id, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT UNSET TAG "db"."schema"."tag1"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER ACCOUNT UNSET TAG %s`, id.FullyQualifiedName()) }) t.Run("rename", func(t *testing.T) { diff --git a/pkg/sdk/alerts.go b/pkg/sdk/alerts.go index 90a3fd2428..ae7d1b815b 100644 --- a/pkg/sdk/alerts.go +++ b/pkg/sdk/alerts.go @@ -292,7 +292,7 @@ func (v *alerts) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Aler Pattern: String(id.Name()), }, In: &In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }, }) if err != nil { diff --git a/pkg/sdk/alerts_test.go b/pkg/sdk/alerts_test.go index 80ee4970cc..977cb51148 100644 --- a/pkg/sdk/alerts_test.go +++ b/pkg/sdk/alerts_test.go @@ -185,16 +185,15 @@ func TestAlertShow(t *testing.T) { }) t.Run("with like and in schema", func(t *testing.T) { - schemaIdentifier := NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()) opts := &ShowAlertOptions{ Like: &Like{ Pattern: String(id.Name()), }, In: &In{ - Schema: schemaIdentifier, + Schema: id.SchemaId(), }, } - assertOptsValidAndSQLEquals(t, opts, "SHOW ALERTS LIKE '%s' IN SCHEMA %s", id.Name(), schemaIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "SHOW ALERTS LIKE '%s' IN SCHEMA %s", id.Name(), id.SchemaId().FullyQualifiedName()) }) t.Run("with 'starts with'", func(t *testing.T) { diff --git a/pkg/sdk/application_roles_gen_test.go b/pkg/sdk/application_roles_gen_test.go index 65f1b86b4b..055b515d75 100644 --- a/pkg/sdk/application_roles_gen_test.go +++ b/pkg/sdk/application_roles_gen_test.go @@ -19,7 +19,7 @@ func TestApplicationRoles_Grant(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -75,7 +75,7 @@ func TestApplicationRoles_Revoke(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/comments_test.go b/pkg/sdk/comments_test.go index fbcb7864b5..4fa38f1237 100644 --- a/pkg/sdk/comments_test.go +++ b/pkg/sdk/comments_test.go @@ -6,31 +6,32 @@ import ( func TestComments(t *testing.T) { t.Run("set on schema", func(t *testing.T) { - id := NewDatabaseObjectIdentifier("db1", "schema2") + id := randomDatabaseObjectIdentifier() opts := &SetCommentOptions{ ObjectType: ObjectTypeSchema, ObjectName: &id, Value: String("mycomment"), } - assertOptsValidAndSQLEquals(t, opts, `COMMENT ON SCHEMA "db1"."schema2" IS 'mycomment'`) + assertOptsValidAndSQLEquals(t, opts, `COMMENT ON SCHEMA %s IS 'mycomment'`, id.FullyQualifiedName()) }) t.Run("set if exists", func(t *testing.T) { - id := NewAccountObjectIdentifier("maskpol") + id := randomAccountObjectIdentifier() opts := &SetCommentOptions{ IfExists: Bool(true), ObjectType: ObjectTypeMaskingPolicy, ObjectName: &id, Value: String("mycomment2"), } - assertOptsValidAndSQLEquals(t, opts, `COMMENT IF EXISTS ON MASKING POLICY "maskpol" IS 'mycomment2'`) + assertOptsValidAndSQLEquals(t, opts, `COMMENT IF EXISTS ON MASKING POLICY %s IS 'mycomment2'`, id.FullyQualifiedName()) }) t.Run("set column comment", func(t *testing.T) { + id := randomDatabaseObjectIdentifier() opts := &SetColumnCommentOptions{ - Column: NewDatabaseObjectIdentifier("table3", "column4"), + Column: id, Value: String("mycomment3"), } - assertOptsValidAndSQLEquals(t, opts, `COMMENT ON COLUMN "table3"."column4" IS 'mycomment3'`) + assertOptsValidAndSQLEquals(t, opts, `COMMENT ON COLUMN %s IS 'mycomment3'`, id.FullyQualifiedName()) }) } diff --git a/pkg/sdk/common_types.go b/pkg/sdk/common_types.go index aaba933466..e95d15671d 100644 --- a/pkg/sdk/common_types.go +++ b/pkg/sdk/common_types.go @@ -234,6 +234,16 @@ const ( LogLevelOff LogLevel = "OFF" ) +var AllLogLevels = []LogLevel{ + LogLevelTrace, + LogLevelDebug, + LogLevelInfo, + LogLevelWarn, + LogLevelError, + LogLevelFatal, + LogLevelOff, +} + type TraceLevel string const ( @@ -241,3 +251,9 @@ const ( TraceLevelOnEvent TraceLevel = "ON_EVENT" TraceLevelOff TraceLevel = "OFF" ) + +var AllTraceLevels = []TraceLevel{ + TraceLevelAlways, + TraceLevelOnEvent, + TraceLevelOff, +} diff --git a/pkg/sdk/context_functions.go b/pkg/sdk/context_functions.go index b21d69d862..e0eb339db8 100644 --- a/pkg/sdk/context_functions.go +++ b/pkg/sdk/context_functions.go @@ -12,6 +12,8 @@ import ( type ContextFunctions interface { // Session functions. CurrentAccount(ctx context.Context) (string, error) + CurrentOrganizationName(ctx context.Context) (string, error) + CurrentAccountName(ctx context.Context) (string, error) CurrentRole(ctx context.Context) (AccountObjectIdentifier, error) CurrentSecondaryRoles(ctx context.Context) (*CurrentSecondaryRoles, error) CurrentRegion(ctx context.Context) (string, error) @@ -70,6 +72,28 @@ func (c *contextFunctions) CurrentAccount(ctx context.Context) (string, error) { return s.CurrentAccount, nil } +func (c *contextFunctions) CurrentOrganizationName(ctx context.Context) (string, error) { + s := &struct { + CurrentOrganizationName string `db:"CURRENT_ORGANIZATION_NAME"` + }{} + err := c.client.queryOne(ctx, s, "SELECT CURRENT_ORGANIZATION_NAME() as CURRENT_ORGANIZATION_NAME") + if err != nil { + return "", err + } + return s.CurrentOrganizationName, nil +} + +func (c *contextFunctions) CurrentAccountName(ctx context.Context) (string, error) { + s := &struct { + CurrentAccountName string `db:"CURRENT_ACCOUNT_NAME"` + }{} + err := c.client.queryOne(ctx, s, "SELECT CURRENT_ACCOUNT_NAME() as CURRENT_ACCOUNT_NAME") + if err != nil { + return "", err + } + return s.CurrentAccountName, nil +} + func (c *contextFunctions) CurrentRole(ctx context.Context) (AccountObjectIdentifier, error) { s := &struct { CurrentRole string `db:"CURRENT_ROLE"` diff --git a/pkg/sdk/database_role_test.go b/pkg/sdk/database_role_test.go index 9bd95efe37..423ff0599e 100644 --- a/pkg/sdk/database_role_test.go +++ b/pkg/sdk/database_role_test.go @@ -20,7 +20,7 @@ func TestDatabaseRoleCreate(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -33,7 +33,7 @@ func TestDatabaseRoleCreate(t *testing.T) { t.Run("validation: multiple errors", func(t *testing.T) { opts := defaultOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier opts.IfNotExists = Bool(true) opts.OrReplace = Bool(true) assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier, errOneOf("createDatabaseRoleOptions", "OrReplace", "IfNotExists")) @@ -69,7 +69,7 @@ func TestDatabaseRoleAlter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -92,7 +92,7 @@ func TestDatabaseRoleAlter(t *testing.T) { t.Run("validation: invalid new name", func(t *testing.T) { opts := defaultOpts() opts.Rename = &DatabaseRoleRename{ - Name: NewDatabaseObjectIdentifier("", ""), + Name: emptyDatabaseObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -169,7 +169,7 @@ func TestDatabaseRoleDrop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -243,7 +243,7 @@ func TestDatabaseRoles_Grant(t *testing.T) { t.Run("validation: invalid identifier", func(t *testing.T) { opts := setUpOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -292,7 +292,7 @@ func TestDatabaseRoles_Revoke(t *testing.T) { t.Run("validation: invalid identifier", func(t *testing.T) { opts := setUpOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -341,7 +341,7 @@ func TestDatabaseRoles_GrantToShare(t *testing.T) { t.Run("validation: invalid identifier", func(t *testing.T) { opts := setUpOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -376,7 +376,7 @@ func TestDatabaseRoles_RevokeFromShare(t *testing.T) { t.Run("validation: invalid identifier", func(t *testing.T) { opts := setUpOpts() - opts.name = NewDatabaseObjectIdentifier("", "") + opts.name = emptyDatabaseObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/databases.go b/pkg/sdk/databases.go index b7be7579d1..cdefbc81d8 100644 --- a/pkg/sdk/databases.go +++ b/pkg/sdk/databases.go @@ -137,24 +137,38 @@ func (row databaseRow) convert() *Database { return database } +type StorageSerializationPolicy string + +const ( + StorageSerializationPolicyCompatible StorageSerializationPolicy = "COMPATIBLE" + StorageSerializationPolicyOptimized StorageSerializationPolicy = "OPTIMIZED" +) + +var AllStorageSerializationPolicies = []StorageSerializationPolicy{ + StorageSerializationPolicyCompatible, + StorageSerializationPolicyOptimized, +} + // CreateDatabaseOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-database. type CreateDatabaseOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Transient *bool `ddl:"keyword" sql:"TRANSIENT"` - database bool `ddl:"static" sql:"DATABASE"` - IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` - name AccountObjectIdentifier `ddl:"identifier"` - Clone *Clone `ddl:"-"` - DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` - MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Transient *bool `ddl:"keyword" sql:"TRANSIENT"` + database bool `ddl:"static" sql:"DATABASE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name AccountObjectIdentifier `ddl:"identifier"` + Clone *Clone `ddl:"-"` + DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` + MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` } func (opts *CreateDatabaseOptions) validate() error { @@ -200,20 +214,22 @@ func (v *databases) Create(ctx context.Context, id AccountObjectIdentifier, opts // CreateSharedDatabaseOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-database. type CreateSharedDatabaseOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Transient *bool `ddl:"keyword" sql:"TRANSIENT"` - database bool `ddl:"static" sql:"DATABASE"` - IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` - name AccountObjectIdentifier `ddl:"identifier"` - fromShare ExternalObjectIdentifier `ddl:"identifier" sql:"FROM SHARE"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` - Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Transient *bool `ddl:"keyword" sql:"TRANSIENT"` + database bool `ddl:"static" sql:"DATABASE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name AccountObjectIdentifier `ddl:"identifier"` + fromShare ExternalObjectIdentifier `ddl:"identifier" sql:"FROM SHARE"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + Tag []TagAssociation `ddl:"keyword,parentheses" sql:"TAG"` } func (opts *CreateSharedDatabaseOptions) validate() error { @@ -260,21 +276,23 @@ func (v *databases) CreateShared(ctx context.Context, id AccountObjectIdentifier // CreateSecondaryDatabaseOptions is based on https://docs.snowflake.com/en/sql-reference/sql/create-database. type CreateSecondaryDatabaseOptions struct { - create bool `ddl:"static" sql:"CREATE"` - OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` - Transient *bool `ddl:"keyword" sql:"TRANSIENT"` - database bool `ddl:"static" sql:"DATABASE"` - IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` - name AccountObjectIdentifier `ddl:"identifier"` - primaryDatabase ExternalObjectIdentifier `ddl:"identifier" sql:"AS REPLICA OF"` - DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` - MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + create bool `ddl:"static" sql:"CREATE"` + OrReplace *bool `ddl:"keyword" sql:"OR REPLACE"` + Transient *bool `ddl:"keyword" sql:"TRANSIENT"` + database bool `ddl:"static" sql:"DATABASE"` + IfNotExists *bool `ddl:"keyword" sql:"IF NOT EXISTS"` + name AccountObjectIdentifier `ddl:"identifier"` + primaryDatabase ExternalObjectIdentifier `ddl:"identifier" sql:"AS REPLICA OF"` + DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` + MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` } func (opts *CreateSecondaryDatabaseOptions) validate() error { @@ -362,14 +380,16 @@ func (opts *AlterDatabaseOptions) validate() error { } type DatabaseSet struct { - DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` - MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` - ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` - Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` - DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` - LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` - TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` - Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` + DataRetentionTimeInDays *int `ddl:"parameter" sql:"DATA_RETENTION_TIME_IN_DAYS"` + MaxDataExtensionTimeInDays *int `ddl:"parameter" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` + ExternalVolume *AccountObjectIdentifier `ddl:"identifier,equals" sql:"EXTERNAL_VOLUME"` + Catalog *AccountObjectIdentifier `ddl:"identifier,equals" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"parameter" sql:"REPLACE_INVALID_CHARACTERS"` + DefaultDDLCollation *string `ddl:"parameter,single_quotes" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *StorageSerializationPolicy `ddl:"parameter" sql:"STORAGE_SERIALIZATION_POLICY"` + LogLevel *LogLevel `ddl:"parameter,single_quotes" sql:"LOG_LEVEL"` + TraceLevel *TraceLevel `ddl:"parameter,single_quotes" sql:"TRACE_LEVEL"` + Comment *string `ddl:"parameter,single_quotes" sql:"COMMENT"` } func (v *DatabaseSet) validate() error { @@ -380,8 +400,8 @@ func (v *DatabaseSet) validate() error { if v.Catalog != nil && !ValidObjectIdentifier(v.Catalog) { errs = append(errs, errInvalidIdentifier("DatabaseSet", "Catalog")) } - if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.DefaultDDLCollation, v.LogLevel, v.TraceLevel, v.Comment) { - errs = append(errs, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.ReplaceInvalidCharacters, v.DefaultDDLCollation, v.StorageSerializationPolicy, v.LogLevel, v.TraceLevel, v.Comment) { + errs = append(errs, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) } return errors.Join(errs...) } @@ -391,7 +411,9 @@ type DatabaseUnset struct { MaxDataExtensionTimeInDays *bool `ddl:"keyword" sql:"MAX_DATA_EXTENSION_TIME_IN_DAYS"` ExternalVolume *bool `ddl:"keyword" sql:"EXTERNAL_VOLUME"` Catalog *bool `ddl:"keyword" sql:"CATALOG"` + ReplaceInvalidCharacters *bool `ddl:"keyword" sql:"REPLACE_INVALID_CHARACTERS"` DefaultDDLCollation *bool `ddl:"keyword" sql:"DEFAULT_DDL_COLLATION"` + StorageSerializationPolicy *bool `ddl:"keyword" sql:"STORAGE_SERIALIZATION_POLICY"` LogLevel *bool `ddl:"keyword" sql:"LOG_LEVEL"` TraceLevel *bool `ddl:"keyword" sql:"TRACE_LEVEL"` Comment *bool `ddl:"keyword" sql:"COMMENT"` @@ -399,8 +421,8 @@ type DatabaseUnset struct { func (v *DatabaseUnset) validate() error { var errs []error - if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.DefaultDDLCollation, v.LogLevel, v.TraceLevel, v.Comment) { - errs = append(errs, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + if !anyValueSet(v.DataRetentionTimeInDays, v.MaxDataExtensionTimeInDays, v.ExternalVolume, v.Catalog, v.ReplaceInvalidCharacters, v.DefaultDDLCollation, v.StorageSerializationPolicy, v.LogLevel, v.TraceLevel, v.Comment) { + errs = append(errs, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) } return errors.Join(errs...) } diff --git a/pkg/sdk/databases_test.go b/pkg/sdk/databases_test.go index 579c77556b..e3b1d36687 100644 --- a/pkg/sdk/databases_test.go +++ b/pkg/sdk/databases_test.go @@ -15,14 +15,14 @@ func TestDatabasesCreate(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: invalid clone", func(t *testing.T) { opts := defaultOpts() opts.Clone = &Clone{ - SourceObject: NewAccountObjectIdentifier(""), + SourceObject: emptyAccountObjectIdentifier, At: &TimeTravel{ Timestamp: Pointer(time.Now()), Offset: Int(123), @@ -44,8 +44,8 @@ func TestDatabasesCreate(t *testing.T) { t.Run("validation: invalid external volume and catalog", func(t *testing.T) { opts := defaultOpts() - opts.ExternalVolume = Pointer(NewAccountObjectIdentifier("")) - opts.Catalog = Pointer(NewAccountObjectIdentifier("")) + opts.ExternalVolume = Pointer(emptyAccountObjectIdentifier) + opts.Catalog = Pointer(emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("CreateDatabaseOptions", "ExternalVolume"), errInvalidIdentifier("CreateDatabaseOptions", "Catalog"), @@ -74,17 +74,20 @@ func TestDatabasesCreate(t *testing.T) { opts.MaxDataExtensionTimeInDays = Int(1) opts.ExternalVolume = &externalVolumeId opts.Catalog = &catalogId + opts.ReplaceInvalidCharacters = Bool(true) opts.DefaultDDLCollation = String("en_US") + opts.StorageSerializationPolicy = Pointer(StorageSerializationPolicyCompatible) opts.LogLevel = Pointer(LogLevelInfo) opts.TraceLevel = Pointer(TraceLevelOnEvent) opts.Comment = String("comment") + tagId := randomAccountObjectIdentifier() opts.Tag = []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag1"), + Name: tagId, Value: "v1", }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE TRANSIENT DATABASE IF NOT EXISTS %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 1 EXTERNAL_VOLUME = %s CATALOG = %s DEFAULT_DDL_COLLATION = 'en_US' LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE TRANSIENT DATABASE IF NOT EXISTS %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 1 EXTERNAL_VOLUME = %s CATALOG = %s REPLACE_INVALID_CHARACTERS = true DEFAULT_DDL_COLLATION = 'en_US' STORAGE_SERIALIZATION_POLICY = COMPATIBLE LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG (%s = 'v1')`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName(), tagId.FullyQualifiedName()) }) } @@ -98,13 +101,13 @@ func TestDatabasesCreateShared(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: invalid from share name", func(t *testing.T) { opts := defaultOpts() - opts.fromShare = NewExternalObjectIdentifier(NewAccountIdentifier("", ""), NewAccountObjectIdentifier("")) + opts.fromShare = NewExternalObjectIdentifier(NewAccountIdentifier("", ""), emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("CreateSharedDatabaseOptions", "fromShare")) }) @@ -119,8 +122,8 @@ func TestDatabasesCreateShared(t *testing.T) { t.Run("validation: invalid external volume and catalog", func(t *testing.T) { opts := defaultOpts() opts.name = NewAccountObjectIdentifier("db") - opts.ExternalVolume = Pointer(NewAccountObjectIdentifier("")) - opts.Catalog = Pointer(NewAccountObjectIdentifier("")) + opts.ExternalVolume = Pointer(emptyAccountObjectIdentifier) + opts.Catalog = Pointer(emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("CreateSharedDatabaseOptions", "ExternalVolume"), errInvalidIdentifier("CreateSharedDatabaseOptions", "Catalog"), @@ -141,17 +144,20 @@ func TestDatabasesCreateShared(t *testing.T) { opts.OrReplace = Bool(true) opts.ExternalVolume = &externalVolumeId opts.Catalog = &catalogId + opts.ReplaceInvalidCharacters = Bool(false) opts.DefaultDDLCollation = String("en_US") + opts.StorageSerializationPolicy = Pointer(StorageSerializationPolicyOptimized) opts.LogLevel = Pointer(LogLevelInfo) opts.TraceLevel = Pointer(TraceLevelOnEvent) opts.Comment = String("comment") + tagId := randomAccountObjectIdentifier() opts.Tag = []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag1"), + Name: tagId, Value: "v1", }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE DATABASE %s FROM SHARE %s EXTERNAL_VOLUME = %s CATALOG = %s DEFAULT_DDL_COLLATION = 'en_US' LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`, opts.name.FullyQualifiedName(), opts.fromShare.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE DATABASE %s FROM SHARE %s EXTERNAL_VOLUME = %s CATALOG = %s REPLACE_INVALID_CHARACTERS = false DEFAULT_DDL_COLLATION = 'en_US' STORAGE_SERIALIZATION_POLICY = OPTIMIZED LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment' TAG (%s = 'v1')`, opts.name.FullyQualifiedName(), opts.fromShare.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName(), tagId.FullyQualifiedName()) }) } @@ -165,13 +171,13 @@ func TestDatabasesCreateSecondary(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: invalid primary database", func(t *testing.T) { opts := defaultOpts() - opts.primaryDatabase = NewExternalObjectIdentifier(NewAccountIdentifier("", ""), NewAccountObjectIdentifier("")) + opts.primaryDatabase = NewExternalObjectIdentifier(NewAccountIdentifier("", ""), emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("CreateSecondaryDatabaseOptions", "primaryDatabase")) }) @@ -184,8 +190,8 @@ func TestDatabasesCreateSecondary(t *testing.T) { t.Run("validation: invalid external volume and catalog", func(t *testing.T) { opts := defaultOpts() - opts.ExternalVolume = Pointer(NewAccountObjectIdentifier("")) - opts.Catalog = Pointer(NewAccountObjectIdentifier("")) + opts.ExternalVolume = Pointer(emptyAccountObjectIdentifier) + opts.Catalog = Pointer(emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("CreateSecondaryDatabaseOptions", "ExternalVolume"), errInvalidIdentifier("CreateSecondaryDatabaseOptions", "Catalog"), @@ -210,11 +216,13 @@ func TestDatabasesCreateSecondary(t *testing.T) { opts.MaxDataExtensionTimeInDays = Int(10) opts.ExternalVolume = &externalVolumeId opts.Catalog = &catalogId + opts.ReplaceInvalidCharacters = Bool(true) opts.DefaultDDLCollation = String("en_US") + opts.StorageSerializationPolicy = Pointer(StorageSerializationPolicyOptimized) opts.LogLevel = Pointer(LogLevelInfo) opts.TraceLevel = Pointer(TraceLevelOnEvent) opts.Comment = String("comment") - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TRANSIENT DATABASE %s AS REPLICA OF %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 10 EXTERNAL_VOLUME = %s CATALOG = %s DEFAULT_DDL_COLLATION = 'en_US' LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment'`, opts.name.FullyQualifiedName(), primaryDatabaseId.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TRANSIENT DATABASE %s AS REPLICA OF %s DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 10 EXTERNAL_VOLUME = %s CATALOG = %s REPLACE_INVALID_CHARACTERS = true DEFAULT_DDL_COLLATION = 'en_US' STORAGE_SERIALIZATION_POLICY = OPTIMIZED LOG_LEVEL = 'INFO' TRACE_LEVEL = 'ON_EVENT' COMMENT = 'comment'`, opts.name.FullyQualifiedName(), primaryDatabaseId.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) }) } @@ -227,15 +235,15 @@ func TestDatabasesAlter(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: invalid external volume and catalog", func(t *testing.T) { opts := defaultOpts() opts.Set = &DatabaseSet{ - ExternalVolume: Pointer(NewAccountObjectIdentifier("")), - Catalog: Pointer(NewAccountObjectIdentifier("")), + ExternalVolume: Pointer(emptyAccountObjectIdentifier), + Catalog: Pointer(emptyAccountObjectIdentifier), } assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("DatabaseSet", "ExternalVolume"), errInvalidIdentifier("DatabaseSet", "Catalog")) }) @@ -255,19 +263,19 @@ func TestDatabasesAlter(t *testing.T) { t.Run("validation: at least one set option", func(t *testing.T) { opts := defaultOpts() opts.Set = &DatabaseSet{} - assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseSet", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) }) t.Run("validation: at least one unset option", func(t *testing.T) { opts := defaultOpts() opts.Unset = &DatabaseUnset{} - assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "DefaultDDLCollation", "LogLevel", "TraceLevel", "Comment")) + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("DatabaseUnset", "DataRetentionTimeInDays", "MaxDataExtensionTimeInDays", "ExternalVolume", "Catalog", "ReplaceInvalidCharacters", "DefaultDDLCollation", "StorageSerializationPolicy", "LogLevel", "TraceLevel", "Comment")) }) t.Run("validation: invalid external volume identifier", func(t *testing.T) { opts := defaultOpts() opts.Set = &DatabaseSet{ - ExternalVolume: Pointer(NewAccountObjectIdentifier("")), + ExternalVolume: Pointer(emptyAccountObjectIdentifier), } assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("DatabaseSet", "ExternalVolume")) }) @@ -275,20 +283,20 @@ func TestDatabasesAlter(t *testing.T) { t.Run("validation: invalid catalog integration identifier", func(t *testing.T) { opts := defaultOpts() opts.Set = &DatabaseSet{ - Catalog: Pointer(NewAccountObjectIdentifier("")), + Catalog: Pointer(emptyAccountObjectIdentifier), } assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("DatabaseSet", "Catalog")) }) t.Run("validation: invalid NewName identifier", func(t *testing.T) { opts := defaultOpts() - opts.NewName = Pointer(NewAccountObjectIdentifier("")) + opts.NewName = Pointer(emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("AlterDatabaseOptions", "NewName")) }) t.Run("validation: invalid SwapWith identifier", func(t *testing.T) { opts := defaultOpts() - opts.SwapWith = Pointer(NewAccountObjectIdentifier("")) + opts.SwapWith = Pointer(emptyAccountObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("AlterDatabaseOptions", "SwapWith")) }) @@ -314,12 +322,14 @@ func TestDatabasesAlter(t *testing.T) { MaxDataExtensionTimeInDays: Int(1), ExternalVolume: &externalVolumeId, Catalog: &catalogId, + ReplaceInvalidCharacters: Bool(true), DefaultDDLCollation: String("en_US"), + StorageSerializationPolicy: Pointer(StorageSerializationPolicyCompatible), LogLevel: Pointer(LogLevelError), TraceLevel: Pointer(TraceLevelOnEvent), Comment: String("comment"), } - assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s SET DATA_RETENTION_TIME_IN_DAYS = 1, MAX_DATA_EXTENSION_TIME_IN_DAYS = 1, EXTERNAL_VOLUME = %s, CATALOG = %s, DEFAULT_DDL_COLLATION = 'en_US', LOG_LEVEL = 'ERROR', TRACE_LEVEL = 'ON_EVENT', COMMENT = 'comment'`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s SET DATA_RETENTION_TIME_IN_DAYS = 1, MAX_DATA_EXTENSION_TIME_IN_DAYS = 1, EXTERNAL_VOLUME = %s, CATALOG = %s, REPLACE_INVALID_CHARACTERS = true, DEFAULT_DDL_COLLATION = 'en_US', STORAGE_SERIALIZATION_POLICY = COMPATIBLE, LOG_LEVEL = 'ERROR', TRACE_LEVEL = 'ON_EVENT', COMMENT = 'comment'`, opts.name.FullyQualifiedName(), externalVolumeId.FullyQualifiedName(), catalogId.FullyQualifiedName()) }) t.Run("unset", func(t *testing.T) { @@ -329,35 +339,40 @@ func TestDatabasesAlter(t *testing.T) { MaxDataExtensionTimeInDays: Bool(true), ExternalVolume: Bool(true), Catalog: Bool(true), + ReplaceInvalidCharacters: Bool(true), DefaultDDLCollation: Bool(true), + StorageSerializationPolicy: Bool(true), LogLevel: Bool(true), TraceLevel: Bool(true), Comment: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, EXTERNAL_VOLUME, CATALOG, DEFAULT_DDL_COLLATION, LOG_LEVEL, TRACE_LEVEL, COMMENT`, opts.name.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, EXTERNAL_VOLUME, CATALOG, REPLACE_INVALID_CHARACTERS, DEFAULT_DDL_COLLATION, STORAGE_SERIALIZATION_POLICY, LOG_LEVEL, TRACE_LEVEL, COMMENT`, opts.name.FullyQualifiedName()) }) t.Run("with set tag", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := defaultOpts() opts.SetTag = []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "tag2"), + Name: tagId2, Value: "v2", }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s SET TAG "db"."schema"."tag1" = 'v1', "db"."schema"."tag2" = 'v2'`, opts.name.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s SET TAG %s = 'v1', %s = 'v2'`, opts.name.FullyQualifiedName(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("with unset tag", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := defaultOpts() opts.UnsetTag = []ObjectIdentifier{ - NewSchemaObjectIdentifier("db", "schema", "tag1"), + id, } - assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s UNSET TAG "db"."schema"."tag1"`, opts.name.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER DATABASE %s UNSET TAG %s`, opts.name.FullyQualifiedName(), id.FullyQualifiedName()) }) } @@ -370,7 +385,7 @@ func TestDatabasesAlterReplication(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -423,7 +438,7 @@ func TestDatabasesAlterFailover(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -475,7 +490,7 @@ func TestDatabasesDrop(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -515,7 +530,7 @@ func TestDatabasesUndrop(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -579,7 +594,7 @@ func TestDatabasesDescribe(t *testing.T) { t.Run("validation: invalid name", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/dynamic_table_impl.go b/pkg/sdk/dynamic_table_impl.go index c4563652ec..55bbb38a02 100644 --- a/pkg/sdk/dynamic_table_impl.go +++ b/pkg/sdk/dynamic_table_impl.go @@ -47,7 +47,7 @@ func (v *dynamicTables) Show(ctx context.Context, request *ShowDynamicTableReque } func (v *dynamicTables) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*DynamicTable, error) { - request := NewShowDynamicTableRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowDynamicTableRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) dynamicTables, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/dynamic_table_test.go b/pkg/sdk/dynamic_table_test.go index a08ac27a67..1bf104d61a 100644 --- a/pkg/sdk/dynamic_table_test.go +++ b/pkg/sdk/dynamic_table_test.go @@ -25,7 +25,7 @@ func TestDynamicTableCreate(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -60,7 +60,7 @@ func TestDynamicTableAlter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -122,7 +122,7 @@ func TestDynamicTableDrop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -198,7 +198,7 @@ func TestDynamicTableDescribe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/event_tables_gen_test.go b/pkg/sdk/event_tables_gen_test.go index 6f795f557e..5224bc09d1 100644 --- a/pkg/sdk/event_tables_gen_test.go +++ b/pkg/sdk/event_tables_gen_test.go @@ -20,7 +20,7 @@ func TestEventTables_Create(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -106,7 +106,7 @@ func TestEventTables_Describe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -133,7 +133,7 @@ func TestEventTables_Alter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/event_tables_impl_gen.go b/pkg/sdk/event_tables_impl_gen.go index 518b54db5f..e20443f951 100644 --- a/pkg/sdk/event_tables_impl_gen.go +++ b/pkg/sdk/event_tables_impl_gen.go @@ -28,7 +28,7 @@ func (v *eventTables) Show(ctx context.Context, request *ShowEventTableRequest) } func (v *eventTables) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*EventTable, error) { - request := NewShowEventTableRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowEventTableRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) eventTables, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/external_functions_gen_test.go b/pkg/sdk/external_functions_gen_test.go index f4bfd77691..073d9b3caf 100644 --- a/pkg/sdk/external_functions_gen_test.go +++ b/pkg/sdk/external_functions_gen_test.go @@ -20,7 +20,7 @@ func TestExternalFunctions_Create(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -34,9 +34,9 @@ func TestExternalFunctions_Create(t *testing.T) { opts.As = "as" integration := emptyAccountObjectIdentifier opts.ApiIntegration = &integration - rt := NewSchemaObjectIdentifier("", "", "") + rt := emptySchemaObjectIdentifier opts.RequestTranslator = &rt - st := NewSchemaObjectIdentifier("", "", "") + st := emptySchemaObjectIdentifier opts.ResponseTranslator = &st assertOptsInvalidJoinedErrors(t, opts, errNotSet("CreateExternalFunctionOptions", "ApiIntegration")) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("CreateExternalFunctionOptions", "RequestTranslator")) @@ -84,9 +84,9 @@ func TestExternalFunctions_Create(t *testing.T) { } opts.MaxBatchRows = Int(100) opts.Compression = String("GZIP") - rt := NewSchemaObjectIdentifier("db", "schema", "request_translator") + rt := randomSchemaObjectIdentifier() opts.RequestTranslator = &rt - rs := NewSchemaObjectIdentifier("db", "schema", "response_translator") + rs := randomSchemaObjectIdentifier() opts.ResponseTranslator = &rs opts.As = "https://xyz.execute-api.us-west-2.amazonaws.com/prod/remote_echo" assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE SECURE EXTERNAL FUNCTION %s (id NUMBER, name VARCHAR) RETURNS VARCHAR NOT NULL CALLED ON NULL INPUT IMMUTABLE COMMENT = 'comment' API_INTEGRATION = "api_integration" HEADERS = ('header1' = 'value1', 'header2' = 'value2') CONTEXT_HEADERS = (CURRENT_ACCOUNT, CURRENT_USER) MAX_BATCH_ROWS = 100 COMPRESSION = GZIP REQUEST_TRANSLATOR = %s RESPONSE_TRANSLATOR = %s AS 'https://xyz.execute-api.us-west-2.amazonaws.com/prod/remote_echo'`, id.FullyQualifiedName(), rt.FullyQualifiedName(), rs.FullyQualifiedName()) @@ -111,7 +111,7 @@ func TestExternalFunctions_Alter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -307,7 +307,7 @@ func TestExternalFunctions_Describe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/external_functions_impl_gen.go b/pkg/sdk/external_functions_impl_gen.go index 3f1838e270..471d45f844 100644 --- a/pkg/sdk/external_functions_impl_gen.go +++ b/pkg/sdk/external_functions_impl_gen.go @@ -36,7 +36,7 @@ func (v *externalFunctions) Show(ctx context.Context, request *ShowExternalFunct func (v *externalFunctions) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*ExternalFunction, error) { arguments := id.Arguments() externalFunctions, err := v.Show(ctx, NewShowExternalFunctionRequest(). - WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}). + WithIn(&In{Schema: id.SchemaId()}). WithLike(&Like{Pattern: String(id.Name())})) if err != nil { return nil, err diff --git a/pkg/sdk/external_tables_impl.go b/pkg/sdk/external_tables_impl.go index 6e0a257d3f..a7bc20d7bd 100644 --- a/pkg/sdk/external_tables_impl.go +++ b/pkg/sdk/external_tables_impl.go @@ -55,7 +55,7 @@ func (v *externalTables) ShowByID(ctx context.Context, id SchemaObjectIdentifier } externalTables, err := v.client.ExternalTables.Show(ctx, NewShowExternalTableRequest(). - WithIn(*NewShowExternalTableInRequest().WithSchema(NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()))). + WithIn(*NewShowExternalTableInRequest().WithSchema(id.SchemaId())). WithLike(id.Name())) if err != nil { return nil, err diff --git a/pkg/sdk/external_tables_test.go b/pkg/sdk/external_tables_test.go index 325bd87557..a4a835dacc 100644 --- a/pkg/sdk/external_tables_test.go +++ b/pkg/sdk/external_tables_test.go @@ -5,10 +5,12 @@ import ( ) func TestExternalTablesCreate(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("basic options", func(t *testing.T) { opts := &CreateExternalTableOptions{ IfNotExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -31,13 +33,14 @@ func TestExternalTablesCreate(t *testing.T) { }, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE IF NOT EXISTS "db"."schema"."external_table" (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) INTEGRATION = '123' LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON)`) + assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE IF NOT EXISTS %s (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) INTEGRATION = '123' LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON)`, id.FullyQualifiedName()) }) t.Run("every optional field", func(t *testing.T) { + rowAccessPolicyId := randomSchemaObjectIdentifier() opts := &CreateExternalTableOptions{ OrReplace: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -62,7 +65,7 @@ func TestExternalTablesCreate(t *testing.T) { AwsSnsTopic: String("aws_sns_topic"), CopyGrants: Bool(true), RowAccessPolicy: &TableRowAccessPolicy{ - Name: NewSchemaObjectIdentifier("db", "schema", "row_access_policy"), + Name: rowAccessPolicyId, On: []string{"value1", "value2"}, }, Tag: []TagAssociation{ @@ -77,14 +80,14 @@ func TestExternalTablesCreate(t *testing.T) { }, Comment: String("some_comment"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE "db"."schema"."external_table" (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) INTEGRATION = '123' LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON) AWS_SNS_TOPIC = 'aws_sns_topic' COPY GRANTS COMMENT = 'some_comment' ROW ACCESS POLICY "db"."schema"."row_access_policy" ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE %s (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) INTEGRATION = '123' LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON) AWS_SNS_TOPIC = 'aws_sns_topic' COPY GRANTS COMMENT = 'some_comment' ROW ACCESS POLICY %s ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`, id.FullyQualifiedName(), rowAccessPolicyId.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &CreateExternalTableOptions{ OrReplace: Bool(true), IfNotExists: Bool(true), - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors( t, opts, @@ -97,7 +100,7 @@ func TestExternalTablesCreate(t *testing.T) { t.Run("raw file format", func(t *testing.T) { opts := &CreateExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -113,12 +116,12 @@ func TestExternalTablesCreate(t *testing.T) { Location: "@s1/logs/", RawFileFormat: &RawFileFormat{Format: "TYPE = JSON"}, } - assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE "db"."schema"."external_table" (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON)`) + assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE %s (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON)`, id.FullyQualifiedName()) }) t.Run("validation: neither raw file format is set, nor file format", func(t *testing.T) { opts := &CreateExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -138,10 +141,13 @@ func TestExternalTablesCreate(t *testing.T) { } func TestExternalTablesCreateWithManualPartitioning(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("valid options", func(t *testing.T) { + rowAccessPolicyId := randomSchemaObjectIdentifier() opts := &CreateWithManualPartitioningExternalTableOptions{ OrReplace: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -165,7 +171,7 @@ func TestExternalTablesCreateWithManualPartitioning(t *testing.T) { }, CopyGrants: Bool(true), RowAccessPolicy: &TableRowAccessPolicy{ - Name: NewSchemaObjectIdentifier("db", "schema", "row_access_policy"), + Name: rowAccessPolicyId, On: []string{"value1", "value2"}, }, Tag: []TagAssociation{ @@ -180,14 +186,14 @@ func TestExternalTablesCreateWithManualPartitioning(t *testing.T) { }, Comment: String("some_comment"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE "db"."schema"."external_table" (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) INTEGRATION = '123' LOCATION = @s1/logs/ PARTITION_TYPE = USER_SPECIFIED FILE_FORMAT = (TYPE = JSON) COPY GRANTS COMMENT = 'some_comment' ROW ACCESS POLICY "db"."schema"."row_access_policy" ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE %s (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) INTEGRATION = '123' LOCATION = @s1/logs/ PARTITION_TYPE = USER_SPECIFIED FILE_FORMAT = (TYPE = JSON) COPY GRANTS COMMENT = 'some_comment' ROW ACCESS POLICY %s ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`, id.FullyQualifiedName(), rowAccessPolicyId.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &CreateWithManualPartitioningExternalTableOptions{ OrReplace: Bool(true), IfNotExists: Bool(true), - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors( t, opts, @@ -200,7 +206,7 @@ func TestExternalTablesCreateWithManualPartitioning(t *testing.T) { t.Run("raw file format", func(t *testing.T) { opts := &CreateWithManualPartitioningExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -216,12 +222,12 @@ func TestExternalTablesCreateWithManualPartitioning(t *testing.T) { Location: "@s1/logs/", RawFileFormat: &RawFileFormat{Format: "TYPE = JSON"}, } - assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE "db"."schema"."external_table" (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) LOCATION = @s1/logs/ PARTITION_TYPE = USER_SPECIFIED FILE_FORMAT = (TYPE = JSON)`) + assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE %s (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) LOCATION = @s1/logs/ PARTITION_TYPE = USER_SPECIFIED FILE_FORMAT = (TYPE = JSON)`, id.FullyQualifiedName()) }) t.Run("validation: neither raw file format is set, nor file format", func(t *testing.T) { opts := &CreateWithManualPartitioningExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -241,10 +247,13 @@ func TestExternalTablesCreateWithManualPartitioning(t *testing.T) { } func TestExternalTablesCreateDeltaLake(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("valid options", func(t *testing.T) { + rowAccessPolicyId := randomSchemaObjectIdentifier() opts := &CreateDeltaLakeExternalTableOptions{ OrReplace: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -265,7 +274,7 @@ func TestExternalTablesCreateDeltaLake(t *testing.T) { }, CopyGrants: Bool(true), RowAccessPolicy: &TableRowAccessPolicy{ - Name: NewSchemaObjectIdentifier("db", "schema", "row_access_policy"), + Name: rowAccessPolicyId, On: []string{"value1", "value2"}, }, Tag: []TagAssociation{ @@ -280,14 +289,14 @@ func TestExternalTablesCreateDeltaLake(t *testing.T) { }, Comment: String("some_comment"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE "db"."schema"."external_table" (column varchar AS (value::column::varchar)) INTEGRATION = '123' PARTITION BY (column) LOCATION = @s1/logs/ FILE_FORMAT = (FORMAT_NAME = 'JSON') TABLE_FORMAT = DELTA COPY GRANTS COMMENT = 'some_comment' ROW ACCESS POLICY "db"."schema"."row_access_policy" ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE %s (column varchar AS (value::column::varchar)) INTEGRATION = '123' PARTITION BY (column) LOCATION = @s1/logs/ FILE_FORMAT = (FORMAT_NAME = 'JSON') TABLE_FORMAT = DELTA COPY GRANTS COMMENT = 'some_comment' ROW ACCESS POLICY %s ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`, id.FullyQualifiedName(), rowAccessPolicyId.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &CreateDeltaLakeExternalTableOptions{ OrReplace: Bool(true), IfNotExists: Bool(true), - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors( t, opts, @@ -300,7 +309,7 @@ func TestExternalTablesCreateDeltaLake(t *testing.T) { t.Run("raw file format", func(t *testing.T) { opts := &CreateDeltaLakeExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -316,12 +325,12 @@ func TestExternalTablesCreateDeltaLake(t *testing.T) { Location: "@s1/logs/", RawFileFormat: &RawFileFormat{Format: "TYPE = JSON"}, } - assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE "db"."schema"."external_table" (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON) TABLE_FORMAT = DELTA`) + assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE %s (column varchar AS (value::column::varchar) NOT NULL CONSTRAINT my_constraint UNIQUE) LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON) TABLE_FORMAT = DELTA`, id.FullyQualifiedName()) }) t.Run("validation: neither raw file format is set, nor file format", func(t *testing.T) { opts := &CreateDeltaLakeExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Columns: []ExternalTableColumn{ { Name: "column", @@ -341,10 +350,13 @@ func TestExternalTablesCreateDeltaLake(t *testing.T) { } func TestExternalTableUsingTemplateOpts(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("valid options", func(t *testing.T) { + rowAccessPolicyId := randomSchemaObjectIdentifier() opts := &CreateExternalTableUsingTemplateOptions{ OrReplace: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, CopyGrants: Bool(true), Query: []string{"query statement"}, CloudProviderParams: &CloudProviderParams{ @@ -359,7 +371,7 @@ func TestExternalTableUsingTemplateOpts(t *testing.T) { }, Comment: String("some_comment"), RowAccessPolicy: &TableRowAccessPolicy{ - Name: NewSchemaObjectIdentifier("db", "schema", "row_access_policy"), + Name: rowAccessPolicyId, On: []string{"value1", "value2"}, }, Tag: []TagAssociation{ @@ -373,12 +385,12 @@ func TestExternalTableUsingTemplateOpts(t *testing.T) { }, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE "db"."schema"."external_table" COPY GRANTS USING TEMPLATE (query statement) INTEGRATION = '123' PARTITION BY (column) LOCATION = @s1/logs/ FILE_FORMAT = (FORMAT_NAME = 'JSON') COMMENT = 'some_comment' ROW ACCESS POLICY "db"."schema"."row_access_policy" ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE EXTERNAL TABLE %s COPY GRANTS USING TEMPLATE (query statement) INTEGRATION = '123' PARTITION BY (column) LOCATION = @s1/logs/ FILE_FORMAT = (FORMAT_NAME = 'JSON') COMMENT = 'some_comment' ROW ACCESS POLICY %s ON (value1, value2) TAG ("tag1" = 'value1', "tag2" = 'value2')`, id.FullyQualifiedName(), rowAccessPolicyId.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &CreateExternalTableUsingTemplateOptions{ - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors( t, opts, @@ -391,19 +403,19 @@ func TestExternalTableUsingTemplateOpts(t *testing.T) { t.Run("raw file format", func(t *testing.T) { opts := &CreateExternalTableUsingTemplateOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Location: "@s1/logs/", Query: []string{ "query statement", }, RawFileFormat: &RawFileFormat{Format: "TYPE = JSON"}, } - assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE "db"."schema"."external_table" USING TEMPLATE (query statement) LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON)`) + assertOptsValidAndSQLEquals(t, opts, `CREATE EXTERNAL TABLE %s USING TEMPLATE (query statement) LOCATION = @s1/logs/ FILE_FORMAT = (TYPE = JSON)`, id.FullyQualifiedName()) }) t.Run("validation: neither raw file format is set, nor file format", func(t *testing.T) { opts := &CreateExternalTableUsingTemplateOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Location: "@s1/logs/", Query: []string{ "query statement", @@ -414,59 +426,61 @@ func TestExternalTableUsingTemplateOpts(t *testing.T) { } func TestExternalTablesAlter(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("refresh without path", func(t *testing.T) { opts := &AlterExternalTableOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Refresh: &RefreshExternalTable{}, } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS "db"."schema"."external_table" REFRESH ''`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS %s REFRESH ''`, id.FullyQualifiedName()) }) t.Run("refresh with path", func(t *testing.T) { opts := &AlterExternalTableOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, Refresh: &RefreshExternalTable{ Path: "some/path", }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS "db"."schema"."external_table" REFRESH 'some/path'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS %s REFRESH 'some/path'`, id.FullyQualifiedName()) }) t.Run("add files", func(t *testing.T) { opts := &AlterExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, AddFiles: []ExternalTableFile{ {Name: "one/file.txt"}, {Name: "second/file.txt"}, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE "db"."schema"."external_table" ADD FILES ('one/file.txt', 'second/file.txt')`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE %s ADD FILES ('one/file.txt', 'second/file.txt')`, id.FullyQualifiedName()) }) t.Run("remove files", func(t *testing.T) { opts := &AlterExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, RemoveFiles: []ExternalTableFile{ {Name: "one/file.txt"}, {Name: "second/file.txt"}, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE "db"."schema"."external_table" REMOVE FILES ('one/file.txt', 'second/file.txt')`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE %s REMOVE FILES ('one/file.txt', 'second/file.txt')`, id.FullyQualifiedName()) }) t.Run("set auto refresh", func(t *testing.T) { opts := &AlterExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, AutoRefresh: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE "db"."schema"."external_table" SET AUTO_REFRESH = true`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE %s SET AUTO_REFRESH = true`, id.FullyQualifiedName()) }) t.Run("set tag", func(t *testing.T) { opts := &AlterExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, SetTag: []TagAssociation{ { Name: NewAccountObjectIdentifier("tag1"), @@ -478,23 +492,23 @@ func TestExternalTablesAlter(t *testing.T) { }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE "db"."schema"."external_table" SET TAG "tag1" = 'tag_value1', "tag2" = 'tag_value2'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE %s SET TAG "tag1" = 'tag_value1', "tag2" = 'tag_value2'`, id.FullyQualifiedName()) }) t.Run("unset tag", func(t *testing.T) { opts := &AlterExternalTableOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, UnsetTag: []ObjectIdentifier{ NewAccountObjectIdentifier("tag1"), NewAccountObjectIdentifier("tag2"), }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE "db"."schema"."external_table" UNSET TAG "tag1", "tag2"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE %s UNSET TAG "tag1", "tag2"`, id.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &AlterExternalTableOptions{ - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, AddFiles: []ExternalTableFile{{Name: "some file"}}, RemoveFiles: []ExternalTableFile{{Name: "some other file"}}, } @@ -507,9 +521,11 @@ func TestExternalTablesAlter(t *testing.T) { } func TestExternalTablesAlterPartitions(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("add partition", func(t *testing.T) { opts := &AlterExternalTablePartitionOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, IfExists: Bool(true), AddPartitions: []Partition{ { @@ -523,22 +539,22 @@ func TestExternalTablesAlterPartitions(t *testing.T) { }, Location: "123", } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS "db"."schema"."external_table" ADD PARTITION (one = 'one_value', two = 'two_value') LOCATION '123'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS %s ADD PARTITION (one = 'one_value', two = 'two_value') LOCATION '123'`, id.FullyQualifiedName()) }) t.Run("remove partition", func(t *testing.T) { opts := &AlterExternalTablePartitionOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, IfExists: Bool(true), DropPartition: Bool(true), Location: "partition_location", } - assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS "db"."schema"."external_table" DROP PARTITION LOCATION 'partition_location'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER EXTERNAL TABLE IF EXISTS %s DROP PARTITION LOCATION 'partition_location'`, id.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &AlterExternalTablePartitionOptions{ - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, AddPartitions: []Partition{{ColumnName: "colName", Value: "value"}}, DropPartition: Bool(true), } @@ -551,31 +567,33 @@ func TestExternalTablesAlterPartitions(t *testing.T) { } func TestExternalTablesDrop(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("restrict", func(t *testing.T) { opts := &DropExternalTableOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, DropOption: &ExternalTableDropOption{ Restrict: Bool(true), }, } - assertOptsValidAndSQLEquals(t, opts, `DROP EXTERNAL TABLE IF EXISTS "db"."schema"."external_table" RESTRICT`) + assertOptsValidAndSQLEquals(t, opts, `DROP EXTERNAL TABLE IF EXISTS %s RESTRICT`, id.FullyQualifiedName()) }) t.Run("cascade", func(t *testing.T) { opts := &DropExternalTableOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, DropOption: &ExternalTableDropOption{ Cascade: Bool(true), }, } - assertOptsValidAndSQLEquals(t, opts, `DROP EXTERNAL TABLE IF EXISTS "db"."schema"."external_table" CASCADE`) + assertOptsValidAndSQLEquals(t, opts, `DROP EXTERNAL TABLE IF EXISTS %s CASCADE`, id.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &DropExternalTableOptions{ - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, DropOption: &ExternalTableDropOption{ Restrict: Bool(true), Cascade: Bool(true), @@ -620,18 +638,19 @@ func TestExternalTablesShow(t *testing.T) { }) t.Run("in schema", func(t *testing.T) { + id := randomDatabaseObjectIdentifier() opts := &ShowExternalTableOptions{ Terse: Bool(true), In: &In{ - Schema: NewDatabaseObjectIdentifier("database_name", "schema_name"), + Schema: id, }, } - assertOptsValidAndSQLEquals(t, opts, `SHOW TERSE EXTERNAL TABLES IN SCHEMA "database_name"."schema_name"`) + assertOptsValidAndSQLEquals(t, opts, `SHOW TERSE EXTERNAL TABLES IN SCHEMA %s`, id.FullyQualifiedName()) }) t.Run("invalid options", func(t *testing.T) { opts := &DropExternalTableOptions{ - name: NewSchemaObjectIdentifier("", "", ""), + name: emptySchemaObjectIdentifier, DropOption: &ExternalTableDropOption{ Restrict: Bool(true), Cascade: Bool(true), @@ -647,17 +666,19 @@ func TestExternalTablesShow(t *testing.T) { } func TestExternalTablesDescribe(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("type columns", func(t *testing.T) { opts := &describeExternalTableColumnsOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, } - assertOptsValidAndSQLEquals(t, opts, `DESCRIBE EXTERNAL TABLE "db"."schema"."external_table" TYPE = COLUMNS`) + assertOptsValidAndSQLEquals(t, opts, `DESCRIBE EXTERNAL TABLE %s TYPE = COLUMNS`, id.FullyQualifiedName()) }) t.Run("type stage", func(t *testing.T) { opts := &describeExternalTableStageOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "external_table"), + name: id, } - assertOptsValidAndSQLEquals(t, opts, `DESCRIBE EXTERNAL TABLE "db"."schema"."external_table" TYPE = STAGE`) + assertOptsValidAndSQLEquals(t, opts, `DESCRIBE EXTERNAL TABLE %s TYPE = STAGE`, id.FullyQualifiedName()) }) } diff --git a/pkg/sdk/file_format.go b/pkg/sdk/file_format.go index 826e83ff79..e1c5eb0de9 100644 --- a/pkg/sdk/file_format.go +++ b/pkg/sdk/file_format.go @@ -651,7 +651,7 @@ func (v *fileFormats) ShowByID(ctx context.Context, id SchemaObjectIdentifier) ( Pattern: String(id.Name()), }, In: &In{ - Schema: NewDatabaseObjectIdentifier(id.databaseName, id.schemaName), + Schema: id.SchemaId(), }, }) if err != nil { diff --git a/pkg/sdk/file_format_test.go b/pkg/sdk/file_format_test.go index ff99155de8..8beb8e4998 100644 --- a/pkg/sdk/file_format_test.go +++ b/pkg/sdk/file_format_test.go @@ -5,19 +5,21 @@ import ( ) func TestFileFormatsCreate(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("minimal", func(t *testing.T) { opts := &CreateFileFormatOptions{ - name: NewSchemaObjectIdentifier("db1", "schema2", "format3"), + name: id, Type: FileFormatTypeCSV, } - assertOptsValidAndSQLEquals(t, opts, `CREATE FILE FORMAT "db1"."schema2"."format3" TYPE = CSV`) + assertOptsValidAndSQLEquals(t, opts, `CREATE FILE FORMAT %s TYPE = CSV`, id.FullyQualifiedName()) }) t.Run("complete CSV", func(t *testing.T) { opts := &CreateFileFormatOptions{ OrReplace: Bool(true), Temporary: Bool(true), - name: NewSchemaObjectIdentifier("db4", "schema5", "format6"), + name: id, IfNotExists: Bool(true), Type: FileFormatTypeCSV, @@ -47,14 +49,14 @@ func TestFileFormatsCreate(t *testing.T) { CSVEncoding: &CSVEncodingISO2022KR, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS "db4"."schema5"."format6" TYPE = CSV COMPRESSION = BZ2 RECORD_DELIMITER = '-' FIELD_DELIMITER = ':' FILE_EXTENSION = 'csv' SKIP_HEADER = 5 SKIP_BLANK_LINES = true DATE_FORMAT = 'YYYY-MM-DD' TIME_FORMAT = 'HH:mm:SS' TIMESTAMP_FORMAT = 'time' BINARY_FORMAT = UTF8 ESCAPE = '\\' ESCAPE_UNENCLOSED_FIELD = '§' TRIM_SPACE = true FIELD_OPTIONALLY_ENCLOSED_BY = '\"' NULL_IF = ('nul', 'nulll') ERROR_ON_COLUMN_COUNT_MISMATCH = true REPLACE_INVALID_CHARACTERS = true EMPTY_FIELD_AS_NULL = true SKIP_BYTE_ORDER_MARK = true ENCODING = 'ISO2022KR'`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS %s TYPE = CSV COMPRESSION = BZ2 RECORD_DELIMITER = '-' FIELD_DELIMITER = ':' FILE_EXTENSION = 'csv' SKIP_HEADER = 5 SKIP_BLANK_LINES = true DATE_FORMAT = 'YYYY-MM-DD' TIME_FORMAT = 'HH:mm:SS' TIMESTAMP_FORMAT = 'time' BINARY_FORMAT = UTF8 ESCAPE = '\\' ESCAPE_UNENCLOSED_FIELD = '§' TRIM_SPACE = true FIELD_OPTIONALLY_ENCLOSED_BY = '\"' NULL_IF = ('nul', 'nulll') ERROR_ON_COLUMN_COUNT_MISMATCH = true REPLACE_INVALID_CHARACTERS = true EMPTY_FIELD_AS_NULL = true SKIP_BYTE_ORDER_MARK = true ENCODING = 'ISO2022KR'`, id.FullyQualifiedName()) }) t.Run("complete JSON", func(t *testing.T) { opts := &CreateFileFormatOptions{ OrReplace: Bool(true), Temporary: Bool(true), - name: NewSchemaObjectIdentifier("db4", "schema5", "format6"), + name: id, IfNotExists: Bool(true), Type: FileFormatTypeJSON, @@ -78,14 +80,14 @@ func TestFileFormatsCreate(t *testing.T) { JSONSkipByteOrderMark: Bool(true), }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS "db4"."schema5"."format6" TYPE = JSON COMPRESSION = BROTLI DATE_FORMAT = 'YYYY-MM-DD' TIME_FORMAT = 'HH:mm:SS' TIMESTAMP_FORMAT = 'aze' BINARY_FORMAT = HEX TRIM_SPACE = true NULL_IF = ('c1', 'c2') FILE_EXTENSION = 'json' ENABLE_OCTAL = true ALLOW_DUPLICATE = true STRIP_OUTER_ARRAY = true STRIP_NULL_VALUES = true REPLACE_INVALID_CHARACTERS = true SKIP_BYTE_ORDER_MARK = true`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS %s TYPE = JSON COMPRESSION = BROTLI DATE_FORMAT = 'YYYY-MM-DD' TIME_FORMAT = 'HH:mm:SS' TIMESTAMP_FORMAT = 'aze' BINARY_FORMAT = HEX TRIM_SPACE = true NULL_IF = ('c1', 'c2') FILE_EXTENSION = 'json' ENABLE_OCTAL = true ALLOW_DUPLICATE = true STRIP_OUTER_ARRAY = true STRIP_NULL_VALUES = true REPLACE_INVALID_CHARACTERS = true SKIP_BYTE_ORDER_MARK = true`, id.FullyQualifiedName()) }) t.Run("complete Avro", func(t *testing.T) { opts := &CreateFileFormatOptions{ OrReplace: Bool(true), Temporary: Bool(true), - name: NewSchemaObjectIdentifier("db4", "schema5", "format6"), + name: id, IfNotExists: Bool(true), Type: FileFormatTypeAvro, @@ -96,14 +98,14 @@ func TestFileFormatsCreate(t *testing.T) { AvroNullIf: &[]NullString{{"nul"}}, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS "db4"."schema5"."format6" TYPE = AVRO COMPRESSION = DEFLATE TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nul')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS %s TYPE = AVRO COMPRESSION = DEFLATE TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nul')`, id.FullyQualifiedName()) }) t.Run("complete ORC", func(t *testing.T) { opts := &CreateFileFormatOptions{ OrReplace: Bool(true), Temporary: Bool(true), - name: NewSchemaObjectIdentifier("db4", "schema5", "format6"), + name: id, IfNotExists: Bool(true), Type: FileFormatTypeORC, @@ -113,14 +115,14 @@ func TestFileFormatsCreate(t *testing.T) { ORCNullIf: &[]NullString{{"nul"}}, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS "db4"."schema5"."format6" TYPE = ORC TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nul')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS %s TYPE = ORC TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nul')`, id.FullyQualifiedName()) }) t.Run("complete Parquet", func(t *testing.T) { opts := &CreateFileFormatOptions{ OrReplace: Bool(true), Temporary: Bool(true), - name: NewSchemaObjectIdentifier("db4", "schema5", "format6"), + name: id, IfNotExists: Bool(true), Type: FileFormatTypeParquet, @@ -132,14 +134,14 @@ func TestFileFormatsCreate(t *testing.T) { ParquetNullIf: &[]NullString{{"nil"}}, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS "db4"."schema5"."format6" TYPE = PARQUET COMPRESSION = LZO BINARY_AS_TEXT = true TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS %s TYPE = PARQUET COMPRESSION = LZO BINARY_AS_TEXT = true TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`, id.FullyQualifiedName()) }) t.Run("complete XML", func(t *testing.T) { opts := &CreateFileFormatOptions{ OrReplace: Bool(true), Temporary: Bool(true), - name: NewSchemaObjectIdentifier("db4", "schema5", "format6"), + name: id, IfNotExists: Bool(true), Type: FileFormatTypeXML, FileFormatTypeOptions: FileFormatTypeOptions{ @@ -153,12 +155,12 @@ func TestFileFormatsCreate(t *testing.T) { }, Comment: String("test file format"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS "db4"."schema5"."format6" TYPE = XML COMPRESSION = ZSTD IGNORE_UTF8_ERRORS = true PRESERVE_SPACE = true STRIP_OUTER_ELEMENT = true DISABLE_SNOWFLAKE_DATA = true DISABLE_AUTO_CONVERT = true SKIP_BYTE_ORDER_MARK = true COMMENT = 'test file format'`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE TEMPORARY FILE FORMAT IF NOT EXISTS %s TYPE = XML COMPRESSION = ZSTD IGNORE_UTF8_ERRORS = true PRESERVE_SPACE = true STRIP_OUTER_ELEMENT = true DISABLE_SNOWFLAKE_DATA = true DISABLE_AUTO_CONVERT = true SKIP_BYTE_ORDER_MARK = true COMMENT = 'test file format'`, id.FullyQualifiedName()) }) t.Run("previous test", func(t *testing.T) { opts := &CreateFileFormatOptions{ - name: NewSchemaObjectIdentifier("test_db", "test_schema", "test_file_format"), + name: id, Type: FileFormatTypeCSV, FileFormatTypeOptions: FileFormatTypeOptions{ CSVNullIf: &[]NullString{{"NULL"}}, @@ -171,26 +173,29 @@ func TestFileFormatsCreate(t *testing.T) { }, Comment: String("great comment"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE FILE FORMAT "test_db"."test_schema"."test_file_format" TYPE = CSV SKIP_BLANK_LINES = false TRIM_SPACE = false NULL_IF = ('NULL') ERROR_ON_COLUMN_COUNT_MISMATCH = true REPLACE_INVALID_CHARACTERS = false EMPTY_FIELD_AS_NULL = false SKIP_BYTE_ORDER_MARK = false COMMENT = 'great comment'`) + assertOptsValidAndSQLEquals(t, opts, `CREATE FILE FORMAT %s TYPE = CSV SKIP_BLANK_LINES = false TRIM_SPACE = false NULL_IF = ('NULL') ERROR_ON_COLUMN_COUNT_MISMATCH = true REPLACE_INVALID_CHARACTERS = false EMPTY_FIELD_AS_NULL = false SKIP_BYTE_ORDER_MARK = false COMMENT = 'great comment'`, id.FullyQualifiedName()) }) } func TestFileFormatsAlter(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("rename", func(t *testing.T) { + newId := randomSchemaObjectIdentifier() opts := &AlterFileFormatOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "fileformat"), + name: id, Rename: &AlterFileFormatRenameOptions{ - NewName: NewSchemaObjectIdentifier("new_db", "new_schema", "new_fileformat"), + NewName: newId, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS "db"."schema"."fileformat" RENAME TO "new_db"."new_schema"."new_fileformat"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS %s RENAME TO %s`, id.FullyQualifiedName(), newId.FullyQualifiedName()) }) t.Run("set", func(t *testing.T) { opts := &AlterFileFormatOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "fileformat"), + name: id, Set: &FileFormatTypeOptions{ AvroCompression: &AvroCompressionBrotli, AvroTrimSpace: Bool(true), @@ -198,13 +203,13 @@ func TestFileFormatsAlter(t *testing.T) { AvroNullIf: &[]NullString{{"nil"}}, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS "db"."schema"."fileformat" SET COMPRESSION = BROTLI TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`) + assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS %s SET COMPRESSION = BROTLI TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`, id.FullyQualifiedName()) }) t.Run("set comment", func(t *testing.T) { opts := &AlterFileFormatOptions{ IfExists: Bool(true), - name: NewSchemaObjectIdentifier("db", "schema", "fileformat"), + name: id, Set: &FileFormatTypeOptions{ AvroCompression: &AvroCompressionBrotli, AvroTrimSpace: Bool(true), @@ -213,24 +218,26 @@ func TestFileFormatsAlter(t *testing.T) { Comment: String("some comment"), }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS "db"."schema"."fileformat" SET COMMENT = 'some comment' COMPRESSION = BROTLI TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`) + assertOptsValidAndSQLEquals(t, opts, `ALTER FILE FORMAT IF EXISTS %s SET COMMENT = 'some comment' COMPRESSION = BROTLI TRIM_SPACE = true REPLACE_INVALID_CHARACTERS = true NULL_IF = ('nil')`, id.FullyQualifiedName()) }) } func TestFileFormatsDrop(t *testing.T) { + id := randomSchemaObjectIdentifier() + t.Run("only name", func(t *testing.T) { opts := &DropFileFormatOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "ff"), + name: id, } - assertOptsValidAndSQLEquals(t, opts, `DROP FILE FORMAT "db"."schema"."ff"`) + assertOptsValidAndSQLEquals(t, opts, `DROP FILE FORMAT %s`, id.FullyQualifiedName()) }) t.Run("with IfExists", func(t *testing.T) { opts := &DropFileFormatOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "ff"), + name: id, IfExists: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `DROP FILE FORMAT IF EXISTS "db"."schema"."ff"`) + assertOptsValidAndSQLEquals(t, opts, `DROP FILE FORMAT IF EXISTS %s`, id.FullyQualifiedName()) }) } @@ -241,36 +248,39 @@ func TestFileFormatsShow(t *testing.T) { }) t.Run("with show options", func(t *testing.T) { + id := randomDatabaseObjectIdentifier() opts := &ShowFileFormatsOptions{ Like: &Like{ Pattern: String("test"), }, In: &In{ - Schema: NewDatabaseObjectIdentifier("db", "schema"), + Schema: id, }, } - assertOptsValidAndSQLEquals(t, opts, `SHOW FILE FORMATS LIKE 'test' IN SCHEMA "db"."schema"`) + assertOptsValidAndSQLEquals(t, opts, `SHOW FILE FORMATS LIKE 'test' IN SCHEMA %s`, id.FullyQualifiedName()) }) } func TestFileFormatsShowById(t *testing.T) { t.Run("simple", func(t *testing.T) { - id := NewSchemaObjectIdentifier("db", "schema", "ff") + id := randomSchemaObjectIdentifier() opts := &ShowFileFormatsOptions{ Like: &Like{ Pattern: String(id.Name()), }, In: &In{ - Schema: NewDatabaseObjectIdentifier(id.databaseName, id.schemaName), + Schema: id.SchemaId(), }, } - assertOptsValidAndSQLEquals(t, opts, `SHOW FILE FORMATS LIKE 'ff' IN SCHEMA "db"."schema"`) + assertOptsValidAndSQLEquals(t, opts, `SHOW FILE FORMATS LIKE '%s' IN SCHEMA %s`, id.Name(), id.SchemaId().FullyQualifiedName()) }) } func TestFileFormatsDescribe(t *testing.T) { + id := randomSchemaObjectIdentifier() + opts := &describeFileFormatOptions{ - name: NewSchemaObjectIdentifier("db", "schema", "ff"), + name: id, } - assertOptsValidAndSQLEquals(t, opts, `DESCRIBE FILE FORMAT "db"."schema"."ff"`) + assertOptsValidAndSQLEquals(t, opts, `DESCRIBE FILE FORMAT %s`, id.FullyQualifiedName()) } diff --git a/pkg/sdk/functions_gen_test.go b/pkg/sdk/functions_gen_test.go index 13b7bff262..985d825b94 100644 --- a/pkg/sdk/functions_gen_test.go +++ b/pkg/sdk/functions_gen_test.go @@ -20,7 +20,7 @@ func TestFunctions_CreateForJava(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -135,7 +135,7 @@ func TestFunctions_CreateForJavascript(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -198,7 +198,7 @@ func TestFunctions_CreateForPython(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -303,7 +303,7 @@ func TestFunctions_CreateForScala(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -372,7 +372,7 @@ func TestFunctions_CreateForSQL(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -446,7 +446,7 @@ func TestFunctions_Drop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -483,7 +483,7 @@ func TestFunctions_Alter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -630,7 +630,7 @@ func TestFunctions_Describe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/functions_impl_gen.go b/pkg/sdk/functions_impl_gen.go index d2746933f5..7a30c62da6 100644 --- a/pkg/sdk/functions_impl_gen.go +++ b/pkg/sdk/functions_impl_gen.go @@ -58,7 +58,7 @@ func (v *functions) Show(ctx context.Context, request *ShowFunctionRequest) ([]F } func (v *functions) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Function, error) { - request := NewShowFunctionRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowFunctionRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) functions, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/grants_impl.go b/pkg/sdk/grants_impl.go index 1be95f66c7..0a67f82164 100644 --- a/pkg/sdk/grants_impl.go +++ b/pkg/sdk/grants_impl.go @@ -47,7 +47,7 @@ func (v *grants) GrantPrivilegesToAccountRole(ctx context.Context, privileges *A SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypePipe, - Name: NewSchemaObjectIdentifier(pipe.DatabaseName, pipe.SchemaName, pipe.Name), + Name: pipe.ID(), }, }, }, @@ -90,7 +90,7 @@ func (v *grants) RevokePrivilegesFromAccountRole(ctx context.Context, privileges SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypePipe, - Name: NewSchemaObjectIdentifier(pipe.DatabaseName, pipe.SchemaName, pipe.Name), + Name: pipe.ID(), }, }, }, @@ -131,7 +131,7 @@ func (v *grants) GrantPrivilegesToDatabaseRole(ctx context.Context, privileges * SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypePipe, - Name: NewSchemaObjectIdentifier(pipe.DatabaseName, pipe.SchemaName, pipe.Name), + Name: pipe.ID(), }, }, }, @@ -172,7 +172,7 @@ func (v *grants) RevokePrivilegesFromDatabaseRole(ctx context.Context, privilege SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypePipe, - Name: NewSchemaObjectIdentifier(pipe.DatabaseName, pipe.SchemaName, pipe.Name), + Name: pipe.ID(), }, }, }, @@ -234,7 +234,7 @@ func (v *grants) GrantOwnership(ctx context.Context, on OwnershipGrantOn, to Own OwnershipGrantOn{ Object: &Object{ ObjectType: ObjectTypePipe, - Name: NewSchemaObjectIdentifier(pipe.DatabaseName, pipe.SchemaName, pipe.Name), + Name: pipe.ID(), }, }, to, diff --git a/pkg/sdk/grants_test.go b/pkg/sdk/grants_test.go index 3f1db203d5..82dfc4fff4 100644 --- a/pkg/sdk/grants_test.go +++ b/pkg/sdk/grants_test.go @@ -96,18 +96,19 @@ func TestGrantPrivilegesToAccountRole(t *testing.T) { }) t.Run("on schema", func(t *testing.T) { + id := randomDatabaseObjectIdentifier() opts := &GrantPrivilegesToAccountRoleOptions{ privileges: &AccountRoleGrantPrivileges{ SchemaPrivileges: []SchemaPrivilege{SchemaPrivilegeCreateAlert}, }, on: &AccountRoleGrantOn{ Schema: &GrantOnSchema{ - Schema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + Schema: Pointer(id), }, }, accountRole: NewAccountObjectIdentifier("role1"), } - assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON SCHEMA "db1"."schema1" TO ROLE "role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON SCHEMA %s TO ROLE "role1"`, id.FullyQualifiedName()) }) t.Run("on all schemas in database", func(t *testing.T) { @@ -141,6 +142,7 @@ func TestGrantPrivilegesToAccountRole(t *testing.T) { }) t.Run("on schema object", func(t *testing.T) { + tableId := randomSchemaObjectIdentifier() opts := &GrantPrivilegesToAccountRoleOptions{ privileges: &AccountRoleGrantPrivileges{ SchemaObjectPrivileges: []SchemaObjectPrivilege{SchemaObjectPrivilegeApply}, @@ -149,13 +151,13 @@ func TestGrantPrivilegesToAccountRole(t *testing.T) { SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypeTable, - Name: NewSchemaObjectIdentifier("db1", "schema1", "table1"), + Name: tableId, }, }, }, accountRole: NewAccountObjectIdentifier("role1"), } - assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON TABLE "db1"."schema1"."table1" TO ROLE "role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON TABLE %s TO ROLE "role1"`, tableId.FullyQualifiedName()) }) t.Run("on future schema object in database", func(t *testing.T) { @@ -177,6 +179,7 @@ func TestGrantPrivilegesToAccountRole(t *testing.T) { }) t.Run("on future schema object in schema", func(t *testing.T) { + id := randomDatabaseObjectIdentifier() opts := &GrantPrivilegesToAccountRoleOptions{ privileges: &AccountRoleGrantPrivileges{ SchemaObjectPrivileges: []SchemaObjectPrivilege{SchemaObjectPrivilegeApply}, @@ -185,17 +188,19 @@ func TestGrantPrivilegesToAccountRole(t *testing.T) { SchemaObject: &GrantOnSchemaObject{ Future: &GrantOnSchemaObjectIn{ PluralObjectType: PluralObjectTypeTables, - InSchema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + InSchema: Pointer(id), }, }, }, accountRole: NewAccountObjectIdentifier("role1"), } - assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON FUTURE TABLES IN SCHEMA "db1"."schema1" TO ROLE "role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON FUTURE TABLES IN SCHEMA %s TO ROLE "role1"`, id.FullyQualifiedName()) }) } func TestRevokePrivilegesFromAccountRole(t *testing.T) { + schemaId := randomDatabaseObjectIdentifier() + t.Run("on account", func(t *testing.T) { opts := &RevokePrivilegesFromAccountRoleOptions{ privileges: &AccountRoleGrantPrivileges{ @@ -246,12 +251,12 @@ func TestRevokePrivilegesFromAccountRole(t *testing.T) { }, on: &AccountRoleGrantOn{ Schema: &GrantOnSchema{ - Schema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + Schema: Pointer(schemaId), }, }, accountRole: NewAccountObjectIdentifier("role1"), } - assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON SCHEMA "db1"."schema1" FROM ROLE "role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON SCHEMA %s FROM ROLE "role1"`, schemaId.FullyQualifiedName()) }) t.Run("on all schemas in database + restrict", func(t *testing.T) { @@ -287,6 +292,7 @@ func TestRevokePrivilegesFromAccountRole(t *testing.T) { }) t.Run("on schema object", func(t *testing.T) { + tableId := randomSchemaObjectIdentifier() opts := &RevokePrivilegesFromAccountRoleOptions{ privileges: &AccountRoleGrantPrivileges{ SchemaObjectPrivileges: []SchemaObjectPrivilege{SchemaObjectPrivilegeSelect, SchemaObjectPrivilegeUpdate}, @@ -295,13 +301,13 @@ func TestRevokePrivilegesFromAccountRole(t *testing.T) { SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypeTable, - Name: NewSchemaObjectIdentifier("db1", "schema1", "table1"), + Name: tableId, }, }, }, accountRole: NewAccountObjectIdentifier("role1"), } - assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON TABLE "db1"."schema1"."table1" FROM ROLE "role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON TABLE %s FROM ROLE "role1"`, tableId.FullyQualifiedName()) }) t.Run("on future schema object in database", func(t *testing.T) { @@ -323,6 +329,7 @@ func TestRevokePrivilegesFromAccountRole(t *testing.T) { }) t.Run("on future schema object in schema", func(t *testing.T) { + id := randomDatabaseObjectIdentifier() opts := &RevokePrivilegesFromAccountRoleOptions{ privileges: &AccountRoleGrantPrivileges{ SchemaObjectPrivileges: []SchemaObjectPrivilege{SchemaObjectPrivilegeSelect, SchemaObjectPrivilegeUpdate}, @@ -331,18 +338,20 @@ func TestRevokePrivilegesFromAccountRole(t *testing.T) { SchemaObject: &GrantOnSchemaObject{ Future: &GrantOnSchemaObjectIn{ PluralObjectType: PluralObjectTypeTables, - InSchema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + InSchema: Pointer(id), }, }, }, accountRole: NewAccountObjectIdentifier("role1"), } - assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON FUTURE TABLES IN SCHEMA "db1"."schema1" FROM ROLE "role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON FUTURE TABLES IN SCHEMA %s FROM ROLE "role1"`, id.FullyQualifiedName()) }) } func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { - dbId := NewAccountObjectIdentifier("db1") + dbId := randomAccountObjectIdentifier() + databaseRoleId := randomDatabaseObjectIdentifierInDatabase(dbId) + schemaId := randomDatabaseObjectIdentifierInDatabase(dbId) defaultGrantsForDb := func() *GrantPrivilegesToDatabaseRoleOptions { return &GrantPrivilegesToDatabaseRoleOptions{ @@ -352,7 +361,7 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { on: &DatabaseRoleGrantOn{ Database: &dbId, }, - databaseRole: NewDatabaseObjectIdentifier("db1", "role1"), + databaseRole: databaseRoleId, } } @@ -363,13 +372,13 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { }, on: &DatabaseRoleGrantOn{ Schema: &GrantOnSchema{ - Schema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + Schema: Pointer(schemaId), }, }, - databaseRole: NewDatabaseObjectIdentifier("db1", "role1"), + databaseRole: databaseRoleId, } } - + tableId := randomSchemaObjectIdentifier() defaultGrantsForSchemaObject := func() *GrantPrivilegesToDatabaseRoleOptions { return &GrantPrivilegesToDatabaseRoleOptions{ privileges: &DatabaseRoleGrantPrivileges{ @@ -379,11 +388,11 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypeTable, - Name: NewSchemaObjectIdentifier("db1", "schema1", "table1"), + Name: tableId, }, }, }, - databaseRole: NewDatabaseObjectIdentifier("db1", "role1"), + databaseRole: databaseRoleId, } } @@ -425,7 +434,7 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { opts.on = &DatabaseRoleGrantOn{ Database: &dbId, Schema: &GrantOnSchema{ - Schema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + Schema: Pointer(schemaId), }, } assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("DatabaseRoleGrantOn", "Database", "Schema", "SchemaObject")) @@ -475,33 +484,33 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { t.Run("on database", func(t *testing.T) { opts := defaultGrantsForDb() - assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE SCHEMA ON DATABASE "db1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE SCHEMA ON DATABASE %s TO DATABASE ROLE %s`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on schema", func(t *testing.T) { opts := defaultGrantsForSchema() - assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON SCHEMA "db1"."schema1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON SCHEMA %s TO DATABASE ROLE %s`, schemaId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on all schemas in database", func(t *testing.T) { opts := defaultGrantsForSchema() opts.on.Schema = &GrantOnSchema{ - AllSchemasInDatabase: Pointer(NewAccountObjectIdentifier("db1")), + AllSchemasInDatabase: Pointer(dbId), } - assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON ALL SCHEMAS IN DATABASE "db1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON ALL SCHEMAS IN DATABASE %s TO DATABASE ROLE %s`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on all future schemas in database", func(t *testing.T) { opts := defaultGrantsForSchema() opts.on.Schema = &GrantOnSchema{ - FutureSchemasInDatabase: Pointer(NewAccountObjectIdentifier("db1")), + FutureSchemasInDatabase: Pointer(dbId), } - assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON FUTURE SCHEMAS IN DATABASE "db1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT CREATE ALERT ON FUTURE SCHEMAS IN DATABASE %s TO DATABASE ROLE %s`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on schema object", func(t *testing.T) { opts := defaultGrantsForSchemaObject() - assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON TABLE "db1"."schema1"."table1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON TABLE %s TO DATABASE ROLE %s`, tableId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on future schema object in database", func(t *testing.T) { @@ -509,10 +518,10 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { opts.on.SchemaObject = &GrantOnSchemaObject{ Future: &GrantOnSchemaObjectIn{ PluralObjectType: PluralObjectTypeTables, - InDatabase: Pointer(NewAccountObjectIdentifier("db1")), + InDatabase: Pointer(dbId), }, } - assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON FUTURE TABLES IN DATABASE "db1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON FUTURE TABLES IN DATABASE %s TO DATABASE ROLE %s`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on future schema object in schema", func(t *testing.T) { @@ -520,10 +529,10 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { opts.on.SchemaObject = &GrantOnSchemaObject{ Future: &GrantOnSchemaObjectIn{ PluralObjectType: PluralObjectTypeTables, - InSchema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + InSchema: Pointer(schemaId), }, } - assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON FUTURE TABLES IN SCHEMA "db1"."schema1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT APPLY ON FUTURE TABLES IN SCHEMA %s TO DATABASE ROLE %s`, schemaId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("grant all privileges", func(t *testing.T) { @@ -531,12 +540,15 @@ func TestGrants_GrantPrivilegesToDatabaseRole(t *testing.T) { opts.privileges = &DatabaseRoleGrantPrivileges{ AllPrivileges: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `GRANT ALL PRIVILEGES ON TABLE "db1"."schema1"."table1" TO DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `GRANT ALL PRIVILEGES ON TABLE %s TO DATABASE ROLE %s`, tableId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) } func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { - dbId := NewAccountObjectIdentifier("db1") + dbId := randomAccountObjectIdentifier() + databaseRoleId := randomDatabaseObjectIdentifierInDatabase(dbId) + schemaId := randomDatabaseObjectIdentifierInDatabase(dbId) + tableId := randomSchemaObjectIdentifierInSchema(schemaId) defaultGrantsForDb := func() *RevokePrivilegesFromDatabaseRoleOptions { return &RevokePrivilegesFromDatabaseRoleOptions{ @@ -546,7 +558,7 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { on: &DatabaseRoleGrantOn{ Database: &dbId, }, - databaseRole: NewDatabaseObjectIdentifier("db1", "role1"), + databaseRole: databaseRoleId, } } @@ -557,10 +569,10 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { }, on: &DatabaseRoleGrantOn{ Schema: &GrantOnSchema{ - Schema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + Schema: Pointer(schemaId), }, }, - databaseRole: NewDatabaseObjectIdentifier("db1", "role1"), + databaseRole: databaseRoleId, } } @@ -573,11 +585,11 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { SchemaObject: &GrantOnSchemaObject{ SchemaObject: &Object{ ObjectType: ObjectTypeTable, - Name: NewSchemaObjectIdentifier("db1", "schema1", "table1"), + Name: tableId, }, }, }, - databaseRole: NewDatabaseObjectIdentifier("db1", "role1"), + databaseRole: databaseRoleId, } } @@ -619,7 +631,7 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { opts.on = &DatabaseRoleGrantOn{ Database: &dbId, Schema: &GrantOnSchema{ - Schema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + Schema: Pointer(schemaId), }, } assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("DatabaseRoleGrantOn", "Database", "Schema", "SchemaObject")) @@ -669,35 +681,35 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { t.Run("on database", func(t *testing.T) { opts := defaultGrantsForDb() - assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE SCHEMA ON DATABASE "db1" FROM DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE SCHEMA ON DATABASE %s FROM DATABASE ROLE %s`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on schema", func(t *testing.T) { opts := defaultGrantsForSchema() - assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON SCHEMA "db1"."schema1" FROM DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON SCHEMA %s FROM DATABASE ROLE %s`, schemaId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on all schemas in database + restrict", func(t *testing.T) { opts := defaultGrantsForSchema() opts.on.Schema = &GrantOnSchema{ - AllSchemasInDatabase: Pointer(NewAccountObjectIdentifier("db1")), + AllSchemasInDatabase: Pointer(dbId), } opts.Restrict = Bool(true) - assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON ALL SCHEMAS IN DATABASE "db1" FROM DATABASE ROLE "db1"."role1" RESTRICT`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON ALL SCHEMAS IN DATABASE %s FROM DATABASE ROLE %s RESTRICT`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on all future schemas in database + cascade", func(t *testing.T) { opts := defaultGrantsForSchema() opts.on.Schema = &GrantOnSchema{ - FutureSchemasInDatabase: Pointer(NewAccountObjectIdentifier("db1")), + FutureSchemasInDatabase: Pointer(dbId), } opts.Cascade = Bool(true) - assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON FUTURE SCHEMAS IN DATABASE "db1" FROM DATABASE ROLE "db1"."role1" CASCADE`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE CREATE ALERT, ADD SEARCH OPTIMIZATION ON FUTURE SCHEMAS IN DATABASE %s FROM DATABASE ROLE %s CASCADE`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on schema object", func(t *testing.T) { opts := defaultGrantsForSchemaObject() - assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON TABLE "db1"."schema1"."table1" FROM DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON TABLE %s FROM DATABASE ROLE %s`, tableId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on future schema object in database", func(t *testing.T) { @@ -705,10 +717,10 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { opts.on.SchemaObject = &GrantOnSchemaObject{ Future: &GrantOnSchemaObjectIn{ PluralObjectType: PluralObjectTypeTables, - InDatabase: Pointer(NewAccountObjectIdentifier("db1")), + InDatabase: Pointer(dbId), }, } - assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON FUTURE TABLES IN DATABASE "db1" FROM DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON FUTURE TABLES IN DATABASE %s FROM DATABASE ROLE %s`, dbId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) t.Run("on future schema object in schema", func(t *testing.T) { @@ -716,10 +728,10 @@ func TestGrants_RevokePrivilegesFromDatabaseRoleRole(t *testing.T) { opts.on.SchemaObject = &GrantOnSchemaObject{ Future: &GrantOnSchemaObjectIn{ PluralObjectType: PluralObjectTypeTables, - InSchema: Pointer(NewDatabaseObjectIdentifier("db1", "schema1")), + InSchema: Pointer(schemaId), }, } - assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON FUTURE TABLES IN SCHEMA "db1"."schema1" FROM DATABASE ROLE "db1"."role1"`) + assertOptsValidAndSQLEquals(t, opts, `REVOKE SELECT, UPDATE ON FUTURE TABLES IN SCHEMA %s FROM DATABASE ROLE %s`, schemaId.FullyQualifiedName(), databaseRoleId.FullyQualifiedName()) }) } @@ -857,23 +869,24 @@ func TestRevokePrivilegeFromShare(t *testing.T) { }) t.Run("on tag", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() opts := &revokePrivilegeFromShareOptions{ privileges: []ObjectPrivilege{ObjectPrivilegeRead}, On: &ShareGrantOn{ - Tag: NewSchemaObjectIdentifier("database-name", "schema-name", "tag-name"), + Tag: tagId, }, from: id, } - assertOptsValidAndSQLEquals(t, opts, "REVOKE READ ON TAG \"database-name\".\"schema-name\".\"tag-name\" FROM SHARE %s", id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "REVOKE READ ON TAG %s FROM SHARE %s", tagId.FullyQualifiedName(), id.FullyQualifiedName()) }) } func TestGrants_GrantOwnership(t *testing.T) { - dbId := NewAccountObjectIdentifier("db1") - schemaId := NewDatabaseObjectIdentifier("db1", "schema1") - roleId := NewAccountObjectIdentifier("role1") - databaseRoleId := NewDatabaseObjectIdentifier("db1", "role1") - tableId := NewSchemaObjectIdentifier("db1", "schema1", "table1") + dbId := randomAccountObjectIdentifier() + schemaId := randomDatabaseObjectIdentifierInDatabase(dbId) + roleId := randomAccountObjectIdentifier() + databaseRoleId := randomDatabaseObjectIdentifierInDatabase(dbId) + tableId := randomSchemaObjectIdentifierInSchema(schemaId) defaultOpts := func() *GrantOwnershipOptions { return &GrantOwnershipOptions{ diff --git a/pkg/sdk/identifier_helpers.go b/pkg/sdk/identifier_helpers.go index 056fda4c5b..0aae547e01 100644 --- a/pkg/sdk/identifier_helpers.go +++ b/pkg/sdk/identifier_helpers.go @@ -123,7 +123,7 @@ func NewAccountIdentifier(organizationName, accountName string) AccountIdentifie func NewAccountIdentifierFromAccountLocator(accountLocator string) AccountIdentifier { return AccountIdentifier{ - accountLocator: accountLocator, + accountLocator: strings.Trim(accountLocator, `"`), } } @@ -369,3 +369,7 @@ func (i TableColumnIdentifier) FullyQualifiedName() string { } return fmt.Sprintf(`"%v"."%v"."%v"."%v"`, i.databaseName, i.schemaName, i.tableName, i.columnName) } + +func (i TableColumnIdentifier) SchemaObjectId() SchemaObjectIdentifier { + return NewSchemaObjectIdentifier(i.databaseName, i.schemaName, i.tableName) +} diff --git a/pkg/sdk/masking_policy.go b/pkg/sdk/masking_policy.go index 78ededbeec..7255c1c484 100644 --- a/pkg/sdk/masking_policy.go +++ b/pkg/sdk/masking_policy.go @@ -289,7 +289,7 @@ func (v *maskingPolicies) ShowByID(ctx context.Context, id SchemaObjectIdentifie Pattern: String(id.Name()), }, In: &In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }, }) if err != nil { diff --git a/pkg/sdk/masking_policy_test.go b/pkg/sdk/masking_policy_test.go index 926163a409..56e14d1cb4 100644 --- a/pkg/sdk/masking_policy_test.go +++ b/pkg/sdk/masking_policy_test.go @@ -189,16 +189,15 @@ func TestMaskingPolicyShow(t *testing.T) { }) t.Run("with like and in schema", func(t *testing.T) { - schemaIdentifier := NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()) opts := &ShowMaskingPolicyOptions{ Like: &Like{ Pattern: String(id.Name()), }, In: &In{ - Schema: schemaIdentifier, + Schema: id.SchemaId(), }, } - assertOptsValidAndSQLEquals(t, opts, "SHOW MASKING POLICIES LIKE '%s' IN SCHEMA %s", id.Name(), schemaIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "SHOW MASKING POLICIES LIKE '%s' IN SCHEMA %s", id.Name(), id.SchemaId().FullyQualifiedName()) }) t.Run("with limit", func(t *testing.T) { diff --git a/pkg/sdk/materialized_views_gen_test.go b/pkg/sdk/materialized_views_gen_test.go index e0fa17dac3..881730aee1 100644 --- a/pkg/sdk/materialized_views_gen_test.go +++ b/pkg/sdk/materialized_views_gen_test.go @@ -21,7 +21,7 @@ func TestMaterializedViews_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -35,7 +35,7 @@ func TestMaterializedViews_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.RowAccessPolicy.RowAccessPolicy]", func(t *testing.T) { opts := defaultOpts() opts.RowAccessPolicy = &MaterializedViewRowAccessPolicy{ - RowAccessPolicy: NewSchemaObjectIdentifier("", "", ""), + RowAccessPolicy: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -115,7 +115,7 @@ func TestMaterializedViews_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -249,7 +249,7 @@ func TestMaterializedViews_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -304,7 +304,7 @@ func TestMaterializedViews_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/materialized_views_impl_gen.go b/pkg/sdk/materialized_views_impl_gen.go index 1914973bf3..b37c0534c6 100644 --- a/pkg/sdk/materialized_views_impl_gen.go +++ b/pkg/sdk/materialized_views_impl_gen.go @@ -38,7 +38,7 @@ func (v *materializedViews) Show(ctx context.Context, request *ShowMaterializedV } func (v *materializedViews) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*MaterializedView, error) { - request := NewShowMaterializedViewRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowMaterializedViewRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) materializedViews, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/network_rule_gen_test.go b/pkg/sdk/network_rule_gen_test.go index 48911422cc..14a1944575 100644 --- a/pkg/sdk/network_rule_gen_test.go +++ b/pkg/sdk/network_rule_gen_test.go @@ -25,7 +25,7 @@ func TestNetworkRules_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -59,7 +59,7 @@ func TestNetworkRules_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -119,7 +119,7 @@ func TestNetworkRules_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -184,7 +184,7 @@ func TestNetworkRules_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/network_rule_impl_gen.go b/pkg/sdk/network_rule_impl_gen.go index 3e4cd49980..e3613bc7c6 100644 --- a/pkg/sdk/network_rule_impl_gen.go +++ b/pkg/sdk/network_rule_impl_gen.go @@ -40,7 +40,7 @@ func (v *networkRules) Show(ctx context.Context, request *ShowNetworkRuleRequest func (v *networkRules) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*NetworkRule, error) { networkRules, err := v.Show(ctx, NewShowNetworkRuleRequest().WithIn(&In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }).WithLike(&Like{ Pattern: String(id.Name()), })) diff --git a/pkg/sdk/parameters.go b/pkg/sdk/parameters.go index f5dac7eed2..0d44afc777 100644 --- a/pkg/sdk/parameters.go +++ b/pkg/sdk/parameters.go @@ -474,6 +474,8 @@ const ( ObjectParameterUserTaskTimeoutMs ObjectParameter = "USER_TASK_TIMEOUT_MS" ObjectParameterCatalog ObjectParameter = "CATALOG" ObjectParameterExternalVolume ObjectParameter = "EXTERNAL_VOLUME" + ObjectParameterReplaceInvalidCharacters ObjectParameter = "REPLACE_INVALID_CHARACTERS" + ObjectParameterStorageSerializationPolicy ObjectParameter = "STORAGE_SERIALIZATION_POLICY" // User Parameters ObjectParameterEnableUnredactedQuerySyntaxError ObjectParameter = "ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR" diff --git a/pkg/sdk/password_policy.go b/pkg/sdk/password_policy.go index 16af471c64..36b97aae4f 100644 --- a/pkg/sdk/password_policy.go +++ b/pkg/sdk/password_policy.go @@ -324,7 +324,7 @@ func (v *passwordPolicies) ShowByID(ctx context.Context, id SchemaObjectIdentifi Pattern: String(id.Name()), }, In: &In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }, }) if err != nil { diff --git a/pkg/sdk/password_policy_test.go b/pkg/sdk/password_policy_test.go index 447b53a432..235893bce2 100644 --- a/pkg/sdk/password_policy_test.go +++ b/pkg/sdk/password_policy_test.go @@ -182,16 +182,15 @@ func TestPasswordPolicyShow(t *testing.T) { }) t.Run("with like and in schema", func(t *testing.T) { - schemaIdentifier := NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()) opts := &ShowPasswordPolicyOptions{ Like: &Like{ Pattern: String(id.Name()), }, In: &In{ - Schema: schemaIdentifier, + Schema: id.SchemaId(), }, } - assertOptsValidAndSQLEquals(t, opts, "SHOW PASSWORD POLICIES LIKE '%s' IN SCHEMA %s", id.Name(), schemaIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, "SHOW PASSWORD POLICIES LIKE '%s' IN SCHEMA %s", id.Name(), id.SchemaId().FullyQualifiedName()) }) t.Run("with limit", func(t *testing.T) { diff --git a/pkg/sdk/pipes_test.go b/pkg/sdk/pipes_test.go index d3198706f3..4948d0313a 100644 --- a/pkg/sdk/pipes_test.go +++ b/pkg/sdk/pipes_test.go @@ -21,7 +21,7 @@ func TestPipesCreate(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -64,7 +64,7 @@ func TestPipesAlter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -194,7 +194,7 @@ func TestPipesDrop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -212,8 +212,6 @@ func TestPipesDrop(t *testing.T) { func TestPipesShow(t *testing.T) { id := randomSchemaObjectIdentifier() - databaseIdentifier := NewAccountObjectIdentifier(id.DatabaseName()) - schemaIdentifier := NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()) defaultOpts := func() *ShowPipeOptions { return &ShowPipeOptions{} @@ -240,7 +238,7 @@ func TestPipesShow(t *testing.T) { opts := defaultOpts() opts.In = &In{ Account: Bool(true), - Database: databaseIdentifier, + Database: id.DatabaseId(), } assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("ShowPipeOptions.In", "Account", "Database", "Schema")) }) @@ -269,17 +267,17 @@ func TestPipesShow(t *testing.T) { t.Run("in database", func(t *testing.T) { opts := defaultOpts() opts.In = &In{ - Database: databaseIdentifier, + Database: id.DatabaseId(), } - assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES IN DATABASE %s`, databaseIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES IN DATABASE %s`, id.DatabaseId().FullyQualifiedName()) }) t.Run("in schema", func(t *testing.T) { opts := defaultOpts() opts.In = &In{ - Schema: schemaIdentifier, + Schema: id.SchemaId(), } - assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES IN SCHEMA %s`, schemaIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES IN SCHEMA %s`, id.SchemaId().FullyQualifiedName()) }) t.Run("with like and in account", func(t *testing.T) { @@ -299,9 +297,9 @@ func TestPipesShow(t *testing.T) { Pattern: String(id.Name()), } opts.In = &In{ - Database: databaseIdentifier, + Database: id.DatabaseId(), } - assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES LIKE '%s' IN DATABASE %s`, id.Name(), databaseIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES LIKE '%s' IN DATABASE %s`, id.Name(), id.DatabaseId().FullyQualifiedName()) }) t.Run("with like and in schema", func(t *testing.T) { @@ -310,9 +308,9 @@ func TestPipesShow(t *testing.T) { Pattern: String(id.Name()), } opts.In = &In{ - Schema: schemaIdentifier, + Schema: id.SchemaId(), } - assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES LIKE '%s' IN SCHEMA %s`, id.Name(), schemaIdentifier.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `SHOW PIPES LIKE '%s' IN SCHEMA %s`, id.Name(), id.SchemaId().FullyQualifiedName()) }) } @@ -332,7 +330,7 @@ func TestPipesDescribe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/poc/README.md b/pkg/sdk/poc/README.md index ac4c624de3..44af1e130b 100644 --- a/pkg/sdk/poc/README.md +++ b/pkg/sdk/poc/README.md @@ -108,6 +108,7 @@ find a better solution to solve the issue (add more logic to the templates ?) - better handling of list of strings/identifiers - there should be no need to define custom types every time - more clear definition of lists that can be empty vs cannot be empty +- add empty ids in generated tests (TODO in random_test.go) ##### Known issues - generating two converts when Show and Desc use the same data structure diff --git a/pkg/sdk/policy_references_test.go b/pkg/sdk/policy_references_test.go index 2154f957c4..d67aac1eea 100644 --- a/pkg/sdk/policy_references_test.go +++ b/pkg/sdk/policy_references_test.go @@ -1,6 +1,7 @@ package sdk import ( + "strings" "testing" ) @@ -52,15 +53,16 @@ func TestPolicyReferencesGetForEntity(t *testing.T) { }) t.Run("table domain", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := &getForEntityPolicyReferenceOptions{ parameters: &policyReferenceParameters{ arguments: &policyReferenceFunctionArguments{ - refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("db", "schema", "table")}, + refEntityName: []ObjectIdentifier{id}, refEntityDomain: Pointer(PolicyEntityDomainTable), }, }, } - assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.POLICY_REFERENCES (REF_ENTITY_NAME => '\"db\".\"schema\".\"table\"', REF_ENTITY_DOMAIN => 'TABLE'))`) + assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.POLICY_REFERENCES (REF_ENTITY_NAME => '%s', REF_ENTITY_DOMAIN => 'TABLE'))`, temporaryReplace(id)) }) t.Run("account domain", func(t *testing.T) { @@ -88,26 +90,33 @@ func TestPolicyReferencesGetForEntity(t *testing.T) { }) t.Run("tag domain", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := &getForEntityPolicyReferenceOptions{ parameters: &policyReferenceParameters{ arguments: &policyReferenceFunctionArguments{ - refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("db", "schema", "tag_name")}, + refEntityName: []ObjectIdentifier{id}, refEntityDomain: Pointer(PolicyEntityDomainTag), }, }, } - assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.POLICY_REFERENCES (REF_ENTITY_NAME => '\"db\".\"schema\".\"tag_name\"', REF_ENTITY_DOMAIN => 'TAG'))`) + assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.POLICY_REFERENCES (REF_ENTITY_NAME => '%s', REF_ENTITY_DOMAIN => 'TAG'))`, temporaryReplace(id)) }) t.Run("view domain", func(t *testing.T) { + id := randomSchemaObjectIdentifier() opts := &getForEntityPolicyReferenceOptions{ parameters: &policyReferenceParameters{ arguments: &policyReferenceFunctionArguments{ - refEntityName: []ObjectIdentifier{NewSchemaObjectIdentifier("db", "schema", "view_name")}, + refEntityName: []ObjectIdentifier{id}, refEntityDomain: Pointer(PolicyEntityDomainView), }, }, } - assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.POLICY_REFERENCES (REF_ENTITY_NAME => '\"db\".\"schema\".\"view_name\"', REF_ENTITY_DOMAIN => 'VIEW'))`) + assertOptsValidAndSQLEquals(t, opts, `SELECT * FROM TABLE (SNOWFLAKE.INFORMATION_SCHEMA.POLICY_REFERENCES (REF_ENTITY_NAME => '%s', REF_ENTITY_DOMAIN => 'VIEW'))`, temporaryReplace(id)) }) } + +// TODO [SNOW-999049]: check during the identifiers rework +func temporaryReplace(id SchemaObjectIdentifier) string { + return strings.ReplaceAll(id.FullyQualifiedName(), `"`, `\"`) +} diff --git a/pkg/sdk/privileges.go b/pkg/sdk/privileges.go index eb0a3ba97e..db4d4278ef 100644 --- a/pkg/sdk/privileges.go +++ b/pkg/sdk/privileges.go @@ -123,6 +123,7 @@ const ( SchemaPrivilegeAddSearchOptimization SchemaPrivilege = "ADD SEARCH OPTIMIZATION" SchemaPrivilegeApplyBudget SchemaPrivilege = "APPLYBUDGET" SchemaPrivilegeCreateAlert SchemaPrivilege = "CREATE ALERT" + SchemaPrivilegeCreateDataset SchemaPrivilege = "CREATE DATASET" SchemaPrivilegeCreateFileFormat SchemaPrivilege = "CREATE FILE FORMAT" SchemaPrivilegeCreateFunction SchemaPrivilege = "CREATE FUNCTION" SchemaPrivilegeCreateGitRepository SchemaPrivilege = "CREATE GIT REPOSITORY" diff --git a/pkg/sdk/procedures_gen_test.go b/pkg/sdk/procedures_gen_test.go index bef49aff9f..015dc6075b 100644 --- a/pkg/sdk/procedures_gen_test.go +++ b/pkg/sdk/procedures_gen_test.go @@ -20,7 +20,7 @@ func TestProcedures_CreateForJava(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -128,7 +128,7 @@ func TestProcedures_CreateForJavaScript(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -175,7 +175,7 @@ func TestProcedures_CreateForPython(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -269,7 +269,7 @@ func TestProcedures_CreateForScala(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -356,7 +356,7 @@ func TestProcedures_CreateForSQL(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -417,7 +417,7 @@ func TestProcedures_Drop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -452,7 +452,7 @@ func TestProcedures_Alter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -577,7 +577,7 @@ func TestProcedures_Describe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -609,7 +609,7 @@ func TestProcedures_Call(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/procedures_impl_gen.go b/pkg/sdk/procedures_impl_gen.go index 8e11f7e896..c665829e7e 100644 --- a/pkg/sdk/procedures_impl_gen.go +++ b/pkg/sdk/procedures_impl_gen.go @@ -58,7 +58,7 @@ func (v *procedures) Show(ctx context.Context, request *ShowProcedureRequest) ([ } func (v *procedures) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Procedure, error) { - request := NewShowProcedureRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowProcedureRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) procedures, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/random_test.go b/pkg/sdk/random_test.go index 406d1bc57f..8b7c981744 100644 --- a/pkg/sdk/random_test.go +++ b/pkg/sdk/random_test.go @@ -6,7 +6,12 @@ import ( var ( invalidAccountObjectIdentifier = NewAccountObjectIdentifier(random.StringN(256)) - invalidSchemaObjectIdentifier = NewSchemaObjectIdentifier(random.StringN(255), random.StringN(255), random.StringN(255)) + longSchemaObjectIdentifier = NewSchemaObjectIdentifier(random.StringN(255), random.StringN(255), random.StringN(255)) + + // TODO: Add to the generator + emptyAccountObjectIdentifier = NewAccountObjectIdentifier("") + emptyDatabaseObjectIdentifier = NewDatabaseObjectIdentifier("", "") + emptySchemaObjectIdentifier = NewSchemaObjectIdentifier("", "", "") ) func randomSchemaObjectIdentifier() SchemaObjectIdentifier { @@ -32,3 +37,11 @@ func randomDatabaseObjectIdentifierInDatabase(databaseId AccountObjectIdentifier func randomAccountObjectIdentifier() AccountObjectIdentifier { return NewAccountObjectIdentifier(random.StringN(12)) } + +func randomTableColumnIdentifier() TableColumnIdentifier { + return NewTableColumnIdentifier(random.StringN(12), random.StringN(12), random.StringN(12), random.StringN(12)) +} + +func randomTableColumnIdentifierInSchemaObject(objectId SchemaObjectIdentifier) TableColumnIdentifier { + return NewTableColumnIdentifier(objectId.DatabaseName(), objectId.SchemaName(), objectId.Name(), random.StringN(12)) +} diff --git a/pkg/sdk/roles_test.go b/pkg/sdk/roles_test.go index 7ae9455ae9..c435a838ce 100644 --- a/pkg/sdk/roles_test.go +++ b/pkg/sdk/roles_test.go @@ -14,18 +14,19 @@ func TestRolesCreate(t *testing.T) { }) t.Run("all options", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() opts := &CreateRoleOptions{ name: NewAccountObjectIdentifier("new_role"), OrReplace: Bool(true), Tag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag1"), + Name: tagId, Value: "v1", }, }, Comment: String("comment"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE ROLE "new_role" COMMENT = 'comment' TAG ("db1"."schema1"."tag1" = 'v1')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE ROLE "new_role" COMMENT = 'comment' TAG (%s = 'v1')`, tagId.FullyQualifiedName()) }) t.Run("validation: invalid identifier", func(t *testing.T) { diff --git a/pkg/sdk/row_access_policies_gen_test.go b/pkg/sdk/row_access_policies_gen_test.go index c14bd7734f..71e8e68395 100644 --- a/pkg/sdk/row_access_policies_gen_test.go +++ b/pkg/sdk/row_access_policies_gen_test.go @@ -24,7 +24,7 @@ func TestRowAccessPolicies_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -89,7 +89,7 @@ func TestRowAccessPolicies_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -173,7 +173,7 @@ func TestRowAccessPolicies_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -234,7 +234,7 @@ func TestRowAccessPolicies_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/row_access_policies_impl_gen.go b/pkg/sdk/row_access_policies_impl_gen.go index 0a020b2e40..c5ac6fa88a 100644 --- a/pkg/sdk/row_access_policies_impl_gen.go +++ b/pkg/sdk/row_access_policies_impl_gen.go @@ -38,7 +38,7 @@ func (v *rowAccessPolicies) Show(ctx context.Context, request *ShowRowAccessPoli } func (v *rowAccessPolicies) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*RowAccessPolicy, error) { - request := NewShowRowAccessPolicyRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowRowAccessPolicyRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) rowAccessPolicies, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/schemas_test.go b/pkg/sdk/schemas_test.go index ad6a7884ef..da4d8ebcf1 100644 --- a/pkg/sdk/schemas_test.go +++ b/pkg/sdk/schemas_test.go @@ -23,6 +23,7 @@ func TestSchemasCreate(t *testing.T) { }) t.Run("complete", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() opts := &CreateSchemaOptions{ name: id, Transient: Bool(true), @@ -33,38 +34,41 @@ func TestSchemasCreate(t *testing.T) { DefaultDDLCollation: String("en_US-trim"), Tag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag1"), + Name: tagId, Value: "v1", }, }, Comment: String("comment"), } - assertOptsValidAndSQLEquals(t, opts, `CREATE TRANSIENT SCHEMA IF NOT EXISTS %s WITH MANAGED ACCESS DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 1 DEFAULT_DDL_COLLATION = 'en_US-trim' TAG ("db1"."schema1"."tag1" = 'v1') COMMENT = 'comment'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `CREATE TRANSIENT SCHEMA IF NOT EXISTS %s WITH MANAGED ACCESS DATA_RETENTION_TIME_IN_DAYS = 1 MAX_DATA_EXTENSION_TIME_IN_DAYS = 1 DEFAULT_DDL_COLLATION = 'en_US-trim' TAG (%s = 'v1') COMMENT = 'comment'`, id.FullyQualifiedName(), tagId.FullyQualifiedName()) }) } func TestSchemasAlter(t *testing.T) { + schemaId := randomDatabaseObjectIdentifier() + newSchemaId := randomDatabaseObjectIdentifierInDatabase(schemaId.DatabaseId()) + t.Run("rename to", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, IfExists: Bool(true), - NewName: NewDatabaseObjectIdentifier("database_name", "new_schema_name"), + NewName: newSchemaId, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA IF EXISTS "database_name"."schema_name" RENAME TO "database_name"."new_schema_name"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA IF EXISTS %s RENAME TO %s`, schemaId.FullyQualifiedName(), newSchemaId.FullyQualifiedName()) }) t.Run("swap with", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, IfExists: Bool(false), - SwapWith: NewDatabaseObjectIdentifier("database_name", "target_schema_name"), + SwapWith: newSchemaId, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" SWAP WITH "database_name"."target_schema_name"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s SWAP WITH %s`, schemaId.FullyQualifiedName(), newSchemaId.FullyQualifiedName()) }) t.Run("set options", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, Set: &SchemaSet{ DataRetentionTimeInDays: Int(3), MaxDataExtensionTimeInDays: Int(2), @@ -72,12 +76,12 @@ func TestSchemasAlter(t *testing.T) { Comment: String("comment"), }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" SET DATA_RETENTION_TIME_IN_DAYS = 3, MAX_DATA_EXTENSION_TIME_IN_DAYS = 2, DEFAULT_DDL_COLLATION = 'en_US-trim', COMMENT = 'comment'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s SET DATA_RETENTION_TIME_IN_DAYS = 3, MAX_DATA_EXTENSION_TIME_IN_DAYS = 2, DEFAULT_DDL_COLLATION = 'en_US-trim', COMMENT = 'comment'`, schemaId.FullyQualifiedName()) }) t.Run("set tags", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, SetTag: []TagAssociation{ { Name: NewAccountObjectIdentifier("tag1"), @@ -89,23 +93,23 @@ func TestSchemasAlter(t *testing.T) { }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" SET TAG "tag1" = 'value1', "tag2" = 'value2'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s SET TAG "tag1" = 'value1', "tag2" = 'value2'`, schemaId.FullyQualifiedName()) }) t.Run("unset tags", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, UnsetTag: []ObjectIdentifier{ NewAccountObjectIdentifier("tag1"), NewAccountObjectIdentifier("tag2"), }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" UNSET TAG "tag1", "tag2"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s UNSET TAG "tag1", "tag2"`, schemaId.FullyQualifiedName()) }) t.Run("unset options", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, Unset: &SchemaUnset{ DataRetentionTimeInDays: Bool(true), MaxDataExtensionTimeInDays: Bool(true), @@ -113,57 +117,63 @@ func TestSchemasAlter(t *testing.T) { Comment: Bool(true), }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, DEFAULT_DDL_COLLATION, COMMENT`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s UNSET DATA_RETENTION_TIME_IN_DAYS, MAX_DATA_EXTENSION_TIME_IN_DAYS, DEFAULT_DDL_COLLATION, COMMENT`, schemaId.FullyQualifiedName()) }) t.Run("enable managed access", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, EnableManagedAccess: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" ENABLE MANAGED ACCESS`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s ENABLE MANAGED ACCESS`, schemaId.FullyQualifiedName()) }) t.Run("disable managed access", func(t *testing.T) { opts := &AlterSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, DisableManagedAccess: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA "database_name"."schema_name" DISABLE MANAGED ACCESS`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SCHEMA %s DISABLE MANAGED ACCESS`, schemaId.FullyQualifiedName()) }) } func TestSchemasDrop(t *testing.T) { + schemaId := randomDatabaseObjectIdentifier() + t.Run("cascade", func(t *testing.T) { opts := &DropSchemaOptions{ IfExists: Bool(true), - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, Cascade: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `DROP SCHEMA IF EXISTS "database_name"."schema_name" CASCADE`) + assertOptsValidAndSQLEquals(t, opts, `DROP SCHEMA IF EXISTS %s CASCADE`, schemaId.FullyQualifiedName()) }) t.Run("restrict", func(t *testing.T) { opts := &DropSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, Restrict: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `DROP SCHEMA "database_name"."schema_name" RESTRICT`) + assertOptsValidAndSQLEquals(t, opts, `DROP SCHEMA %s RESTRICT`, schemaId.FullyQualifiedName()) }) } func TestSchemasUndrop(t *testing.T) { + schemaId := randomDatabaseObjectIdentifier() + opts := &undropSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, } - assertOptsValidAndSQLEquals(t, opts, `UNDROP SCHEMA "database_name"."schema_name"`) + assertOptsValidAndSQLEquals(t, opts, `UNDROP SCHEMA %s`, schemaId.FullyQualifiedName()) } func TestSchemasDescribe(t *testing.T) { + schemaId := randomDatabaseObjectIdentifier() + opts := &describeSchemaOptions{ - name: NewDatabaseObjectIdentifier("database_name", "schema_name"), + name: schemaId, } - assertOptsValidAndSQLEquals(t, opts, `DESCRIBE SCHEMA "database_name"."schema_name"`) + assertOptsValidAndSQLEquals(t, opts, `DESCRIBE SCHEMA %s`, schemaId.FullyQualifiedName()) } func TestSchemasShow(t *testing.T) { diff --git a/pkg/sdk/security_integrations_gen_test.go b/pkg/sdk/security_integrations_gen_test.go index 4da4ebf27a..5a82b9e6b5 100644 --- a/pkg/sdk/security_integrations_gen_test.go +++ b/pkg/sdk/security_integrations_gen_test.go @@ -909,7 +909,7 @@ func TestSecurityIntegrations_AlterOauthForPartnerApplications(t *testing.T) { opts.Set = &OauthForPartnerApplicationsIntegrationSet{ Enabled: Pointer(true), } - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -1017,7 +1017,7 @@ func TestSecurityIntegrations_AlterOauthForCustomClients(t *testing.T) { opts.Set = &OauthForCustomClientsIntegrationSet{ Enabled: Pointer(true), } - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -1137,7 +1137,7 @@ func TestSecurityIntegrations_AlterSaml2(t *testing.T) { opts.Set = &Saml2IntegrationSet{ Enabled: Pointer(true), } - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -1257,7 +1257,7 @@ func TestSecurityIntegrations_AlterScim(t *testing.T) { opts.Set = &ScimIntegrationSet{ Enabled: Pointer(true), } - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -1351,7 +1351,7 @@ func TestSecurityIntegrations_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -1379,7 +1379,7 @@ func TestSecurityIntegrations_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewAccountObjectIdentifier("") + opts.name = emptyAccountObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/sequences_gen_test.go b/pkg/sdk/sequences_gen_test.go index 310757e3c3..6d4c81ff1d 100644 --- a/pkg/sdk/sequences_gen_test.go +++ b/pkg/sdk/sequences_gen_test.go @@ -20,7 +20,7 @@ func TestSequences_Create(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -59,7 +59,7 @@ func TestSequences_Alter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -152,7 +152,7 @@ func TestSequences_Describe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -177,7 +177,7 @@ func TestSequences_Drop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/sequences_impl_gen.go b/pkg/sdk/sequences_impl_gen.go index 4cac023003..79e1d356ab 100644 --- a/pkg/sdk/sequences_impl_gen.go +++ b/pkg/sdk/sequences_impl_gen.go @@ -33,7 +33,7 @@ func (v *sequences) Show(ctx context.Context, request *ShowSequenceRequest) ([]S } func (v *sequences) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Sequence, error) { - request := NewShowSequenceRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowSequenceRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) sequences, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/session_policies_gen_test.go b/pkg/sdk/session_policies_gen_test.go index 9a98972ad4..f6cb6c5f4c 100644 --- a/pkg/sdk/session_policies_gen_test.go +++ b/pkg/sdk/session_policies_gen_test.go @@ -19,7 +19,7 @@ func TestSessionPolicies_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -62,7 +62,7 @@ func TestSessionPolicies_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -159,7 +159,7 @@ func TestSessionPolicies_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -203,7 +203,7 @@ func TestSessionPolicies_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/shares_test.go b/pkg/sdk/shares_test.go index 613d4c011a..a5ca26535c 100644 --- a/pkg/sdk/shares_test.go +++ b/pkg/sdk/shares_test.go @@ -73,17 +73,18 @@ func TestShareAlter(t *testing.T) { }) t.Run("with set tag", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() opts := &AlterShareOptions{ IfExists: Bool(true), name: NewAccountObjectIdentifier("myshare"), SetTag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "tag"), + Name: tagId, Value: "v1", }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SHARE IF EXISTS "myshare" SET TAG "db"."schema"."tag" = 'v1'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SHARE IF EXISTS "myshare" SET TAG %s = 'v1'`, tagId.FullyQualifiedName()) }) t.Run("with unset", func(t *testing.T) { @@ -98,14 +99,15 @@ func TestShareAlter(t *testing.T) { }) t.Run("with unset tag", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() opts := &AlterShareOptions{ IfExists: Bool(true), name: NewAccountObjectIdentifier("myshare"), UnsetTag: []ObjectIdentifier{ - NewSchemaObjectIdentifier("db", "schema", "tag"), + tagId, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER SHARE IF EXISTS "myshare" UNSET TAG "db"."schema"."tag"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER SHARE IF EXISTS "myshare" UNSET TAG %s`, tagId.FullyQualifiedName()) }) } diff --git a/pkg/sdk/stages_gen_test.go b/pkg/sdk/stages_gen_test.go index c6c46f2e95..84d15939a5 100644 --- a/pkg/sdk/stages_gen_test.go +++ b/pkg/sdk/stages_gen_test.go @@ -341,7 +341,7 @@ func TestStages_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.RenameTo] if set", func(t *testing.T) { opts := defaultOpts() - newId := NewSchemaObjectIdentifier("", "", "") + newId := emptySchemaObjectIdentifier opts.RenameTo = &newId assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -362,7 +362,7 @@ func TestStages_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -417,7 +417,7 @@ func TestStages_AlterInternalStage(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -461,7 +461,7 @@ func TestStages_AlterExternalS3Stage(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -536,7 +536,7 @@ func TestStages_AlterExternalGCSStage(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -588,7 +588,7 @@ func TestStages_AlterExternalAzureStage(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -650,7 +650,7 @@ func TestStages_AlterDirectoryTable(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -690,7 +690,7 @@ func TestStages_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -718,7 +718,7 @@ func TestStages_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -729,6 +729,7 @@ func TestStages_Describe(t *testing.T) { } func TestStages_Show(t *testing.T) { + schemaId := randomDatabaseObjectIdentifier() // Minimal valid ShowStageOptions defaultOpts := func() *ShowStageOptions { return &ShowStageOptions{} @@ -750,8 +751,8 @@ func TestStages_Show(t *testing.T) { Pattern: String("some pattern"), } opts.In = &In{ - Schema: NewDatabaseObjectIdentifier("db", "schema"), + Schema: schemaId, } - assertOptsValidAndSQLEquals(t, opts, `SHOW STAGES LIKE 'some pattern' IN SCHEMA "db"."schema"`) + assertOptsValidAndSQLEquals(t, opts, `SHOW STAGES LIKE 'some pattern' IN SCHEMA %s`, schemaId.FullyQualifiedName()) }) } diff --git a/pkg/sdk/stages_impl_gen.go b/pkg/sdk/stages_impl_gen.go index 1ee8f78706..686820f8b3 100644 --- a/pkg/sdk/stages_impl_gen.go +++ b/pkg/sdk/stages_impl_gen.go @@ -99,7 +99,7 @@ func (v *stages) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Stag Pattern: String(id.Name()), }). WithIn(&In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), })) if err != nil { return nil, err diff --git a/pkg/sdk/streamlits_gen_test.go b/pkg/sdk/streamlits_gen_test.go index 8b0cdc38d5..b69b3cf69c 100644 --- a/pkg/sdk/streamlits_gen_test.go +++ b/pkg/sdk/streamlits_gen_test.go @@ -18,7 +18,7 @@ func TestStreamlits_Create(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -58,7 +58,7 @@ func TestStreamlits_Alter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -97,7 +97,7 @@ func TestStreamlits_Drop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -167,7 +167,7 @@ func TestStreamlits_Describe(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/streamlits_impl_gen.go b/pkg/sdk/streamlits_impl_gen.go index 7ee780cf83..0e26844c5f 100644 --- a/pkg/sdk/streamlits_impl_gen.go +++ b/pkg/sdk/streamlits_impl_gen.go @@ -38,7 +38,7 @@ func (v *streamlits) Show(ctx context.Context, request *ShowStreamlitRequest) ([ } func (v *streamlits) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Streamlit, error) { - request := NewShowStreamlitRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowStreamlitRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) streamlits, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/streams_gen_test.go b/pkg/sdk/streams_gen_test.go index ed0b59d56d..c06691129b 100644 --- a/pkg/sdk/streams_gen_test.go +++ b/pkg/sdk/streams_gen_test.go @@ -27,13 +27,13 @@ func TestStreams_CreateOnTable(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: valid identifier for [opts.TableId]", func(t *testing.T) { opts := defaultOpts() - opts.TableId = NewSchemaObjectIdentifier("", "", "") + opts.TableId = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -105,13 +105,13 @@ func TestStreams_CreateOnExternalTable(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: valid identifier for [opts.ExternalTableId]", func(t *testing.T) { opts := defaultOpts() - opts.ExternalTableId = NewSchemaObjectIdentifier("", "", "") + opts.ExternalTableId = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -176,13 +176,13 @@ func TestStreams_CreateOnDirectoryTable(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: valid identifier for [opts.StageId]", func(t *testing.T) { opts := defaultOpts() - opts.StageId = NewSchemaObjectIdentifier("", "", "") + opts.StageId = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -232,13 +232,13 @@ func TestStreams_CreateOnView(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: valid identifier for [opts.viewId]", func(t *testing.T) { opts := defaultOpts() - opts.ViewId = NewSchemaObjectIdentifier("", "", "") + opts.ViewId = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -304,7 +304,7 @@ func TestStreams_Clone(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -338,7 +338,7 @@ func TestStreams_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -411,7 +411,7 @@ func TestStreams_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -467,7 +467,7 @@ func TestStreams_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/streams_impl_gen.go b/pkg/sdk/streams_impl_gen.go index 689a1b4f4d..de15c02dcc 100644 --- a/pkg/sdk/streams_impl_gen.go +++ b/pkg/sdk/streams_impl_gen.go @@ -60,7 +60,7 @@ func (v *streams) Show(ctx context.Context, request *ShowStreamRequest) ([]Strea func (v *streams) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Stream, error) { streams, err := v.Show(ctx, NewShowStreamRequest(). WithIn(&In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }). WithLike(&Like{Pattern: String(id.Name())})) if err != nil { diff --git a/pkg/sdk/tables_impl.go b/pkg/sdk/tables_impl.go index f320d0a0f6..7736ecc418 100644 --- a/pkg/sdk/tables_impl.go +++ b/pkg/sdk/tables_impl.go @@ -78,7 +78,7 @@ func (v *tables) Show(ctx context.Context, request *ShowTableRequest) ([]Table, } func (v *tables) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Table, error) { - request := NewShowTableRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLikePattern(id.Name()) + request := NewShowTableRequest().WithIn(&In{Schema: id.SchemaId()}).WithLikePattern(id.Name()) returnedTables, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/tables_test.go b/pkg/sdk/tables_test.go index 380c6e8a32..bcd807a75f 100644 --- a/pkg/sdk/tables_test.go +++ b/pkg/sdk/tables_test.go @@ -54,7 +54,7 @@ func TestTableCreate(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableOptions", "name")) }) @@ -107,7 +107,7 @@ func TestTableCreate(t *testing.T) { Columns: []TableColumn{{ Name: "a", MaskingPolicy: &ColumnMaskingPolicy{ - Name: NewSchemaObjectIdentifier("", "", ""), + Name: emptySchemaObjectIdentifier, }, }}, } @@ -122,7 +122,7 @@ func TestTableCreate(t *testing.T) { Name: "a", Tags: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("", "", ""), + Name: emptySchemaObjectIdentifier, Value: "v1", }, }, @@ -143,7 +143,7 @@ func TestTableCreate(t *testing.T) { t.Run("validation: rowAccessPolicy's incorrect identifier", func(t *testing.T) { opts := defaultOpts() opts.RowAccessPolicy = &TableRowAccessPolicy{ - Name: NewSchemaObjectIdentifier("", "", ""), + Name: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("TableRowAccessPolicy", "Name")) }) @@ -292,7 +292,7 @@ func TestTableCreate(t *testing.T) { outOfLineConstraint := OutOfLineConstraint{ Type: ColumnConstraintTypeForeignKey, ForeignKey: &OutOfLineForeignKey{ - TableName: NewSchemaObjectIdentifier("", "", ""), + TableName: emptySchemaObjectIdentifier, }, } opts := defaultOptsWithColumnOutOfLineConstraint(&outOfLineConstraint) @@ -371,6 +371,10 @@ func TestTableCreate(t *testing.T) { }) t.Run("with complete options", func(t *testing.T) { + columnTagId1 := randomSchemaObjectIdentifier() + columnTagId2 := randomSchemaObjectIdentifierInSchema(columnTagId1.SchemaId()) + tableTagId1 := randomSchemaObjectIdentifierInSchema(columnTagId1.SchemaId()) + tableTagId2 := randomSchemaObjectIdentifierInSchema(columnTagId1.SchemaId()) columnComment := random.Comment() tableComment := random.Comment() collation := "de" @@ -382,22 +386,22 @@ func TestTableCreate(t *testing.T) { } columnTags := []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "column_tag1"), + Name: columnTagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "column_tag2"), + Name: columnTagId2, Value: "v2", }, } tableTags := []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "table_tag1"), + Name: tableTagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "table_tag2"), + Name: tableTagId2, Value: "v2", }, } @@ -476,14 +480,18 @@ func TestTableCreate(t *testing.T) { Comment: &tableComment, } assertOptsValidAndSQLEquals(t, opts, - `CREATE TABLE %s (%s %s CONSTRAINT INLINE_CONSTRAINT PRIMARY KEY NOT NULL COLLATE 'de' IDENTITY START 10 INCREMENT 1 ORDER MASKING POLICY %s USING (FOO, BAR) TAG ("db"."schema"."column_tag1" = 'v1', "db"."schema"."column_tag2" = 'v2') COMMENT '%s', CONSTRAINT OUT_OF_LINE_CONSTRAINT FOREIGN KEY (COLUMN_1, COLUMN_2) REFERENCES %s (COLUMN_3, COLUMN_4) MATCH FULL ON UPDATE SET NULL ON DELETE RESTRICT, UNIQUE (COLUMN_1) ENFORCED DEFERRABLE INITIALLY DEFERRED ENABLE RELY) CLUSTER BY (COLUMN_1, COLUMN_2) ENABLE_SCHEMA_EVOLUTION = true STAGE_FILE_FORMAT = (TYPE = CSV COMPRESSION = AUTO) STAGE_COPY_OPTIONS = (ON_ERROR = SKIP_FILE) DATA_RETENTION_TIME_IN_DAYS = 10 MAX_DATA_EXTENSION_TIME_IN_DAYS = 100 CHANGE_TRACKING = true DEFAULT_DDL_COLLATION = 'en' COPY GRANTS ROW ACCESS POLICY %s ON (COLUMN_1, COLUMN_2) TAG ("db"."schema"."table_tag1" = 'v1', "db"."schema"."table_tag2" = 'v2') COMMENT = '%s'`, + `CREATE TABLE %s (%s %s CONSTRAINT INLINE_CONSTRAINT PRIMARY KEY NOT NULL COLLATE 'de' IDENTITY START 10 INCREMENT 1 ORDER MASKING POLICY %s USING (FOO, BAR) TAG (%s = 'v1', %s = 'v2') COMMENT '%s', CONSTRAINT OUT_OF_LINE_CONSTRAINT FOREIGN KEY (COLUMN_1, COLUMN_2) REFERENCES %s (COLUMN_3, COLUMN_4) MATCH FULL ON UPDATE SET NULL ON DELETE RESTRICT, UNIQUE (COLUMN_1) ENFORCED DEFERRABLE INITIALLY DEFERRED ENABLE RELY) CLUSTER BY (COLUMN_1, COLUMN_2) ENABLE_SCHEMA_EVOLUTION = true STAGE_FILE_FORMAT = (TYPE = CSV COMPRESSION = AUTO) STAGE_COPY_OPTIONS = (ON_ERROR = SKIP_FILE) DATA_RETENTION_TIME_IN_DAYS = 10 MAX_DATA_EXTENSION_TIME_IN_DAYS = 100 CHANGE_TRACKING = true DEFAULT_DDL_COLLATION = 'en' COPY GRANTS ROW ACCESS POLICY %s ON (COLUMN_1, COLUMN_2) TAG (%s = 'v1', %s = 'v2') COMMENT = '%s'`, id.FullyQualifiedName(), columnName, columnType, maskingPolicy.Name.FullyQualifiedName(), + columnTagId1.FullyQualifiedName(), + columnTagId2.FullyQualifiedName(), columnComment, outOfLineConstraint1.ForeignKey.TableName.FullyQualifiedName(), rowAccessPolicy.Name.FullyQualifiedName(), + tableTagId1.FullyQualifiedName(), + tableTagId2.FullyQualifiedName(), tableComment, ) }) @@ -524,7 +532,7 @@ func TestTableCreateAsSelect(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableAsSelectOptions", "name")) }) @@ -592,7 +600,7 @@ func TestTableCreateUsingTemplate(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableUsingTemplateOptions", "name")) }) @@ -625,13 +633,13 @@ func TestTableCreateLike(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableLikeOptions", "name")) }) t.Run("validation: source table's incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.SourceTable = NewSchemaObjectIdentifier("", "", "") + opts.SourceTable = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableLikeOptions", "SourceTable")) }) @@ -671,13 +679,13 @@ func TestTableCreateClone(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableCloneOptions", "name")) }) t.Run("validation: source table's incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.SourceTable = NewSchemaObjectIdentifier("", "", "") + opts.SourceTable = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("createTableCloneOptions", "SourceTable")) }) @@ -727,27 +735,27 @@ func TestTableAlter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("alterTableOptions", "name")) }) t.Run("validation: both NewName and SwapWith are present ", func(t *testing.T) { opts := defaultOpts() - opts.NewName = Pointer(NewSchemaObjectIdentifier("test", "test", "test")) - opts.SwapWith = Pointer(NewSchemaObjectIdentifier("test", "test", "test")) + opts.NewName = Pointer(randomSchemaObjectIdentifier()) + opts.SwapWith = Pointer(randomSchemaObjectIdentifier()) assertOptsInvalidJoinedErrors(t, opts, errExactlyOneOf("alterTableOptions", "NewName", "SwapWith", "ClusteringAction", "ColumnAction", "ConstraintAction", "ExternalTableAction", "SearchOptimizationAction", "Set", "SetTags", "UnsetTags", "Unset", "AddRowAccessPolicy", "DropRowAccessPolicy", "DropAndAddRowAccessPolicy", "DropAllAccessRowPolicies")) }) t.Run("validation: NewName's incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.NewName = Pointer(NewSchemaObjectIdentifier("", "", "")) + opts.NewName = Pointer(emptySchemaObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("alterTableOptions", "NewName")) }) t.Run("validation: SwapWith incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.SwapWith = Pointer(NewSchemaObjectIdentifier("", "", "")) + opts.SwapWith = Pointer(emptySchemaObjectIdentifier) assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("alterTableOptions", "SwapWith")) }) @@ -1078,13 +1086,15 @@ func TestTableAlter(t *testing.T) { }) t.Run("alter: set tags", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) columnTags := []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "column_tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "column_tag2"), + Name: tagId2, Value: "v2", }, } @@ -1097,13 +1107,15 @@ func TestTableAlter(t *testing.T) { }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s ALTER COLUMN COLUMN_1 SET TAG "db"."schema"."column_tag1" = 'v1', "db"."schema"."column_tag2" = 'v2'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s ALTER COLUMN COLUMN_1 SET TAG %s = 'v1', %s = 'v2'`, id.FullyQualifiedName(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("alter: unset tags", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) columnTags := []ObjectIdentifier{ - NewSchemaObjectIdentifier("db", "schema", "column_tag1"), - NewSchemaObjectIdentifier("db", "schema", "column_tag2"), + tagId1, + tagId2, } opts := &alterTableOptions{ name: id, @@ -1114,7 +1126,7 @@ func TestTableAlter(t *testing.T) { }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s ALTER COLUMN COLUMN_1 UNSET TAG "db"."schema"."column_tag1", "db"."schema"."column_tag2"`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s ALTER COLUMN COLUMN_1 UNSET TAG %s, %s`, id.FullyQualifiedName(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("alter: drop columns", func(t *testing.T) { @@ -1337,31 +1349,35 @@ func TestTableAlter(t *testing.T) { }) t.Run("set tags", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &alterTableOptions{ name: id, SetTags: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "table_tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "table_tag2"), + Name: tagId2, Value: "v2", }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s SET TAG "db"."schema"."table_tag1" = 'v1', "db"."schema"."table_tag2" = 'v2'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s SET TAG %s = 'v1', %s = 'v2'`, id.FullyQualifiedName(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("unset tags", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &alterTableOptions{ name: id, UnsetTags: []ObjectIdentifier{ - NewSchemaObjectIdentifier("db", "schema", "table_tag1"), - NewSchemaObjectIdentifier("db", "schema", "table_tag2"), + tagId1, + tagId2, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s UNSET TAG "db"."schema"."table_tag1", "db"."schema"."table_tag2"`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s UNSET TAG %s, %s`, id.FullyQualifiedName(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("unset: complete options", func(t *testing.T) { @@ -1448,7 +1464,7 @@ func TestTableDrop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("dropTableOptions", "name")) }) @@ -1518,7 +1534,7 @@ func TestTableDescribeColumns(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("describeTableColumnsOptions", "name")) }) @@ -1595,7 +1611,7 @@ func TestTableDescribeStage(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, errInvalidIdentifier("describeTableStageOptions", "name")) }) diff --git a/pkg/sdk/tags_impl.go b/pkg/sdk/tags_impl.go index 67b47ecf5d..86392b2154 100644 --- a/pkg/sdk/tags_impl.go +++ b/pkg/sdk/tags_impl.go @@ -34,7 +34,7 @@ func (v *tags) Show(ctx context.Context, request *ShowTagRequest) ([]Tag, error) func (v *tags) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*Tag, error) { request := NewShowTagRequest().WithIn(&In{ - Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }).WithLike(id.Name()) tags, err := v.Show(ctx, request) @@ -115,7 +115,7 @@ func (s *SetTagRequest) toOpts() *setTagOptions { id, ok := o.objectName.(TableColumnIdentifier) if ok { o.objectType = ObjectTypeTable - o.objectName = NewSchemaObjectIdentifier(id.DatabaseName(), id.SchemaName(), id.TableName()) + o.objectName = id.SchemaObjectId() o.column = String(id.Name()) } } @@ -132,7 +132,7 @@ func (s *UnsetTagRequest) toOpts() *unsetTagOptions { id, ok := o.objectName.(TableColumnIdentifier) if ok { o.objectType = ObjectTypeTable - o.objectName = NewSchemaObjectIdentifier(id.DatabaseName(), id.SchemaName(), id.TableName()) + o.objectName = id.SchemaObjectId() o.column = String(id.Name()) } } diff --git a/pkg/sdk/tags_test.go b/pkg/sdk/tags_test.go index 42866b00fb..a72d636dc7 100644 --- a/pkg/sdk/tags_test.go +++ b/pkg/sdk/tags_test.go @@ -1,7 +1,6 @@ package sdk import ( - "fmt" "testing" ) @@ -38,7 +37,7 @@ func TestTagCreate(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -59,7 +58,7 @@ func TestTagCreate(t *testing.T) { t.Run("validation: multiple errors", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier opts.IfNotExists = Bool(true) opts.OrReplace = Bool(true) assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier, errOneOf("createTagOptions", "OrReplace", "IfNotExists")) @@ -81,7 +80,7 @@ func TestTagDrop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -111,7 +110,7 @@ func TestTagUndrop(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -259,7 +258,7 @@ func TestTagAlter(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -282,7 +281,7 @@ func TestTagAlter(t *testing.T) { t.Run("validation: invalid new name", func(t *testing.T) { opts := defaultOpts() opts.Rename = &TagRename{ - Name: NewSchemaObjectIdentifier("", "", ""), + Name: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -330,7 +329,7 @@ func TestTagSet(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.objectName = NewSchemaObjectIdentifier("", "", "") + opts.objectName = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -346,16 +345,16 @@ func TestTagSet(t *testing.T) { }) t.Run("set with column", func(t *testing.T) { - objectName := NewTableColumnIdentifier("db1", "schema1", "table1", "column1") - tableName := NewSchemaObjectIdentifier("db1", "schema1", "table1") - request := NewSetTagRequest(ObjectTypeColumn, objectName).WithSetTags([]TagAssociation{ + objectId := randomTableColumnIdentifierInSchemaObject(id) + tagId := randomSchemaObjectIdentifier() + request := NewSetTagRequest(ObjectTypeColumn, objectId).WithSetTags([]TagAssociation{ { - Name: NewAccountObjectIdentifier("tag1"), + Name: tagId, Value: "value1", }, }) opts := request.toOpts() - assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s MODIFY COLUMN "%s" SET TAG "tag1" = 'value1'`, tableName.FullyQualifiedName(), objectName.columnName) + assertOptsValidAndSQLEquals(t, opts, `ALTER TABLE %s MODIFY COLUMN "%s" SET TAG %s = 'value1'`, id.FullyQualifiedName(), objectId.columnName, tagId.FullyQualifiedName()) }) } @@ -375,7 +374,7 @@ func TestTagUnset(t *testing.T) { t.Run("validation: incorrect identifier", func(t *testing.T) { opts := defaultOpts() - opts.objectName = NewSchemaObjectIdentifier("", "", "") + opts.objectName = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -389,17 +388,18 @@ func TestTagUnset(t *testing.T) { }) t.Run("unset with column", func(t *testing.T) { - table, column := NewSchemaObjectIdentifier("db1", "schema1", "table1"), "column1" - objectName := NewObjectIdentifierFromFullyQualifiedName(fmt.Sprintf("%s.%s.%s.%s", table.DatabaseName(), table.SchemaName(), table.Name(), column)) + objectId := randomTableColumnIdentifierInSchemaObject(id) + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) request := UnsetTagRequest{ objectType: ObjectTypeColumn, - objectName: objectName, + objectName: objectId, UnsetTags: []ObjectIdentifier{ - NewAccountObjectIdentifier("tag1"), - NewAccountObjectIdentifier("tag2"), + tagId1, + tagId2, }, } opts := request.toOpts() - assertOptsValidAndSQLEquals(t, opts, `ALTER %s %s MODIFY COLUMN "%s" UNSET TAG "tag1", "tag2"`, opts.objectType, table.FullyQualifiedName(), column) + assertOptsValidAndSQLEquals(t, opts, `ALTER %s %s MODIFY COLUMN "%s" UNSET TAG %s, %s`, opts.objectType, id.FullyQualifiedName(), objectId.Name(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) } diff --git a/pkg/sdk/tasks_gen_test.go b/pkg/sdk/tasks_gen_test.go index cc32112fbd..ee312de048 100644 --- a/pkg/sdk/tasks_gen_test.go +++ b/pkg/sdk/tasks_gen_test.go @@ -23,7 +23,7 @@ func TestTasks_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -109,13 +109,13 @@ func TestTasks_Clone(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) t.Run("validation: valid identifier for [opts.sourceTask]", func(t *testing.T) { opts := defaultOpts() - opts.sourceTask = NewSchemaObjectIdentifier("", "", "") + opts.sourceTask = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -150,7 +150,7 @@ func TestTasks_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -334,7 +334,7 @@ func TestTasks_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -393,7 +393,7 @@ func TestTasks_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -420,7 +420,7 @@ func TestTasks_Execute(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/test_ids_test.go b/pkg/sdk/test_ids_test.go deleted file mode 100644 index 7a67683a27..0000000000 --- a/pkg/sdk/test_ids_test.go +++ /dev/null @@ -1,4 +0,0 @@ -package sdk - -// TODO: Add to the generator -var emptyAccountObjectIdentifier = NewAccountObjectIdentifier("") diff --git a/pkg/sdk/testint/alerts_integration_test.go b/pkg/sdk/testint/alerts_integration_test.go index afaeb8a82f..770e34cdb4 100644 --- a/pkg/sdk/testint/alerts_integration_test.go +++ b/pkg/sdk/testint/alerts_integration_test.go @@ -238,8 +238,7 @@ func TestInt_AlertDescribe(t *testing.T) { }) t.Run("when alert does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - _, err := client.Alerts.Describe(ctx, id) + _, err := client.Alerts.Describe(ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } @@ -374,8 +373,7 @@ func TestInt_AlertDrop(t *testing.T) { }) t.Run("when alert does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - err := client.Alerts.Drop(ctx, id, &sdk.DropAlertOptions{}) + err := client.Alerts.Drop(ctx, NonExistingSchemaObjectIdentifier, &sdk.DropAlertOptions{}) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/testint/database_role_integration_test.go b/pkg/sdk/testint/database_role_integration_test.go index 12774c96ee..519ab7674f 100644 --- a/pkg/sdk/testint/database_role_integration_test.go +++ b/pkg/sdk/testint/database_role_integration_test.go @@ -88,7 +88,7 @@ func TestInt_DatabaseRoles(t *testing.T) { }) t.Run("drop database_role: non-existing", func(t *testing.T) { - id := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, "does_not_exist") + id := NonExistingDatabaseObjectIdentifier err := client.DatabaseRoles.Drop(ctx, sdk.NewDropDatabaseRoleRequest(id)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) @@ -199,9 +199,9 @@ func TestInt_DatabaseRoles(t *testing.T) { t.Run("grant and revoke database_role: to database role", func(t *testing.T) { role1 := createDatabaseRole(t) - id1 := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role1.Name) + id1 := testClientHelper().Ids.NewDatabaseObjectIdentifier(role1.Name) role2 := createDatabaseRole(t) - id2 := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role2.Name) + id2 := testClientHelper().Ids.NewDatabaseObjectIdentifier(role2.Name) grantRequest := sdk.NewGrantDatabaseRoleRequest(id1).WithDatabaseRole(id2) err := client.DatabaseRoles.Grant(ctx, grantRequest) @@ -226,7 +226,7 @@ func TestInt_DatabaseRoles(t *testing.T) { t.Run("grant and revoke database_role: to account role", func(t *testing.T) { role := createDatabaseRole(t) - roleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role.Name) + roleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(role.Name) accountRole, accountRoleCleanup := testClientHelper().Role.CreateRole(t) t.Cleanup(accountRoleCleanup) @@ -248,7 +248,7 @@ func TestInt_DatabaseRoles(t *testing.T) { t.Run("grant and revoke database_role: to share", func(t *testing.T) { role := createDatabaseRole(t) - roleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role.Name) + roleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(role.Name) share, shareCleanup := testClientHelper().Share.CreateShare(t) t.Cleanup(shareCleanup) diff --git a/pkg/sdk/testint/databases_integration_test.go b/pkg/sdk/testint/databases_integration_test.go index 1e52fce35c..5342c01f72 100644 --- a/pkg/sdk/testint/databases_integration_test.go +++ b/pkg/sdk/testint/databases_integration_test.go @@ -80,7 +80,9 @@ func TestInt_DatabasesCreate(t *testing.T) { MaxDataExtensionTimeInDays: sdk.Int(1), ExternalVolume: &externalVolume, Catalog: &catalog, + ReplaceInvalidCharacters: sdk.Bool(true), DefaultDDLCollation: sdk.String("en_US"), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyCompatible), LogLevel: sdk.Pointer(sdk.LogLevelInfo), TraceLevel: sdk.Pointer(sdk.TraceLevelOnEvent), Comment: sdk.String(comment), @@ -124,6 +126,14 @@ func TestInt_DatabasesCreate(t *testing.T) { assert.NoError(t, err) assert.Equal(t, string(sdk.TraceLevelOnEvent), traceLevelParam.Value) + ignoreInvalidCharactersParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterReplaceInvalidCharacters, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseID}) + assert.NoError(t, err) + assert.Equal(t, "true", ignoreInvalidCharactersParam.Value) + + serializationPolicyParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterStorageSerializationPolicy, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseID}) + assert.NoError(t, err) + assert.Equal(t, string(sdk.StorageSerializationPolicyCompatible), serializationPolicyParam.Value) + tag1Value, err := client.SystemFunctions.GetTag(ctx, tagTest.ID(), database.ID(), sdk.ObjectTypeDatabase) require.NoError(t, err) assert.Equal(t, "v1", tag1Value) @@ -139,13 +149,7 @@ func TestInt_DatabasesCreateShared(t *testing.T) { secondaryClient := testSecondaryClient(t) ctx := testContext(t) - databaseTest, databaseCleanup := testClientHelper().Database.CreateDatabase(t) - t.Cleanup(databaseCleanup) - - schemaTest, schemaCleanup := testClientHelper().Schema.CreateSchemaInDatabase(t, databaseTest.ID()) - t.Cleanup(schemaCleanup) - - testTag, testTagCleanup := testClientHelper().Tag.CreateTagInSchema(t, schemaTest.ID()) + testTag, testTagCleanup := testClientHelper().Tag.CreateTag(t) t.Cleanup(testTagCleanup) externalVolume, externalVolumeCleanup := testClientHelper().ExternalVolume.Create(t) @@ -161,6 +165,8 @@ func TestInt_DatabasesCreateShared(t *testing.T) { sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) t.Cleanup(sharedDatabaseCleanup) + databaseId := sharedDatabase.ID() + err := secondaryClient.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ Database: sharedDatabase.ID(), }, shareTest.ID()) @@ -183,15 +189,17 @@ func TestInt_DatabasesCreateShared(t *testing.T) { require.NoError(t, err) comment := random.Comment() - err = client.Databases.CreateShared(ctx, sharedDatabase.ID(), shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{ - Transient: sdk.Bool(true), - IfNotExists: sdk.Bool(true), - ExternalVolume: &externalVolume, - Catalog: &catalog, - DefaultDDLCollation: sdk.String("en_US"), - LogLevel: sdk.Pointer(sdk.LogLevelDebug), - TraceLevel: sdk.Pointer(sdk.TraceLevelAlways), - Comment: sdk.String(comment), + err = client.Databases.CreateShared(ctx, databaseId, shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{ + Transient: sdk.Bool(true), + IfNotExists: sdk.Bool(true), + ExternalVolume: &externalVolume, + Catalog: &catalog, + ReplaceInvalidCharacters: sdk.Bool(true), + DefaultDDLCollation: sdk.String("en_US"), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyOptimized), + LogLevel: sdk.Pointer(sdk.LogLevelDebug), + TraceLevel: sdk.Pointer(sdk.TraceLevelAlways), + Comment: sdk.String(comment), Tag: []sdk.TagAssociation{ { Name: testTag.ID(), @@ -200,30 +208,38 @@ func TestInt_DatabasesCreateShared(t *testing.T) { }, }) require.NoError(t, err) - t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID())) + t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, databaseId)) - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + database, err := client.Databases.ShowByID(ctx, databaseId) require.NoError(t, err) - assert.Equal(t, sharedDatabase.ID().Name(), database.Name) + assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, comment, database.Comment) - externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, externalVolume.Name(), externalVolumeParam.Value) - catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, catalog.Name(), catalogParam.Value) - logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.LogLevelDebug), logLevelParam.Value) - traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.TraceLevelAlways), traceLevelParam.Value) + ignoreInvalidCharactersParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterReplaceInvalidCharacters, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, "true", ignoreInvalidCharactersParam.Value) + + serializationPolicyParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterStorageSerializationPolicy, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, string(sdk.StorageSerializationPolicyOptimized), serializationPolicyParam.Value) + tag1Value, err := client.SystemFunctions.GetTag(ctx, testTag.ID(), database.ID(), sdk.ObjectTypeDatabase) require.NoError(t, err) assert.Equal(t, "v1", tag1Value) @@ -237,6 +253,8 @@ func TestInt_DatabasesCreateSecondary(t *testing.T) { sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) t.Cleanup(sharedDatabaseCleanup) + databaseId := sharedDatabase.ID() + err := secondaryClient.Databases.AlterReplication(ctx, sharedDatabase.ID(), &sdk.AlterDatabaseReplicationOptions{ EnableReplication: &sdk.EnableReplication{ ToAccounts: []sdk.AccountIdentifier{ @@ -253,48 +271,59 @@ func TestInt_DatabasesCreateSecondary(t *testing.T) { catalog, catalogCleanup := testClientHelper().CatalogIntegration.Create(t) t.Cleanup(catalogCleanup) - externalDatabaseId := sdk.NewExternalObjectIdentifier(secondaryTestClientHelper().Ids.AccountIdentifierWithLocator(), sharedDatabase.ID()) + externalDatabaseId := sdk.NewExternalObjectIdentifier(secondaryTestClientHelper().Account.GetAccountIdentifier(t), sharedDatabase.ID()) + comment := random.Comment() - err = client.Databases.CreateSecondary(ctx, sharedDatabase.ID(), externalDatabaseId, &sdk.CreateSecondaryDatabaseOptions{ + err = client.Databases.CreateSecondary(ctx, databaseId, externalDatabaseId, &sdk.CreateSecondaryDatabaseOptions{ IfNotExists: sdk.Bool(true), DataRetentionTimeInDays: sdk.Int(1), MaxDataExtensionTimeInDays: sdk.Int(10), ExternalVolume: &externalVolume, Catalog: &catalog, + ReplaceInvalidCharacters: sdk.Bool(true), DefaultDDLCollation: sdk.String("en_US"), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyOptimized), LogLevel: sdk.Pointer(sdk.LogLevelDebug), TraceLevel: sdk.Pointer(sdk.TraceLevelAlways), Comment: sdk.String(comment), }) require.NoError(t, err) - t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID())) + t.Cleanup(testClientHelper().Database.DropDatabaseFunc(t, databaseId)) - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + database, err := client.Databases.ShowByID(ctx, databaseId) require.NoError(t, err) - assert.Equal(t, sharedDatabase.ID().Name(), database.Name) + assert.Equal(t, databaseId.Name(), database.Name) assert.Equal(t, 1, database.RetentionTime) assert.Equal(t, comment, database.Comment) - param, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterMaxDataExtensionTimeInDays, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + param, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterMaxDataExtensionTimeInDays, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, "10", param.Value) - externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + externalVolumeParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterExternalVolume, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, externalVolume.Name(), externalVolumeParam.Value) - catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + catalogParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterCatalog, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, catalog.Name(), catalogParam.Value) - logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + logLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterLogLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.LogLevelDebug), logLevelParam.Value) - traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: sharedDatabase.ID()}) + traceLevelParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterTraceLevel, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) assert.NoError(t, err) assert.Equal(t, string(sdk.TraceLevelAlways), traceLevelParam.Value) + + ignoreInvalidCharactersParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterReplaceInvalidCharacters, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, "true", ignoreInvalidCharactersParam.Value) + + serializationPolicyParam, err := client.Parameters.ShowObjectParameter(ctx, sdk.ObjectParameterStorageSerializationPolicy, sdk.Object{ObjectType: sdk.ObjectTypeDatabase, Name: databaseId}) + assert.NoError(t, err) + assert.Equal(t, string(sdk.StorageSerializationPolicyOptimized), serializationPolicyParam.Value) } func TestInt_DatabasesAlter(t *testing.T) { @@ -302,14 +331,19 @@ func TestInt_DatabasesAlter(t *testing.T) { secondaryClient := testSecondaryClient(t) ctx := testContext(t) - queryParameterValueForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier, parameter sdk.ObjectParameter) string { + queryParameterForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier, parameter sdk.ObjectParameter) *sdk.Parameter { t.Helper() value, err := client.Parameters.ShowObjectParameter(ctx, parameter, sdk.Object{ ObjectType: sdk.ObjectTypeDatabase, Name: id, }) require.NoError(t, err) - return value.Value + return value + } + + queryParameterValueForDatabase := func(t *testing.T, id sdk.AccountObjectIdentifier, parameter sdk.ObjectParameter) string { + t.Helper() + return queryParameterForDatabase(t, id, parameter).Value } testCases := []struct { @@ -334,6 +368,8 @@ func TestInt_DatabasesAlter(t *testing.T) { sharedDatabase, sharedDatabaseCleanup := secondaryTestClientHelper().Database.CreateDatabase(t) t.Cleanup(sharedDatabaseCleanup) + databaseId := sharedDatabase.ID() + err := secondaryClient.Grants.GrantPrivilegeToShare(ctx, []sdk.ObjectPrivilege{sdk.ObjectPrivilegeUsage}, &sdk.ShareGrantOn{ Database: sharedDatabase.ID(), }, shareTest.ID()) @@ -355,13 +391,13 @@ func TestInt_DatabasesAlter(t *testing.T) { }) require.NoError(t, err) - err = client.Databases.CreateShared(ctx, sharedDatabase.ID(), shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{}) + err = client.Databases.CreateShared(ctx, databaseId, shareTest.ExternalID(), &sdk.CreateSharedDatabaseOptions{}) require.NoError(t, err) - database, err := client.Databases.ShowByID(ctx, sharedDatabase.ID()) + database, err := client.Databases.ShowByID(ctx, databaseId) require.NoError(t, err) - return database, testClientHelper().Database.DropDatabaseFunc(t, sharedDatabase.ID()) + return database, testClientHelper().Database.DropDatabaseFunc(t, database.ID()) }, }, { @@ -442,6 +478,40 @@ func TestInt_DatabasesAlter(t *testing.T) { require.Equal(t, string(sdk.TraceLevelOff), queryParameterValueForDatabase(t, databaseTest.ID(), sdk.ObjectParameterTraceLevel)) }) + t.Run(fmt.Sprintf("Database: %s - setting and unsetting replace_invalid_characters and storage_serialization_policy", testCase.DatabaseType), func(t *testing.T) { + if testCase.DatabaseType == "From Share" { + t.Skipf("Skipping database test because from share is not supported") + } + + databaseTest, databaseTestCleanup := testCase.CreateFn(t) + t.Cleanup(databaseTestCleanup) + + err := client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ + Set: &sdk.DatabaseSet{ + ReplaceInvalidCharacters: sdk.Bool(true), + StorageSerializationPolicy: sdk.Pointer(sdk.StorageSerializationPolicyCompatible), + }, + }) + require.NoError(t, err) + + require.Equal(t, "true", queryParameterValueForDatabase(t, databaseTest.ID(), sdk.ObjectParameterReplaceInvalidCharacters)) + require.Equal(t, string(sdk.StorageSerializationPolicyCompatible), queryParameterValueForDatabase(t, databaseTest.ID(), sdk.ObjectParameterStorageSerializationPolicy)) + + err = client.Databases.Alter(ctx, databaseTest.ID(), &sdk.AlterDatabaseOptions{ + Unset: &sdk.DatabaseUnset{ + ReplaceInvalidCharacters: sdk.Bool(true), + StorageSerializationPolicy: sdk.Bool(true), + }, + }) + require.NoError(t, err) + + replaceInvalidCharactersParam := queryParameterForDatabase(t, databaseTest.ID(), sdk.ObjectParameterReplaceInvalidCharacters) + storageSerializationPolicyParam := queryParameterForDatabase(t, databaseTest.ID(), sdk.ObjectParameterStorageSerializationPolicy) + + require.Equal(t, replaceInvalidCharactersParam.Default, replaceInvalidCharactersParam.Value) + require.Equal(t, storageSerializationPolicyParam.Default, storageSerializationPolicyParam.Value) + }) + t.Run(fmt.Sprintf("Database: %s - setting and unsetting external volume and catalog", testCase.DatabaseType), func(t *testing.T) { if testCase.DatabaseType == "From Share" { t.Skipf("Skipping database test because from share is not supported") diff --git a/pkg/sdk/testint/dynamic_table_integration_test.go b/pkg/sdk/testint/dynamic_table_integration_test.go index 3facd1a081..163133ab59 100644 --- a/pkg/sdk/testint/dynamic_table_integration_test.go +++ b/pkg/sdk/testint/dynamic_table_integration_test.go @@ -120,8 +120,7 @@ func TestInt_DynamicTableDescribe(t *testing.T) { }) t.Run("when dynamic table does not exist", func(t *testing.T) { - name := sdk.NewSchemaObjectIdentifier("my_db", "my_schema", "does_not_exist") - _, err := client.DynamicTables.Describe(ctx, sdk.NewDescribeDynamicTableRequest(name)) + _, err := client.DynamicTables.Describe(ctx, sdk.NewDescribeDynamicTableRequest(NonExistingSchemaObjectIdentifier)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/testint/event_tables_integration_test.go b/pkg/sdk/testint/event_tables_integration_test.go index 3db615c31c..ec43a812a9 100644 --- a/pkg/sdk/testint/event_tables_integration_test.go +++ b/pkg/sdk/testint/event_tables_integration_test.go @@ -218,10 +218,9 @@ func TestInt_EventTables(t *testing.T) { table, tableCleanup := testClientHelper().Table.CreateTable(t) t.Cleanup(tableCleanup) - id := sdk.NewSchemaObjectIdentifier(table.DatabaseName, table.SchemaName, table.Name) // add policy - alterRequest := sdk.NewAlterEventTableRequest(id).WithAddRowAccessPolicy(sdk.NewEventTableAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []string{"id"})) + alterRequest := sdk.NewAlterEventTableRequest(table.ID()).WithAddRowAccessPolicy(sdk.NewEventTableAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []string{"id"})) err := client.EventTables.Alter(ctx, alterRequest) require.NoError(t, err) @@ -234,7 +233,7 @@ func TestInt_EventTables(t *testing.T) { assert.Equal(t, "ACTIVE", e.PolicyStatus) // remove policy - alterRequest = sdk.NewAlterEventTableRequest(id).WithDropRowAccessPolicy(sdk.NewEventTableDropRowAccessPolicyRequest(rowAccessPolicy.ID())) + alterRequest = sdk.NewAlterEventTableRequest(table.ID()).WithDropRowAccessPolicy(sdk.NewEventTableDropRowAccessPolicyRequest(rowAccessPolicy.ID())) err = client.EventTables.Alter(ctx, alterRequest) require.NoError(t, err) @@ -242,7 +241,7 @@ func TestInt_EventTables(t *testing.T) { require.Error(t, err, "no rows in result set") // add policy again - alterRequest = sdk.NewAlterEventTableRequest(id).WithAddRowAccessPolicy(sdk.NewEventTableAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []string{"id"})) + alterRequest = sdk.NewAlterEventTableRequest(table.ID()).WithAddRowAccessPolicy(sdk.NewEventTableAddRowAccessPolicyRequest(rowAccessPolicy.ID(), []string{"id"})) err = client.EventTables.Alter(ctx, alterRequest) require.NoError(t, err) @@ -251,7 +250,7 @@ func TestInt_EventTables(t *testing.T) { assert.Equal(t, rowAccessPolicy.ID().Name(), e.PolicyName) // drop and add other policy simultaneously - alterRequest = sdk.NewAlterEventTableRequest(id).WithDropAndAddRowAccessPolicy(sdk.NewEventTableDropAndAddRowAccessPolicyRequest( + alterRequest = sdk.NewAlterEventTableRequest(table.ID()).WithDropAndAddRowAccessPolicy(sdk.NewEventTableDropAndAddRowAccessPolicyRequest( *sdk.NewEventTableDropRowAccessPolicyRequest(rowAccessPolicy.ID()), *sdk.NewEventTableAddRowAccessPolicyRequest(rowAccessPolicy2.ID(), []string{"id"}), )) @@ -263,7 +262,7 @@ func TestInt_EventTables(t *testing.T) { assert.Equal(t, rowAccessPolicy2.ID().Name(), e.PolicyName) // drop all policies - alterRequest = sdk.NewAlterEventTableRequest(id).WithDropAllRowAccessPolicies(sdk.Bool(true)) + alterRequest = sdk.NewAlterEventTableRequest(table.ID()).WithDropAllRowAccessPolicies(sdk.Bool(true)) err = client.EventTables.Alter(ctx, alterRequest) require.NoError(t, err) diff --git a/pkg/sdk/testint/external_functions_integration_test.go b/pkg/sdk/testint/external_functions_integration_test.go index a36b76fb93..9787dd7f2e 100644 --- a/pkg/sdk/testint/external_functions_integration_test.go +++ b/pkg/sdk/testint/external_functions_integration_test.go @@ -15,8 +15,6 @@ func TestInt_ExternalFunctions(t *testing.T) { defaultDataTypes := []sdk.DataType{sdk.DataTypeVARCHAR} - databaseTest, schemaTest := testDb(t), testSchema(t) - integration, integrationCleanup := testClientHelper().ApiIntegration.CreateApiIntegration(t) t.Cleanup(integrationCleanup) @@ -27,7 +25,8 @@ func TestInt_ExternalFunctions(t *testing.T) { } } - createExternalFunction := func(t *testing.T) *sdk.ExternalFunction { + // TODO [SNOW-999049]: id returned on purpose; address during identifiers rework + createExternalFunction := func(t *testing.T) (*sdk.ExternalFunction, sdk.SchemaObjectIdentifier) { t.Helper() id := testClientHelper().Ids.RandomSchemaObjectIdentifierWithArguments(defaultDataTypes) argument := sdk.NewExternalFunctionArgumentRequest("x", defaultDataTypes[0]) @@ -42,7 +41,7 @@ func TestInt_ExternalFunctions(t *testing.T) { e, err := client.ExternalFunctions.ShowByID(ctx, id) require.NoError(t, err) - return e + return e, id } assertExternalFunction := func(t *testing.T, id sdk.SchemaObjectIdentifier, secure bool) { @@ -123,9 +122,7 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("alter external function: set api integration", func(t *testing.T) { - e := createExternalFunction(t) - e.ID() - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) + _, id := createExternalFunction(t) set := sdk.NewExternalFunctionSetRequest(). WithApiIntegration(sdk.Pointer(integration.ID())) request := sdk.NewAlterExternalFunctionRequest(id, defaultDataTypes).WithSet(set) @@ -136,9 +133,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("alter external function: set headers", func(t *testing.T) { - e := createExternalFunction(t) + _, id := createExternalFunction(t) - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) headers := []sdk.ExternalFunctionHeaderRequest{ { Name: "measure", @@ -153,9 +149,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("alter external function: set context headers", func(t *testing.T) { - e := createExternalFunction(t) + _, id := createExternalFunction(t) - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) ch := []sdk.ExternalFunctionContextHeaderRequest{ { ContextFunction: "CURRENT_DATE", @@ -172,9 +167,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("alter external function: set compression", func(t *testing.T) { - e := createExternalFunction(t) + _, id := createExternalFunction(t) - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) set := sdk.NewExternalFunctionSetRequest().WithCompression(sdk.String("AUTO")) request := sdk.NewAlterExternalFunctionRequest(id, defaultDataTypes).WithSet(set) err := client.ExternalFunctions.Alter(ctx, request) @@ -183,9 +177,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("alter external function: set max batch rows", func(t *testing.T) { - e := createExternalFunction(t) + _, id := createExternalFunction(t) - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) set := sdk.NewExternalFunctionSetRequest().WithMaxBatchRows(sdk.Int(20)) request := sdk.NewAlterExternalFunctionRequest(id, defaultDataTypes).WithSet(set) err := client.ExternalFunctions.Alter(ctx, request) @@ -194,9 +187,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("alter external function: unset", func(t *testing.T) { - e := createExternalFunction(t) + _, id := createExternalFunction(t) - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) unset := sdk.NewExternalFunctionUnsetRequest(). WithComment(sdk.Bool(true)). WithHeaders(sdk.Bool(true)) @@ -208,8 +200,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("show external function: with like", func(t *testing.T) { - e1 := createExternalFunction(t) - e2 := createExternalFunction(t) + e1, _ := createExternalFunction(t) + e2, _ := createExternalFunction(t) es, err := client.ExternalFunctions.Show(ctx, sdk.NewShowExternalFunctionRequest().WithLike(&sdk.Like{Pattern: sdk.String(e1.Name)})) require.NoError(t, err) @@ -223,7 +215,7 @@ func TestInt_ExternalFunctions(t *testing.T) { otherDb, otherDbCleanup := testClientHelper().Database.CreateDatabase(t) t.Cleanup(otherDbCleanup) - e1 := createExternalFunction(t) + e1, _ := createExternalFunction(t) es, err := client.ExternalFunctions.Show(ctx, sdk.NewShowExternalFunctionRequest().WithIn(&sdk.In{Schema: e1.ID().SchemaId()})) require.NoError(t, err) @@ -248,9 +240,8 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("show external function by id", func(t *testing.T) { - e := createExternalFunction(t) + e, id := createExternalFunction(t) - id := sdk.NewSchemaObjectIdentifierWithArguments(databaseTest.Name, schemaTest.Name, e.Name, defaultDataTypes) es, err := client.ExternalFunctions.ShowByID(ctx, id) require.NoError(t, err) require.Equal(t, *e, *es) @@ -260,10 +251,9 @@ func TestInt_ExternalFunctions(t *testing.T) { }) t.Run("describe external function", func(t *testing.T) { - e := createExternalFunction(t) - id := e.ID() + e, _ := createExternalFunction(t) - request := sdk.NewDescribeExternalFunctionRequest(id, []sdk.DataType{sdk.DataTypeVARCHAR}) + request := sdk.NewDescribeExternalFunctionRequest(e.ID(), []sdk.DataType{sdk.DataTypeVARCHAR}) details, err := client.ExternalFunctions.Describe(ctx, request) require.NoError(t, err) pairs := make(map[string]string) diff --git a/pkg/sdk/testint/external_tables_integration_test.go b/pkg/sdk/testint/external_tables_integration_test.go index 62e832eba8..801cc804ae 100644 --- a/pkg/sdk/testint/external_tables_integration_test.go +++ b/pkg/sdk/testint/external_tables_integration_test.go @@ -15,7 +15,7 @@ func TestInt_ExternalTables(t *testing.T) { client := testClient(t) ctx := testContext(t) - stageID := sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, "EXTERNAL_TABLE_STAGE") + stageID := testClientHelper().Ids.RandomSchemaObjectIdentifier() stageLocation := fmt.Sprintf("@%s", stageID.FullyQualifiedName()) _, stageCleanup := testClientHelper().Stage.CreateStageWithURL(t, stageID) t.Cleanup(stageCleanup) @@ -38,16 +38,16 @@ func TestInt_ExternalTables(t *testing.T) { sdk.NewExternalTableColumnRequest("part_date", sdk.DataTypeDate, "parse_json(metadata$external_table_partition):weather_date::date"), }...) - minimalCreateExternalTableReq := func(name string) *sdk.CreateExternalTableRequest { + minimalCreateExternalTableReq := func(id sdk.SchemaObjectIdentifier) *sdk.CreateExternalTableRequest { return sdk.NewCreateExternalTableRequest( - sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, name), + id, stageLocation, ).WithFileFormat(*sdk.NewExternalTableFileFormatRequest().WithFileFormatType(sdk.ExternalTableFileFormatTypeJSON)) } - createExternalTableWithManualPartitioningReq := func(name string) *sdk.CreateWithManualPartitioningExternalTableRequest { + createExternalTableWithManualPartitioningReq := func(id sdk.SchemaObjectIdentifier) *sdk.CreateWithManualPartitioningExternalTableRequest { return sdk.NewCreateWithManualPartitioningExternalTableRequest( - sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, name), + id, stageLocation, ). WithFileFormat(*sdk.NewExternalTableFileFormatRequest().WithFileFormatType(sdk.ExternalTableFileFormatTypeJSON)). @@ -61,24 +61,22 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Create: minimal", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(name)) + err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) require.NoError(t, err) externalTable, err := client.ExternalTables.ShowByID(ctx, externalTableID) require.NoError(t, err) - assert.Equal(t, name, externalTable.Name) + assert.Equal(t, externalTableID.Name(), externalTable.Name) }) t.Run("Create: with raw file format", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, sdk.NewCreateExternalTableRequest(sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, name), stageLocation).WithRawFileFormat("TYPE = JSON")) + err := client.ExternalTables.Create(ctx, sdk.NewCreateExternalTableRequest(externalTableID, stageLocation).WithRawFileFormat("TYPE = JSON")) require.NoError(t, err) externalTable, err := client.ExternalTables.ShowByID(ctx, externalTableID) require.NoError(t, err) - assert.Equal(t, name, externalTable.Name) + assert.Equal(t, externalTableID.Name(), externalTable.Name) }) t.Run("Create: complete", func(t *testing.T) { @@ -135,7 +133,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Create with manual partitioning: complete", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() name := externalTableID.Name() - err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(name)) + err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(externalTableID)) require.NoError(t, err) externalTable, err := client.ExternalTables.ShowByID(ctx, externalTableID) @@ -171,8 +169,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Alter: refresh", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(name)) + err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) require.NoError(t, err) err = client.ExternalTables.Alter( @@ -186,10 +183,9 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Alter: add files", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() err := client.ExternalTables.Create( ctx, - minimalCreateExternalTableReq(name). + minimalCreateExternalTableReq(externalTableID). WithPattern("weather-nyc/weather_2_3_0.json.gz"), ) require.NoError(t, err) @@ -205,10 +201,9 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Alter: remove files", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() err := client.ExternalTables.Create( ctx, - minimalCreateExternalTableReq(name). + minimalCreateExternalTableReq(externalTableID). WithPattern("weather-nyc/weather_2_3_0.json.gz"), ) require.NoError(t, err) @@ -232,8 +227,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Alter: set auto refresh", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(name)) + err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) require.NoError(t, err) err = client.ExternalTables.Alter( @@ -292,8 +286,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Alter: add partitions", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(name)) + err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(externalTableID)) require.NoError(t, err) err = client.ExternalTables.AlterPartitions( @@ -308,8 +301,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Alter: drop partitions", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(name)) + err := client.ExternalTables.CreateWithManualPartitioning(ctx, createExternalTableWithManualPartitioningReq(externalTableID)) require.NoError(t, err) err = client.ExternalTables.AlterPartitions( @@ -333,8 +325,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Drop", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(name)) + err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) require.NoError(t, err) err = client.ExternalTables.Drop( @@ -352,7 +343,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Show", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(name)) + err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) require.NoError(t, err) et, err := client.ExternalTables.Show( @@ -371,8 +362,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Describe: columns", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - req := minimalCreateExternalTableReq(name) + req := minimalCreateExternalTableReq(externalTableID) err := client.ExternalTables.Create(ctx, req) require.NoError(t, err) @@ -397,8 +387,7 @@ func TestInt_ExternalTables(t *testing.T) { t.Run("Describe: stage", func(t *testing.T) { externalTableID := testClientHelper().Ids.RandomSchemaObjectIdentifier() - name := externalTableID.Name() - err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(name)) + err := client.ExternalTables.Create(ctx, minimalCreateExternalTableReq(externalTableID)) require.NoError(t, err) d, err := client.ExternalTables.DescribeStage(ctx, sdk.NewDescribeExternalTableStageRequest(externalTableID)) diff --git a/pkg/sdk/testint/grants_integration_test.go b/pkg/sdk/testint/grants_integration_test.go index cdf84c8402..b352e452c9 100644 --- a/pkg/sdk/testint/grants_integration_test.go +++ b/pkg/sdk/testint/grants_integration_test.go @@ -234,12 +234,10 @@ func TestInt_GrantAndRevokePrivilegesToAccountRole(t *testing.T) { }) t.Run("grant and revoke on all pipes", func(t *testing.T) { - schema := testSchema(t) - - table, tableCleanup := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) + table, tableCleanup := testClientHelper().Table.CreateTable(t) t.Cleanup(tableCleanup) - stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, sdk.NewDatabaseObjectIdentifier(testDb(t).Name, schema.Name)) + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) t.Cleanup(stageCleanup) pipe, pipeCleanup := testClientHelper().Pipe.CreatePipe(t, createPipeCopyStatement(t, table, stage)) @@ -260,7 +258,7 @@ func TestInt_GrantAndRevokePrivilegesToAccountRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, @@ -280,7 +278,7 @@ func TestInt_GrantAndRevokePrivilegesToAccountRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, @@ -293,12 +291,10 @@ func TestInt_GrantAndRevokePrivilegesToAccountRole(t *testing.T) { }) t.Run("grant and revoke on all pipes with multiple errors", func(t *testing.T) { - schema := testSchema(t) - - table, tableCleanup := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) + table, tableCleanup := testClientHelper().Table.CreateTable(t) t.Cleanup(tableCleanup) - stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, sdk.NewDatabaseObjectIdentifier(testDb(t).Name, schema.Name)) + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) t.Cleanup(stageCleanup) _, pipeCleanup := testClientHelper().Pipe.CreatePipe(t, createPipeCopyStatement(t, table, stage)) @@ -319,7 +315,7 @@ func TestInt_GrantAndRevokePrivilegesToAccountRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, @@ -340,7 +336,7 @@ func TestInt_GrantAndRevokePrivilegesToAccountRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, @@ -388,7 +384,7 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { databaseRole, databaseRoleCleanup := testClientHelper().DatabaseRole.CreateDatabaseRole(t) t.Cleanup(databaseRoleCleanup) - databaseRoleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, databaseRole.Name) + databaseRoleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(databaseRole.Name) privileges := &sdk.DatabaseRoleGrantPrivileges{ DatabasePrivileges: []sdk.AccountObjectPrivilege{sdk.AccountObjectPrivilegeCreateSchema}, @@ -436,7 +432,7 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { databaseRole, databaseRoleCleanup := testClientHelper().DatabaseRole.CreateDatabaseRole(t) t.Cleanup(databaseRoleCleanup) - databaseRoleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, databaseRole.Name) + databaseRoleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(databaseRole.Name) privileges := &sdk.DatabaseRoleGrantPrivileges{ SchemaPrivileges: []sdk.SchemaPrivilege{sdk.SchemaPrivilegeCreateAlert}, @@ -486,7 +482,7 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { databaseRole, databaseRoleCleanup := testClientHelper().DatabaseRole.CreateDatabaseRole(t) t.Cleanup(databaseRoleCleanup) - databaseRoleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, databaseRole.Name) + databaseRoleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(databaseRole.Name) table, _ := testClientHelper().Table.CreateTable(t) privileges := &sdk.DatabaseRoleGrantPrivileges{ @@ -540,7 +536,7 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { databaseRole, databaseRoleCleanup := testClientHelper().DatabaseRole.CreateDatabaseRole(t) t.Cleanup(databaseRoleCleanup) - databaseRoleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, databaseRole.Name) + databaseRoleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(databaseRole.Name) privileges := &sdk.DatabaseRoleGrantPrivileges{ SchemaObjectPrivileges: []sdk.SchemaObjectPrivilege{sdk.SchemaObjectPrivilegeSelect}, @@ -584,12 +580,10 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { }) t.Run("grant and revoke on all pipes", func(t *testing.T) { - schema := testSchema(t) - - table, tableCleanup := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) + table, tableCleanup := testClientHelper().Table.CreateTable(t) t.Cleanup(tableCleanup) - stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, sdk.NewDatabaseObjectIdentifier(testDb(t).Name, schema.Name)) + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) t.Cleanup(stageCleanup) pipe, pipeCleanup := testClientHelper().Pipe.CreatePipe(t, createPipeCopyStatement(t, table, stage)) @@ -610,11 +604,11 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, - sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role.Name), + testClientHelper().Ids.NewDatabaseObjectIdentifier(role.Name), &sdk.GrantPrivilegesToDatabaseRoleOptions{}, ) require.NoError(t, err) @@ -630,11 +624,11 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, - sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role.Name), + testClientHelper().Ids.NewDatabaseObjectIdentifier(role.Name), &sdk.RevokePrivilegesFromDatabaseRoleOptions{}, ) require.NoError(t, err) @@ -643,12 +637,10 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { }) t.Run("grant and revoke on all pipes with multiple errors", func(t *testing.T) { - schema := testSchema(t) - - table, tableCleanup := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) + table, tableCleanup := testClientHelper().Table.CreateTable(t) t.Cleanup(tableCleanup) - stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, sdk.NewDatabaseObjectIdentifier(testDb(t).Name, schema.Name)) + stage, stageCleanup := testClientHelper().Stage.CreateStage(t) t.Cleanup(stageCleanup) _, pipeCleanup := testClientHelper().Pipe.CreatePipe(t, createPipeCopyStatement(t, table, stage)) @@ -669,11 +661,11 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, - sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role.Name), + testClientHelper().Ids.NewDatabaseObjectIdentifier(role.Name), &sdk.GrantPrivilegesToDatabaseRoleOptions{}, ) require.Error(t, err) @@ -690,11 +682,11 @@ func TestInt_GrantAndRevokePrivilegesToDatabaseRole(t *testing.T) { SchemaObject: &sdk.GrantOnSchemaObject{ All: &sdk.GrantOnSchemaObjectIn{ PluralObjectType: sdk.PluralObjectTypePipes, - InSchema: sdk.Pointer(schema.ID()), + InSchema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, }, - sdk.NewDatabaseObjectIdentifier(testDb(t).Name, role.Name), + testClientHelper().Ids.NewDatabaseObjectIdentifier(role.Name), &sdk.RevokePrivilegesFromDatabaseRoleOptions{}, ) require.Error(t, err) @@ -890,7 +882,7 @@ func TestInt_GrantOwnership(t *testing.T) { }, &sdk.AccountRoleGrantOn{ Schema: &sdk.GrantOnSchema{ - Schema: sdk.Pointer(sdk.NewDatabaseObjectIdentifier(TestDatabaseName, TestSchemaName)), + Schema: sdk.Pointer(testClientHelper().Ids.SchemaId()), }, }, roleId, @@ -1018,7 +1010,7 @@ func TestInt_GrantOwnership(t *testing.T) { databaseRole, databaseRoleCleanup := testClientHelper().DatabaseRole.CreateDatabaseRole(t) t.Cleanup(databaseRoleCleanup) - databaseRoleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, databaseRole.Name) + databaseRoleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(databaseRole.Name) table, _ := testClientHelper().Table.CreateTable(t) on := sdk.OwnershipGrantOn{ diff --git a/pkg/sdk/testint/masking_policy_integration_test.go b/pkg/sdk/testint/masking_policy_integration_test.go index 82d608964c..75c9d705ee 100644 --- a/pkg/sdk/testint/masking_policy_integration_test.go +++ b/pkg/sdk/testint/masking_policy_integration_test.go @@ -253,8 +253,7 @@ func TestInt_MaskingPolicyDescribe(t *testing.T) { }) t.Run("when masking policy does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - _, err := client.MaskingPolicies.Describe(ctx, id) + _, err := client.MaskingPolicies.Describe(ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } @@ -379,8 +378,7 @@ func TestInt_MaskingPolicyDrop(t *testing.T) { }) t.Run("when masking policy does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - err := client.MaskingPolicies.Drop(ctx, id, nil) + err := client.MaskingPolicies.Drop(ctx, NonExistingSchemaObjectIdentifier, nil) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/testint/materialized_views_gen_integration_test.go b/pkg/sdk/testint/materialized_views_gen_integration_test.go index 428c4496f5..61b06c18e1 100644 --- a/pkg/sdk/testint/materialized_views_gen_integration_test.go +++ b/pkg/sdk/testint/materialized_views_gen_integration_test.go @@ -160,9 +160,7 @@ func TestInt_MaterializedViews(t *testing.T) { }) t.Run("drop view: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - err := client.MaterializedViews.Drop(ctx, sdk.NewDropMaterializedViewRequest(id)) + err := client.MaterializedViews.Drop(ctx, sdk.NewDropMaterializedViewRequest(NonExistingSchemaObjectIdentifier)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) @@ -360,8 +358,7 @@ func TestInt_MaterializedViews(t *testing.T) { t.Run("show materialized view: no existing view", func(t *testing.T) { showRequest := sdk.NewShowMaterializedViewRequest(). - WithIn(&sdk.In{Schema: testClientHelper().Ids.SchemaId()}). - WithLike(&sdk.Like{Pattern: sdk.Pointer("non-existing")}) + WithIn(&sdk.In{Schema: testClientHelper().Ids.SchemaId()}) returnedViews, err := client.MaterializedViews.Show(ctx, showRequest) require.NoError(t, err) @@ -370,7 +367,7 @@ func TestInt_MaterializedViews(t *testing.T) { t.Run("show materialized view: schema not existing", func(t *testing.T) { showRequest := sdk.NewShowMaterializedViewRequest(). - WithIn(&sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(testDb(t).Name, "made-up-name")}) + WithIn(&sdk.In{Schema: NonExistingDatabaseObjectIdentifier}) _, err := client.MaterializedViews.Show(ctx, showRequest) require.Error(t, err) }) @@ -381,7 +378,7 @@ func TestInt_MaterializedViews(t *testing.T) { showRequest := sdk.NewShowMaterializedViewRequest(). WithLike(&sdk.Like{Pattern: &view1.Name}). - WithIn(&sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(testDb(t).Name, testSchema(t).Name)}) + WithIn(&sdk.In{Schema: testClientHelper().Ids.SchemaId()}) returnedViews, err := client.MaterializedViews.Show(ctx, showRequest) require.NoError(t, err) @@ -401,9 +398,7 @@ func TestInt_MaterializedViews(t *testing.T) { }) t.Run("describe materialized view: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - _, err := client.MaterializedViews.Describe(ctx, id) + _, err := client.MaterializedViews.Describe(ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/testint/network_rule_gen_integration_test.go b/pkg/sdk/testint/network_rule_gen_integration_test.go index 939208d142..705b29824b 100644 --- a/pkg/sdk/testint/network_rule_gen_integration_test.go +++ b/pkg/sdk/testint/network_rule_gen_integration_test.go @@ -93,7 +93,7 @@ func TestInt_NetworkRules(t *testing.T) { }) networkRules, err := client.NetworkRules.Show(ctx, sdk.NewShowNetworkRuleRequest().WithIn(&sdk.In{ - Schema: sdk.NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName()), + Schema: id.SchemaId(), }).WithLike(&sdk.Like{ Pattern: sdk.String(id.Name()), })) diff --git a/pkg/sdk/testint/password_policy_integration_test.go b/pkg/sdk/testint/password_policy_integration_test.go index b8cbdc56ea..eb21eb8617 100644 --- a/pkg/sdk/testint/password_policy_integration_test.go +++ b/pkg/sdk/testint/password_policy_integration_test.go @@ -172,8 +172,7 @@ func TestInt_PasswordPolicyDescribe(t *testing.T) { }) t.Run("when password policy does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - _, err := client.PasswordPolicies.Describe(ctx, id) + _, err := client.PasswordPolicies.Describe(ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } @@ -290,8 +289,7 @@ func TestInt_PasswordPolicyDrop(t *testing.T) { }) t.Run("when password policy does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - err := client.PasswordPolicies.Drop(ctx, id, nil) + err := client.PasswordPolicies.Drop(ctx, NonExistingSchemaObjectIdentifier, nil) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) diff --git a/pkg/sdk/testint/pipes_integration_test.go b/pkg/sdk/testint/pipes_integration_test.go index 9e30eb6e18..145ff5e099 100644 --- a/pkg/sdk/testint/pipes_integration_test.go +++ b/pkg/sdk/testint/pipes_integration_test.go @@ -22,7 +22,7 @@ func createPipeCopyStatement(t *testing.T, table *sdk.Table, stage *sdk.Stage) s // TestInt_CreatePipeWithStrangeSchemaName documented previous bad behavior. It changed with Snowflake 8.3.1 release. // We leave the test for future reference. func TestInt_CreatePipeWithStrangeSchemaName(t *testing.T) { - schemaIdentifier := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, "tcK1>AJ+") + schemaIdentifier := testClientHelper().Ids.NewDatabaseObjectIdentifier("tcK1>AJ+") // creating a new schema on purpose schema, schemaCleanup := testClientHelper().Schema.CreateSchemaWithName(t, schemaIdentifier.Name()) @@ -139,9 +139,7 @@ func TestInt_PipesShowAndDescribe(t *testing.T) { }) t.Run("describe: non-existing pipe", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testDb(t).Name, "does_not_exist") - - _, err := itc.client.Pipes.Describe(itc.ctx, id) + _, err := itc.client.Pipes.Describe(itc.ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } @@ -233,9 +231,7 @@ func TestInt_PipeDrop(t *testing.T) { }) t.Run("pipe does not exist", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testDb(t).Name, "does_not_exist") - - err := itc.client.Pipes.Drop(itc.ctx, id, &sdk.DropPipeOptions{}) + err := itc.client.Pipes.Drop(itc.ctx, NonExistingSchemaObjectIdentifier, &sdk.DropPipeOptions{}) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/testint/procedures_integration_test.go b/pkg/sdk/testint/procedures_integration_test.go index 7cd641794d..ea4df83a82 100644 --- a/pkg/sdk/testint/procedures_integration_test.go +++ b/pkg/sdk/testint/procedures_integration_test.go @@ -622,15 +622,13 @@ func TestInt_CallProcedure(t *testing.T) { t.Run("call procedure for SQL: argument positions", func(t *testing.T) { f := createProcedureForSQLHandle(t, true) - id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, f.Name) - err := client.Procedures.Call(ctx, sdk.NewCallProcedureRequest(id).WithCallArguments([]string{"'hi'"})) + err := client.Procedures.Call(ctx, sdk.NewCallProcedureRequest(f.ID()).WithCallArguments([]string{"'hi'"})) require.NoError(t, err) }) t.Run("call procedure for SQL: argument names", func(t *testing.T) { f := createProcedureForSQLHandle(t, true) - id := sdk.NewSchemaObjectIdentifier(databaseTest.Name, schemaTest.Name, f.Name) - err := client.Procedures.Call(ctx, sdk.NewCallProcedureRequest(id).WithCallArguments([]string{"message => 'hi'"})) + err := client.Procedures.Call(ctx, sdk.NewCallProcedureRequest(f.ID()).WithCallArguments([]string{"message => 'hi'"})) require.NoError(t, err) }) diff --git a/pkg/sdk/testint/row_access_policies_gen_integration_test.go b/pkg/sdk/testint/row_access_policies_gen_integration_test.go index f08e3aedee..040706b09b 100644 --- a/pkg/sdk/testint/row_access_policies_gen_integration_test.go +++ b/pkg/sdk/testint/row_access_policies_gen_integration_test.go @@ -117,9 +117,7 @@ func TestInt_RowAccessPolicies(t *testing.T) { }) t.Run("drop row access policy: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - err := client.RowAccessPolicies.Drop(ctx, sdk.NewDropRowAccessPolicyRequest(id)) + err := client.RowAccessPolicies.Drop(ctx, sdk.NewDropRowAccessPolicyRequest(NonExistingSchemaObjectIdentifier)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) @@ -250,7 +248,7 @@ func TestInt_RowAccessPolicies(t *testing.T) { showRequest := sdk.NewShowRowAccessPolicyRequest(). WithLike(&sdk.Like{Pattern: &rowAccessPolicy1.Name}). - WithIn(&sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(testDb(t).Name, testSchema(t).Name)}) + WithIn(&sdk.In{Schema: testClientHelper().Ids.SchemaId()}) returnedRowAccessPolicies, err := client.RowAccessPolicies.Show(ctx, showRequest) require.NoError(t, err) @@ -305,9 +303,7 @@ func TestInt_RowAccessPolicies(t *testing.T) { }) t.Run("describe row access policy: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - _, err := client.RowAccessPolicies.Describe(ctx, id) + _, err := client.RowAccessPolicies.Describe(ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/testint/schemas_integration_test.go b/pkg/sdk/testint/schemas_integration_test.go index 4ab3379351..ef789bf30e 100644 --- a/pkg/sdk/testint/schemas_integration_test.go +++ b/pkg/sdk/testint/schemas_integration_test.go @@ -49,13 +49,13 @@ func TestInt_SchemasCreate(t *testing.T) { t.Run("clone", func(t *testing.T) { comment := "some_comment" - schemaID := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, testClientHelper().Ids.RandomAccountObjectIdentifier().Name()) + schemaID := testClientHelper().Ids.RandomDatabaseObjectIdentifier() err := client.Schemas.Create(ctx, schemaID, &sdk.CreateSchemaOptions{ Comment: sdk.String(comment), }) require.NoError(t, err) - clonedSchemaID := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, testClientHelper().Ids.RandomAccountObjectIdentifier().Name()) + clonedSchemaID := testClientHelper().Ids.RandomDatabaseObjectIdentifier() err = client.Schemas.Create(ctx, clonedSchemaID, &sdk.CreateSchemaOptions{ Comment: sdk.String(comment), Clone: &sdk.Clone{ @@ -139,7 +139,7 @@ func TestInt_SchemasAlter(t *testing.T) { table, _ := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) t.Cleanup(func() { - newId := sdk.NewSchemaObjectIdentifier(testDb(t).Name, swapSchema.Name, table.Name) + newId := sdk.NewSchemaObjectIdentifierInSchema(swapSchema.ID(), table.Name) err := client.Tables.Drop(ctx, sdk.NewDropTableRequest(newId)) require.NoError(t, err) }) diff --git a/pkg/sdk/testint/session_policies_gen_integration_test.go b/pkg/sdk/testint/session_policies_gen_integration_test.go index 063304edc7..8f4b35db4f 100644 --- a/pkg/sdk/testint/session_policies_gen_integration_test.go +++ b/pkg/sdk/testint/session_policies_gen_integration_test.go @@ -110,9 +110,7 @@ func TestInt_SessionPolicies(t *testing.T) { }) t.Run("drop session_policy: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - err := client.SessionPolicies.Drop(ctx, sdk.NewDropSessionPolicyRequest(id)) + err := client.SessionPolicies.Drop(ctx, sdk.NewDropSessionPolicyRequest(NonExistingSchemaObjectIdentifier)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) diff --git a/pkg/sdk/testint/setup_test.go b/pkg/sdk/testint/setup_test.go index c08be27994..e913e60781 100644 --- a/pkg/sdk/testint/setup_test.go +++ b/pkg/sdk/testint/setup_test.go @@ -22,7 +22,9 @@ var ( TestDatabaseName = "int_test_db_" + random.IntegrationTestsSuffix TestSchemaName = "int_test_sc_" + random.IntegrationTestsSuffix - NonExistingAccountObjectIdentifier = sdk.NewAccountObjectIdentifier("does_not_exist") + NonExistingAccountObjectIdentifier = sdk.NewAccountObjectIdentifier("does_not_exist") + NonExistingDatabaseObjectIdentifier = sdk.NewDatabaseObjectIdentifier(TestDatabaseName, "does_not_exist") + NonExistingSchemaObjectIdentifier = sdk.NewSchemaObjectIdentifier(TestDatabaseName, TestSchemaName, "does_not_exist") ) var itc integrationTestContext diff --git a/pkg/sdk/testint/streamlits_integration_test.go b/pkg/sdk/testint/streamlits_integration_test.go index 45d2119789..ad7ba7c68d 100644 --- a/pkg/sdk/testint/streamlits_integration_test.go +++ b/pkg/sdk/testint/streamlits_integration_test.go @@ -143,7 +143,7 @@ func TestInt_Streamlits(t *testing.T) { databaseRole, databaseRoleCleanup := testClientHelper().DatabaseRole.CreateDatabaseRole(t) t.Cleanup(databaseRoleCleanup) - databaseRoleId := sdk.NewDatabaseObjectIdentifier(testDb(t).Name, databaseRole.Name) + databaseRoleId := testClientHelper().Ids.NewDatabaseObjectIdentifier(databaseRole.Name) comment := random.Comment() id := testClientHelper().Ids.RandomSchemaObjectIdentifier() @@ -281,6 +281,7 @@ func TestInt_Streamlits(t *testing.T) { require.Equal(t, e.Name, detail.Name) require.Equal(t, e.UrlId, detail.UrlId) require.Equal(t, mainFile, detail.MainFile) + // TODO [SNOW-999049]: make nicer during the identifiers rework require.Equal(t, stage.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(detail.RootLocation[1:]).FullyQualifiedName()) require.Empty(t, detail.Title) require.Empty(t, detail.QueryWarehouse) diff --git a/pkg/sdk/testint/streams_gen_integration_test.go b/pkg/sdk/testint/streams_gen_integration_test.go index 6d8d33db0a..6b5548c0ba 100644 --- a/pkg/sdk/testint/streams_gen_integration_test.go +++ b/pkg/sdk/testint/streams_gen_integration_test.go @@ -46,6 +46,7 @@ func TestInt_Streams(t *testing.T) { s, err := client.Streams.ShowByID(ctx, id) require.NoError(t, err) + // TODO [SNOW-999049]: make nicer during the identifiers rework assert.Equal(t, table.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*s.TableName).FullyQualifiedName()) assertStream(t, s, id, "Table", "DEFAULT") }) @@ -76,17 +77,17 @@ func TestInt_Streams(t *testing.T) { s, err := client.Streams.ShowByID(ctx, id) require.NoError(t, err) + // TODO [SNOW-999049]: make nicer during the identifiers rework assert.Equal(t, externalTableId.FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*s.TableName).FullyQualifiedName()) assertStream(t, s, id, "External Table", "INSERT_ONLY") }) t.Run("CreateOnDirectoryTable", func(t *testing.T) { stage, cleanupStage := testClientHelper().Stage.CreateStageWithDirectory(t) - stageId := sdk.NewSchemaObjectIdentifier(db.Name, schema.Name, stage.Name) t.Cleanup(cleanupStage) id := testClientHelper().Ids.RandomSchemaObjectIdentifier() - req := sdk.NewCreateStreamOnDirectoryTableRequest(id, stageId).WithComment(sdk.String("some comment")) + req := sdk.NewCreateStreamOnDirectoryTableRequest(id, stage.ID()).WithComment(sdk.String("some comment")) err := client.Streams.CreateOnDirectoryTable(ctx, req) require.NoError(t, err) t.Cleanup(func() { @@ -102,10 +103,9 @@ func TestInt_Streams(t *testing.T) { t.Run("CreateOnView", func(t *testing.T) { table, cleanupTable := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) - tableId := sdk.NewSchemaObjectIdentifier(db.Name, schema.Name, table.Name) t.Cleanup(cleanupTable) - view, cleanupView := testClientHelper().View.CreateView(t, fmt.Sprintf("SELECT id FROM %s", tableId.FullyQualifiedName())) + view, cleanupView := testClientHelper().View.CreateView(t, fmt.Sprintf("SELECT id FROM %s", table.ID().FullyQualifiedName())) t.Cleanup(cleanupView) id := testClientHelper().Ids.RandomSchemaObjectIdentifier() @@ -148,6 +148,7 @@ func TestInt_Streams(t *testing.T) { require.NoError(t, err) assertStream(t, s, cloneId, "Table", "DEFAULT") + // TODO [SNOW-999049]: make nicer during the identifiers rework assert.Equal(t, table.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*s.TableName).FullyQualifiedName()) }) @@ -291,7 +292,7 @@ func TestInt_Streams(t *testing.T) { s, err := client.Streams.Show(ctx, sdk.NewShowStreamRequest(). WithTerse(sdk.Bool(false)). WithIn(&sdk.In{ - Schema: sdk.NewDatabaseObjectIdentifier(db.Name, schema.Name), + Schema: schema.ID(), }). WithLike(&sdk.Like{ Pattern: sdk.String(id.Name()), @@ -372,7 +373,7 @@ func TestInt_Streams(t *testing.T) { s, err := client.Streams.Show(ctx, sdk.NewShowStreamRequest(). WithTerse(sdk.Bool(false)). WithIn(&sdk.In{ - Schema: sdk.NewDatabaseObjectIdentifier(db.Name, schema.Name), + Schema: schema.ID(), }). WithStartsWith(sdk.String(idPrefix)). WithLimit(&sdk.LimitFrom{ @@ -413,6 +414,7 @@ func TestInt_Streams(t *testing.T) { assert.Equal(t, schema.Name, s.SchemaName) assert.Nil(t, s.TableOn) assert.Equal(t, "some comment", *s.Comment) + // TODO [SNOW-999049]: make nicer during the identifiers rework assert.Equal(t, table.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*s.TableName).FullyQualifiedName()) assert.Equal(t, "Table", *s.SourceType) assert.Equal(t, "DEFAULT", *s.Mode) diff --git a/pkg/sdk/testint/system_functions_integration_test.go b/pkg/sdk/testint/system_functions_integration_test.go index 851f1d476e..247c70e2d6 100644 --- a/pkg/sdk/testint/system_functions_integration_test.go +++ b/pkg/sdk/testint/system_functions_integration_test.go @@ -56,7 +56,7 @@ func TestInt_PipeStatus(t *testing.T) { table, tableCleanup := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) t.Cleanup(tableCleanup) - stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, sdk.NewDatabaseObjectIdentifier(testDb(t).Name, schema.Name)) + stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, schema.ID()) t.Cleanup(stageCleanup) copyStatement := createPipeCopyStatement(t, table, stage) @@ -105,7 +105,7 @@ func TestInt_PipeForceResume(t *testing.T) { table, tableCleanup := testClientHelper().Table.CreateTableInSchema(t, schema.ID()) t.Cleanup(tableCleanup) - stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, sdk.NewDatabaseObjectIdentifier(testDb(t).Name, schema.Name)) + stage, stageCleanup := testClientHelper().Stage.CreateStageInSchema(t, schema.ID()) t.Cleanup(stageCleanup) copyStatement := createPipeCopyStatement(t, table, stage) diff --git a/pkg/sdk/testint/tables_integration_test.go b/pkg/sdk/testint/tables_integration_test.go index f2c4aa35b3..f6a6f08ce9 100644 --- a/pkg/sdk/testint/tables_integration_test.go +++ b/pkg/sdk/testint/tables_integration_test.go @@ -525,6 +525,7 @@ func TestInt_Table(t *testing.T) { require.NoError(t, err) require.Equal(t, 2, len(tableDetails)) + // TODO [SNOW-999049]: make nicer during the identifiers rework assert.Equal(t, maskingPolicy.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*tableDetails[0].PolicyName).FullyQualifiedName()) alterRequest := sdk.NewAlterTableRequest(id). diff --git a/pkg/sdk/testint/tasks_gen_integration_test.go b/pkg/sdk/testint/tasks_gen_integration_test.go index 4b3c4c4599..759faa0e1d 100644 --- a/pkg/sdk/testint/tasks_gen_integration_test.go +++ b/pkg/sdk/testint/tasks_gen_integration_test.go @@ -369,9 +369,7 @@ func TestInt_Tasks(t *testing.T) { }) t.Run("drop task: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - err := client.Tasks.Drop(ctx, sdk.NewDropTaskRequest(id)) + err := client.Tasks.Drop(ctx, sdk.NewDropTaskRequest(NonExistingSchemaObjectIdentifier)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) @@ -556,7 +554,7 @@ func TestInt_Tasks(t *testing.T) { showRequest := sdk.NewShowTaskRequest(). WithLike(&sdk.Like{Pattern: &task1.Name}). - WithIn(&sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(testDb(t).Name, testSchema(t).Name)}). + WithIn(&sdk.In{Schema: testClientHelper().Ids.SchemaId()}). WithLimit(&sdk.LimitFrom{Rows: sdk.Int(5)}) returnedTasks, err := client.Tasks.Show(ctx, showRequest) diff --git a/pkg/sdk/testint/views_gen_integration_test.go b/pkg/sdk/testint/views_gen_integration_test.go index 0f01b2720e..707b7292df 100644 --- a/pkg/sdk/testint/views_gen_integration_test.go +++ b/pkg/sdk/testint/views_gen_integration_test.go @@ -183,9 +183,7 @@ func TestInt_Views(t *testing.T) { }) t.Run("drop view: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - err := client.Views.Drop(ctx, sdk.NewDropViewRequest(id)) + err := client.Views.Drop(ctx, sdk.NewDropViewRequest(NonExistingSchemaObjectIdentifier)) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) @@ -330,6 +328,7 @@ func TestInt_Views(t *testing.T) { require.NoError(t, err) assert.Equal(t, 1, len(alteredViewDetails)) + // TODO [SNOW-999049]: make nicer during the identifiers rework assert.Equal(t, maskingPolicy.ID().FullyQualifiedName(), sdk.NewSchemaObjectIdentifierFromFullyQualifiedName(*alteredViewDetails[0].PolicyName).FullyQualifiedName()) alterRequest = sdk.NewAlterViewRequest(id).WithUnsetMaskingPolicyOnColumn( @@ -475,7 +474,7 @@ func TestInt_Views(t *testing.T) { showRequest := sdk.NewShowViewRequest(). WithLike(&sdk.Like{Pattern: &view1.Name}). - WithIn(&sdk.In{Schema: sdk.NewDatabaseObjectIdentifier(testDb(t).Name, testSchema(t).Name)}). + WithIn(&sdk.In{Schema: testClientHelper().Ids.SchemaId()}). WithLimit(&sdk.LimitFrom{Rows: sdk.Int(5)}) returnedViews, err := client.Views.Show(ctx, showRequest) @@ -520,9 +519,7 @@ func TestInt_Views(t *testing.T) { }) t.Run("describe view: non-existing", func(t *testing.T) { - id := sdk.NewSchemaObjectIdentifier(testDb(t).Name, testSchema(t).Name, "does_not_exist") - - _, err := client.Views.Describe(ctx, id) + _, err := client.Views.Describe(ctx, NonExistingSchemaObjectIdentifier) assert.ErrorIs(t, err, sdk.ErrObjectNotExistOrAuthorized) }) } diff --git a/pkg/sdk/users_test.go b/pkg/sdk/users_test.go index dbc1659fd7..ba2179e215 100644 --- a/pkg/sdk/users_test.go +++ b/pkg/sdk/users_test.go @@ -15,9 +15,10 @@ func TestUserCreate(t *testing.T) { }) t.Run("with complete options", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() tags := []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "tag1"), + Name: tagId, Value: "v1", }, } @@ -43,7 +44,7 @@ func TestUserCreate(t *testing.T) { Tags: tags, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE USER IF NOT EXISTS %s PASSWORD = '%s' LOGIN_NAME = '%s' DEFAULT_ROLE = foo ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR = true AUTOCOMMIT = true WITH TAG ("db"."schema"."tag1" = 'v1')`, id.FullyQualifiedName(), password, loginName) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE USER IF NOT EXISTS %s PASSWORD = '%s' LOGIN_NAME = '%s' DEFAULT_ROLE = foo ENABLE_UNREDACTED_QUERY_SYNTAX_ERROR = true AUTOCOMMIT = true WITH TAG (%s = 'v1')`, id.FullyQualifiedName(), password, loginName, tagId.FullyQualifiedName()) }) } @@ -63,7 +64,7 @@ func TestUserAlter(t *testing.T) { }) t.Run("with setting a policy", func(t *testing.T) { - passwordPolicy := NewSchemaObjectIdentifier("db", "schema", "PASSWORD_POLICY1") + passwordPolicy := randomSchemaObjectIdentifier() opts := &AlterUserOptions{ name: id, Set: &UserSet{ @@ -74,13 +75,15 @@ func TestUserAlter(t *testing.T) { }) t.Run("with setting tags", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) tags := []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "tag2"), + Name: tagId2, Value: "v2", }, } @@ -88,7 +91,7 @@ func TestUserAlter(t *testing.T) { name: id, SetTag: tags, } - assertOptsValidAndSQLEquals(t, opts, `ALTER USER %s SET TAG "db"."schema"."tag1" = 'v1', "db"."schema"."tag2" = 'v2'`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER USER %s SET TAG %s = 'v1', %s = 'v2'`, id.FullyQualifiedName(), tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("with setting properties and parameters", func(t *testing.T) { @@ -170,8 +173,8 @@ func TestUserAlter(t *testing.T) { }) t.Run("with unsetting tags", func(t *testing.T) { - tag1 := NewSchemaObjectIdentifier("db", "schema", "USER_TAG1") - tag2 := NewSchemaObjectIdentifier("db", "schema", "USER_TAG2") + tag1 := randomSchemaObjectIdentifier() + tag2 := randomSchemaObjectIdentifier() opts := &AlterUserOptions{ name: id, UnsetTag: []ObjectIdentifier{tag1, tag2}, diff --git a/pkg/sdk/validations_test.go b/pkg/sdk/validations_test.go index 5ed4d4044c..4a854fca2c 100644 --- a/pkg/sdk/validations_test.go +++ b/pkg/sdk/validations_test.go @@ -46,8 +46,8 @@ func TestValidObjectIdentifier(t *testing.T) { assert.Equal(t, ok, false) }) - t.Run("with 255 charcters in each of db, schema and name", func(t *testing.T) { - ok := ValidObjectIdentifier(invalidSchemaObjectIdentifier) + t.Run("with 255 characters in each of db, schema and name", func(t *testing.T) { + ok := ValidObjectIdentifier(longSchemaObjectIdentifier) assert.Equal(t, ok, true) }) } diff --git a/pkg/sdk/views_gen_test.go b/pkg/sdk/views_gen_test.go index 665ef2bb89..80ec68535c 100644 --- a/pkg/sdk/views_gen_test.go +++ b/pkg/sdk/views_gen_test.go @@ -23,7 +23,7 @@ func TestViews_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -37,7 +37,7 @@ func TestViews_Create(t *testing.T) { t.Run("validation: valid identifier for [opts.RowAccessPolicy.RowAccessPolicy]", func(t *testing.T) { opts := defaultOpts() opts.RowAccessPolicy = &ViewRowAccessPolicy{ - RowAccessPolicy: NewSchemaObjectIdentifier("", "", ""), + RowAccessPolicy: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -110,7 +110,7 @@ func TestViews_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -129,7 +129,7 @@ func TestViews_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.DropRowAccessPolicy.RowAccessPolicy]", func(t *testing.T) { opts := defaultOpts() opts.DropRowAccessPolicy = &ViewDropRowAccessPolicy{ - RowAccessPolicy: NewSchemaObjectIdentifier("", "", ""), + RowAccessPolicy: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -137,7 +137,7 @@ func TestViews_Alter(t *testing.T) { t.Run("validation: valid identifier for [opts.AddRowAccessPolicy.RowAccessPolicy]", func(t *testing.T) { opts := defaultOpts() opts.AddRowAccessPolicy = &ViewAddRowAccessPolicy{ - RowAccessPolicy: NewSchemaObjectIdentifier("", "", ""), + RowAccessPolicy: emptySchemaObjectIdentifier, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -155,7 +155,7 @@ func TestViews_Alter(t *testing.T) { opts := defaultOpts() opts.DropAndAddRowAccessPolicy = &ViewDropAndAddRowAccessPolicy{ Drop: ViewDropRowAccessPolicy{ - RowAccessPolicy: NewSchemaObjectIdentifier("", "", ""), + RowAccessPolicy: emptySchemaObjectIdentifier, }, Add: ViewAddRowAccessPolicy{ RowAccessPolicy: randomSchemaObjectIdentifier(), @@ -171,7 +171,7 @@ func TestViews_Alter(t *testing.T) { RowAccessPolicy: randomSchemaObjectIdentifier(), }, Add: ViewAddRowAccessPolicy{ - RowAccessPolicy: NewSchemaObjectIdentifier("", "", ""), + RowAccessPolicy: emptySchemaObjectIdentifier, }, } assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) @@ -372,7 +372,7 @@ func TestViews_Drop(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) @@ -430,7 +430,7 @@ func TestViews_Describe(t *testing.T) { t.Run("validation: valid identifier for [opts.name]", func(t *testing.T) { opts := defaultOpts() - opts.name = NewSchemaObjectIdentifier("", "", "") + opts.name = emptySchemaObjectIdentifier assertOptsInvalidJoinedErrors(t, opts, ErrInvalidObjectIdentifier) }) diff --git a/pkg/sdk/views_impl_gen.go b/pkg/sdk/views_impl_gen.go index 4acd186b9b..e41fb0bd63 100644 --- a/pkg/sdk/views_impl_gen.go +++ b/pkg/sdk/views_impl_gen.go @@ -38,7 +38,7 @@ func (v *views) Show(ctx context.Context, request *ShowViewRequest) ([]View, err } func (v *views) ShowByID(ctx context.Context, id SchemaObjectIdentifier) (*View, error) { - request := NewShowViewRequest().WithIn(&In{Schema: NewDatabaseObjectIdentifier(id.DatabaseName(), id.SchemaName())}).WithLike(&Like{String(id.Name())}) + request := NewShowViewRequest().WithIn(&In{Schema: id.SchemaId()}).WithLike(&Like{String(id.Name())}) views, err := v.Show(ctx, request) if err != nil { return nil, err diff --git a/pkg/sdk/warehouses_test.go b/pkg/sdk/warehouses_test.go index e3ed0f8033..cdb5e1fe2c 100644 --- a/pkg/sdk/warehouses_test.go +++ b/pkg/sdk/warehouses_test.go @@ -18,6 +18,8 @@ func TestWarehouseCreate(t *testing.T) { }) t.Run("with complete options", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &CreateWarehouseOptions{ OrReplace: Bool(true), name: NewAccountObjectIdentifier("completewarehouse"), @@ -41,16 +43,16 @@ func TestWarehouseCreate(t *testing.T) { StatementTimeoutInSeconds: Int(89), Tag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag2"), + Name: tagId2, Value: "v2", }, }, } - assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE WAREHOUSE IF NOT EXISTS "completewarehouse" WAREHOUSE_TYPE = 'STANDARD' WAREHOUSE_SIZE = 'X4LARGE' MAX_CLUSTER_COUNT = 8 MIN_CLUSTER_COUNT = 3 SCALING_POLICY = 'ECONOMY' AUTO_SUSPEND = 1000 AUTO_RESUME = true INITIALLY_SUSPENDED = false RESOURCE_MONITOR = "myresmon" COMMENT = 'hello' ENABLE_QUERY_ACCELERATION = true QUERY_ACCELERATION_MAX_SCALE_FACTOR = 62 MAX_CONCURRENCY_LEVEL = 7 STATEMENT_QUEUED_TIMEOUT_IN_SECONDS = 29 STATEMENT_TIMEOUT_IN_SECONDS = 89 TAG ("db1"."schema1"."tag1" = 'v1', "db1"."schema1"."tag2" = 'v2')`) + assertOptsValidAndSQLEquals(t, opts, `CREATE OR REPLACE WAREHOUSE IF NOT EXISTS "completewarehouse" WAREHOUSE_TYPE = 'STANDARD' WAREHOUSE_SIZE = 'X4LARGE' MAX_CLUSTER_COUNT = 8 MIN_CLUSTER_COUNT = 3 SCALING_POLICY = 'ECONOMY' AUTO_SUSPEND = 1000 AUTO_RESUME = true INITIALLY_SUSPENDED = false RESOURCE_MONITOR = "myresmon" COMMENT = 'hello' ENABLE_QUERY_ACCELERATION = true QUERY_ACCELERATION_MAX_SCALE_FACTOR = 62 MAX_CONCURRENCY_LEVEL = 7 STATEMENT_QUEUED_TIMEOUT_IN_SECONDS = 29 STATEMENT_TIMEOUT_IN_SECONDS = 89 TAG (%s = 'v1', %s = 'v2')`, tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) } @@ -120,30 +122,33 @@ func TestWarehouseAlter(t *testing.T) { }) t.Run("with set tag", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &AlterWarehouseOptions{ name: NewAccountObjectIdentifier("mywarehouse"), SetTag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db", "schema", "tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db", "schema", "tag2"), + Name: tagId2, Value: "v2", }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" SET TAG "db"."schema"."tag1" = 'v1', "db"."schema"."tag2" = 'v2'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" SET TAG %s = 'v1', %s = 'v2'`, tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("with unset tag", func(t *testing.T) { + tagId := randomSchemaObjectIdentifier() opts := &AlterWarehouseOptions{ name: NewAccountObjectIdentifier("mywarehouse"), UnsetTag: []ObjectIdentifier{ - NewSchemaObjectIdentifier("db", "schema", "tag1"), + tagId, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" UNSET TAG "db"."schema"."tag1"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" UNSET TAG %s`, tagId.FullyQualifiedName()) }) t.Run("with unset params", func(t *testing.T) { @@ -192,31 +197,35 @@ func TestWarehouseAlter(t *testing.T) { }) t.Run("with set tag", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &AlterWarehouseOptions{ name: NewAccountObjectIdentifier("mywarehouse"), SetTag: []TagAssociation{ { - Name: NewSchemaObjectIdentifier("db1", "schema1", "tag1"), + Name: tagId1, Value: "v1", }, { - Name: NewSchemaObjectIdentifier("db2", "schema2", "tag2"), + Name: tagId2, Value: "v2", }, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" SET TAG "db1"."schema1"."tag1" = 'v1', "db2"."schema2"."tag2" = 'v2'`) + assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" SET TAG %s = 'v1', %s = 'v2'`, tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) t.Run("with unset tag", func(t *testing.T) { + tagId1 := randomSchemaObjectIdentifier() + tagId2 := randomSchemaObjectIdentifierInSchema(tagId1.SchemaId()) opts := &AlterWarehouseOptions{ name: NewAccountObjectIdentifier("mywarehouse"), UnsetTag: []ObjectIdentifier{ - NewSchemaObjectIdentifier("db1", "schema1", "tag1"), - NewSchemaObjectIdentifier("db2", "schema2", "tag2"), + tagId1, + tagId2, }, } - assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" UNSET TAG "db1"."schema1"."tag1", "db2"."schema2"."tag2"`) + assertOptsValidAndSQLEquals(t, opts, `ALTER WAREHOUSE "mywarehouse" UNSET TAG %s, %s`, tagId1.FullyQualifiedName(), tagId2.FullyQualifiedName()) }) } diff --git a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD index c40eabcb0d..fa47b0f928 100644 --- a/v1-preparations/ESSENTIAL_GA_OBJECTS.MD +++ b/v1-preparations/ESSENTIAL_GA_OBJECTS.MD @@ -10,27 +10,27 @@ Status is one of: Known issues lists open issues touching the given object. Note that some of these issues may be already fixed in the newer provider versions. We will address these while working on the given object. -| Object Type | Status | Known issues | -|--------------------------|:------:|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| ACCOUNT | ❌ | [#2030](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2030), [#2015](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2015), [#1891](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1891), [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679), [#1671](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1671), [#1501](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1501), [#1062](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1062) | -| DATABASE | ❌ | [#2590](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2590), [#2321](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2321), [#2277](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2277), [#1833](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1833), [#1770](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1770), [#1453](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1453), [#1371](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1371), [#1367](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1367), [#1045](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1045), [#506](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/506) | -| DATABASE ROLE | ❌ | - | -| NETWORK POLICY | ❌ | - | -| RESOURCE MONITOR | ❌ | [#1990](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1990), [#1832](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1832), [#1821](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1821), [#1754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1754), [#1716](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1716), [#1714](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1714), [#1624](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1624), [#1500](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1500), [#1175](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1175) | -| ROLE | ❌ | - | -| SECURITY INTEGRATION | ❌ | [#2719](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2719), [#2568](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2568), [#2177](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2177), [#1851](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1851), [#1773](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1773), [#1741](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1741), [#1637](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1637), [#1503](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1503), [#1498](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1498), [#1421](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1421), [#1224](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1224) | -| USER | ❌ | [#2817](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2817), [#2662](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2662), [#1572](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1572), [#1535](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1535), [#1155](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1155) | -| WAREHOUSE | ❌ | [#1844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1844), [#1104](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1104) | -| FUNCTION | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2426](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2426), [#1479](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1479), [#1393](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1393), [#1208](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1208), [#1079](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1079) | -| MASKING POLICY | ❌ | [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1656](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1656), [#1444](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1444), [#1422](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1422), [#1097](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1097) | -| PROCEDURE | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2623](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2623), [#2257](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2257), [#2146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2146), [#1855](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1855), [#1695](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1695), [#1640](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1640), [#1195](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1195), [#1189](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1189), [#1178](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1178), [#1050](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1050) | -| ROW ACCESS POLICY | ❌ | [#2053](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2053), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1151](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1151) | -| SCHEMA | ❌ | [#2826](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2826), [#2211](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2211), [#1243](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1243), [#506](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/506) | -| STAGE | ❌ | [#2818](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2818), [#2505](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2505), [#1911](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1911), [#1903](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1903), [#1795](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1795), [#1705](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1705), [#1544](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1544), [#1491](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1491), [#1087](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1087), [#265](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/265) | -| STREAM | ❌ | [#2413](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2413), [#2201](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2201), [#1150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1150) | -| STREAMLIT | ❌ | [#1933](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1933) | -| TABLE | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | -| TAG | ❌ | [#2598](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2598), [#1910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1910), [#1909](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1909), [#1862](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1862), [#1806](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1806), [#1657](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1657), [#1496](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1496), [#1443](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1443), [#1394](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1394), [#1372](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1372), [#1074](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1074) | -| TASK | ❌ | [#1419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1419), [#1250](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1250), [#1194](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1194), [#1088](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1088) | -| VIEW | ❌ | [#2430](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2430), [#2085](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2085), [#2055](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2055), [#2031](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2031), [#1526](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1526), [#1253](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1253), [#1049](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1049) | -| snowflake_unsafe_execute | ❌ | - | \ No newline at end of file +| Object Type | Status | Known issues | +|--------------------------|:------:|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| ACCOUNT | ❌ | [#2030](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2030), [#2015](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2015), [#1891](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1891), [#1679](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1679), [#1671](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1671), [#1501](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1501), [#1062](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1062) | +| DATABASE | 👨‍💻 | [#2590](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2590), [#2321](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2321), [#2277](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2277), [#1833](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1833), [#1770](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1770), [#1453](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1453), [#1371](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1371), [#1367](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1367), [#1045](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1045), [#506](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/506) | +| DATABASE ROLE | ❌ | - | +| NETWORK POLICY | ❌ | - | +| RESOURCE MONITOR | ❌ | [#1990](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1990), [#1832](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1832), [#1821](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1821), [#1754](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1754), [#1716](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1716), [#1714](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1714), [#1624](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1624), [#1500](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1500), [#1175](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1175) | +| ROLE | ❌ | - | +| SECURITY INTEGRATION | 👨‍💻 | [#2719](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2719), [#2568](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2568), [#2177](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2177), [#1851](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1851), [#1773](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1773), [#1741](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1741), [#1637](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1637), [#1503](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1503), [#1498](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1498), [#1421](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1421), [#1224](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1224) | +| USER | ❌ | [#2817](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2817), [#2662](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2662), [#1572](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1572), [#1535](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1535), [#1155](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1155) | +| WAREHOUSE | ❌ | [#1844](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1844), [#1104](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1104) | +| FUNCTION | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2426](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2426), [#1479](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1479), [#1393](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1393), [#1208](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1208), [#1079](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1079) | +| MASKING POLICY | ❌ | [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1656](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1656), [#1444](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1444), [#1422](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1422), [#1097](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1097) | +| PROCEDURE | ❌ | [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2623](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2623), [#2257](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2257), [#2146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2146), [#1855](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1855), [#1695](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1695), [#1640](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1640), [#1195](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1195), [#1189](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1189), [#1178](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1178), [#1050](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1050) | +| ROW ACCESS POLICY | ❌ | [#2053](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2053), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1151](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1151) | +| SCHEMA | ❌ | [#2826](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2826), [#2211](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2211), [#1243](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1243), [#506](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/506) | +| STAGE | ❌ | [#2818](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2818), [#2505](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2505), [#1911](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1911), [#1903](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1903), [#1795](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1795), [#1705](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1705), [#1544](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1544), [#1491](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1491), [#1087](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1087), [#265](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/265) | +| STREAM | ❌ | [#2413](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2413), [#2201](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2201), [#1150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1150) | +| STREAMLIT | ❌ | [#1933](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1933) | +| TABLE | ❌ | [#2839](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2839), [#2735](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2735), [#2733](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2733), [#2683](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2683), [#2676](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2676), [#2674](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2674), [#2629](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2629), [#2418](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2418), [#2415](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2415), [#2406](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2406), [#2236](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2236), [#2035](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2035), [#1799](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1799), [#1764](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1764), [#1600](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1600), [#1387](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1387), [#1272](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1272), [#1271](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1271), [#1248](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1248), [#1241](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1241), [#1146](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1146), [#1032](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1032), [#420](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/420) | +| TAG | ❌ | [#2598](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2598), [#1910](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1910), [#1909](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1909), [#1862](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1862), [#1806](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1806), [#1657](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1657), [#1496](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1496), [#1443](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1443), [#1394](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1394), [#1372](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1372), [#1074](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1074) | +| TASK | ❌ | [#1419](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1419), [#1250](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1250), [#1194](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1194), [#1088](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1088) | +| VIEW | ❌ | [#2430](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2430), [#2085](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2085), [#2055](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2055), [#2031](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2031), [#1526](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1526), [#1253](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1253), [#1049](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/1049) | +| snowflake_unsafe_execute | ❌ | - | \ No newline at end of file