From c60db80f44d949258f0a692baafdc22b886c3010 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Wed, 29 Nov 2023 09:56:36 +0100 Subject: [PATCH 1/5] feat: Introduce simple arch tests (#2210) * Add naive file getting * Test naively (resources) * Split into files * Add assertion for non-acceptance tests * Reorganize * Extract files * Extract directory * Extract method * Rename to archtest * Split lib and our usage into 2 different packages * Introduce datasources tests * Start testing archtest * Parametrize directories test * Change API * Rename to architest * Add nicer assertions * Add file to method * Rename to exported methods * Test file * Test file filtering * Add tests for package assertions * Test rest of the assertions * Test invocation on every file and method * Test creation of directory and file * Add tests for sdk integration tests * Add tests integration tests matchers * Fix lint and stuff * Add sample usage * Fix test setup * Fix after review * Introduce FilesProvider * Add recipe --- Makefile | 9 +- pkg/architest/architest_test.go | 357 ++++++++++++++++++ pkg/architest/assertions.go | 38 ++ pkg/architest/directory.go | 41 ++ pkg/architest/file.go | 51 +++ pkg/architest/file_filter_providers.go | 35 ++ pkg/architest/files.go | 25 ++ pkg/architest/method.go | 21 ++ pkg/architest/methods.go | 13 + pkg/architest/regex.go | 12 + pkg/architest/testdata/dir1/different1.go | 1 + pkg/architest/testdata/dir1/sample1.go | 1 + pkg/architest/testdata/dir1/sample2.go | 5 + pkg/architest/testdata/dir2/sample1.go | 1 + pkg/architest/testdata/dir2/sample1_test.go | 7 + pkg/architest/testdata/dir3/sample1.go | 1 + .../testdata/dir3/sample1_acceptance_test.go | 7 + pkg/architest/testdata/dir4/sample1.go | 1 + .../testdata/dir4/sample1_integration_test.go | 7 + .../datasources_acceptance_tests_arch_test.go | 47 +++ .../resources_acceptance_tests_arch_test.go | 48 +++ .../sdk_integration_tests_arch_test.go | 50 +++ .../database_grant_acceptance_test.go | 2 +- pkg/resources/role_grants_acceptance_test.go | 6 +- pkg/sdk/testint/parsers.go | 2 +- .../resource_monitors_integration_test.go | 4 +- ...etup_integration_test.go => setup_test.go} | 0 27 files changed, 782 insertions(+), 10 deletions(-) create mode 100644 pkg/architest/architest_test.go create mode 100644 pkg/architest/assertions.go create mode 100644 pkg/architest/directory.go create mode 100644 pkg/architest/file.go create mode 100644 pkg/architest/file_filter_providers.go create mode 100644 pkg/architest/files.go create mode 100644 pkg/architest/method.go create mode 100644 pkg/architest/methods.go create mode 100644 pkg/architest/regex.go create mode 100644 pkg/architest/testdata/dir1/different1.go create mode 100644 pkg/architest/testdata/dir1/sample1.go create mode 100644 pkg/architest/testdata/dir1/sample2.go create mode 100644 pkg/architest/testdata/dir2/sample1.go create mode 100644 pkg/architest/testdata/dir2/sample1_test.go create mode 100644 pkg/architest/testdata/dir3/sample1.go create mode 100644 pkg/architest/testdata/dir3/sample1_acceptance_test.go create mode 100644 pkg/architest/testdata/dir4/sample1.go create mode 100644 pkg/architest/testdata/dir4/sample1_integration_test.go create mode 100644 pkg/architests/datasources_acceptance_tests_arch_test.go create mode 100644 pkg/architests/resources_acceptance_tests_arch_test.go create mode 100644 pkg/architests/sdk_integration_tests_arch_test.go rename pkg/sdk/testint/{setup_integration_test.go => setup_test.go} (100%) diff --git a/Makefile b/Makefile index 0b2a2fc7d9..0dd62fbd35 100644 --- a/Makefile +++ b/Makefile @@ -38,10 +38,10 @@ install: ## install the binary go install -v ./... lint: # Run static code analysis, check formatting. See https://golangci-lint.run/ - golangci-lint run ./... -v + ./bin/golangci-lint run ./... -v lint-fix: ## Run static code analysis, check formatting and try to fix findings - golangci-lint run ./... -v --fix + ./bin/golangci-lint run ./... -v --fix mod: ## add missing and remove unused modules go mod tidy -compat=1.20 @@ -49,7 +49,7 @@ mod: ## add missing and remove unused modules mod-check: mod ## check if there are any missing/unused modules git diff --exit-code -- go.mod go.sum -pre-push: fmt docs mod lint ## Run a few checks before pushing a change (docs, fmt, mod, etc.) +pre-push: fmt docs mod lint test-architecture ## Run a few checks before pushing a change (docs, fmt, mod, etc.) pre-push-check: fmt-check docs-check lint-check mod-check ## Run a few checks before pushing a change (docs, fmt, mod, etc.) @@ -68,6 +68,9 @@ test: ## run unit and integration tests test-acceptance: ## run acceptance tests TF_ACC=1 go test -run "^TestAcc_" -v -cover -timeout=30m ./... +test-architecture: ## check architecture constraints between packages + go test ./pkg/architests/... -v + build-local: ## build the binary locally go build -o $(BASE_BINARY_NAME) . diff --git a/pkg/architest/architest_test.go b/pkg/architest/architest_test.go new file mode 100644 index 0000000000..419f78c503 --- /dev/null +++ b/pkg/architest/architest_test.go @@ -0,0 +1,357 @@ +package architest_test + +import ( + "fmt" + "regexp" + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/architest" + "github.com/stretchr/testify/assert" +) + +func Test_Directory(t *testing.T) { + t.Run("fail to use non-existing directory", func(t *testing.T) { + assert.Panics(t, func() { + architest.Directory("testdata/non_existing") + }) + }) + + t.Run("use directory", func(t *testing.T) { + assert.NotPanics(t, func() { + architest.Directory("testdata/dir1") + }) + }) + + tests1 := []struct { + directory string + expectedFileNames []string + expectedPackageNames []string + }{ + {directory: "testdata/dir1", expectedFileNames: []string{"testdata/dir1/sample1.go", "testdata/dir1/sample2.go", "testdata/dir1/different1.go"}, expectedPackageNames: []string{"dir1"}}, + {directory: "testdata/dir2", expectedFileNames: []string{"testdata/dir2/sample1.go", "testdata/dir2/sample1_test.go"}, expectedPackageNames: []string{"dir2", "dir2_test"}}, + {directory: "testdata/dir3", expectedFileNames: []string{"testdata/dir3/sample1.go", "testdata/dir3/sample1_acceptance_test.go"}, expectedPackageNames: []string{"dir3", "dir3_test"}}, + {directory: "testdata/dir4", expectedFileNames: []string{"testdata/dir4/sample1.go", "testdata/dir4/sample1_integration_test.go"}, expectedPackageNames: []string{"dir4", "dir4_test"}}, + } + for _, tt := range tests1 { + t.Run("list all files in the given directory", func(t *testing.T) { + dir := architest.Directory(tt.directory) + + allFiles := dir.AllFiles() + assert.Len(t, allFiles, len(tt.expectedFileNames)) + + fileNames := make([]string, 0, len(allFiles)) + for _, f := range allFiles { + fileNames = append(fileNames, f.Name()) + } + assert.ElementsMatch(t, fileNames, tt.expectedFileNames) + + packageNames := make(map[string]bool) + for _, f := range allFiles { + packageNames[f.PackageName()] = true + } + assert.Len(t, packageNames, len(tt.expectedPackageNames)) + for _, name := range tt.expectedPackageNames { + assert.Contains(t, packageNames, name) + } + }) + } + + tests2 := []struct { + directory string + filter architest.FileFilter + expectedFileNames []string + }{ + {directory: "testdata/dir1", filter: architest.FileNameFilterProvider("sample"), expectedFileNames: []string{"testdata/dir1/sample1.go", "testdata/dir1/sample2.go"}}, + {directory: "testdata/dir1", filter: architest.FileNameRegexFilterProvider(regexp.MustCompile("sample")), expectedFileNames: []string{"testdata/dir1/sample1.go", "testdata/dir1/sample2.go"}}, + {directory: "testdata/dir1", filter: architest.FileNameFilterWithExclusionsProvider(regexp.MustCompile("sample"), regexp.MustCompile("sample1")), expectedFileNames: []string{"testdata/dir1/sample2.go"}}, + {directory: "testdata/dir2", filter: architest.PackageFilterProvider("dir2"), expectedFileNames: []string{"testdata/dir2/sample1.go"}}, + {directory: "testdata/dir2", filter: architest.PackageFilterProvider("dir2_test"), expectedFileNames: []string{"testdata/dir2/sample1_test.go"}}, + {directory: "testdata/dir2", filter: architest.FileNameRegexFilterProvider(architest.AcceptanceTestFileRegex), expectedFileNames: []string{}}, + {directory: "testdata/dir3", filter: architest.FileNameRegexFilterProvider(architest.AcceptanceTestFileRegex), expectedFileNames: []string{"testdata/dir3/sample1_acceptance_test.go"}}, + {directory: "testdata/dir4", filter: architest.FileNameRegexFilterProvider(architest.AcceptanceTestFileRegex), expectedFileNames: []string{}}, + {directory: "testdata/dir2", filter: architest.FileNameRegexFilterProvider(architest.IntegrationTestFileRegex), expectedFileNames: []string{}}, + {directory: "testdata/dir3", filter: architest.FileNameRegexFilterProvider(architest.IntegrationTestFileRegex), expectedFileNames: []string{}}, + {directory: "testdata/dir4", filter: architest.FileNameRegexFilterProvider(architest.IntegrationTestFileRegex), expectedFileNames: []string{"testdata/dir4/sample1_integration_test.go"}}, + {directory: "testdata/dir2", filter: architest.FileNameRegexFilterProvider(architest.TestFileRegex), expectedFileNames: []string{"testdata/dir2/sample1_test.go"}}, + {directory: "testdata/dir3", filter: architest.FileNameRegexFilterProvider(architest.TestFileRegex), expectedFileNames: []string{"testdata/dir3/sample1_acceptance_test.go"}}, + {directory: "testdata/dir4", filter: architest.FileNameRegexFilterProvider(architest.TestFileRegex), expectedFileNames: []string{"testdata/dir4/sample1_integration_test.go"}}, + } + for _, tt := range tests2 { + t.Run("list only files matching filter in the given directory", func(t *testing.T) { + dir := architest.Directory(tt.directory) + + filteredFiles := dir.Files(tt.filter) + assert.Len(t, filteredFiles, len(tt.expectedFileNames)) + + fileNames := make([]string, 0, len(filteredFiles)) + for _, f := range filteredFiles { + fileNames = append(fileNames, f.Name()) + } + assert.ElementsMatch(t, fileNames, tt.expectedFileNames) + + // now exactly the same but indirectly + filteredFiles = dir.AllFiles().Filter(tt.filter) + assert.Len(t, filteredFiles, len(tt.expectedFileNames)) + + fileNames = make([]string, 0, len(filteredFiles)) + for _, f := range filteredFiles { + fileNames = append(fileNames, f.Name()) + } + assert.ElementsMatch(t, fileNames, tt.expectedFileNames) + }) + } +} + +func Test_Files(t *testing.T) { + t.Run("fail to use non-existing file", func(t *testing.T) { + assert.Panics(t, func() { + architest.NewFileFromPath("testdata/dir1/non_existing.go") + }) + }) + + t.Run("use file", func(t *testing.T) { + assert.NotPanics(t, func() { + architest.NewFileFromPath("testdata/dir1/sample1.go") + }) + }) + + tests1 := []struct { + filePath string + expectedMethodNames []string + }{ + {filePath: "testdata/dir1/sample1.go", expectedMethodNames: []string{}}, + {filePath: "testdata/dir1/sample2.go", expectedMethodNames: []string{"A"}}, + } + for _, tt := range tests1 { + t.Run("list all methods in file", func(t *testing.T) { + file := architest.NewFileFromPath(tt.filePath) + + exportedMethods := file.ExportedMethods() + assert.Len(t, exportedMethods, len(tt.expectedMethodNames)) + + methodNames := make([]string, 0, len(exportedMethods)) + for _, m := range exportedMethods { + methodNames = append(methodNames, m.Name()) + } + assert.ElementsMatch(t, methodNames, tt.expectedMethodNames) + }) + } + + tests2 := []struct { + fileNames []string + }{ + {fileNames: []string{}}, + {fileNames: []string{"a"}}, + {fileNames: []string{"a", "A"}}, + {fileNames: []string{"A", "a"}}, + {fileNames: []string{"A", "B", "C"}}, + } + for _, tt := range tests2 { + t.Run("receiver invoked for every file", func(t *testing.T) { + files := make(architest.Files, 0, len(tt.fileNames)) + for _, f := range tt.fileNames { + files = append(files, *architest.NewFile("package", f, nil)) + } + invokedFiles := make([]string, 0) + receiver := func(f *architest.File) { + invokedFiles = append(invokedFiles, f.Name()) + } + + files.All(receiver) + + assert.ElementsMatch(t, tt.fileNames, invokedFiles) + }) + } +} + +func Test_Methods(t *testing.T) { + tests := []struct { + methodNames []string + }{ + {methodNames: []string{}}, + {methodNames: []string{"a"}}, + {methodNames: []string{"a", "A"}}, + {methodNames: []string{"A", "a"}}, + {methodNames: []string{"A", "B", "C"}}, + } + for _, tt := range tests { + t.Run("receiver invoked for every method", func(t *testing.T) { + methods := make(architest.Methods, 0, len(tt.methodNames)) + for _, m := range tt.methodNames { + methods = append(methods, *architest.NewMethod(m, nil)) + } + invokedMethods := make([]string, 0) + receiver := func(m *architest.Method) { + invokedMethods = append(invokedMethods, m.Name()) + } + + methods.All(receiver) + + assert.ElementsMatch(t, tt.methodNames, invokedMethods) + }) + } +} + +func Test_Assertions(t *testing.T) { + tests1 := []struct { + filePath string + expectedPackage string + }{ + {filePath: "testdata/dir1/sample1.go", expectedPackage: "dir1"}, + {filePath: "testdata/dir2/sample1.go", expectedPackage: "dir2"}, + {filePath: "testdata/dir2/sample1_test.go", expectedPackage: "dir2_test"}, + } + for _, tt := range tests1 { + t.Run("file package assertions", func(t *testing.T) { + file := architest.NewFileFromPath(tt.filePath) + tut1 := &testing.T{} + tut2 := &testing.T{} + + file.AssertHasPackage(tut1, tt.expectedPackage) + file.AssertHasPackage(tut2, "some_other_package") + + assert.Equal(t, false, tut1.Failed()) + assert.Equal(t, true, tut2.Failed()) + }) + } + + tests2 := []struct { + methodName string + correct bool + }{ + {methodName: "TestAcc_abc", correct: true}, + {methodName: "TestAcc_TestAcc_Test", correct: true}, + {methodName: "TestAcc_", correct: false}, + {methodName: "ATestAcc_", correct: false}, + {methodName: "TestAcc", correct: false}, + {methodName: "Test_", correct: false}, + {methodName: "Test", correct: false}, + {methodName: "Test_asdf", correct: false}, + {methodName: "TestAccc_", correct: false}, + {methodName: "TestInt_Abc", correct: false}, + } + for _, tt := range tests2 { + t.Run(fmt.Sprintf("acceptance test name assertions for method %s", tt.methodName), func(t *testing.T) { + file := architest.NewFileFromPath("testdata/dir1/sample1.go") + method := architest.NewMethod(tt.methodName, file) + tut := &testing.T{} + + method.AssertAcceptanceTestNamedCorrectly(tut) + + assert.Equal(t, !tt.correct, tut.Failed()) + }) + } + + tests3 := []struct { + methodName string + regexRaw string + correct bool + }{ + {methodName: "sample1", regexRaw: "sample", correct: true}, + {methodName: "Sample1", regexRaw: "sample", correct: false}, + {methodName: "Sample1", regexRaw: "Sample", correct: true}, + } + for _, tt := range tests3 { + t.Run(fmt.Sprintf("matching and not matching method name assertions for %s", tt.methodName), func(t *testing.T) { + file := architest.NewFileFromPath("testdata/dir1/sample1.go") + method := architest.NewMethod(tt.methodName, file) + tut1 := &testing.T{} + tut2 := &testing.T{} + + method.AssertNameMatches(tut1, regexp.MustCompile(tt.regexRaw)) + method.AssertNameDoesNotMatch(tut2, regexp.MustCompile(tt.regexRaw)) + + assert.Equal(t, !tt.correct, tut1.Failed()) + assert.Equal(t, tt.correct, tut2.Failed()) + }) + } + + tests4 := []struct { + methodName string + correct bool + }{ + {methodName: "Test", correct: true}, + {methodName: "aTest", correct: false}, + {methodName: "Test_", correct: true}, + {methodName: "Test_adsfadf", correct: true}, + } + for _, tt := range tests4 { + t.Run(fmt.Sprintf("test name assertions for method %s", tt.methodName), func(t *testing.T) { + file := architest.NewFileFromPath("testdata/dir1/sample1.go") + method := architest.NewMethod(tt.methodName, file) + tut := &testing.T{} + + method.AssertTestNamedCorrectly(tut) + + assert.Equal(t, !tt.correct, tut.Failed()) + }) + } + + tests5 := []struct { + methodName string + correct bool + }{ + {methodName: "TestInt_abc", correct: true}, + {methodName: "TestInt_TestInt_Test", correct: true}, + {methodName: "TestInt_", correct: false}, + {methodName: "ATestInt_", correct: false}, + {methodName: "TestInt", correct: false}, + {methodName: "Test_", correct: false}, + {methodName: "Test", correct: false}, + {methodName: "Test_asdf", correct: false}, + {methodName: "TestIntt_", correct: false}, + {methodName: "TestAcc_Abc", correct: false}, + } + for _, tt := range tests5 { + t.Run(fmt.Sprintf("intagration test name assertions for method %s", tt.methodName), func(t *testing.T) { + file := architest.NewFileFromPath("testdata/dir1/sample1.go") + method := architest.NewMethod(tt.methodName, file) + tut := &testing.T{} + + method.AssertIntegrationTestNamedCorrectly(tut) + + assert.Equal(t, !tt.correct, tut.Failed()) + }) + } +} + +func Test_SampleArchiTestUsage(t *testing.T) { + t.Run("acceptance tests", func(t *testing.T) { + acceptanceTestFiles := architest.Directory("testdata/dir3/"). + AllFiles(). + Filter(architest.FileNameRegexFilterProvider(architest.AcceptanceTestFileRegex)) + + acceptanceTestFiles.All(func(file *architest.File) { + file.AssertHasPackage(t, "dir3_test") + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertAcceptanceTestNamedCorrectly(t) + }) + }) + }) + + t.Run("integration tests", func(t *testing.T) { + integrationTestFiles := architest.Directory("testdata/dir4/"). + AllFiles(). + Filter(architest.FileNameRegexFilterProvider(architest.IntegrationTestFileRegex)) + + integrationTestFiles.All(func(file *architest.File) { + file.AssertHasPackage(t, "dir4_test") + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertIntegrationTestNamedCorrectly(t) + }) + }) + }) + + t.Run("tests", func(t *testing.T) { + testFiles := architest.Directory("testdata/dir2/"). + AllFiles(). + Filter(architest.FileNameRegexFilterProvider(architest.TestNameRegex)) + + testFiles.All(func(file *architest.File) { + file.AssertHasPackage(t, "dir2_test") + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertTestNamedCorrectly(t) + }) + }) + }) +} diff --git a/pkg/architest/assertions.go b/pkg/architest/assertions.go new file mode 100644 index 0000000000..41859482de --- /dev/null +++ b/pkg/architest/assertions.go @@ -0,0 +1,38 @@ +package architest + +import ( + "regexp" + "testing" + + "github.com/stretchr/testify/assert" +) + +func (f *File) AssertHasPackage(t *testing.T, expectedPackage string) { + t.Helper() + assert.Equalf(t, expectedPackage, f.packageName, "file %s has package %s, expected package %s", f.Name(), f.PackageName(), expectedPackage) +} + +func (method *Method) AssertAcceptanceTestNamedCorrectly(t *testing.T) { + t.Helper() + method.AssertNameMatches(t, AcceptanceTestNameRegex) +} + +func (method *Method) AssertIntegrationTestNamedCorrectly(t *testing.T) { + t.Helper() + method.AssertNameMatches(t, IntegrationTestNameRegex) +} + +func (method *Method) AssertTestNamedCorrectly(t *testing.T) { + t.Helper() + method.AssertNameMatches(t, TestNameRegex) +} + +func (method *Method) AssertNameMatches(t *testing.T, regex *regexp.Regexp) { + t.Helper() + assert.Truef(t, regex.MatchString(method.Name()), "file %s contains exported method %s which does not match %s", method.FileName(), method.Name(), regex.String()) +} + +func (method *Method) AssertNameDoesNotMatch(t *testing.T, regex *regexp.Regexp) { + t.Helper() + assert.Falsef(t, regex.MatchString(method.Name()), "file %s contains exported method %s which matches %s", method.FileName(), method.Name(), regex.String()) +} diff --git a/pkg/architest/directory.go b/pkg/architest/directory.go new file mode 100644 index 0000000000..d289765632 --- /dev/null +++ b/pkg/architest/directory.go @@ -0,0 +1,41 @@ +package architest + +import ( + "go/parser" + "go/token" +) + +type directory struct { + path string + files Files +} + +type FilesProvider interface { + AllFiles() Files + Files(filter FileFilter) Files +} + +func Directory(path string) FilesProvider { + packagesDict, err := parser.ParseDir(token.NewFileSet(), path, nil, 0) + if err != nil { + panic(err) + } + files := make(Files, 0) + for packageName, astPackage := range packagesDict { + for fileName, fileSrc := range astPackage.Files { + files = append(files, *NewFile(packageName, fileName, fileSrc)) + } + } + return &directory{ + path: path, + files: files, + } +} + +func (d *directory) AllFiles() Files { + return d.files +} + +func (d *directory) Files(filter FileFilter) Files { + return d.AllFiles().Filter(filter) +} diff --git a/pkg/architest/file.go b/pkg/architest/file.go new file mode 100644 index 0000000000..a2d781a719 --- /dev/null +++ b/pkg/architest/file.go @@ -0,0 +1,51 @@ +package architest + +import ( + "go/ast" + "go/parser" + "go/token" + "path/filepath" +) + +type File struct { + packageName string + fileName string + fileSrc *ast.File +} + +func NewFile(packageName string, fileName string, fileSrc *ast.File) *File { + return &File{ + packageName: packageName, + fileName: fileName, + fileSrc: fileSrc, + } +} + +func NewFileFromPath(path string) *File { + file, err := parser.ParseFile(token.NewFileSet(), path, nil, 0) + if err != nil { + panic(err) + } + return NewFile(file.Name.Name, filepath.Base(path), file) +} + +func (f *File) PackageName() string { + return f.packageName +} + +func (f *File) Name() string { + return f.fileName +} + +func (f *File) ExportedMethods() Methods { + allExportedMethods := make(Methods, 0) + for _, d := range f.fileSrc.Decls { + if v, ok := d.(*ast.FuncDecl); ok { + name := v.Name.Name + if ast.IsExported(name) { + allExportedMethods = append(allExportedMethods, *NewMethod(name, f)) + } + } + } + return allExportedMethods +} diff --git a/pkg/architest/file_filter_providers.go b/pkg/architest/file_filter_providers.go new file mode 100644 index 0000000000..4f59d07dc0 --- /dev/null +++ b/pkg/architest/file_filter_providers.go @@ -0,0 +1,35 @@ +package architest + +import ( + "fmt" + "regexp" +) + +func FileNameFilterProvider(text string) FileFilter { + regex := regexp.MustCompile(fmt.Sprintf(`^.*%s.*$`, text)) + return func(f *File) bool { + return regex.Match([]byte(f.fileName)) + } +} + +func FileNameRegexFilterProvider(regex *regexp.Regexp) FileFilter { + return func(f *File) bool { + return regex.Match([]byte(f.fileName)) + } +} + +func FileNameFilterWithExclusionsProvider(regex *regexp.Regexp, exclusionRegex ...*regexp.Regexp) FileFilter { + return func(f *File) bool { + matches := regex.MatchString(f.fileName) + for _, e := range exclusionRegex { + matches = matches && !e.MatchString(f.fileName) + } + return matches + } +} + +func PackageFilterProvider(packageName string) FileFilter { + return func(f *File) bool { + return f.packageName == packageName + } +} diff --git a/pkg/architest/files.go b/pkg/architest/files.go new file mode 100644 index 0000000000..6b73d2b039 --- /dev/null +++ b/pkg/architest/files.go @@ -0,0 +1,25 @@ +package architest + +type ( + FileFilter = func(*File) bool + FileReceiver = func(*File) + Files []File +) + +func (files Files) Filter(filter FileFilter) Files { + filteredFiles := make([]File, 0) + for _, f := range files { + f := f + if filter(&f) { + filteredFiles = append(filteredFiles, f) + } + } + return filteredFiles +} + +func (files Files) All(receiver FileReceiver) { + for _, file := range files { + file := file + receiver(&file) + } +} diff --git a/pkg/architest/method.go b/pkg/architest/method.go new file mode 100644 index 0000000000..17bbe597c3 --- /dev/null +++ b/pkg/architest/method.go @@ -0,0 +1,21 @@ +package architest + +type Method struct { + name string + file *File +} + +func (method *Method) Name() string { + return method.name +} + +func (method *Method) FileName() string { + return method.file.Name() +} + +func NewMethod(name string, file *File) *Method { + return &Method{ + name: name, + file: file, + } +} diff --git a/pkg/architest/methods.go b/pkg/architest/methods.go new file mode 100644 index 0000000000..b94f8a8d75 --- /dev/null +++ b/pkg/architest/methods.go @@ -0,0 +1,13 @@ +package architest + +type ( + MethodReceiver = func(method *Method) + Methods []Method +) + +func (methods Methods) All(receiver MethodReceiver) { + for _, method := range methods { + method := method + receiver(&method) + } +} diff --git a/pkg/architest/regex.go b/pkg/architest/regex.go new file mode 100644 index 0000000000..2876dfed79 --- /dev/null +++ b/pkg/architest/regex.go @@ -0,0 +1,12 @@ +package architest + +import "regexp" + +var ( + AcceptanceTestFileRegex = regexp.MustCompile("^.*_acceptance_test.go$") + AcceptanceTestNameRegex = regexp.MustCompile("^TestAcc_.+$") + IntegrationTestFileRegex = regexp.MustCompile("^.*_integration_test.go$") + IntegrationTestNameRegex = regexp.MustCompile("^TestInt_.+$") + TestFileRegex = regexp.MustCompile("^.*_test.go$") + TestNameRegex = regexp.MustCompile("^Test.*$") +) diff --git a/pkg/architest/testdata/dir1/different1.go b/pkg/architest/testdata/dir1/different1.go new file mode 100644 index 0000000000..b719eadc09 --- /dev/null +++ b/pkg/architest/testdata/dir1/different1.go @@ -0,0 +1 @@ +package dir1 diff --git a/pkg/architest/testdata/dir1/sample1.go b/pkg/architest/testdata/dir1/sample1.go new file mode 100644 index 0000000000..b719eadc09 --- /dev/null +++ b/pkg/architest/testdata/dir1/sample1.go @@ -0,0 +1 @@ +package dir1 diff --git a/pkg/architest/testdata/dir1/sample2.go b/pkg/architest/testdata/dir1/sample2.go new file mode 100644 index 0000000000..7ba18f8743 --- /dev/null +++ b/pkg/architest/testdata/dir1/sample2.go @@ -0,0 +1,5 @@ +package dir1 + +func A() {} + +func a() {} diff --git a/pkg/architest/testdata/dir2/sample1.go b/pkg/architest/testdata/dir2/sample1.go new file mode 100644 index 0000000000..6fe35e9e59 --- /dev/null +++ b/pkg/architest/testdata/dir2/sample1.go @@ -0,0 +1 @@ +package dir2 diff --git a/pkg/architest/testdata/dir2/sample1_test.go b/pkg/architest/testdata/dir2/sample1_test.go new file mode 100644 index 0000000000..f263d411f3 --- /dev/null +++ b/pkg/architest/testdata/dir2/sample1_test.go @@ -0,0 +1,7 @@ +package dir2_test + +import "testing" + +func Test(t *testing.T) { + t.Skip("Test for example purposes") +} diff --git a/pkg/architest/testdata/dir3/sample1.go b/pkg/architest/testdata/dir3/sample1.go new file mode 100644 index 0000000000..6ea203550b --- /dev/null +++ b/pkg/architest/testdata/dir3/sample1.go @@ -0,0 +1 @@ +package dir3 diff --git a/pkg/architest/testdata/dir3/sample1_acceptance_test.go b/pkg/architest/testdata/dir3/sample1_acceptance_test.go new file mode 100644 index 0000000000..0eab424337 --- /dev/null +++ b/pkg/architest/testdata/dir3/sample1_acceptance_test.go @@ -0,0 +1,7 @@ +package dir3_test + +import "testing" + +func TestAcc_Abc(t *testing.T) { + t.Skip("Test for example purposes") +} diff --git a/pkg/architest/testdata/dir4/sample1.go b/pkg/architest/testdata/dir4/sample1.go new file mode 100644 index 0000000000..5f6819e3fe --- /dev/null +++ b/pkg/architest/testdata/dir4/sample1.go @@ -0,0 +1 @@ +package dir4 diff --git a/pkg/architest/testdata/dir4/sample1_integration_test.go b/pkg/architest/testdata/dir4/sample1_integration_test.go new file mode 100644 index 0000000000..33cedbbf4e --- /dev/null +++ b/pkg/architest/testdata/dir4/sample1_integration_test.go @@ -0,0 +1,7 @@ +package dir4_test + +import "testing" + +func TestInt_Abc(t *testing.T) { + t.Skip("Test for example purposes") +} diff --git a/pkg/architests/datasources_acceptance_tests_arch_test.go b/pkg/architests/datasources_acceptance_tests_arch_test.go new file mode 100644 index 0000000000..8131db1b8c --- /dev/null +++ b/pkg/architests/datasources_acceptance_tests_arch_test.go @@ -0,0 +1,47 @@ +package architests + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/architest" +) + +func TestArchCheck_AcceptanceTests_DataSources(t *testing.T) { + datasourcesFiles := architest.Directory("../datasources/").AllFiles() + acceptanceTestFiles := datasourcesFiles.Filter(architest.FileNameRegexFilterProvider(architest.AcceptanceTestFileRegex)) + + t.Run("acceptance tests files have the right package", func(t *testing.T) { + acceptanceTestFiles.All(func(file *architest.File) { + file.AssertHasPackage(t, "datasources_test") + }) + }) + + t.Run("acceptance tests are named correctly", func(t *testing.T) { + acceptanceTestFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertAcceptanceTestNamedCorrectly(t) + }) + }) + }) + + t.Run("there are no acceptance tests in other test files in the directory", func(t *testing.T) { + otherTestFiles := datasourcesFiles.Filter(architest.FileNameFilterWithExclusionsProvider(architest.TestFileRegex, architest.AcceptanceTestFileRegex)) + + otherTestFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertNameDoesNotMatch(t, architest.AcceptanceTestNameRegex) + method.AssertTestNamedCorrectly(t) + }) + }) + }) + + t.Run("there are only acceptance tests in package datasources_test", func(t *testing.T) { + packageFiles := datasourcesFiles.Filter(architest.PackageFilterProvider("datasources_test")) + + packageFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertAcceptanceTestNamedCorrectly(t) + }) + }) + }) +} diff --git a/pkg/architests/resources_acceptance_tests_arch_test.go b/pkg/architests/resources_acceptance_tests_arch_test.go new file mode 100644 index 0000000000..008df3df01 --- /dev/null +++ b/pkg/architests/resources_acceptance_tests_arch_test.go @@ -0,0 +1,48 @@ +package architests + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/architest" +) + +func TestArchCheck_AcceptanceTests_Resources(t *testing.T) { + resourcesFiles := architest.Directory("../resources/").AllFiles() + acceptanceTestFiles := resourcesFiles.Filter(architest.FileNameRegexFilterProvider(architest.AcceptanceTestFileRegex)) + + t.Run("acceptance tests files have the right package", func(t *testing.T) { + acceptanceTestFiles.All(func(file *architest.File) { + file.AssertHasPackage(t, "resources_test") + }) + }) + + t.Run("acceptance tests are named correctly", func(t *testing.T) { + acceptanceTestFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertAcceptanceTestNamedCorrectly(t) + }) + }) + }) + + t.Run("there are no acceptance tests in other test files in the directory", func(t *testing.T) { + otherTestFiles := resourcesFiles.Filter(architest.FileNameFilterWithExclusionsProvider(architest.TestFileRegex, architest.AcceptanceTestFileRegex)) + + otherTestFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertNameDoesNotMatch(t, architest.AcceptanceTestNameRegex) + method.AssertTestNamedCorrectly(t) + }) + }) + }) + + t.Run("there are only acceptance tests in package resources_test", func(t *testing.T) { + t.Skipf("Currently there are non-acceptance tests in resources_test package") + packageFiles := resourcesFiles.Filter(architest.PackageFilterProvider("resources_test")) + + packageFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertAcceptanceTestNamedCorrectly(t) + }) + }) + }) +} diff --git a/pkg/architests/sdk_integration_tests_arch_test.go b/pkg/architests/sdk_integration_tests_arch_test.go new file mode 100644 index 0000000000..2afb157dfe --- /dev/null +++ b/pkg/architests/sdk_integration_tests_arch_test.go @@ -0,0 +1,50 @@ +package architests + +import ( + "testing" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/architest" +) + +func TestArchCheck_IntegrationTests_Sdk(t *testing.T) { + sdkIntegrationTestFiles := architest.Directory("../sdk/testint/").AllFiles() + integrationTestFiles := sdkIntegrationTestFiles.Filter(architest.FileNameRegexFilterProvider(architest.IntegrationTestFileRegex)) + + t.Run("integration tests files have the right package", func(t *testing.T) { + integrationTestFiles.All(func(file *architest.File) { + file.AssertHasPackage(t, "testint") + }) + }) + + t.Run("integration tests are named correctly", func(t *testing.T) { + integrationTestFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertIntegrationTestNamedCorrectly(t) + }) + }) + }) + + t.Run("there are no integration tests in other test files in the directory", func(t *testing.T) { + otherTestFiles := sdkIntegrationTestFiles.Filter(architest.FileNameFilterWithExclusionsProvider(architest.TestFileRegex, architest.IntegrationTestFileRegex)) + + otherTestFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + method.AssertNameDoesNotMatch(t, architest.IntegrationTestNameRegex) + method.AssertTestNamedCorrectly(t) + }) + }) + }) + + t.Run("there are only integration tests in package testint", func(t *testing.T) { + packageFiles := sdkIntegrationTestFiles.Filter(architest.PackageFilterProvider("testint")) + + packageFiles.All(func(file *architest.File) { + file.ExportedMethods().All(func(method *architest.Method) { + // our integration tests have TestMain, let's filter it out now (maybe later we can support in in architest) + if method.Name() != "TestMain" { + method.AssertIntegrationTestNamedCorrectly(t) + } + }) + }) + }) +} diff --git a/pkg/resources/database_grant_acceptance_test.go b/pkg/resources/database_grant_acceptance_test.go index 1bb7e9e040..55de7bf4c3 100644 --- a/pkg/resources/database_grant_acceptance_test.go +++ b/pkg/resources/database_grant_acceptance_test.go @@ -16,7 +16,7 @@ func testRolesAndShares(t *testing.T, path string, roles []string) func(*terrafo return func(state *terraform.State) error { is := state.RootModule().Resources[path].Primary - if c, ok := is.Attributes["roles.#"]; !ok || MustParseInt(t, c) != int64(len(roles)) { + if c, ok := is.Attributes["roles.#"]; !ok || mustParseInt(t, c) != int64(len(roles)) { return fmt.Errorf("expected roles.# to equal %d but got %s", len(roles), c) } r, err := extractList(is.Attributes, "roles") diff --git a/pkg/resources/role_grants_acceptance_test.go b/pkg/resources/role_grants_acceptance_test.go index 8bb7f0f403..0c143abd6a 100644 --- a/pkg/resources/role_grants_acceptance_test.go +++ b/pkg/resources/role_grants_acceptance_test.go @@ -15,7 +15,7 @@ import ( "github.com/hashicorp/terraform-plugin-testing/terraform" ) -func MustParseInt(t *testing.T, input string) int64 { +func mustParseInt(t *testing.T, input string) int64 { t.Helper() i, err := strconv.ParseInt(input, 10, 64) if err != nil { @@ -61,7 +61,7 @@ func testCheckRolesAndUsers(t *testing.T, path string, roles, users []string) fu t.Helper() return func(state *terraform.State) error { is := state.RootModule().Resources[path].Primary - if c, ok := is.Attributes["roles.#"]; !ok || MustParseInt(t, c) != int64(len(roles)) { + if c, ok := is.Attributes["roles.#"]; !ok || mustParseInt(t, c) != int64(len(roles)) { return fmt.Errorf("expected roles.# to equal %d but got %s", len(roles), c) } r, err := extractList(is.Attributes, "roles") @@ -74,7 +74,7 @@ func testCheckRolesAndUsers(t *testing.T, path string, roles, users []string) fu return fmt.Errorf("expected roles %#v but got %#v", roles, r) } - if c, ok := is.Attributes["users.#"]; !ok || MustParseInt(t, c) != int64(len(users)) { + if c, ok := is.Attributes["users.#"]; !ok || mustParseInt(t, c) != int64(len(users)) { return fmt.Errorf("expected users.# to equal %d but got %s", len(users), c) } u, err := extractList(is.Attributes, "users") diff --git a/pkg/sdk/testint/parsers.go b/pkg/sdk/testint/parsers.go index 86d8a90fef..99114fb599 100644 --- a/pkg/sdk/testint/parsers.go +++ b/pkg/sdk/testint/parsers.go @@ -2,7 +2,7 @@ package testint import "time" -func ParseTimestampWithOffset(s string) (*time.Time, error) { +func parseTimestampWithOffset(s string) (*time.Time, error) { t, err := time.Parse("2006-01-02T15:04:05-07:00", s) if err != nil { return nil, err diff --git a/pkg/sdk/testint/resource_monitors_integration_test.go b/pkg/sdk/testint/resource_monitors_integration_test.go index 86e19aa5fc..904a198c8d 100644 --- a/pkg/sdk/testint/resource_monitors_integration_test.go +++ b/pkg/sdk/testint/resource_monitors_integration_test.go @@ -235,9 +235,9 @@ func TestInt_ResourceMonitorAlter(t *testing.T) { assert.Equal(t, 1, len(resourceMonitors)) resourceMonitor = &resourceMonitors[0] assert.Equal(t, *frequency, resourceMonitor.Frequency) - startTime, err := ParseTimestampWithOffset(resourceMonitor.StartTime) + startTime, err := parseTimestampWithOffset(resourceMonitor.StartTime) require.NoError(t, err) - endTime, err := ParseTimestampWithOffset(resourceMonitor.EndTime) + endTime, err := parseTimestampWithOffset(resourceMonitor.EndTime) require.NoError(t, err) assert.Equal(t, startTimeStamp, startTime.Format("2006-01-01 15:04")) assert.Equal(t, endTimeStamp, endTime.Format("2006-01-01 15:04")) diff --git a/pkg/sdk/testint/setup_integration_test.go b/pkg/sdk/testint/setup_test.go similarity index 100% rename from pkg/sdk/testint/setup_integration_test.go rename to pkg/sdk/testint/setup_test.go From 196134cbf91996eabc50bdc586a657fe7ac71900 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Thu, 30 Nov 2023 13:56:55 +0100 Subject: [PATCH 2/5] feat: Add unsafe_execute resource (#2225) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * add unsafe exec resource * Rename resource and make the basic tests pass * Simplify tests setup * Rename resource args * Fix checking db existence * Add random part to id * Rename migration * Remove unnecessary config param * Remove duplicated checks * Extract function to generate capitalized name * Test escaped identifier * Test unsafe execute with grants * Add TODO * Add test with HCL for_each * Fix before pushing * Update docs with warning * Update example usage in docs * Add negative tests * Add read (WIP) * Pass test without query * Add test for query removed * Move unsafe client and implementation to SDK * Fix query unsafe * Update docs and change query behavior * Update docs * Update logs * Fix test --------- Co-authored-by: Jan Cieślak --- docs/resources/unsafe_execute.md | 143 ++++ .../snowflake_unsafe_execute/resource.tf | 104 +++ pkg/acceptance/testing.go | 8 + pkg/provider/provider.go | 1 + .../TestAcc_UnsafeExecute_commonSetup/test.tf | 4 + .../variables.tf | 7 + .../test.tf | 5 + .../variables.tf | 7 + .../TestAcc_UnsafeExecute_withRead/test.tf | 5 + .../variables.tf | 11 + pkg/resources/unsafe_execute.go | 156 ++++ .../unsafe_execute_acceptance_test.go | 746 ++++++++++++++++++ pkg/sdk/client_extensions_unsafe.go | 65 ++ ...ient_unsafe_extensions_integration_test.go | 54 ++ templates/resources/unsafe_execute.md.tmpl | 33 + 15 files changed, 1349 insertions(+) create mode 100644 docs/resources/unsafe_execute.md create mode 100644 examples/resources/snowflake_unsafe_execute/resource.tf create mode 100644 pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf create mode 100644 pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf create mode 100644 pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/variables.tf create mode 100644 pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf create mode 100644 pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/variables.tf create mode 100644 pkg/resources/unsafe_execute.go create mode 100644 pkg/resources/unsafe_execute_acceptance_test.go create mode 100644 pkg/sdk/client_extensions_unsafe.go create mode 100644 pkg/sdk/testint/client_unsafe_extensions_integration_test.go create mode 100644 templates/resources/unsafe_execute.md.tmpl diff --git a/docs/resources/unsafe_execute.md b/docs/resources/unsafe_execute.md new file mode 100644 index 0000000000..e43ad8a456 --- /dev/null +++ b/docs/resources/unsafe_execute.md @@ -0,0 +1,143 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "snowflake_unsafe_execute Resource - terraform-provider-snowflake" +subcategory: "" +description: |- + Experimental resource used for testing purposes only. Allows to execute ANY SQL statement. +--- + +# snowflake_unsafe_execute (Resource) + +!> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Will be deleted in the upcoming versions. Use at your own risk. + +~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. + +~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. + +Experimental resource used for testing purposes only. Allows to execute ANY SQL statement. + +## Example Usage + +```terraform +################################## +### simple use cases +################################## + +# create and destroy resource +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# create and destroy resource using qualified name +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE \"abc\"" + revert = "DROP DATABASE \"abc\"" +} + +# with query +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} + +################################## +### grants example +################################## + +# grant and revoke privilege USAGE to ROLE on database +resource "snowflake_unsafe_execute" "test" { + execute = "GRANT USAGE ON DATABASE ABC TO ROLE XYZ" + revert = "REVOKE USAGE ON DATABASE ABC FROM ROLE XYZ" +} + +# grant and revoke with for_each +variable "database_grants" { + type = list(object({ + database_name = string + role_id = string + privileges = list(string) + })) +} + +resource "snowflake_unsafe_execute" "test" { + for_each = { for index, db_grant in var.database_grants : index => db_grant } + execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" + revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" +} + +################################## +### fixing bad configuration +################################## + +# bad revert - simple +# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "SELECT 1" +} + +# 2 - fix the revert first; resource won't be recreated +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# bad revert - complex (we assume that the problem is spotted after trying to change the execute) +# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "SELECT 1" +} + +# 2 - try to create different database; it will fail on bad destroy +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE XYZ" + revert = "SELECT 1" +} + +# 3 - fix the revert first +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# 4 - create different database updating revert also +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE XYZ" + revert = "DROP DATABASE XYZ" +} + +# bad query +# 1 - resource will be created; query_results will be empty +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "bad query" +} + +# 2 - fix the query; query_results will be calculated; resource won't be recreated +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} +``` + + +## Schema + +### Required + +- `execute` (String) SQL statement to execute. Forces recreation of resource on change. +- `revert` (String) SQL statement to revert the execute statement. Invoked when resource is being destroyed. + +### Optional + +- `query` (String) Optional SQL statement to do a read. Invoked after creation and every time it is changed. + +### Read-Only + +- `id` (String) The ID of this resource. +- `query_results` (List of Map of String) List of key-value maps (text to text) retrieved after executing read query. Will be empty if the query results in an error. diff --git a/examples/resources/snowflake_unsafe_execute/resource.tf b/examples/resources/snowflake_unsafe_execute/resource.tf new file mode 100644 index 0000000000..efca62f160 --- /dev/null +++ b/examples/resources/snowflake_unsafe_execute/resource.tf @@ -0,0 +1,104 @@ +################################## +### simple use cases +################################## + +# create and destroy resource +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# create and destroy resource using qualified name +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE \"abc\"" + revert = "DROP DATABASE \"abc\"" +} + +# with query +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} + +################################## +### grants example +################################## + +# grant and revoke privilege USAGE to ROLE on database +resource "snowflake_unsafe_execute" "test" { + execute = "GRANT USAGE ON DATABASE ABC TO ROLE XYZ" + revert = "REVOKE USAGE ON DATABASE ABC FROM ROLE XYZ" +} + +# grant and revoke with for_each +variable "database_grants" { + type = list(object({ + database_name = string + role_id = string + privileges = list(string) + })) +} + +resource "snowflake_unsafe_execute" "test" { + for_each = { for index, db_grant in var.database_grants : index => db_grant } + execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" + revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" +} + +################################## +### fixing bad configuration +################################## + +# bad revert - simple +# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "SELECT 1" +} + +# 2 - fix the revert first; resource won't be recreated +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# bad revert - complex (we assume that the problem is spotted after trying to change the execute) +# 1 - resource created with a bad revert; it is constructed, revert is not validated before destroy happens +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "SELECT 1" +} + +# 2 - try to create different database; it will fail on bad destroy +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE XYZ" + revert = "SELECT 1" +} + +# 3 - fix the revert first +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" +} + +# 4 - create different database updating revert also +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE XYZ" + revert = "DROP DATABASE XYZ" +} + +# bad query +# 1 - resource will be created; query_results will be empty +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "bad query" +} + +# 2 - fix the query; query_results will be calculated; resource won't be recreated +resource "snowflake_unsafe_execute" "test" { + execute = "CREATE DATABASE ABC" + revert = "DROP DATABASE ABC" + query = "SHOW DATABASES LIKE '%ABC%'" +} diff --git a/pkg/acceptance/testing.go b/pkg/acceptance/testing.go index a9601b1e5b..0cd29e41f4 100644 --- a/pkg/acceptance/testing.go +++ b/pkg/acceptance/testing.go @@ -83,3 +83,11 @@ func ConfigurationSameAsStepN(step int) func(config.TestStepConfigRequest) strin return filepath.Join("testdata", req.TestName, strconv.Itoa(step)) } } + +// ConfigurationDirectory should be used to obtain configuration if the same can be shared between multiple tests to avoid duplication of configuration and var files. +// Based on config.TestNameDirectory. Similar to config.StaticDirectory but prefixed provided directory with `testdata`. +func ConfigurationDirectory(directory string) func(config.TestStepConfigRequest) string { + return func(req config.TestStepConfigRequest) string { + return filepath.Join("testdata", directory) + } +} diff --git a/pkg/provider/provider.go b/pkg/provider/provider.go index e2270cf62c..f51cb5e706 100644 --- a/pkg/provider/provider.go +++ b/pkg/provider/provider.go @@ -476,6 +476,7 @@ func getResources() map[string]*schema.Resource { "snowflake_tag_association": resources.TagAssociation(), "snowflake_tag_masking_policy_association": resources.TagMaskingPolicyAssociation(), "snowflake_task": resources.Task(), + "snowflake_unsafe_execute": resources.UnsafeExecute(), "snowflake_user": resources.User(), "snowflake_user_ownership_grant": resources.UserOwnershipGrant(), "snowflake_user_public_keys": resources.UserPublicKeys(), diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf b/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf new file mode 100644 index 0000000000..a71f75afd3 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/test.tf @@ -0,0 +1,4 @@ +resource "snowflake_unsafe_execute" "test" { + execute = var.execute + revert = var.revert +} diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/variables.tf b/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/variables.tf new file mode 100644 index 0000000000..e1cdc1640a --- /dev/null +++ b/pkg/resources/testdata/TestAcc_UnsafeExecute_commonSetup/variables.tf @@ -0,0 +1,7 @@ +variable "execute" { + type = string +} + +variable "revert" { + type = string +} diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf b/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf new file mode 100644 index 0000000000..96c70bae50 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_unsafe_execute" "test" { + for_each = { for index, db_grant in var.database_grants : index => db_grant } + execute = "GRANT ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} TO ROLE ${each.value.role_id}" + revert = "REVOKE ${join(",", each.value.privileges)} ON DATABASE ${each.value.database_name} FROM ROLE ${each.value.role_id}" +} diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/variables.tf b/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/variables.tf new file mode 100644 index 0000000000..3a024e5399 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_UnsafeExecute_grantsComplex/variables.tf @@ -0,0 +1,7 @@ +variable "database_grants" { + type = list(object({ + database_name = string + role_id = string + privileges = list(string) + })) +} diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf b/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf new file mode 100644 index 0000000000..ff23b05f4e --- /dev/null +++ b/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/test.tf @@ -0,0 +1,5 @@ +resource "snowflake_unsafe_execute" "test" { + execute = var.execute + revert = var.revert + query = var.query +} diff --git a/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/variables.tf b/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/variables.tf new file mode 100644 index 0000000000..769ca5a105 --- /dev/null +++ b/pkg/resources/testdata/TestAcc_UnsafeExecute_withRead/variables.tf @@ -0,0 +1,11 @@ +variable "execute" { + type = string +} + +variable "revert" { + type = string +} + +variable "query" { + type = string +} diff --git a/pkg/resources/unsafe_execute.go b/pkg/resources/unsafe_execute.go new file mode 100644 index 0000000000..af58617587 --- /dev/null +++ b/pkg/resources/unsafe_execute.go @@ -0,0 +1,156 @@ +package resources + +import ( + "context" + "database/sql" + "fmt" + "log" + + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/go-uuid" + "github.com/hashicorp/terraform-plugin-sdk/v2/helper/schema" +) + +var unsafeExecuteSchema = map[string]*schema.Schema{ + "execute": { + Type: schema.TypeString, + Required: true, + ForceNew: true, + Description: "SQL statement to execute. Forces recreation of resource on change.", + }, + "revert": { + Type: schema.TypeString, + Required: true, + Description: "SQL statement to revert the execute statement. Invoked when resource is being destroyed.", + }, + "query": { + Type: schema.TypeString, + Optional: true, + Description: "Optional SQL statement to do a read. Invoked after creation and every time it is changed.", + }, + "query_results": { + Type: schema.TypeList, + Computed: true, + Description: "List of key-value maps (text to text) retrieved after executing read query. Will be empty if the query results in an error.", + Elem: &schema.Schema{ + Type: schema.TypeMap, + Elem: &schema.Schema{ + Type: schema.TypeString, + Optional: true, + }, + }, + }, +} + +func UnsafeExecute() *schema.Resource { + return &schema.Resource{ + Create: CreateUnsafeExecute, + Read: ReadUnsafeExecute, + Delete: DeleteUnsafeExecute, + Update: UpdateUnsafeExecute, + + Schema: unsafeExecuteSchema, + + DeprecationMessage: "Experimental resource. Will be deleted in the upcoming versions. Use at your own risk.", + Description: "Experimental resource used for testing purposes only. Allows to execute ANY SQL statement.", + } +} + +func ReadUnsafeExecute(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + ctx := context.Background() + client := sdk.NewClientFromDB(db) + + readStatement := d.Get("query").(string) + + setNilResults := func() error { + log.Printf(`[DEBUG] Clearing query_results`) + err := d.Set("query_results", nil) + if err != nil { + return err + } + return nil + } + + if readStatement == "" { + return setNilResults() + } else { + rows, err := client.QueryUnsafe(ctx, readStatement) + if err != nil { + log.Printf(`[WARN] SQL query "%s" failed with err %v`, readStatement, err) + return setNilResults() + } + log.Printf(`[INFO] SQL query "%s" executed successfully, returned rows count: %d`, readStatement, len(rows)) + rowsTransformed := make([]map[string]any, len(rows)) + for i, row := range rows { + t := make(map[string]any) + for k, v := range row { + if *v == nil { + t[k] = nil + } else { + switch (*v).(type) { + case fmt.Stringer: + t[k] = fmt.Sprintf("%v", *v) + case string: + t[k] = *v + default: + return fmt.Errorf("currently only objects convertible to String are supported by query; got %v", *v) + } + } + } + rowsTransformed[i] = t + } + err = d.Set("query_results", rowsTransformed) + if err != nil { + return err + } + } + + return nil +} + +func CreateUnsafeExecute(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + ctx := context.Background() + client := sdk.NewClientFromDB(db) + + id, err := uuid.GenerateUUID() + if err != nil { + return err + } + + executeStatement := d.Get("execute").(string) + _, err = client.ExecUnsafe(ctx, executeStatement) + if err != nil { + return err + } + + d.SetId(id) + log.Printf(`[INFO] SQL "%s" applied successfully\n`, executeStatement) + + return ReadUnsafeExecute(d, meta) +} + +func DeleteUnsafeExecute(d *schema.ResourceData, meta interface{}) error { + db := meta.(*sql.DB) + ctx := context.Background() + client := sdk.NewClientFromDB(db) + + revertStatement := d.Get("revert").(string) + _, err := client.ExecUnsafe(ctx, revertStatement) + if err != nil { + return err + } + + d.SetId("") + log.Printf(`[INFO] SQL "%s" applied successfully\n`, revertStatement) + + return nil +} + +func UpdateUnsafeExecute(d *schema.ResourceData, meta interface{}) error { + if d.HasChange("query") { + return ReadUnsafeExecute(d, meta) + } + return nil +} diff --git a/pkg/resources/unsafe_execute_acceptance_test.go b/pkg/resources/unsafe_execute_acceptance_test.go new file mode 100644 index 0000000000..3a35b04caa --- /dev/null +++ b/pkg/resources/unsafe_execute_acceptance_test.go @@ -0,0 +1,746 @@ +package resources_test + +import ( + "context" + "crypto/rand" + "errors" + "fmt" + "math/big" + "regexp" + "strings" + "testing" + + acc "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/acceptance" + "github.com/Snowflake-Labs/terraform-provider-snowflake/pkg/sdk" + "github.com/hashicorp/terraform-plugin-testing/config" + "github.com/hashicorp/terraform-plugin-testing/helper/resource" + "github.com/hashicorp/terraform-plugin-testing/plancheck" + "github.com/hashicorp/terraform-plugin-testing/terraform" + "github.com/hashicorp/terraform-plugin-testing/tfversion" + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestAcc_UnsafeExecute_basic(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + idLowerCase := strings.ToLower(generateUnsafeExecuteTestDatabaseName(t)) + idLowerCaseEscaped := fmt.Sprintf(`"%s"`, idLowerCase) + createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } + dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } + + resourceName := "snowflake_unsafe_execute.test" + createConfigVariables := func(id string) map[string]config.Variable { + return map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDatabaseExistence(t, id, false), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(id), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "revert", dropDatabaseStatement(id)), + resource.TestCheckNoResourceAttr(resourceName, "query"), + resource.TestCheckNoResourceAttr(resourceName, "query_results.#"), + resource.TestCheckResourceAttrSet(resourceName, "id"), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + }, + }) + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDatabaseExistence(t, idLowerCase, false), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(idLowerCaseEscaped), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(idLowerCaseEscaped)), + resource.TestCheckResourceAttr(resourceName, "revert", dropDatabaseStatement(idLowerCaseEscaped)), + resource.TestCheckNoResourceAttr(resourceName, "query"), + resource.TestCheckNoResourceAttr(resourceName, "query_results.#"), + resource.TestCheckResourceAttrSet(resourceName, "id"), + testAccCheckDatabaseExistence(t, idLowerCase, true), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_withRead(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } + dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } + showDatabaseStatement := func(id string) string { return fmt.Sprintf("show databases like '%%%s%%'", id) } + + resourceName := "snowflake_unsafe_execute.test" + createConfigVariables := func(id string) map[string]config.Variable { + return map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + "query": config.StringVariable(showDatabaseStatement(id)), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDatabaseExistence(t, id, false), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigVariables: createConfigVariables(id), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "revert", dropDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "query", showDatabaseStatement(id)), + resource.TestCheckResourceAttrSet(resourceName, "id"), + testAccCheckDatabaseExistence(t, id, true), + resource.TestCheckResourceAttrSet(resourceName, "query_results.#"), + resource.TestCheckResourceAttr(resourceName, "query_results.0.name", id), + resource.TestCheckResourceAttrSet(resourceName, "query_results.0.created_on"), + resource.TestCheckResourceAttr(resourceName, "query_results.0.budget", ""), + resource.TestCheckResourceAttr(resourceName, "query_results.0.comment", ""), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_readRemoved(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } + dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } + showDatabaseStatement := func(id string) string { return fmt.Sprintf("show databases like '%%%s%%'", id) } + resourceName := "snowflake_unsafe_execute.test" + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDatabaseExistence(t, id, false), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + "query": config.StringVariable(showDatabaseStatement(id)), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "query", showDatabaseStatement(id)), + resource.TestCheckResourceAttrSet(resourceName, "query_results.#"), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + "query": config.StringVariable(""), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "query", ""), + resource.TestCheckNoResourceAttr(resourceName, "query_results.#"), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_badQuery(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } + dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } + showDatabaseStatement := func(id string) string { return fmt.Sprintf("show databases like '%%%s%%'", id) } + resourceName := "snowflake_unsafe_execute.test" + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDatabaseExistence(t, id, false), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + "query": config.StringVariable("bad query"), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "revert", dropDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "query", "bad query"), + resource.TestCheckNoResourceAttr(resourceName, "query_results.#"), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_withRead"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + "query": config.StringVariable(showDatabaseStatement(id)), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "query", showDatabaseStatement(id)), + resource.TestCheckResourceAttrSet(resourceName, "query_results.#"), + resource.TestCheckResourceAttr(resourceName, "query_results.0.name", id), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_invalidExecuteStatement(t *testing.T) { + invalidCreateStatement := "create database" + invalidDropStatement := "drop database" + + createConfigVariables := func() map[string]config.Variable { + return map[string]config.Variable{ + "execute": config.StringVariable(invalidCreateStatement), + "revert": config.StringVariable(invalidDropStatement), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + ExpectError: regexp.MustCompile("SQL compilation error"), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_invalidRevertStatement(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + updatedId := generateUnsafeExecuteTestDatabaseName(t) + createDatabaseStatement := func(id string) string { return fmt.Sprintf("create database %s", id) } + dropDatabaseStatement := func(id string) string { return fmt.Sprintf("drop database %s", id) } + invalidDropStatement := "drop database" + + resourceName := "snowflake_unsafe_execute.test" + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: func(state *terraform.State) error { + err := testAccCheckDatabaseExistence(t, id, false)(state) + if err != nil { + return err + } + err = testAccCheckDatabaseExistence(t, updatedId, false)(state) + if err != nil { + return err + } + return nil + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(invalidDropStatement), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "revert", invalidDropStatement), + resource.TestCheckResourceAttrSet(resourceName, "id"), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(updatedId)), + "revert": config.StringVariable(invalidDropStatement), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + ExpectError: regexp.MustCompile("SQL compilation error"), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(id)), + "revert": config.StringVariable(dropDatabaseStatement(id)), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(id)), + resource.TestCheckResourceAttr(resourceName, "revert", dropDatabaseStatement(id)), + resource.TestCheckResourceAttrSet(resourceName, "id"), + testAccCheckDatabaseExistence(t, id, true), + testAccCheckDatabaseExistence(t, updatedId, false), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: map[string]config.Variable{ + "execute": config.StringVariable(createDatabaseStatement(updatedId)), + "revert": config.StringVariable(dropDatabaseStatement(updatedId)), + }, + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", createDatabaseStatement(updatedId)), + resource.TestCheckResourceAttr(resourceName, "revert", dropDatabaseStatement(updatedId)), + resource.TestCheckResourceAttrSet(resourceName, "id"), + testAccCheckDatabaseExistence(t, id, false), + testAccCheckDatabaseExistence(t, updatedId, true), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_revertUpdated(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + execute := fmt.Sprintf("create database %s", id) + revert := fmt.Sprintf("drop database %s", id) + notMatchingRevert := "select 1" + var savedId string + + resourceName := "snowflake_unsafe_execute.test" + createConfigVariables := func(execute string, revert string) map[string]config.Variable { + return map[string]config.Variable{ + "execute": config.StringVariable(execute), + "revert": config.StringVariable(revert), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: testAccCheckDatabaseExistence(t, id, false), + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(execute, notMatchingRevert), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", execute), + resource.TestCheckResourceAttr(resourceName, "revert", notMatchingRevert), + resource.TestCheckResourceAttrSet(resourceName, "id"), + resource.TestCheckResourceAttrWith(resourceName, "id", func(value string) error { + savedId = value + return nil + }), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(execute, revert), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", execute), + resource.TestCheckResourceAttr(resourceName, "revert", revert), + resource.TestCheckResourceAttrSet(resourceName, "id"), + resource.TestCheckResourceAttrWith(resourceName, "id", func(value string) error { + if savedId != value { + return errors.New("different id after revert update") + } + return nil + }), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_executeUpdated(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + execute := fmt.Sprintf("create database %s", id) + revert := fmt.Sprintf("drop database %s", id) + + newId := fmt.Sprintf("%s_2", id) + newExecute := fmt.Sprintf("create database %s", newId) + newRevert := fmt.Sprintf("drop database %s", newId) + + var savedId string + + resourceName := "snowflake_unsafe_execute.test" + createConfigVariables := func(execute string, revert string) map[string]config.Variable { + return map[string]config.Variable{ + "execute": config.StringVariable(execute), + "revert": config.StringVariable(revert), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: func(state *terraform.State) error { + err := testAccCheckDatabaseExistence(t, id, false)(state) + if err != nil { + return err + } + err = testAccCheckDatabaseExistence(t, newId, false)(state) + if err != nil { + return err + } + return nil + }, + Steps: []resource.TestStep{ + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(execute, revert), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", execute), + resource.TestCheckResourceAttr(resourceName, "revert", revert), + resource.TestCheckResourceAttrSet(resourceName, "id"), + resource.TestCheckResourceAttrWith(resourceName, "id", func(value string) error { + savedId = value + return nil + }), + testAccCheckDatabaseExistence(t, id, true), + ), + }, + { + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(newExecute, newRevert), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", newExecute), + resource.TestCheckResourceAttr(resourceName, "revert", newRevert), + resource.TestCheckResourceAttrSet(resourceName, "id"), + resource.TestCheckResourceAttrWith(resourceName, "id", func(value string) error { + if savedId == value { + return errors.New("same id after execute update") + } + return nil + }), + testAccCheckDatabaseExistence(t, id, false), + testAccCheckDatabaseExistence(t, newId, true), + ), + }, + }, + }) +} + +func TestAcc_UnsafeExecute_grants(t *testing.T) { + id := generateUnsafeExecuteTestDatabaseName(t) + roleId := generateUnsafeExecuteTestRoleName(t) + privilege := sdk.AccountObjectPrivilegeCreateSchema + execute := fmt.Sprintf("GRANT %s ON DATABASE %s TO ROLE %s", privilege, id, roleId) + revert := fmt.Sprintf("REVOKE %s ON DATABASE %s FROM ROLE %s", privilege, id, roleId) + + resourceName := "snowflake_unsafe_execute.test" + createConfigVariables := func(execute string, revert string) map[string]config.Variable { + return map[string]config.Variable{ + "execute": config.StringVariable(execute), + "revert": config.StringVariable(revert), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: func(state *terraform.State) error { + err := verifyGrantExists(t, roleId, privilege, false)(state) + dropResourcesForUnsafeExecuteTestCaseForGrants(t, id, roleId) + return err + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { createResourcesForExecuteUnsafeTestCaseForGrants(t, id, roleId) }, + ConfigDirectory: acc.ConfigurationDirectory("TestAcc_UnsafeExecute_commonSetup"), + ConfigVariables: createConfigVariables(execute, revert), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + resource.TestCheckResourceAttr(resourceName, "execute", execute), + resource.TestCheckResourceAttr(resourceName, "revert", revert), + resource.TestCheckResourceAttrSet(resourceName, "id"), + verifyGrantExists(t, roleId, privilege, true), + ), + }, + }, + }) +} + +// TestAcc_UnsafeExecute_grantsComplex test fails with: +// +// testing_new_config.go:156: unexpected index type (string) for "snowflake_unsafe_execute.test[\"0\"]", for_each is not supported +// testing_new.go:68: unexpected index type (string) for "snowflake_unsafe_execute.test[\"0\"]", for_each is not supported +// +// Quick search unveiled this issue: https://github.com/hashicorp/terraform-plugin-sdk/issues/536. +// +// It also seems that it is working correctly underneath; with TF_LOG set to DEBUG we have: +// +// 2023/11/26 17:16:03 [DEBUG] SQL "GRANT CREATE SCHEMA,MODIFY ON DATABASE UNSAFE_EXECUTE_TEST_DATABASE_4397 TO ROLE UNSAFE_EXECUTE_TEST_ROLE_1145" applied successfully +// 2023/11/26 17:16:03 [DEBUG] SQL "GRANT MODIFY,USAGE ON DATABASE UNSAFE_EXECUTE_TEST_DATABASE_3740 TO ROLE UNSAFE_EXECUTE_TEST_ROLE_3008" applied successfully +func TestAcc_UnsafeExecute_grantsComplex(t *testing.T) { + t.Skip("Skipping TestAcc_UnsafeExecute_grantsComplex because of https://github.com/hashicorp/terraform-plugin-sdk/issues/536 issue") + + dbId1 := generateUnsafeExecuteTestDatabaseName(t) + dbId2 := generateUnsafeExecuteTestDatabaseName(t) + roleId1 := generateUnsafeExecuteTestRoleName(t) + roleId2 := generateUnsafeExecuteTestRoleName(t) + privilege1 := sdk.AccountObjectPrivilegeCreateSchema + privilege2 := sdk.AccountObjectPrivilegeModify + privilege3 := sdk.AccountObjectPrivilegeUsage + + // resourceName1 := "snowflake_unsafe_execute.test.0" + // resourceName2 := "snowflake_unsafe_execute.test.1" + createConfigVariables := func() map[string]config.Variable { + return map[string]config.Variable{ + "database_grants": config.ListVariable(config.ObjectVariable(map[string]config.Variable{ + "database_name": config.StringVariable(dbId1), + "role_id": config.StringVariable(roleId1), + "privileges": config.ListVariable(config.StringVariable(privilege1.String()), config.StringVariable(privilege2.String())), + }), config.ObjectVariable(map[string]config.Variable{ + "database_name": config.StringVariable(dbId2), + "role_id": config.StringVariable(roleId2), + "privileges": config.ListVariable(config.StringVariable(privilege2.String()), config.StringVariable(privilege3.String())), + })), + } + } + + resource.Test(t, resource.TestCase{ + ProtoV6ProviderFactories: acc.TestAccProtoV6ProviderFactories, + PreCheck: func() { acc.TestAccPreCheck(t) }, + TerraformVersionChecks: []tfversion.TerraformVersionCheck{ + tfversion.RequireAbove(tfversion.Version1_5_0), + }, + CheckDestroy: func(state *terraform.State) error { + err := verifyGrantExists(t, roleId1, privilege1, false)(state) + if err != nil { + return err + } + err = verifyGrantExists(t, roleId1, privilege2, false)(state) + if err != nil { + return err + } + err = verifyGrantExists(t, roleId1, privilege3, false)(state) + if err != nil { + return err + } + err = verifyGrantExists(t, roleId2, privilege1, false)(state) + if err != nil { + return err + } + err = verifyGrantExists(t, roleId2, privilege2, false)(state) + if err != nil { + return err + } + err = verifyGrantExists(t, roleId2, privilege3, false)(state) + if err != nil { + return err + } + dropResourcesForUnsafeExecuteTestCaseForGrants(t, dbId1, roleId1) + dropResourcesForUnsafeExecuteTestCaseForGrants(t, dbId2, roleId2) + return err + }, + Steps: []resource.TestStep{ + { + PreConfig: func() { + createResourcesForExecuteUnsafeTestCaseForGrants(t, dbId1, roleId1) + createResourcesForExecuteUnsafeTestCaseForGrants(t, dbId2, roleId2) + }, + ConfigDirectory: config.TestNameDirectory(), + ConfigVariables: createConfigVariables(), + ConfigPlanChecks: resource.ConfigPlanChecks{ + PreApply: []plancheck.PlanCheck{plancheck.ExpectNonEmptyPlan()}, + }, + Check: resource.ComposeTestCheckFunc( + // resource.TestCheckResourceAttrSet(resourceName1, "id"), + // resource.TestCheckResourceAttrSet(resourceName2, "id"), + verifyGrantExists(t, roleId1, privilege1, true), + verifyGrantExists(t, roleId1, privilege2, true), + verifyGrantExists(t, roleId1, privilege3, false), + verifyGrantExists(t, roleId2, privilege1, false), + verifyGrantExists(t, roleId2, privilege2, true), + verifyGrantExists(t, roleId2, privilege3, true), + ), + }, + }, + }) +} + +// generateUnsafeExecuteTestDatabaseName returns capitalized name on purpose. +// Using small caps without escaping creates problem with later using sdk client which uses identifier that is escaped by default. +func generateUnsafeExecuteTestDatabaseName(t *testing.T) string { + t.Helper() + id, err := rand.Int(rand.Reader, big.NewInt(10000)) + if err != nil { + t.Fatalf("Failed to generate database id: %v", err) + } + return fmt.Sprintf("UNSAFE_EXECUTE_TEST_DATABASE_%d", id) +} + +// generateUnsafeExecuteTestRoleName returns capitalized name on purpose. +// Using small caps without escaping creates problem with later using sdk client which uses identifier that is escaped by default. +func generateUnsafeExecuteTestRoleName(t *testing.T) string { + t.Helper() + id, err := rand.Int(rand.Reader, big.NewInt(10000)) + if err != nil { + t.Fatalf("Failed to generate role id: %v", err) + } + return fmt.Sprintf("UNSAFE_EXECUTE_TEST_ROLE_%d", id) +} + +func testAccCheckDatabaseExistence(t *testing.T, id string, shouldExist bool) func(state *terraform.State) error { + t.Helper() + return func(state *terraform.State) error { + client, err := sdk.NewDefaultClient() + require.NoError(t, err) + ctx := context.Background() + + _, err = client.Databases.ShowByID(ctx, sdk.NewAccountObjectIdentifier(id)) + if shouldExist { + if err != nil { + return fmt.Errorf("error while retrieving database %s, err = %w", id, err) + } + } else { + if err == nil { + return fmt.Errorf("database %v still exists", id) + } + } + return nil + } +} + +func createResourcesForExecuteUnsafeTestCaseForGrants(t *testing.T, dbId string, roleId string) { + t.Helper() + + client, err := sdk.NewDefaultClient() + require.NoError(t, err) + ctx := context.Background() + + err = client.Databases.Create(ctx, sdk.NewAccountObjectIdentifier(dbId), &sdk.CreateDatabaseOptions{}) + require.NoError(t, err) + + err = client.Roles.Create(ctx, sdk.NewCreateRoleRequest(sdk.NewAccountObjectIdentifier(roleId))) + require.NoError(t, err) +} + +func dropResourcesForUnsafeExecuteTestCaseForGrants(t *testing.T, dbId string, roleId string) { + t.Helper() + + client, err := sdk.NewDefaultClient() + require.NoError(t, err) + ctx := context.Background() + + err = client.Databases.Drop(ctx, sdk.NewAccountObjectIdentifier(dbId), &sdk.DropDatabaseOptions{}) + assert.NoError(t, err) + + err = client.Roles.Drop(ctx, sdk.NewDropRoleRequest(sdk.NewAccountObjectIdentifier(roleId))) + assert.NoError(t, err) +} + +func verifyGrantExists(t *testing.T, roleId string, privilege sdk.AccountObjectPrivilege, shouldExist bool) func(state *terraform.State) error { + t.Helper() + return func(state *terraform.State) error { + client, err := sdk.NewDefaultClient() + require.NoError(t, err) + ctx := context.Background() + + grants, err := client.Grants.Show(ctx, &sdk.ShowGrantOptions{ + To: &sdk.ShowGrantsTo{ + Role: sdk.NewAccountObjectIdentifier(roleId), + }, + }) + require.NoError(t, err) + + if shouldExist { + require.Equal(t, 1, len(grants)) + assert.Equal(t, privilege.String(), grants[0].Privilege) + assert.Equal(t, sdk.ObjectTypeDatabase, grants[0].GrantedOn) + assert.Equal(t, sdk.ObjectTypeRole, grants[0].GrantedTo) + assert.Equal(t, sdk.NewAccountObjectIdentifier(roleId).FullyQualifiedName(), grants[0].GranteeName.FullyQualifiedName()) + } else { + require.Equal(t, 0, len(grants)) + } + + // it does not matter what we return, because we have assertions above + return nil + } +} diff --git a/pkg/sdk/client_extensions_unsafe.go b/pkg/sdk/client_extensions_unsafe.go new file mode 100644 index 0000000000..fe0afd7302 --- /dev/null +++ b/pkg/sdk/client_extensions_unsafe.go @@ -0,0 +1,65 @@ +package sdk + +import ( + "context" + "database/sql" +) + +func (c *Client) ExecUnsafe(ctx context.Context, sql string) (sql.Result, error) { + return c.exec(ctx, sql) +} + +// QueryUnsafe for now only supports single query. For more queries we will have to adjust the behavior. From the gosnowflake driver docs: +// +// (...) while using the multi-statement feature, pass a Context that specifies the number of statements in the string. +// When multiple queries are executed by a single call to QueryContext(), multiple result sets are returned. After you process the first result set, get the next result set (for the next SQL statement) by calling NextResultSet(). +// +// Therefore, only single resultSet is processed. +func (c *Client) QueryUnsafe(ctx context.Context, sql string) ([]map[string]*any, error) { + rows, err := c.db.QueryContext(ctx, sql) + if err != nil { + return nil, err + } + allRows, err := unsafeExecuteProcessRows(rows) + if err != nil { + return nil, err + } + return allRows, nil +} + +func unsafeExecuteProcessRows(rows *sql.Rows) ([]map[string]*any, error) { + defer rows.Close() + + columnNames, err := rows.Columns() + if err != nil { + return nil, err + } + processedRows := make([]map[string]*any, 0) + for rows.Next() { + row, err := unsafeExecuteProcessRow(rows, columnNames) + if err != nil { + return nil, err + } + processedRows = append(processedRows, row) + } + + return processedRows, nil +} + +func unsafeExecuteProcessRow(rows *sql.Rows, columnNames []string) (map[string]*any, error) { + values := make([]any, len(columnNames)) + for i := range values { + values[i] = new(any) + } + + err := rows.Scan(values...) + if err != nil { + return nil, err + } + + row := make(map[string]*any) + for i, col := range columnNames { + row[col] = values[i].(*any) + } + return row, nil +} diff --git a/pkg/sdk/testint/client_unsafe_extensions_integration_test.go b/pkg/sdk/testint/client_unsafe_extensions_integration_test.go new file mode 100644 index 0000000000..9070a1315d --- /dev/null +++ b/pkg/sdk/testint/client_unsafe_extensions_integration_test.go @@ -0,0 +1,54 @@ +package testint + +import ( + "fmt" + "testing" + + "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" +) + +func TestInt_Client_UnsafeQuery(t *testing.T) { + client := testClient(t) + ctx := testContext(t) + + t.Run("test show databases", func(t *testing.T) { + sql := fmt.Sprintf("SHOW DATABASES LIKE '%%%s%%'", testDb(t).Name) + results, err := client.QueryUnsafe(ctx, sql) + require.NoError(t, err) + + assert.Len(t, results, 1) + row := results[0] + assert.Equal(t, testDb(t).Name, *row["name"]) + assert.NotEmpty(t, *row["created_on"]) + assert.Equal(t, "STANDARD", *row["kind"]) + assert.Equal(t, "ACCOUNTADMIN", *row["owner"]) + assert.Equal(t, "", *row["options"]) + assert.Equal(t, "", *row["comment"]) + assert.Equal(t, "N", *row["is_default"]) + assert.Nil(t, *row["budget"]) + }) + + t.Run("test more results", func(t *testing.T) { + db1, db1Cleanup := createDatabase(t, client) + t.Cleanup(db1Cleanup) + db2, db2Cleanup := createDatabase(t, client) + t.Cleanup(db2Cleanup) + db3, db3Cleanup := createDatabase(t, client) + t.Cleanup(db3Cleanup) + + sql := "SHOW DATABASES" + results, err := client.QueryUnsafe(ctx, sql) + require.NoError(t, err) + + require.GreaterOrEqual(t, len(results), 4) + names := make([]any, len(results)) + for i, r := range results { + names[i] = *r["name"] + } + assert.Contains(t, names, testDb(t).Name) + assert.Contains(t, names, db1.Name) + assert.Contains(t, names, db2.Name) + assert.Contains(t, names, db3.Name) + }) +} diff --git a/templates/resources/unsafe_execute.md.tmpl b/templates/resources/unsafe_execute.md.tmpl new file mode 100644 index 0000000000..ab72c4bb07 --- /dev/null +++ b/templates/resources/unsafe_execute.md.tmpl @@ -0,0 +1,33 @@ +--- +# generated by https://github.com/hashicorp/terraform-plugin-docs +page_title: "{{.Name}} {{.Type}} - {{.ProviderName}}" +subcategory: "" +description: |- +{{ .Description | plainmarkdown | trimspace | prefixlines " " }} +--- + +# {{.Name}} ({{.Type}}) + +!> **Warning** This is a dangerous resource that allows executing **ANY** SQL statement. It may destroy resources if used incorrectly. It may behave incorrectly combined with other resources. Will be deleted in the upcoming versions. Use at your own risk. + +~> **Note** It can be theoretically used to manage resource that are not supported by the provider. This is risky and may brake other resources if used incorrectly. + +~> **Note** Use `query` parameter with caution. It will fetch **ALL** the results returned by the query provided. Try to limit the number of results by writing query with filters. Query failure does not stop resource creation; it simply results in `query_results` being empty. + +{{ .Description | trimspace }} + +{{ if .HasExample -}} +## Example Usage + +{{ tffile (printf "examples/resources/%s/resource.tf" .Name)}} +{{- end }} + +{{ .SchemaMarkdown | trimspace }} +{{- if .HasImport }} + +## Import + +Import is supported using the following syntax: + +{{ printf "{{codefile \"shell\" %q}}" .ImportFile }} +{{- end }} From 8fbc5cf3005f33c7cfb27815d6e4a1909ffbac80 Mon Sep 17 00:00:00 2001 From: "snowflake-release-please[bot]" <105954990+snowflake-release-please[bot]@users.noreply.github.com> Date: Thu, 30 Nov 2023 14:25:25 +0100 Subject: [PATCH 3/5] chore(main): release 0.77.0 (#2204) Co-authored-by: snowflake-release-please[bot] <105954990+snowflake-release-please[bot]@users.noreply.github.com> --- CHANGELOG.md | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 34bce3c4e1..d73efa754e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,20 @@ # Changelog +## [0.77.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.76.0...v0.77.0) (2023-11-30) + + +### 🎉 **What's new:** + +* Add unsafe_execute resource ([#2225](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2225)) ([196134c](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/196134cbf91996eabc50bdc586a657fe7ac71900)) +* Introduce simple arch tests ([#2210](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2210)) ([c60db80](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/c60db80f44d949258f0a692baafdc22b886c3010)) + + +### 🐛 **Bug fixes:** + +* cleanup workflows and makefile ([#2150](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2150)) ([64335e7](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/64335e72e480393437dff9f88122a256a2ac0814)) +* documentation for role ownership grant ([#2203](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2203)) ([e3d405c](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/e3d405c91b494413d432e1aef9ff1da1f9ede4a7)) +* Fix workflows ([#2206](https://github.com/Snowflake-Labs/terraform-provider-snowflake/issues/2206)) ([6d7f833](https://github.com/Snowflake-Labs/terraform-provider-snowflake/commit/6d7f8336897dee17c102d69a517e2525c1bb4d91)) + ## [0.76.0](https://github.com/Snowflake-Labs/terraform-provider-snowflake/compare/v0.75.0...v0.76.0) (2023-11-15) From 0b388bf79346cd4ddedaac99d4651390f1f93358 Mon Sep 17 00:00:00 2001 From: sonya huang Date: Fri, 1 Dec 2023 06:33:44 -0500 Subject: [PATCH 4/5] fix: snowpipe error integration (#2227) --- pkg/resources/pipe.go | 9 +++++---- pkg/sdk/pipes.go | 1 + pkg/sdk/pipes_test.go | 5 +++-- pkg/sdk/pipes_validations.go | 4 ++-- 4 files changed, 11 insertions(+), 8 deletions(-) diff --git a/pkg/resources/pipe.go b/pkg/resources/pipe.go index d47ea05736..1676d31d56 100644 --- a/pkg/resources/pipe.go +++ b/pkg/resources/pipe.go @@ -200,8 +200,9 @@ func ReadPipe(d *schema.ResourceData, meta interface{}) error { } if strings.Contains(pipe.NotificationChannel, "arn:aws:sns:") { - err = d.Set("aws_sns_topic_arn", pipe.NotificationChannel) - return err + if err := d.Set("aws_sns_topic_arn", pipe.NotificationChannel); err != nil { + return err + } } if err := d.Set("error_integration", pipe.ErrorIntegration); err != nil { @@ -236,10 +237,10 @@ func UpdatePipe(d *schema.ResourceData, meta interface{}) error { if d.HasChange("error_integration") { if errorIntegration, ok := d.GetOk("error_integration"); ok { runSetStatement = true - pipeSet.Comment = sdk.String(errorIntegration.(string)) + pipeSet.ErrorIntegration = sdk.String(errorIntegration.(string)) } else { runUnsetStatement = true - pipeUnset.Comment = sdk.Bool(true) + pipeUnset.ErrorIntegration = sdk.Bool(true) } } diff --git a/pkg/sdk/pipes.go b/pkg/sdk/pipes.go index 928d01992a..6306b3f3db 100644 --- a/pkg/sdk/pipes.go +++ b/pkg/sdk/pipes.go @@ -56,6 +56,7 @@ type PipeSet struct { } type PipeUnset struct { + ErrorIntegration *bool `ddl:"keyword" sql:"ERROR_INTEGRATION"` PipeExecutionPaused *bool `ddl:"keyword" sql:"PIPE_EXECUTION_PAUSED"` Comment *bool `ddl:"keyword" sql:"COMMENT"` } diff --git a/pkg/sdk/pipes_test.go b/pkg/sdk/pipes_test.go index f6ccd7fdeb..489f2b8023 100644 --- a/pkg/sdk/pipes_test.go +++ b/pkg/sdk/pipes_test.go @@ -93,7 +93,7 @@ func TestPipesAlter(t *testing.T) { t.Run("validation: no property to unset", func(t *testing.T) { opts := defaultOpts() opts.Unset = &PipeUnset{} - assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterPipeOptions.Unset", "PipeExecutionPaused", "Comment")) + assertOptsInvalidJoinedErrors(t, opts, errAtLeastOneOf("AlterPipeOptions.Unset", "ErrorIntegration", "PipeExecutionPaused", "Comment")) }) t.Run("set tag: single", func(t *testing.T) { @@ -154,10 +154,11 @@ func TestPipesAlter(t *testing.T) { opts := defaultOpts() opts.IfExists = Bool(true) opts.Unset = &PipeUnset{ + ErrorIntegration: Bool(true), PipeExecutionPaused: Bool(true), Comment: Bool(true), } - assertOptsValidAndSQLEquals(t, opts, `ALTER PIPE IF EXISTS %s UNSET PIPE_EXECUTION_PAUSED, COMMENT`, id.FullyQualifiedName()) + assertOptsValidAndSQLEquals(t, opts, `ALTER PIPE IF EXISTS %s UNSET ERROR_INTEGRATION, PIPE_EXECUTION_PAUSED, COMMENT`, id.FullyQualifiedName()) }) t.Run("refresh", func(t *testing.T) { diff --git a/pkg/sdk/pipes_validations.go b/pkg/sdk/pipes_validations.go index f87272f466..fe40944e94 100644 --- a/pkg/sdk/pipes_validations.go +++ b/pkg/sdk/pipes_validations.go @@ -49,8 +49,8 @@ func (opts *AlterPipeOptions) validate() error { } } if unset := opts.Unset; valueSet(unset) { - if !anyValueSet(unset.PipeExecutionPaused, unset.Comment) { - errs = append(errs, errAtLeastOneOf("AlterPipeOptions.Unset", "PipeExecutionPaused", "Comment")) + if !anyValueSet(unset.ErrorIntegration, unset.PipeExecutionPaused, unset.Comment) { + errs = append(errs, errAtLeastOneOf("AlterPipeOptions.Unset", "ErrorIntegration", "PipeExecutionPaused", "Comment")) } } return errors.Join(errs...) From 1fcd3935f3cd3357ffb4cc3052b85f886c53cc95 Mon Sep 17 00:00:00 2001 From: Artur Sawicki Date: Fri, 1 Dec 2023 12:34:30 +0100 Subject: [PATCH 5/5] Disable static check temporarily (#2229) --- .github/workflows/reviewdog-staticcheck.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/reviewdog-staticcheck.yml b/.github/workflows/reviewdog-staticcheck.yml index d8878242d2..1bdaf022df 100644 --- a/.github/workflows/reviewdog-staticcheck.yml +++ b/.github/workflows/reviewdog-staticcheck.yml @@ -2,6 +2,8 @@ name: staticcheck on: [pull_request] jobs: reviewdog: + # disable the job temporarily (until we configure linters appropriately) + if: false name: reviewdog runs-on: ubuntu-latest steps: