diff --git a/esti/lakectl_local_test.go b/esti/lakectl_local_test.go index 21a8861d48b..6a9bc18b5c4 100644 --- a/esti/lakectl_local_test.go +++ b/esti/lakectl_local_test.go @@ -9,6 +9,7 @@ import ( "path/filepath" "strings" "testing" + "time" "github.com/stretchr/testify/require" "github.com/treeverse/lakefs/pkg/block" @@ -486,6 +487,80 @@ func TestLakectlLocal_pull(t *testing.T) { } } +func TestLakectlLocal_commitProtectedBranch(t *testing.T) { + repoName := generateUniqueRepositoryName() + storage := generateUniqueStorageNamespace(repoName) + tmpDir := t.TempDir() + fd, err := os.CreateTemp(tmpDir, "") + require.NoError(t, err) + require.NoError(t, fd.Close()) + dataDir, err := os.MkdirTemp(tmpDir, "") + require.NoError(t, err) + file := "test.txt" + + vars := map[string]string{ + "REPO": repoName, + "STORAGE": storage, + "BRANCH": mainBranch, + "REF": mainBranch, + "LOCAL_DIR": dataDir, + "FILE": file, + } + runCmd(t, Lakectl()+" repo create lakefs://"+vars["REPO"]+" "+vars["STORAGE"], false, false, vars) + runCmd(t, Lakectl()+" branch-protect add lakefs://"+vars["REPO"]+"/ '*'", false, false, vars) + // BranchUpdateMaxInterval - sleep in order to overcome branch update caching + time.Sleep(branchProtectTimeout) + // Cloning local dir + RunCmdAndVerifyContainsText(t, Lakectl()+" local clone lakefs://"+vars["REPO"]+"/"+vars["BRANCH"]+"/ "+vars["LOCAL_DIR"], false, "Successfully cloned lakefs://${REPO}/${REF}/ to ${LOCAL_DIR}.", vars) + RunCmdAndVerifyContainsText(t, Lakectl()+" local status "+vars["LOCAL_DIR"], false, "No diff found", vars) + // Adding file to local dir + fd, err = os.Create(filepath.Join(vars["LOCAL_DIR"], vars["FILE"])) + require.NoError(t, err) + require.NoError(t, fd.Close()) + RunCmdAndVerifyContainsText(t, Lakectl()+" local status "+vars["LOCAL_DIR"], false, "local ║ added ║ test.txt", vars) + // Try to commit local dir, expect failure + RunCmdAndVerifyFailureContainsText(t, Lakectl()+" local commit -m test "+vars["LOCAL_DIR"], false, "cannot write to protected branch", vars) +} + +func TestLakectlLocal_RmCommitProtectedBranch(t *testing.T) { + repoName := generateUniqueRepositoryName() + storage := generateUniqueStorageNamespace(repoName) + tmpDir := t.TempDir() + fd, err := os.CreateTemp(tmpDir, "") + require.NoError(t, err) + require.NoError(t, fd.Close()) + dataDir, err := os.MkdirTemp(tmpDir, "") + require.NoError(t, err) + file := "ro_1k.0" + + vars := map[string]string{ + "REPO": repoName, + "STORAGE": storage, + "BRANCH": mainBranch, + "REF": mainBranch, + "LOCAL_DIR": dataDir, + "FILE_PATH": file, + } + runCmd(t, Lakectl()+" repo create lakefs://"+vars["REPO"]+" "+vars["STORAGE"], false, false, vars) + + // Cloning local dir + RunCmdAndVerifyContainsText(t, Lakectl()+" local clone lakefs://"+vars["REPO"]+"/"+vars["BRANCH"]+"/ "+vars["LOCAL_DIR"], false, "Successfully cloned lakefs://${REPO}/${REF}/ to ${LOCAL_DIR}.", vars) + RunCmdAndVerifyContainsText(t, Lakectl()+" local status "+vars["LOCAL_DIR"], false, "No diff found", vars) + + // locally add a file and commit + fd, err = os.Create(filepath.Join(vars["LOCAL_DIR"], vars["FILE_PATH"])) + require.NoError(t, err) + require.NoError(t, fd.Close()) + RunCmdAndVerifyContainsText(t, Lakectl()+" local commit "+vars["LOCAL_DIR"]+" -m test", false, "Commit for branch \""+vars["BRANCH"]+"\" completed.", vars) + runCmd(t, Lakectl()+" branch-protect add lakefs://"+vars["REPO"]+"/ '*'", false, false, vars) + // BranchUpdateMaxInterval - sleep in order to overcome branch update caching + time.Sleep(branchProtectTimeout) + // Try delete file from local dir and then commit + require.NoError(t, os.Remove(filepath.Join(vars["LOCAL_DIR"], vars["FILE_PATH"]))) + RunCmdAndVerifyContainsText(t, Lakectl()+" local status "+vars["LOCAL_DIR"], false, "local ║ removed ║ "+vars["FILE_PATH"], vars) + RunCmdAndVerifyFailureContainsText(t, Lakectl()+" local commit -m test "+vars["LOCAL_DIR"], false, "cannot write to protected branch", vars) +} + func TestLakectlLocal_commit(t *testing.T) { tmpDir := t.TempDir() fd, err := os.CreateTemp(tmpDir, "") diff --git a/esti/lakectl_test.go b/esti/lakectl_test.go index 5195bb267bf..d81ab66db7c 100644 --- a/esti/lakectl_test.go +++ b/esti/lakectl_test.go @@ -10,10 +10,13 @@ import ( "github.com/stretchr/testify/require" "github.com/treeverse/lakefs/pkg/api/apigen" + "github.com/treeverse/lakefs/pkg/graveler" ) var emptyVars = make(map[string]string) +const branchProtectTimeout = graveler.BranchUpdateMaxInterval + time.Second + func TestLakectlHelp(t *testing.T) { RunCmdAndVerifySuccessWithFile(t, Lakectl(), false, "lakectl_help", emptyVars) RunCmdAndVerifySuccessWithFile(t, Lakectl()+" --help", false, "lakectl_help", emptyVars) @@ -727,6 +730,43 @@ func getStorageConfig(t *testing.T) *apigen.StorageConfig { return storageResp.JSON200 } +func TestLakectlFsUpload_protectedBranch(t *testing.T) { + repoName := generateUniqueRepositoryName() + storage := generateUniqueStorageNamespace(repoName) + vars := map[string]string{ + "REPO": repoName, + "STORAGE": storage, + "BRANCH": mainBranch, + } + RunCmdAndVerifySuccessWithFile(t, Lakectl()+" repo create lakefs://"+vars["REPO"]+" "+vars["STORAGE"], false, "lakectl_repo_create", vars) + runCmd(t, Lakectl()+" branch-protect add lakefs://"+vars["REPO"]+"/ '*'", false, false, vars) + RunCmdAndVerifyContainsText(t, Lakectl()+" branch-protect list lakefs://"+vars["REPO"]+"/ ", false, "*", vars) + // BranchUpdateMaxInterval - sleep in order to overcome branch update caching + time.Sleep(branchProtectTimeout) + vars["FILE_PATH"] = "ro_1k.0" + RunCmdAndVerifyFailure(t, Lakectl()+" fs upload lakefs://"+vars["REPO"]+"/"+vars["BRANCH"]+"/"+vars["FILE_PATH"]+" -s files/ro_1k", false, "cannot write to protected branch\n403 Forbidden\n", vars) +} + +func TestLakectlFsRm_protectedBranch(t *testing.T) { + repoName := generateUniqueRepositoryName() + storage := generateUniqueStorageNamespace(repoName) + vars := map[string]string{ + "REPO": repoName, + "STORAGE": storage, + "BRANCH": mainBranch, + } + + RunCmdAndVerifySuccessWithFile(t, Lakectl()+" repo create lakefs://"+vars["REPO"]+" "+vars["STORAGE"], false, "lakectl_repo_create", vars) + vars["FILE_PATH"] = "ro_1k.0" + runCmd(t, Lakectl()+" fs upload lakefs://"+vars["REPO"]+"/"+vars["BRANCH"]+"/"+vars["FILE_PATH"]+" -s files/ro_1k", false, false, vars) + runCmd(t, Lakectl()+" commit lakefs://"+vars["REPO"]+"/"+vars["BRANCH"]+" --allow-empty-message -m \" \"", false, false, vars) + runCmd(t, Lakectl()+" branch-protect add lakefs://"+vars["REPO"]+"/ '*'", false, false, vars) + // BranchUpdateMaxInterval - sleep in order to overcome branch update caching + time.Sleep(branchProtectTimeout) + RunCmdAndVerifyContainsText(t, Lakectl()+" branch-protect list lakefs://"+vars["REPO"]+"/ ", false, "*", vars) + RunCmdAndVerifyFailure(t, Lakectl()+" fs rm lakefs://"+vars["REPO"]+"/"+vars["BRANCH"]+"/"+vars["FILE_PATH"], false, "cannot write to protected branch\n403 Forbidden\n", vars) +} + func TestLakectlFsPresign(t *testing.T) { config := getStorageConfig(t) if !config.PreSignSupport { diff --git a/pkg/api/helpers/upload.go b/pkg/api/helpers/upload.go index 27e09027a6f..061c5433f11 100644 --- a/pkg/api/helpers/upload.go +++ b/pkg/api/helpers/upload.go @@ -389,7 +389,7 @@ func (u *presignUpload) uploadObject(ctx context.Context) (*apigen.ObjectStats, if linkResp.JSON409 != nil { return nil, ErrConflict } - return nil, fmt.Errorf("link object to backing store: %w (%s)", ErrRequestFailed, linkResp.Status()) + return nil, fmt.Errorf("link object to backing store: %w (%s)", ResponseAsError(linkResp), linkResp.Status()) } func (u *presignUpload) Upload(ctx context.Context) (*apigen.ObjectStats, error) { diff --git a/pkg/local/sync.go b/pkg/local/sync.go index fa82508334d..e28ec46d5d9 100644 --- a/pkg/local/sync.go +++ b/pkg/local/sync.go @@ -388,7 +388,7 @@ func (s *SyncManager) deleteRemote(ctx context.Context, remote *uri.URI, change return } if resp.StatusCode() != http.StatusNoContent { - return fmt.Errorf("could not delete object: HTTP %d: %w", resp.StatusCode(), helpers.ErrRequestFailed) + return fmt.Errorf("could not delete object: HTTP %d: %w", resp.StatusCode(), helpers.ResponseAsError(resp)) } return }