Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: auto_merge not working properly with orchestrator backend #1871

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 16 additions & 15 deletions backend/controllers/github.go
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,18 @@ import (
"encoding/json"
"errors"
"fmt"
"log"
"math/rand"
"net/http"
"net/url"
"os"
"path/filepath"
"reflect"
"runtime/debug"
"slices"
"strconv"
"strings"

"github.com/davecgh/go-spew/spew"
"github.com/diggerhq/digger/backend/ci_backends"
config2 "github.com/diggerhq/digger/backend/config"
Expand All @@ -29,17 +41,6 @@ import (
"github.com/samber/lo"
"golang.org/x/oauth2"
"gorm.io/gorm"
"log"
"math/rand"
"net/http"
"net/url"
"os"
"path/filepath"
"reflect"
"runtime/debug"
"slices"
"strconv"
"strings"
)

type IssueCommentHook func(gh utils.GithubClientProvider, payload *github.IssueCommentEvent, ciBackendProvider ci_backends.CiBackendProvider) error
Expand Down Expand Up @@ -397,7 +398,7 @@ func handlePullRequestEvent(gh utils.GithubClientProvider, payload *github.PullR
return fmt.Errorf("error processing event")
}

jobsForImpactedProjects, _, err := dg_github.ConvertGithubPullRequestEventToJobs(payload, impactedProjects, nil, *config, false)
jobsForImpactedProjects, coverAllImpactedProjects, err := dg_github.ConvertGithubPullRequestEventToJobs(payload, impactedProjects, nil, *config, false)
if err != nil {
log.Printf("Error converting event to jobsForImpactedProjects: %v", err)
commentReporterManager.UpdateComment(fmt.Sprintf(":x: Error converting event to jobsForImpactedProjects: %v", err))
Expand Down Expand Up @@ -522,7 +523,7 @@ func handlePullRequestEvent(gh utils.GithubClientProvider, payload *github.PullR
aiSummaryCommentId = aiSummaryComment.Id
}

batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, models.DiggerVCSGithub, organisationId, impactedJobsMap, impactedProjectsMap, projectsGraph, installationId, branch, prNumber, repoOwner, repoName, repoFullName, commitSha, commentId, diggerYmlStr, 0, aiSummaryCommentId, config.ReportTerraformOutputs)
batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, models.DiggerVCSGithub, organisationId, impactedJobsMap, impactedProjectsMap, projectsGraph, installationId, branch, prNumber, repoOwner, repoName, repoFullName, commitSha, commentId, diggerYmlStr, 0, aiSummaryCommentId, config.ReportTerraformOutputs, coverAllImpactedProjects)
if err != nil {
log.Printf("ConvertJobsToDiggerJobs error: %v", err)
commentReporterManager.UpdateComment(fmt.Sprintf(":x: ConvertJobsToDiggerJobs error: %v", err))
Expand Down Expand Up @@ -827,7 +828,7 @@ func handleIssueCommentEvent(gh utils.GithubClientProvider, payload *github.Issu
}
log.Printf("GitHub IssueComment event processed successfully\n")

jobs, _, err := generic.ConvertIssueCommentEventToJobs(repoFullName, actor, issueNumber, commentBody, impactedProjects, requestedProject, config.Workflows, prBranchName, defaultBranch)
jobs, coverAllImpactedProjects, err := generic.ConvertIssueCommentEventToJobs(repoFullName, actor, issueNumber, commentBody, impactedProjects, requestedProject, config.Workflows, prBranchName, defaultBranch)
if err != nil {
log.Printf("Error converting event to jobs: %v", err)
commentReporterManager.UpdateComment(fmt.Sprintf(":x: Error converting event to jobs: %v", err))
Expand Down Expand Up @@ -930,7 +931,7 @@ func handleIssueCommentEvent(gh utils.GithubClientProvider, payload *github.Issu
aiSummaryCommentId = aiSummaryComment.Id
}

batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, "github", orgId, impactedProjectsJobMap, impactedProjectsMap, projectsGraph, installationId, *branch, issueNumber, repoOwner, repoName, repoFullName, *commitSha, reporterCommentId, diggerYmlStr, 0, aiSummaryCommentId, config.ReportTerraformOutputs)
batchId, _, err := utils.ConvertJobsToDiggerJobs(*diggerCommand, "github", orgId, impactedProjectsJobMap, impactedProjectsMap, projectsGraph, installationId, *branch, issueNumber, repoOwner, repoName, repoFullName, *commitSha, reporterCommentId, diggerYmlStr, 0, aiSummaryCommentId, config.ReportTerraformOutputs, coverAllImpactedProjects)
if err != nil {
log.Printf("ConvertJobsToDiggerJobs error: %v", err)
commentReporterManager.UpdateComment(fmt.Sprintf(":x: ConvertJobsToDiggerJobs error: %v", err))
Expand Down
11 changes: 6 additions & 5 deletions backend/controllers/github_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,12 +2,13 @@ package controllers

import (
"encoding/json"
orchestrator "github.com/diggerhq/digger/libs/scheduler"
"log"
"os"
"strings"
"testing"

orchestrator "github.com/diggerhq/digger/libs/scheduler"

"github.com/diggerhq/digger/backend/models"
"github.com/diggerhq/digger/backend/utils"
configuration "github.com/diggerhq/digger/libs/digger_config"
Expand Down Expand Up @@ -731,7 +732,7 @@ func TestJobsTreeWithOneJobsAndTwoProjects(t *testing.T) {
graph, err := configuration.CreateProjectDependencyGraph(projects)
assert.NoError(t, err)

_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 41584295, "", 2, "diggerhq", "parallel_jobs_demo", "diggerhq/parallel_jobs_demo", "", 123, "test", 0, "", false)
_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 41584295, "", 2, "diggerhq", "parallel_jobs_demo", "diggerhq/parallel_jobs_demo", "", 123, "test", 0, "", false, true)
assert.NoError(t, err)
assert.Equal(t, 1, len(result))
parentLinks, err := models.DB.GetDiggerJobParentLinksChildId(&result["dev"].DiggerJobID)
Expand Down Expand Up @@ -760,7 +761,7 @@ func TestJobsTreeWithTwoDependantJobs(t *testing.T) {
projectMap["dev"] = project1
projectMap["prod"] = project2

_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 123, "", 2, "", "", "test", "", 123, "test", 0, "", false)
_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 123, "", 2, "", "", "test", "", 123, "test", 0, "", false, true)
assert.NoError(t, err)
assert.Equal(t, 2, len(result))

Expand Down Expand Up @@ -793,7 +794,7 @@ func TestJobsTreeWithTwoIndependentJobs(t *testing.T) {
projectMap["dev"] = project1
projectMap["prod"] = project2

_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 123, "", 2, "", "", "test", "", 123, "test", 0, "", false)
_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 123, "", 2, "", "", "test", "", 123, "test", 0, "", false, true)
assert.NoError(t, err)
assert.Equal(t, 2, len(result))
parentLinks, err := models.DB.GetDiggerJobParentLinksChildId(&result["dev"].DiggerJobID)
Expand Down Expand Up @@ -838,7 +839,7 @@ func TestJobsTreeWithThreeLevels(t *testing.T) {
projectMap["555"] = project5
projectMap["666"] = project6

_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 123, "", 2, "", "", "test", "", 123, "test", 0, "", false)
_, result, err := utils.ConvertJobsToDiggerJobs("", "github", 1, jobs, projectMap, graph, 123, "", 2, "", "", "test", "", 123, "test", 0, "", false, true)
assert.NoError(t, err)
assert.Equal(t, 6, len(result))
parentLinks, err := models.DB.GetDiggerJobParentLinksChildId(&result["111"].DiggerJobID)
Expand Down
15 changes: 8 additions & 7 deletions backend/controllers/projects.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,13 @@ import (
"encoding/json"
"errors"
"fmt"
"log"
"net/http"
"os"
"strconv"
"strings"
"time"

"github.com/diggerhq/digger/backend/middleware"
"github.com/diggerhq/digger/backend/models"
"github.com/diggerhq/digger/backend/services"
Expand All @@ -15,12 +22,6 @@ import (
orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler"
"github.com/gin-gonic/gin"
"gorm.io/gorm"
"log"
"net/http"
"os"
"strconv"
"strings"
"time"
)

func ListProjects(c *gin.Context) {
Expand Down Expand Up @@ -730,7 +731,7 @@ func AutomergePRforBatchIfEnabled(gh utils.GithubClientProvider, batch *models.D
} else {
automerge = false
}
if batch.Status == orchestrator_scheduler.BatchJobSucceeded && batch.BatchType == orchestrator_scheduler.DiggerCommandApply && automerge == true {
if batch.Status == orchestrator_scheduler.BatchJobSucceeded && batch.BatchType == orchestrator_scheduler.DiggerCommandApply && batch.CoverAllImpactedProjects == true && automerge == true {
prService, err := GetPrServiceFromBatch(batch, gh)
if err != nil {
log.Printf("Error getting github service: %v", err)
Expand Down
29 changes: 21 additions & 8 deletions backend/controllers/projects_test.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
package controllers

import (
"net/http"
"testing"

"github.com/diggerhq/digger/backend/models"
"github.com/diggerhq/digger/backend/utils"
orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler"
"github.com/google/go-github/v61/github"
"github.com/google/uuid"
"github.com/migueleliasweb/go-github-mock/src/mock"
"github.com/stretchr/testify/assert"
"net/http"
"testing"
)

func TestAutomergeWhenBatchIsSuccessfulStatus(t *testing.T) {
Expand Down Expand Up @@ -68,11 +69,12 @@ func TestAutomergeWhenBatchIsSuccessfulStatus(t *testing.T) {
" - name: dev\n" +
" dir: dev\n" +
"auto_merge: false",
GithubInstallationId: int64(41584295),
RepoFullName: "diggerhq/github-job-scheduler",
RepoOwner: "diggerhq",
RepoName: "github-job-scheduler",
BatchType: orchestrator_scheduler.DiggerCommandApply,
GithubInstallationId: int64(41584295),
RepoFullName: "diggerhq/github-job-scheduler",
RepoOwner: "diggerhq",
RepoName: "github-job-scheduler",
BatchType: orchestrator_scheduler.DiggerCommandApply,
CoverAllImpactedProjects: true,
}
err := AutomergePRforBatchIfEnabled(gh, &batch)
assert.NoError(t, err)
Expand All @@ -94,8 +96,19 @@ func TestAutomergeWhenBatchIsSuccessfulStatus(t *testing.T) {
" dir: dev\n" +
"auto_merge: true"
batch.BatchType = orchestrator_scheduler.DiggerCommandApply
batch.CoverAllImpactedProjects = false
err = AutomergePRforBatchIfEnabled(gh, &batch)
assert.NoError(t, err)
assert.True(t, isMergeCalled)
assert.False(t, isMergeCalled)

batch.DiggerConfig = "" +
"projects:\n" +
" - name: dev\n" +
" dir: dev\n" +
"auto_merge: true"
batch.BatchType = orchestrator_scheduler.DiggerCommandApply
batch.CoverAllImpactedProjects = true
err = AutomergePRforBatchIfEnabled(gh, &batch)
assert.NoError(t, err)
assert.True(t, isMergeCalled)
}
2 changes: 2 additions & 0 deletions backend/migrations/20250115155205.sql
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
-- Modify "digger_batches" table
ALTER TABLE "public"."digger_batches" ADD COLUMN "cover_all_impacted_projects" boolean NULL;
3 changes: 2 additions & 1 deletion backend/migrations/atlas.sum
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
h1:kHWmqYJMe9ZbdcztvM02SVEs5lyw/RFbKzZmqgbmSIk=
h1:KSPayCJw5aDjgANi5Nb0yhR8pvu1f0fOrSNRQ1U/VJU=
20231227132525.sql h1:43xn7XC0GoJsCnXIMczGXWis9d504FAWi4F1gViTIcw=
20240115170600.sql h1:IW8fF/8vc40+eWqP/xDK+R4K9jHJ9QBSGO6rN9LtfSA=
20240116123649.sql h1:R1JlUIgxxF6Cyob9HdtMqiKmx/BfnsctTl5rvOqssQw=
Expand Down Expand Up @@ -35,3 +35,4 @@ h1:kHWmqYJMe9ZbdcztvM02SVEs5lyw/RFbKzZmqgbmSIk=
20241107172343.sql h1:E1j+7R5TZlyCKEpyYmH1mJ2zh+y5hVbtQ/PuEMJR7us=
20241114202249.sql h1:P2DhJK8MLe8gSAAz+Y5KNmsvKVw8KfLQPCncynYXEfM=
20241229112312.sql h1:Fr06uwt7LcQoLh6bjGzKB+uy9i8+uk8m6jfi+OBBbP4=
20250115155205.sql h1:zXoB4jz7dc6OpzNcEdA7Ln1j/FMTyXtwyES1Y2DjvRM=
36 changes: 19 additions & 17 deletions backend/models/scheduler.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,12 @@ package models
import (
"encoding/json"
"fmt"
"log"
"time"

orchestrator_scheduler "github.com/diggerhq/digger/libs/scheduler"
"github.com/google/uuid"
"gorm.io/gorm"
"log"
"time"
)

type DiggerJobParentLink struct {
Expand All @@ -22,21 +23,22 @@ const DiggerVCSGithub DiggerVCSType = "github"
const DiggerVCSGitlab DiggerVCSType = "gitlab"

type DiggerBatch struct {
ID uuid.UUID `gorm:"primary_key"`
VCS DiggerVCSType
PrNumber int
CommentId *int64
AiSummaryCommentId string
Status orchestrator_scheduler.DiggerBatchStatus
BranchName string
DiggerConfig string
GithubInstallationId int64
GitlabProjectId int
RepoFullName string
RepoOwner string
RepoName string
BatchType orchestrator_scheduler.DiggerCommand
ReportTerraformOutputs bool
ID uuid.UUID `gorm:"primary_key"`
VCS DiggerVCSType
PrNumber int
CommentId *int64
AiSummaryCommentId string
Status orchestrator_scheduler.DiggerBatchStatus
BranchName string
DiggerConfig string
GithubInstallationId int64
GitlabProjectId int
RepoFullName string
RepoOwner string
RepoName string
BatchType orchestrator_scheduler.DiggerCommand
ReportTerraformOutputs bool
CoverAllImpactedProjects bool
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

💡 Codebase verification

CoverAllImpactedProjects is not included in serialization

The MapToJsonStruct method in DiggerBatch does not include the CoverAllImpactedProjects field in its serialization output, which could lead to data loss. The field needs to be added to the SerializedBatch struct returned by the method.

🔗 Analysis chain

Verify serialization of CoverAllImpactedProjects.

Ensure that the new field CoverAllImpactedProjects in the DiggerBatch struct is correctly handled during serialization, especially in the MapToJsonStruct method, to prevent data inconsistencies.

🏁 Scripts executed

The following scripts were executed for the analysis:

Script:

#!/bin/bash
# Description: Search for serialization of 'CoverAllImpactedProjects' in methods.

# Look for occurrences where DiggerBatch is converted without including 'CoverAllImpactedProjects'.
ast-grep --pattern $'func ($_ *DiggerBatch) MapToJsonStruct() {...}' --json | jq '.captures[] | select(.value | test("CoverAllImpactedProjects")) | .value'

Length of output: 224


Script:

#!/bin/bash
# Find the MapToJsonStruct method implementation and surrounding context
rg -U "func.*MapToJsonStruct.*DiggerBatch" -A 20

# Search for struct definition and JSON tags
rg "CoverAllImpactedProjects.*\`json:" 

# Look for test files that might verify serialization
rg "TestDiggerBatch.*MapToJsonStruct" -l

Length of output: 133


Script:

#!/bin/bash
# Show the content of scheduler.go
cat backend/models/scheduler.go

# Find any JSON-related code in the models directory
rg "json" backend/models/ -A 5 -B 5

# Look for test files in the models directory
fd ".*_test.go" backend/models/

Length of output: 12644

// used for module source grouping comments
SourceDetails []byte
}
Expand Down
40 changes: 21 additions & 19 deletions backend/models/storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,16 +4,17 @@ import (
"encoding/json"
"errors"
"fmt"
"log"
"net/http"
"time"

"github.com/dchest/uniuri"
configuration "github.com/diggerhq/digger/libs/digger_config"
scheduler "github.com/diggerhq/digger/libs/scheduler"
"github.com/gin-gonic/gin"
"github.com/google/uuid"
"github.com/samber/lo"
"gorm.io/gorm"
"log"
"net/http"
"time"
)

func (db *Database) GetProjectsFromContext(c *gin.Context, orgIdKey string) ([]Project, bool) {
Expand Down Expand Up @@ -617,24 +618,25 @@ func (db *Database) GetDiggerBatch(batchId *uuid.UUID) (*DiggerBatch, error) {
return batch, nil
}

func (db *Database) CreateDiggerBatch(vcsType DiggerVCSType, githubInstallationId int64, repoOwner string, repoName string, repoFullname string, PRNumber int, diggerConfig string, branchName string, batchType scheduler.DiggerCommand, commentId *int64, gitlabProjectId int, aiSummaryCommentId string, reportTerraformOutputs bool) (*DiggerBatch, error) {
func (db *Database) CreateDiggerBatch(vcsType DiggerVCSType, githubInstallationId int64, repoOwner string, repoName string, repoFullname string, PRNumber int, diggerConfig string, branchName string, batchType scheduler.DiggerCommand, commentId *int64, gitlabProjectId int, aiSummaryCommentId string, reportTerraformOutputs bool, coverAllImpactedProjects bool) (*DiggerBatch, error) {
uid := uuid.New()
batch := &DiggerBatch{
ID: uid,
VCS: vcsType,
GithubInstallationId: githubInstallationId,
RepoOwner: repoOwner,
RepoName: repoName,
RepoFullName: repoFullname,
PrNumber: PRNumber,
CommentId: commentId,
Status: scheduler.BatchJobCreated,
BranchName: branchName,
DiggerConfig: diggerConfig,
BatchType: batchType,
GitlabProjectId: gitlabProjectId,
AiSummaryCommentId: aiSummaryCommentId,
ReportTerraformOutputs: reportTerraformOutputs,
ID: uid,
VCS: vcsType,
GithubInstallationId: githubInstallationId,
RepoOwner: repoOwner,
RepoName: repoName,
RepoFullName: repoFullname,
PrNumber: PRNumber,
CommentId: commentId,
Status: scheduler.BatchJobCreated,
BranchName: branchName,
DiggerConfig: diggerConfig,
BatchType: batchType,
GitlabProjectId: gitlabProjectId,
AiSummaryCommentId: aiSummaryCommentId,
ReportTerraformOutputs: reportTerraformOutputs,
CoverAllImpactedProjects: coverAllImpactedProjects,
}
result := db.GormDB.Save(batch)
if result.Error != nil {
Expand Down
11 changes: 6 additions & 5 deletions backend/models/storage_test.go
Original file line number Diff line number Diff line change
@@ -1,15 +1,16 @@
package models

import (
"log"
"os"
"strings"
"testing"

"github.com/diggerhq/digger/libs/scheduler"
"github.com/stretchr/testify/assert"
"gorm.io/driver/sqlite"
"gorm.io/gorm"
"gorm.io/gorm/logger"
"log"
"os"
"strings"
"testing"
)

func setupSuite(tb testing.TB) (func(tb testing.TB), *Database, *Organisation) {
Expand Down Expand Up @@ -151,7 +152,7 @@ func TestGetDiggerJobsForBatchPreloadsSummary(t *testing.T) {
resourcesUpdated := uint(2)
resourcesDeleted := uint(3)

batch, err := DB.CreateDiggerBatch(DiggerVCSGithub, 123, repoOwner, repoName, repoFullName, prNumber, diggerconfig, branchName, batchType, &commentId, 0, "", false)
batch, err := DB.CreateDiggerBatch(DiggerVCSGithub, 123, repoOwner, repoName, repoFullName, prNumber, diggerconfig, branchName, batchType, &commentId, 0, "", false, true)
assert.NoError(t, err)

job, err := DB.CreateDiggerJob(batch.ID, []byte(jobSpec), "workflow_file.yml")
Expand Down
Loading