From 4d89a78a1f89165fd917d9c24a2f74158dc8071a Mon Sep 17 00:00:00 2001 From: Finn Arne Gangstad Date: Wed, 27 Sep 2023 14:52:24 +0200 Subject: [PATCH] refactor: split Clone into Clone and MergeAgain Clone is now a NOP if the PR has not changed, and loses its second return value, the MergedAgain flag. MergeAgain must be called explicitly in the only location that cares about this flag, just before planning. This cleans up the code for Clone and re-merging a bit. Also regenerated mocks --- .../mocks/mock_template_writer.go | 2 +- .../mocks/mock_pull_approved_checker.go | 29 +++--- server/events/github_app_working_dir.go | 2 +- server/events/github_app_working_dir_test.go | 8 +- server/events/mock_workingdir_test.go | 67 +++++++++---- server/events/mocks/mock_event_parsing.go | 2 +- server/events/mocks/mock_working_dir.go | 67 +++++++++---- .../post_workflow_hooks_command_runner.go | 2 +- ...post_workflow_hooks_command_runner_test.go | 20 ++-- .../pre_workflow_hooks_command_runner.go | 2 +- .../pre_workflow_hooks_command_runner_test.go | 20 ++-- server/events/project_command_builder.go | 6 +- .../project_command_builder_internal_test.go | 10 +- server/events/project_command_builder_test.go | 20 ++-- server/events/project_command_runner.go | 17 +++- server/events/project_command_runner_test.go | 7 +- .../mocks/mock_github_pull_request_getter.go | 29 +++--- server/events/working_dir.go | 96 ++++++++++--------- server/events/working_dir_test.go | 51 +++++----- 19 files changed, 268 insertions(+), 189 deletions(-) diff --git a/server/controllers/web_templates/mocks/mock_template_writer.go b/server/controllers/web_templates/mocks/mock_template_writer.go index e3fafa580c..5d3e33a2ef 100644 --- a/server/controllers/web_templates/mocks/mock_template_writer.go +++ b/server/controllers/web_templates/mocks/mock_template_writer.go @@ -1,5 +1,5 @@ // Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/controllers/templates (interfaces: TemplateWriter) +// Source: github.com/runatlantis/atlantis/server/controllers/web_templates (interfaces: TemplateWriter) package mocks diff --git a/server/core/runtime/mocks/mock_pull_approved_checker.go b/server/core/runtime/mocks/mock_pull_approved_checker.go index 13e1a3a834..fc43172cee 100644 --- a/server/core/runtime/mocks/mock_pull_approved_checker.go +++ b/server/core/runtime/mocks/mock_pull_approved_checker.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockPullApprovedChecker(options ...pegomock.Option) *MockPullApprovedChe func (mock *MockPullApprovedChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockPullApprovedChecker) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { +func (mock *MockPullApprovedChecker) PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockPullApprovedChecker().") } - params := []pegomock.Param{baseRepo, pull} + params := []pegomock.Param{logger, baseRepo, pull} result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 models.ApprovalStatus var ret1 error @@ -81,8 +82,8 @@ type VerifierMockPullApprovedChecker struct { timeout time.Duration } -func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { - params := []pegomock.Param{baseRepo, pull} +func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(logger logging.SimpleLogging, baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { + params := []pegomock.Param{logger, baseRepo, pull} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) return &MockPullApprovedChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -92,21 +93,25 @@ type MockPullApprovedChecker_PullIsApproved_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - baseRepo, pull := c.GetAllCapturedArguments() - return baseRepo[len(baseRepo)-1], pull[len(pull)-1] +func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, baseRepo, pull := c.GetAllCapturedArguments() + return logger[len(logger)-1], baseRepo[len(baseRepo)-1], pull[len(pull)-1] } -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go index a06599efe0..5852a94e9c 100644 --- a/server/events/github_app_working_dir.go +++ b/server/events/github_app_working_dir.go @@ -20,7 +20,7 @@ type GithubAppWorkingDir struct { } // Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { baseRepo := &p.BaseRepo // Realistically, this is a super brittle way of supporting clones using gh app installation tokens diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index 78e64d4e0b..900b1f6193 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -45,7 +45,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { GithubHostname: testServer, } - cloneDir, _, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -90,11 +90,11 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { When(credentials.GetToken()).ThenReturn("token", nil) When(workingDir.Clone(Any[logging.SimpleLogging](), Eq(modifiedBaseRepo), Eq(models.PullRequest{BaseRepo: modifiedBaseRepo}), - Eq("default"))).ThenReturn("", true, nil) + Eq("default"))).ThenReturn("", nil) - _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") + _, err := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") workingDir.VerifyWasCalledOnce().Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default") - Assert(t, success == true, "clone url mutation error") + Ok(t, err) } diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index c2e070cfed..ee69d79b73 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -26,27 +26,23 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{logger, headRepo, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string - var ret1 bool - var ret2 error + var ret1 error if len(result) != 0 { if result[0] != nil { ret0 = result[0].(string) } if result[1] != nil { - ret1 = result[1].(bool) - } - if result[2] != nil { - ret2 = result[2].(error) + ret1 = result[1].(error) } } - return ret0, ret1, ret2 + return ret0, ret1 } func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { @@ -166,12 +162,23 @@ func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir s return ret0 } -func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { +func (mock *MockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) + params := []pegomock.Param{logger, headRepo, p, workspace} + result := pegomock.GetGenericMockFrom(mock).Invoke("MergeAgain", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 bool + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(bool) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -507,19 +514,41 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) - return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_MergeAgain_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MergeAgain", params, verifier.timeout) + return &MockWorkingDir_MergeAgain_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { +type MockWorkingDir_MergeAgain_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]models.Repo, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) + } + } + return } diff --git a/server/events/mocks/mock_event_parsing.go b/server/events/mocks/mock_event_parsing.go index ad7a75c252..505fadb5fa 100644 --- a/server/events/mocks/mock_event_parsing.go +++ b/server/events/mocks/mock_event_parsing.go @@ -5,11 +5,11 @@ package mocks import ( gitea "code.gitea.io/sdk/gitea" - gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea" github "github.com/google/go-github/v59/github" azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea" logging "github.com/runatlantis/atlantis/server/logging" go_gitlab "github.com/xanzy/go-gitlab" "reflect" diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 9c162fc4a2..63b00805ee 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -26,27 +26,23 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{logger, headRepo, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string - var ret1 bool - var ret2 error + var ret1 error if len(result) != 0 { if result[0] != nil { ret0 = result[0].(string) } if result[1] != nil { - ret1 = result[1].(bool) - } - if result[2] != nil { - ret2 = result[2].(error) + ret1 = result[1].(error) } } - return ret0, ret1, ret2 + return ret0, ret1 } func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { @@ -166,12 +162,23 @@ func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir s return ret0 } -func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { +func (mock *MockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) + params := []pegomock.Param{logger, headRepo, p, workspace} + result := pegomock.GetGenericMockFrom(mock).Invoke("MergeAgain", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 bool + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(bool) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -507,19 +514,41 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) - return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_MergeAgain_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MergeAgain", params, verifier.timeout) + return &MockWorkingDir_MergeAgain_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { +type MockWorkingDir_MergeAgain_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]models.Repo, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) + } + } + return } diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go index e1f7f10a9d..ddaa7d1525 100644 --- a/server/events/post_workflow_hooks_command_runner.go +++ b/server/events/post_workflow_hooks_command_runner.go @@ -59,7 +59,7 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + repoDir, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } diff --git a/server/events/post_workflow_hooks_command_runner_test.go b/server/events/post_workflow_hooks_command_runner_test.go index 29996d8028..7fdb68c8f9 100644 --- a/server/events/post_workflow_hooks_command_runner_test.go +++ b/server/events/post_workflow_hooks_command_runner_test.go @@ -143,7 +143,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -237,7 +237,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, errors.New("some error")) err := postWh.RunPostHooks(ctx, planCmd) @@ -272,7 +272,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -314,7 +314,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -350,7 +350,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -386,7 +386,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -422,7 +422,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -459,7 +459,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -495,7 +495,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -531,7 +531,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go index 0e81f15ab9..0f0ec08593 100644 --- a/server/events/pre_workflow_hooks_command_runner.go +++ b/server/events/pre_workflow_hooks_command_runner.go @@ -59,7 +59,7 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + repoDir, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go index 191a8c27dc..29912576da 100644 --- a/server/events/pre_workflow_hooks_command_runner_test.go +++ b/server/events/pre_workflow_hooks_command_runner_test.go @@ -145,7 +145,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -241,7 +241,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, errors.New("some error")) err := preWh.RunPreHooks(ctx, planCmd) @@ -276,7 +276,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -318,7 +318,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -354,7 +354,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -390,7 +390,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -426,7 +426,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -463,7 +463,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -499,7 +499,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -535,7 +535,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 9cdaff45d4..f0a0d65c46 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -397,7 +397,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) + repoDir, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -570,7 +570,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont defer unlockFn() ctx.Log.Debug("cloning repository") - _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return pcc, err } @@ -648,7 +648,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont if DefaultWorkspace != workspace { ctx.Log.Debug("cloning repository with workspace %s", workspace) - _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) + _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return pcc, err } diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index 2d45006959..19ae2a99bb 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -631,7 +631,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -846,7 +846,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1091,7 +1091,7 @@ workflows: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1246,7 +1246,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1386,7 +1386,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index e1e35eb675..680ad375be 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -242,7 +242,7 @@ terraform { tmpDir := DirStructure(t, c.TestDirStructure) workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(ChangedFiles(c.TestDirStructure, ""), nil) @@ -603,7 +603,7 @@ projects: workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -792,7 +792,7 @@ projects: workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -1122,7 +1122,7 @@ projects: workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -1312,7 +1312,7 @@ projects: Ok(t, err) When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ @@ -1401,7 +1401,7 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -1558,7 +1558,7 @@ projects: Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil) workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ @@ -1743,7 +1743,7 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) @@ -1962,7 +1962,7 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) @@ -2074,7 +2074,7 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 2ad783efd1..3377115d48 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -552,15 +552,22 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model } defer unlockFn() - p.WorkingDir.SetCheckForUpstreamChanges() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) } - return nil, "", cloneErr + return nil, "", err } + mergedAgain, err := p.WorkingDir.MergeAgain(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + if err != nil { + if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { + ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) + } + return nil, "", err + } + projAbsPath := filepath.Join(repoDir, ctx.RepoRelDir) if _, err = os.Stat(projAbsPath); os.IsNotExist(err) { return nil, "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} @@ -667,7 +674,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } @@ -713,7 +720,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out *models.StateRmSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 103966ef52..1b86d18b8e 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -64,7 +64,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { repoDir := t.TempDir() When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) @@ -104,6 +104,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { Assert(t, res.PlanSuccess != nil, "exp plan success") Equals(t, "https://lock-key", res.PlanSuccess.LockURL) + t.Logf("output is %s", res.PlanSuccess.TerraformOutput) Equals(t, "run\napply\nplan\ninit", res.PlanSuccess.TerraformOutput) expSteps := []string{"run", "apply", "plan", "init", "env"} for _, step := range expSteps { @@ -549,7 +550,7 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { repoDir := t.TempDir() When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) @@ -691,7 +692,7 @@ func TestDefaultProjectCommandRunner_Import(t *testing.T) { } repoDir := t.TempDir() When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) if c.setup != nil { c.setup(repoDir, ctx, mockLocker, mockInit, mockImport) } diff --git a/server/events/vcs/mocks/mock_github_pull_request_getter.go b/server/events/vcs/mocks/mock_github_pull_request_getter.go index f31a9a7ca9..00b472db75 100644 --- a/server/events/vcs/mocks/mock_github_pull_request_getter.go +++ b/server/events/vcs/mocks/mock_github_pull_request_getter.go @@ -7,6 +7,7 @@ import ( github "github.com/google/go-github/v59/github" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -26,11 +27,11 @@ func NewMockGithubPullRequestGetter(options ...pegomock.Option) *MockGithubPullR func (mock *MockGithubPullRequestGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockGithubPullRequestGetter) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { +func (mock *MockGithubPullRequestGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*github.PullRequest, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockGithubPullRequestGetter().") } - params := []pegomock.Param{repo, pullNum} + params := []pegomock.Param{logger, repo, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *github.PullRequest var ret1 error @@ -82,8 +83,8 @@ type VerifierMockGithubPullRequestGetter struct { timeout time.Duration } -func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} +func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { + params := []pegomock.Param{logger, repo, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) return &MockGithubPullRequestGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -93,21 +94,25 @@ type MockGithubPullRequestGetter_GetPullRequest_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] +func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, int) { + logger, repo, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repo[len(repo)-1], pullNum[len(pullNum)-1] } -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { +func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(models.Repo) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/working_dir.go b/server/events/working_dir.go index c1aa64a072..a0dd0a749b 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -39,10 +39,11 @@ var recheckRequiredMap sync.Map // WorkingDir handles the workspace on disk for running commands. type WorkingDir interface { // Clone git clones headRepo, checks out the branch and then returns the - // absolute path to the root of the cloned repo. It also returns - // a boolean indicating if we should warn users that the branch we're - // merging into has been updated since we cloned it. - Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) + // absolute path to the root of the cloned repo. + Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) + // MergeAgain merges again with upstream if upstream has been modified, returns + // whether it actually did a new merge + MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (bool, error) // GetWorkingDir returns the path to the workspace for this repo and pull. // If workspace does not exist on disk, error will be of type os.IsNotExist. GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) @@ -51,10 +52,6 @@ type WorkingDir interface { // Delete deletes the workspace for this repo and pull. Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error - // Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if - // the upstream branch has been modified. This is only safe after grabbing the project lock - // and before running any plans - SetCheckForUpstreamChanges() // DeletePlan deletes the plan for this repo, pull, workspace path and project name DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error // GetGitUntrackedFiles returns a list of Git untracked files in the working dir. @@ -90,24 +87,13 @@ type FileWorkspace struct { } // Clone git clones headRepo, checks out the branch and then returns the absolute -// path to the root of the cloned repo. It also returns -// a boolean indicating whether we had to merge with upstream again. +// path to the root of the cloned repo. // If the repo already exists and is at // the right commit it does nothing. This is to support running commands in // multiple dirs of the same repo without deleting existing plans. -func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) - // Disable the mustRecheckUpstream flag if we are not using the merge checkout strategy - mustRecheckUpstream := w.CheckForUpstreamChanges && w.CheckoutMerge - - if mustRecheckUpstream { - // We atomically set the recheckRequiredMap flag here before grabbing the clone lock. - // If the flag is cleared after we grab the lock, it means some other thread - // did the necessary work late enough that we do not have to do it again. - recheckRequiredMap.Store(cloneDir, struct{}{}) - } - // Unconditionally wait for the clone lock here, if anyone else is doing any clone // operation in this directory, we wait for it to finish before we check anything. value, _ := cloneLocks.LoadOrStore(cloneDir, new(sync.Mutex)) @@ -115,13 +101,6 @@ func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo mutex.Lock() defer mutex.Unlock() - if mustRecheckUpstream { - if _, exists := recheckRequiredMap.Load(cloneDir); !exists { - mustRecheckUpstream = false - w.Logger.Debug("Skipping upstream check. Some other thread has done this for us") - } - } - c := wrappedGitContext{cloneDir, headRepo, p} // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. @@ -142,33 +121,62 @@ func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo outputRevParseCmd, err := revParseCmd.CombinedOutput() if err != nil { logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(logger, c) + return cloneDir, w.forceClone(logger, c) } currCommit := strings.Trim(string(outputRevParseCmd), "\n") // We're prefix matching here because BitBucket doesn't give us the full // commit, only a 12 character prefix. if strings.HasPrefix(currCommit, p.HeadCommit) { - if mustRecheckUpstream { - w.CheckForUpstreamChanges = false - recheckRequiredMap.Delete(cloneDir) - if w.recheckDiverged(p, headRepo, cloneDir) { - w.Logger.Info("base branch has been updated, using merge strategy and will merge again") - return cloneDir, true, w.mergeAgain(logger, c) - } - w.Logger.Debug("repo is at correct commit %q and there are no upstream changes", p.HeadCommit) - } else { - w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) - } - return cloneDir, false, nil - } else { - logger.Debug("repo was already cloned but is not at correct commit, wanted '%s' got '%s'", p.HeadCommit, currCommit) + logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) + return cloneDir, nil } + logger.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit) // We'll fall through to re-clone. } // Otherwise we clone the repo. - return cloneDir, false, w.forceClone(logger, c) + return cloneDir, w.forceClone(logger, c) +} + +// MergeAgain merges again with upstream if we are using the merge checkout strategy, +// and upstream has been modified since we last checked. +// It returns a flag indicating whether we had to merge with upstream again. +func (w *FileWorkspace) MergeAgain( + logger logging.SimpleLogging, + headRepo models.Repo, + p models.PullRequest, + workspace string) (bool, error) { + + if !w.CheckoutMerge { + return false, nil + } + + cloneDir := w.cloneDir(p.BaseRepo, p, workspace) + // We atomically set the recheckRequiredMap flag here before grabbing the clone lock. + // If the flag is cleared after we grab the lock, it means some other thread + // did the necessary work late enough that we do not have to do it again. + recheckRequiredMap.Store(cloneDir, struct{}{}) + + // Unconditionally wait for the clone lock here, if anyone else is doing any clone + // operation in this directory, we wait for it to finish before we check anything. + value, _ := cloneLocks.LoadOrStore(cloneDir, new(sync.Mutex)) + mutex := value.(*sync.Mutex) + mutex.Lock() + defer mutex.Unlock() + + if _, exists := recheckRequiredMap.Load(cloneDir); !exists { + logger.Debug("Skipping upstream check. Some other thread has done this for us") + return false, nil + } + recheckRequiredMap.Delete(cloneDir) + + c := wrappedGitContext{cloneDir, headRepo, p} + if w.recheckDiverged(logger, p, headRepo, cloneDir) { + logger.Info("base branch has been updated, using merge strategy and will merge again") + return true, w.mergeAgain(logger, c) + } + return false, nil } // recheckDiverged returns true if the branch we're merging into has diverged diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index e25c420100..26c82823b7 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -45,7 +45,7 @@ func TestClone_NoneExisting(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -97,13 +97,12 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check the commits. actBaseCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD~1") @@ -148,25 +147,23 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { GpgNoSigningEnabled: true, } - _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -200,25 +197,23 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { GpgNoSigningEnabled: true, } - _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -257,7 +252,7 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { GpgNoSigningEnabled: true, } - _, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -316,13 +311,12 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in shallow repo") @@ -346,13 +340,12 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in full repo") @@ -381,12 +374,11 @@ func TestClone_NoReclone(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check that our proof file is still there. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -425,13 +417,12 @@ func TestClone_RecloneWrongCommit(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, }, "default") Ok(t, err) - Equals(t, false, mergedAgain) assert.NoFileExists(t, planFile, "Plan file should have been wiped out by Clone") // Use rev-parse to verify at correct commit. @@ -506,23 +497,28 @@ func TestClone_MasterHasDiverged(t *testing.T) { Assert(t, err == nil, "creating plan file: %v", err) assert.FileExists(t, planFile) - // Run the clone without the checkout merge strategy. It should return + // Run MergeAgain without the checkout merge strategy. It should return // false for mergedAgain - _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err = wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, mergedAgain == false, "Clone with CheckoutMerge=false should not merge") - assert.FileExists(t, planFile, "Existing plan file should not be deleted by Clone with merge disabled") + mergedAgain, err := wd.MergeAgain(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ + BaseRepo: models.Repo{CloneURL: repoDir}, + HeadBranch: "second-pr", + BaseBranch: "main", + }, "default") + Ok(t, err) + assert.FileExists(t, planFile, "Existing plan file should not be deleted by merging again") + Assert(t, mergedAgain == false, "MergeAgain with CheckoutMerge=false should not merge") wd.CheckoutMerge = true - wd.SetCheckForUpstreamChanges() // Run the clone twice with the merge strategy, the first run should // return true for mergedAgain, subsequent runs should // return false since the first call is supposed to merge. - _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ + mergedAgain, err = wd.MergeAgain(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", @@ -531,8 +527,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { assert.FileExists(t, planFile, "Existing plan file should not be deleted by merging again") Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged") - wd.SetCheckForUpstreamChanges() - _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ + mergedAgain, err = wd.MergeAgain(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main",