diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go index a06599efe0..5852a94e9c 100644 --- a/server/events/github_app_working_dir.go +++ b/server/events/github_app_working_dir.go @@ -20,7 +20,7 @@ type GithubAppWorkingDir struct { } // Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { baseRepo := &p.BaseRepo // Realistically, this is a super brittle way of supporting clones using gh app installation tokens diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index 78e64d4e0b..900b1f6193 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -45,7 +45,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { GithubHostname: testServer, } - cloneDir, _, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -90,11 +90,11 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { When(credentials.GetToken()).ThenReturn("token", nil) When(workingDir.Clone(Any[logging.SimpleLogging](), Eq(modifiedBaseRepo), Eq(models.PullRequest{BaseRepo: modifiedBaseRepo}), - Eq("default"))).ThenReturn("", true, nil) + Eq("default"))).ThenReturn("", nil) - _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") + _, err := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") workingDir.VerifyWasCalledOnce().Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default") - Assert(t, success == true, "clone url mutation error") + Ok(t, err) } diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index d298b2cee7..f5ead7a509 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -27,27 +27,23 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{logger, headRepo, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string - var ret1 bool - var ret2 error + var ret1 error if len(result) != 0 { if result[0] != nil { ret0 = result[0].(string) } if result[1] != nil { - ret1 = result[1].(bool) - } - if result[2] != nil { - ret2 = result[2].(error) + ret1 = result[1].(error) } } - return ret0, ret1, ret2 + return ret0, ret1 } func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { @@ -167,12 +163,23 @@ func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir s return ret0 } -func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { +func (mock *MockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) + params := []pegomock.Param{logger, headRepo, p, workspace} + result := pegomock.GetGenericMockFrom(mock).Invoke("MergeAgain", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 bool + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(bool) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -508,19 +515,41 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) - return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_MergeAgain_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MergeAgain", params, verifier.timeout) + return &MockWorkingDir_MergeAgain_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { +type MockWorkingDir_MergeAgain_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]models.Repo, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) + } + } + return } diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 9c162fc4a2..63b00805ee 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -26,27 +26,23 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{logger, headRepo, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string - var ret1 bool - var ret2 error + var ret1 error if len(result) != 0 { if result[0] != nil { ret0 = result[0].(string) } if result[1] != nil { - ret1 = result[1].(bool) - } - if result[2] != nil { - ret2 = result[2].(error) + ret1 = result[1].(error) } } - return ret0, ret1, ret2 + return ret0, ret1 } func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { @@ -166,12 +162,23 @@ func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir s return ret0 } -func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { +func (mock *MockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) + params := []pegomock.Param{logger, headRepo, p, workspace} + result := pegomock.GetGenericMockFrom(mock).Invoke("MergeAgain", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 bool + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(bool) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -507,19 +514,41 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) - return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockWorkingDir) MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_MergeAgain_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MergeAgain", params, verifier.timeout) + return &MockWorkingDir_MergeAgain_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { +type MockWorkingDir_MergeAgain_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_MergeAgain_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]models.Repo, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) + } + } + return } diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go index f5fe0c5245..018350f79f 100644 --- a/server/events/post_workflow_hooks_command_runner.go +++ b/server/events/post_workflow_hooks_command_runner.go @@ -59,7 +59,7 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + repoDir, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } diff --git a/server/events/post_workflow_hooks_command_runner_test.go b/server/events/post_workflow_hooks_command_runner_test.go index 29996d8028..7fdb68c8f9 100644 --- a/server/events/post_workflow_hooks_command_runner_test.go +++ b/server/events/post_workflow_hooks_command_runner_test.go @@ -143,7 +143,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -237,7 +237,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, errors.New("some error")) err := postWh.RunPostHooks(ctx, planCmd) @@ -272,7 +272,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -314,7 +314,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -350,7 +350,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -386,7 +386,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -422,7 +422,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -459,7 +459,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -495,7 +495,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -531,7 +531,7 @@ func TestRunPostHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go index 70462765a3..8d2442d060 100644 --- a/server/events/pre_workflow_hooks_command_runner.go +++ b/server/events/pre_workflow_hooks_command_runner.go @@ -59,7 +59,7 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + repoDir, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go index 191a8c27dc..29912576da 100644 --- a/server/events/pre_workflow_hooks_command_runner_test.go +++ b/server/events/pre_workflow_hooks_command_runner_test.go @@ -145,7 +145,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -241,7 +241,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, errors.New("some error")) err := preWh.RunPreHooks(ctx, planCmd) @@ -276,7 +276,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -318,7 +318,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -354,7 +354,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -390,7 +390,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -426,7 +426,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -463,7 +463,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -499,7 +499,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -535,7 +535,7 @@ func TestRunPreHooks_Clone(t *testing.T) { When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), - Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 7d587e4ce3..2f2f24bc19 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -397,7 +397,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) + repoDir, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -594,7 +594,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont defer unlockFn() ctx.Log.Debug("cloning repository") - _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return pcc, err } @@ -672,7 +672,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont if DefaultWorkspace != workspace { ctx.Log.Debug("cloning repository with workspace %s", workspace) - _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) + _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return pcc, err } diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index d020871b31..c6e7bc070d 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -631,7 +631,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -846,7 +846,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1091,7 +1091,7 @@ workflows: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1245,7 +1245,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) @@ -1385,7 +1385,7 @@ projects: workingDir := NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmp, false, nil) + Any[string]())).ThenReturn(tmp, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 7560b5d6de..039612f617 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -241,7 +241,7 @@ terraform { tmpDir := DirStructure(t, c.TestDirStructure) workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(ChangedFiles(c.TestDirStructure, ""), nil) @@ -602,7 +602,7 @@ projects: workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -790,7 +790,7 @@ projects: workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -1119,7 +1119,7 @@ projects: workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -1307,7 +1307,7 @@ projects: Ok(t, err) When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ @@ -1395,7 +1395,7 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), @@ -1551,7 +1551,7 @@ projects: Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil) workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ @@ -1735,7 +1735,7 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) @@ -1952,7 +1952,7 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) @@ -2063,7 +2063,7 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked workingDir := mocks.NewMockWorkingDir() When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, false, nil) + Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 153269c7e2..e2cb01155b 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -555,15 +555,22 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model } defer unlockFn() - p.WorkingDir.SetCheckForUpstreamChanges() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) } - return nil, "", cloneErr + return nil, "", err } + mergedAgain, err := p.WorkingDir.MergeAgain(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + if err != nil { + if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { + ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) + } + return nil, "", err + } + projAbsPath := filepath.Join(repoDir, ctx.RepoRelDir) if _, err = os.Stat(projAbsPath); os.IsNotExist(err) { return nil, "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} @@ -680,7 +687,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } @@ -726,7 +733,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out *models.StateRmSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index d241d44569..d4436ce13c 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -64,7 +64,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { repoDir := t.TempDir() When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) @@ -104,6 +104,7 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { Assert(t, res.PlanSuccess != nil, "exp plan success") Equals(t, "https://lock-key", res.PlanSuccess.LockURL) + t.Logf("output is %s", res.PlanSuccess.TerraformOutput) Equals(t, "run\napply\nplan\ninit", res.PlanSuccess.TerraformOutput) expSteps := []string{"run", "apply", "plan", "init", "env"} for _, step := range expSteps { @@ -571,7 +572,7 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { repoDir := t.TempDir() When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) @@ -713,7 +714,7 @@ func TestDefaultProjectCommandRunner_Import(t *testing.T) { } repoDir := t.TempDir() When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, false, nil) + Any[string]())).ThenReturn(repoDir, nil) if c.setup != nil { c.setup(repoDir, ctx, mockLocker, mockInit, mockImport) } diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 392e561aa0..37c087ccf9 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -40,10 +40,11 @@ var recheckRequiredMap sync.Map // WorkingDir handles the workspace on disk for running commands. type WorkingDir interface { // Clone git clones headRepo, checks out the branch and then returns the - // absolute path to the root of the cloned repo. It also returns - // a boolean indicating if we should warn users that the branch we're - // merging into has been updated since we cloned it. - Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) + // absolute path to the root of the cloned repo. + Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) + // MergeAgain merges again with upstream if upstream has been modified, returns + // whether it actually did a new merge + MergeAgain(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (bool, error) // GetWorkingDir returns the path to the workspace for this repo and pull. // If workspace does not exist on disk, error will be of type os.IsNotExist. GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) @@ -52,10 +53,6 @@ type WorkingDir interface { // Delete deletes the workspace for this repo and pull. Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error - // Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if - // the upstream branch has been modified. This is only safe after grabbing the project lock - // and before running any plans - SetCheckForUpstreamChanges() // DeletePlan deletes the plan for this repo, pull, workspace path and project name DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error // GetGitUntrackedFiles returns a list of Git untracked files in the working dir. @@ -91,24 +88,13 @@ type FileWorkspace struct { } // Clone git clones headRepo, checks out the branch and then returns the absolute -// path to the root of the cloned repo. It also returns -// a boolean indicating whether we had to merge with upstream again. +// path to the root of the cloned repo. // If the repo already exists and is at // the right commit it does nothing. This is to support running commands in // multiple dirs of the same repo without deleting existing plans. -func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) - // Disable the mustRecheckUpstream flag if we are not using the merge checkout strategy - mustRecheckUpstream := w.CheckForUpstreamChanges && w.CheckoutMerge - - if mustRecheckUpstream { - // We atomically set the recheckRequiredMap flag here before grabbing the clone lock. - // If the flag is cleared after we grab the lock, it means some other thread - // did the necessary work late enough that we do not have to do it again. - recheckRequiredMap.Store(cloneDir, struct{}{}) - } - // Unconditionally wait for the clone lock here, if anyone else is doing any clone // operation in this directory, we wait for it to finish before we check anything. value, _ := cloneLocks.LoadOrStore(cloneDir, new(sync.Mutex)) @@ -116,13 +102,6 @@ func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo mutex.Lock() defer mutex.Unlock() - if mustRecheckUpstream { - if _, exists := recheckRequiredMap.Load(cloneDir); !exists { - mustRecheckUpstream = false - w.Logger.Debug("Skipping upstream check. Some other thread has done this for us") - } - } - c := wrappedGitContext{cloneDir, headRepo, p} // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. @@ -143,33 +122,62 @@ func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo outputRevParseCmd, err := revParseCmd.CombinedOutput() if err != nil { logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(logger, c) + return cloneDir, w.forceClone(logger, c) } currCommit := strings.Trim(string(outputRevParseCmd), "\n") // We're prefix matching here because BitBucket doesn't give us the full // commit, only a 12 character prefix. if strings.HasPrefix(currCommit, p.HeadCommit) { - if mustRecheckUpstream { - w.CheckForUpstreamChanges = false - recheckRequiredMap.Delete(cloneDir) - if w.recheckDiverged(p, headRepo, cloneDir) { - w.Logger.Info("base branch has been updated, using merge strategy and will merge again") - return cloneDir, true, w.mergeAgain(logger, c) - } - w.Logger.Debug("repo is at correct commit %q and there are no upstream changes", p.HeadCommit) - } else { - w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) - } - return cloneDir, false, nil - } else { - logger.Debug("repo was already cloned but is not at correct commit, wanted '%s' got '%s'", p.HeadCommit, currCommit) + logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) + return cloneDir, nil } + logger.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit) // We'll fall through to re-clone. } // Otherwise we clone the repo. - return cloneDir, false, w.forceClone(logger, c) + return cloneDir, w.forceClone(logger, c) +} + +// MergeAgain merges again with upstream if we are using the merge checkout strategy, +// and upstream has been modified since we last checked. +// It returns a flag indicating whether we had to merge with upstream again. +func (w *FileWorkspace) MergeAgain( + logger logging.SimpleLogging, + headRepo models.Repo, + p models.PullRequest, + workspace string) (bool, error) { + + if !w.CheckoutMerge { + return false, nil + } + + cloneDir := w.cloneDir(p.BaseRepo, p, workspace) + // We atomically set the recheckRequiredMap flag here before grabbing the clone lock. + // If the flag is cleared after we grab the lock, it means some other thread + // did the necessary work late enough that we do not have to do it again. + recheckRequiredMap.Store(cloneDir, struct{}{}) + + // Unconditionally wait for the clone lock here, if anyone else is doing any clone + // operation in this directory, we wait for it to finish before we check anything. + value, _ := cloneLocks.LoadOrStore(cloneDir, new(sync.Mutex)) + mutex := value.(*sync.Mutex) + mutex.Lock() + defer mutex.Unlock() + + if _, exists := recheckRequiredMap.Load(cloneDir); !exists { + logger.Debug("Skipping upstream check. Some other thread has done this for us") + return false, nil + } + recheckRequiredMap.Delete(cloneDir) + + c := wrappedGitContext{cloneDir, headRepo, p} + if w.recheckDiverged(logger, p, headRepo, cloneDir) { + logger.Info("base branch has been updated, using merge strategy and will merge again") + return true, w.mergeAgain(logger, c) + } + return false, nil } // recheckDiverged returns true if the branch we're merging into has diverged diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index e25c420100..26c82823b7 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -45,7 +45,7 @@ func TestClone_NoneExisting(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -97,13 +97,12 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check the commits. actBaseCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD~1") @@ -148,25 +147,23 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { GpgNoSigningEnabled: true, } - _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -200,25 +197,23 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { GpgNoSigningEnabled: true, } - _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -257,7 +252,7 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { GpgNoSigningEnabled: true, } - _, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -316,13 +311,12 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in shallow repo") @@ -346,13 +340,12 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in full repo") @@ -381,12 +374,11 @@ func TestClone_NoReclone(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) - Equals(t, false, mergedAgain) // Check that our proof file is still there. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -425,13 +417,12 @@ func TestClone_RecloneWrongCommit(t *testing.T) { TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, } - cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + cloneDir, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, }, "default") Ok(t, err) - Equals(t, false, mergedAgain) assert.NoFileExists(t, planFile, "Plan file should have been wiped out by Clone") // Use rev-parse to verify at correct commit. @@ -506,23 +497,28 @@ func TestClone_MasterHasDiverged(t *testing.T) { Assert(t, err == nil, "creating plan file: %v", err) assert.FileExists(t, planFile) - // Run the clone without the checkout merge strategy. It should return + // Run MergeAgain without the checkout merge strategy. It should return // false for mergedAgain - _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ + _, err = wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, mergedAgain == false, "Clone with CheckoutMerge=false should not merge") - assert.FileExists(t, planFile, "Existing plan file should not be deleted by Clone with merge disabled") + mergedAgain, err := wd.MergeAgain(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ + BaseRepo: models.Repo{CloneURL: repoDir}, + HeadBranch: "second-pr", + BaseBranch: "main", + }, "default") + Ok(t, err) + assert.FileExists(t, planFile, "Existing plan file should not be deleted by merging again") + Assert(t, mergedAgain == false, "MergeAgain with CheckoutMerge=false should not merge") wd.CheckoutMerge = true - wd.SetCheckForUpstreamChanges() // Run the clone twice with the merge strategy, the first run should // return true for mergedAgain, subsequent runs should // return false since the first call is supposed to merge. - _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ + mergedAgain, err = wd.MergeAgain(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", @@ -531,8 +527,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { assert.FileExists(t, planFile, "Existing plan file should not be deleted by merging again") Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged") - wd.SetCheckForUpstreamChanges() - _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ + mergedAgain, err = wd.MergeAgain(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main",