diff --git a/server/legacy/controllers/events/errors/errors.go b/server/legacy/controllers/events/errors/errors.go deleted file mode 100644 index 3690acf70..000000000 --- a/server/legacy/controllers/events/errors/errors.go +++ /dev/null @@ -1,45 +0,0 @@ -package errors - -// RequestValidationError is it's own unique type -// in order to allow us to type assert and return custom http -// error codes accordingly -type RequestValidationError struct { - Err error -} - -func (e *RequestValidationError) Error() string { - return e.Err.Error() -} - -// WebhookParsingError is it's own unique type -// in order to allow us to type assert and return custom http -// error codes accordingly -type WebhookParsingError struct { - Err error -} - -func (e *WebhookParsingError) Error() string { - return e.Err.Error() -} - -// EventParsingError is it's own unique type -// in order to allow us to type assert and return custom http -// error codes accordingly -type EventParsingError struct { - Err error -} - -func (e *EventParsingError) Error() string { - return e.Err.Error() -} - -// UnsupportedEventTypeError is it's own unique type -// in order to allow us to type assert and return custom http -// error codes accordingly -type UnsupportedEventTypeError struct { - Msg string -} - -func (r *UnsupportedEventTypeError) Error() string { - return r.Msg -} diff --git a/server/legacy/controllers/events/events_controller.go b/server/legacy/controllers/events/events_controller.go deleted file mode 100644 index 1578226a1..000000000 --- a/server/legacy/controllers/events/events_controller.go +++ /dev/null @@ -1,288 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "context" - "fmt" - "net/http" - - httputils "github.com/runatlantis/atlantis/server/legacy/http" - - requestErrors "github.com/runatlantis/atlantis/server/legacy/controllers/events/errors" - "github.com/runatlantis/atlantis/server/legacy/controllers/events/handlers" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - key "github.com/runatlantis/atlantis/server/neptune/context" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" - github_converter "github.com/runatlantis/atlantis/server/vcs/provider/github/converter" - github_request "github.com/runatlantis/atlantis/server/vcs/provider/github/request" - "github.com/uber-go/tally/v4" -) - -const ( - githubHeader = "X-Github-Event" -) - -type commentEventHandler interface { - Handle(ctx context.Context, request *httputils.BufferedRequest, event event_types.Comment) error -} - -type prEventHandler interface { - Handle(ctx context.Context, request *httputils.BufferedRequest, event event_types.PullRequest) error -} - -type unsupportedPushEventHandler struct{} - -func (h unsupportedPushEventHandler) Handle(ctx context.Context, event event_types.Push) error { - return fmt.Errorf("push events are not supported in this context") -} - -type unsupportedCheckRunEventHandler struct{} - -func (h unsupportedCheckRunEventHandler) Handle(ctx context.Context, event event_types.CheckRun) error { - return fmt.Errorf("check run events are not supported in this context") -} - -type unsupportedCheckSuiteEventHandler struct{} - -func (h unsupportedCheckSuiteEventHandler) Handle(ctx context.Context, event event_types.CheckSuite) error { - return fmt.Errorf("check suite events are not supported in this context") -} - -func NewRequestResolvers( - providerResolverInitializer map[models.VCSHostType]func() RequestResolver, - supportedProviders []models.VCSHostType, -) []RequestResolver { - var resolvers []RequestResolver - for provider, resolverInitializer := range providerResolverInitializer { - for _, supportedProvider := range supportedProviders { - if provider != supportedProvider { - continue - } - - resolvers = append(resolvers, resolverInitializer()) - } - } - - return resolvers -} - -func NewVCSEventsController( - scope tally.Scope, - githubWebhookSecret []byte, - allowDraftPRs bool, - commandRunner events.CommandRunner, - commentParser events.CommentParsing, - eventParser events.EventParsing, - pullCleaner events.PullCleaner, - repoAllowlistChecker *events.RepoAllowlistChecker, - vcsClient vcs.Client, - logger logging.Logger, - applyDisabled bool, - supportedVCSProviders []models.VCSHostType, - repoConverter github_converter.RepoConverter, - pullConverter github_converter.PullConverter, - githubPullGetter github_converter.PullGetter, - pullFetcher github_converter.PullFetcher, -) *VCSEventsController { - prHandler := handlers.NewPullRequestEvent( - repoAllowlistChecker, pullCleaner, logger, commandRunner, - ) - - commentHandler := handlers.NewCommentEvent( - commentParser, - repoAllowlistChecker, - vcsClient, - commandRunner, - logger, - ) - - pullRequestReviewHandler := handlers.NewPullRequestReviewEvent(commandRunner, logger) - - // we don't support push events in the atlantis worker and these should never make it in the queue - // in the first place, so if it happens, let's return an error and fail fast. - pushHandler := unsupportedPushEventHandler{} - - // lazy map of resolver providers to their resolver - // laziness ensures we only instantiate the providers we support. - providerResolverInitializer := map[models.VCSHostType]func() RequestResolver{ - models.Github: func() RequestResolver { - return github_request.NewHandler( - logger, - scope, - githubWebhookSecret, - pullFetcher, - commentHandler, - prHandler, - pushHandler, - pullRequestReviewHandler, - unsupportedCheckRunEventHandler{}, - unsupportedCheckSuiteEventHandler{}, - allowDraftPRs, - repoConverter, - pullConverter, - githubPullGetter, - ) - }, - } - - router := &RequestRouter{ - Resolvers: NewRequestResolvers(providerResolverInitializer, supportedVCSProviders), - Logger: logger, - } - - return &VCSEventsController{ - RequestRouter: router, - Logger: logger, - Scope: scope, - Parser: eventParser, - CommentParser: commentParser, - PREventHandler: prHandler, - CommentEventHandler: commentHandler, - ApplyDisabled: applyDisabled, - RepoAllowlistChecker: repoAllowlistChecker, - SupportedVCSHosts: supportedVCSProviders, - VCSClient: vcsClient, - } -} - -type RequestHandler interface { - Handle(request *httputils.BufferedRequest) error -} - -type RequestMatcher interface { - Matches(request *httputils.BufferedRequest) bool -} - -type RequestResolver interface { - RequestHandler - RequestMatcher -} - -// TODO: once VCSEventsController is fully broken down this implementation can just live in there. -type RequestRouter struct { - Resolvers []RequestResolver - Logger logging.Logger -} - -func (p *RequestRouter) Route(w http.ResponseWriter, r *http.Request) { - ctx := r.Context() - // we do this to allow for multiple reads to the request body - request, err := httputils.NewBufferedRequest(r) - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - p.logAndWriteBody(ctx, w, err.Error(), map[string]interface{}{key.ErrKey.String(): err}) - return - } - - for _, resolver := range p.Resolvers { - if !resolver.Matches(request) { - continue - } - - err := resolver.Handle(request) - - if e, ok := err.(*requestErrors.RequestValidationError); ok { - w.WriteHeader(http.StatusForbidden) - p.logAndWriteBody(ctx, w, e.Error(), map[string]interface{}{key.ErrKey.String(): e}) - return - } - - if e, ok := err.(*requestErrors.WebhookParsingError); ok { - w.WriteHeader(http.StatusBadRequest) - p.logAndWriteBody(ctx, w, e.Error(), map[string]interface{}{key.ErrKey.String(): e}) - return - } - - if e, ok := err.(*requestErrors.EventParsingError); ok { - w.WriteHeader(http.StatusBadRequest) - p.logAndWriteBody(ctx, w, e.Error(), map[string]interface{}{key.ErrKey.String(): e}) - return - } - - if e, ok := err.(*requestErrors.UnsupportedEventTypeError); ok { - // historically we've just ignored these so for now let's just do that. - w.WriteHeader(http.StatusOK) - p.logAndWriteBody(ctx, w, e.Error(), map[string]interface{}{key.ErrKey.String(): e}) - return - } - - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - p.logAndWriteBody(ctx, w, err.Error(), map[string]interface{}{key.ErrKey.String(): err}) - return - } - - w.WriteHeader(http.StatusOK) - p.logAndWriteBody(ctx, w, "Processing...") - return - } - - w.WriteHeader(http.StatusInternalServerError) - p.logAndWriteBody(ctx, w, "no resolver configured for request") -} - -func (p *RequestRouter) logAndWriteBody(ctx context.Context, w http.ResponseWriter, msg string, fields ...map[string]interface{}) { - fmt.Fprintln(w, msg) - p.Logger.InfoContext(ctx, msg, fields...) -} - -// VCSEventsController handles all webhook requests which signify 'events' in the -// VCS host, ex. GitHub. -// TODO: migrate all provider specific request handling into packaged resolver similar to github -type VCSEventsController struct { - Logger logging.Logger - Scope tally.Scope - CommentParser events.CommentParsing - Parser events.EventParsing - PREventHandler prEventHandler - CommentEventHandler commentEventHandler - RequestRouter *RequestRouter - ApplyDisabled bool - RepoAllowlistChecker *events.RepoAllowlistChecker - // SupportedVCSHosts is which VCS hosts Atlantis was configured upon - // startup to support. - SupportedVCSHosts []models.VCSHostType - VCSClient vcs.Client -} - -// Post handles POST webhook requests. -func (e *VCSEventsController) Post(w http.ResponseWriter, r *http.Request) { - if r.Header.Get(githubHeader) != "" { - e.RequestRouter.Route(w, r) - return - } - e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request") -} - -func (e *VCSEventsController) respond(w http.ResponseWriter, lvl logging.LogLevel, code int, format string, args ...interface{}) { - response := fmt.Sprintf(format, args...) - switch lvl { - case logging.Error: - e.Logger.Error(response) - case logging.Info: - e.Logger.Info(response) - case logging.Warn: - e.Logger.Warn(response) - case logging.Debug: - e.Logger.Debug(response) - default: - e.Logger.Error(response) - } - w.WriteHeader(code) - fmt.Fprintln(w, response) -} diff --git a/server/legacy/controllers/events/events_controller_e2e_test.go b/server/legacy/controllers/events/events_controller_e2e_test.go deleted file mode 100644 index 357ddcacd..000000000 --- a/server/legacy/controllers/events/events_controller_e2e_test.go +++ /dev/null @@ -1,1181 +0,0 @@ -package events_test - -import ( - "bytes" - "context" - "fmt" - "net/http" - "net/http/httptest" - "os" - "os/exec" - "path/filepath" - "regexp" - "strings" - "sync" - "testing" - - "github.com/google/go-github/v45/github" - "github.com/hashicorp/go-getter" - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/config/valid" - server "github.com/runatlantis/atlantis/server/legacy" - events_controllers "github.com/runatlantis/atlantis/server/legacy/controllers/events" - "github.com/runatlantis/atlantis/server/legacy/controllers/events/handlers" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - runtime_models "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - "github.com/runatlantis/atlantis/server/legacy/jobs" - lyftCommand "github.com/runatlantis/atlantis/server/legacy/lyft/command" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" - github_converter "github.com/runatlantis/atlantis/server/vcs/provider/github/converter" - "github.com/runatlantis/atlantis/server/vcs/provider/github/request" - ffclient "github.com/thomaspoignant/go-feature-flag" - - "github.com/runatlantis/atlantis/server/legacy/core/runtime/policy" - "github.com/runatlantis/atlantis/server/legacy/core/terraform" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/vcs/markdown" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - lyft_vcs "github.com/runatlantis/atlantis/server/legacy/events/vcs/lyft" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - "github.com/runatlantis/atlantis/server/legacy/wrappers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - . "github.com/runatlantis/atlantis/testing" -) - -const ( - ConftestVersion = "0.25.0" - githubHeader = "X-Github-Event" -) - -type noopPushEventHandler struct{} - -func (h noopPushEventHandler) Handle(ctx context.Context, event event_types.Push) error { - return nil -} - -type noopCheckRunEventHandler struct{} - -func (h noopCheckRunEventHandler) Handle(ctx context.Context, event event_types.CheckRun) error { - return nil -} - -type noopCheckSuiteEventHandler struct{} - -func (h noopCheckSuiteEventHandler) Handle(ctx context.Context, event event_types.CheckSuite) error { - return nil -} - -type NoopTFDownloader struct{} - -func (m *NoopTFDownloader) GetFile(dst, src string, opts ...getter.ClientOption) error { - return nil -} - -func (m *NoopTFDownloader) GetAny(dst, src string, opts ...getter.ClientOption) error { - return nil -} - -type LocalConftestCache struct{} - -func (m *LocalConftestCache) Get(key *version.Version) (string, error) { - return exec.LookPath(fmt.Sprintf("conftest%s", ConftestVersion)) -} - -func TestGitHubWorkflowWithPolicyCheck(t *testing.T) { - featureConfig := feature.StringRetriever(`platform-mode: - percentage: 100 - true: true - false: false - default: false - trackEvents: false -legacy-deprecation: - percentage: 100 - true: true - false: false - default: false - trackEvents: false`) - if testing.Short() { - t.SkipNow() - } - // Ensure we have >= TF 0.14 locally. - ensureRunning014(t) - // Ensure we have >= Conftest 0.21 locally. - ensureRunningConftest(t) - - cases := []struct { - Description string - // RepoDir is relative to testfixtures/test-repos. - RepoDir string - // ModifiedFiles are the list of files that have been modified in this - // pull request. - ModifiedFiles []string - // Comments are what our mock user writes to the pull request. - Comments []string - // ExpReplies is a list of files containing the expected replies that - // Atlantis writes to the pull request in order. A reply from a parallel operation - // will be matched using a substring check. - ExpReplies [][]string - }{ - { - Description: "1 failing project and 1 passing project ", - RepoDir: "policy-checks-multi-projects", - ModifiedFiles: []string{"dir1/main.tf,", "dir2/main.tf"}, - ExpReplies: [][]string{ - {"exp-output-autoplan.txt"}, - {"exp-output-auto-policy-check.txt"}, - }, - }, - { - Description: "failing policy without policies passing", - RepoDir: "policy-checks", - ModifiedFiles: []string{"main.tf"}, - ExpReplies: [][]string{ - {"exp-output-autoplan.txt"}, - {"exp-output-auto-policy-check.txt"}, - {"exp-output-auto-policy-check.txt"}, - }, - }, - { - Description: "failing policy additional apply requirements specified", - RepoDir: "policy-checks-apply-reqs", - ModifiedFiles: []string{"main.tf"}, - ExpReplies: [][]string{ - {"exp-output-autoplan.txt"}, - {"exp-output-auto-policy-check.txt"}, - {"exp-output-auto-policy-check.txt"}, - }, - }, - { - Description: "failing policy approved by non owner", - RepoDir: "policy-checks-diff-owner", - ModifiedFiles: []string{"main.tf"}, - ExpReplies: [][]string{ - {"exp-output-autoplan.txt"}, - {"exp-output-auto-policy-check.txt"}, - {"exp-output-auto-policy-check.txt"}, - }, - }, - } - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - // reset userConfig - userConfig := &server.UserConfig{} - userConfig.EnablePolicyChecks = true - - ghClient := &testGithubClient{ExpectedModifiedFiles: c.ModifiedFiles} - - headSHA, ctrl := setupE2E(t, c.RepoDir, userConfig, ghClient, e2eOptions{ - featureConfig: featureConfig, - }) - - // Setup test dependencies. - w := httptest.NewRecorder() - - ghClient.ExpectedPull = GitHubPullRequestParsed(headSHA) - ghClient.ExpectedApprovalStatus = models.ApprovalStatus{IsApproved: true} - - // First, send the open pull request event which triggers autoplan. - pullOpenedReq := GitHubPullRequestOpenedEvent(t, headSHA) - ctrl.Post(w, pullOpenedReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - - if c.RepoDir != "policy-checks-multi-projects" { - pullReviewedReq := GitHubPullRequestReviewedEvent(t, headSHA) - w = httptest.NewRecorder() - ctrl.Post(w, pullReviewedReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - } - - // Now send any other comments. - for _, comment := range c.Comments { - commentReq := GitHubCommentEvent(t, comment) - w = httptest.NewRecorder() - ctrl.Post(w, commentReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - } - - // Send the "pull closed" event which would be triggered by the - // a manual merge. - pullClosedReq := GitHubPullRequestClosedEvent(t) - w = httptest.NewRecorder() - ctrl.Post(w, pullClosedReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - - // Verify - actReplies := ghClient.CapturedComments - Assert(t, len(c.ExpReplies) == len(actReplies), "missing expected replies, got %d but expected %d", len(actReplies), len(c.ExpReplies)) - for i, expReply := range c.ExpReplies { - assertCommentEquals(t, expReply, actReplies[i], c.RepoDir, false) - } - }) - } -} - -func TestGitHubWorkflowPullRequestsWorkflows(t *testing.T) { - featureConfig := feature.StringRetriever(`platform-mode: - percentage: 100 - true: true - false: false - default: false - trackEvents: false -legacy-deprecation: - percentage: 100 - true: true - false: false - default: false - trackEvents: false`) - - if testing.Short() { - t.SkipNow() - } - // Ensure we have >= TF 0.14 locally. - ensureRunning014(t) - // Ensure we have >= Conftest 0.21 locally. - ensureRunningConftest(t) - - cases := []struct { - Description string - // RepoDir is relative to testfixtures/test-repos. - RepoDir string - // ModifiedFiles are the list of files that have been modified in this - // pull request. - ModifiedFiles []string - // Comments are what our mock user writes to the pull request. - Comments []string - // ExpReplies is a list of files containing the expected replies that - // Atlantis writes to the pull request in order. A reply from a parallel operation - // will be matched using a substring check. - ExpReplies [][]string - }{ - { - Description: "disabled apply", - RepoDir: "platform-mode/disabled-apply", - ModifiedFiles: []string{"staging/main.tf"}, - Comments: []string{ - "atlantis apply", - }, - ExpReplies: [][]string{ - {"exp-output-autoplan.txt"}, - {"exp-output-auto-policy-check.txt"}, - {"exp-output-apply.txt"}, - }, - }, - { - Description: "autoplan and policy check approvals", - RepoDir: "platform-mode/policy-check-approval", - ModifiedFiles: []string{"main.tf"}, - Comments: []string{ - "atlantis apply", - }, - ExpReplies: [][]string{ - {"exp-output-autoplan.txt"}, - {"exp-output-auto-policy-check.txt"}, - {"exp-output-apply.txt"}, - }, - }, - } - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - // Setup test dependencies. - w := httptest.NewRecorder() - // reset userConfig - userConfig := &server.UserConfig{} - userConfig.EnablePolicyChecks = true - - ghClient := &testGithubClient{ExpectedModifiedFiles: c.ModifiedFiles} - - headSHA, ctrl := setupE2E(t, c.RepoDir, userConfig, ghClient, e2eOptions{ - featureConfig: featureConfig, - }) - - ghClient.ExpectedPull = GitHubPullRequestParsed(headSHA) - ghClient.ExpectedApprovalStatus = models.ApprovalStatus{IsApproved: true} - - // First, send the open pull request event which triggers autoplan. - pullOpenedReq := GitHubPullRequestOpenedEvent(t, headSHA) - ctrl.Post(w, pullOpenedReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - - // Now send any other comments. - for _, comment := range c.Comments { - commentReq := GitHubCommentEvent(t, comment) - w = httptest.NewRecorder() - ctrl.Post(w, commentReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - } - - // Send the "pull closed" event which would be triggered by the - // a manual merge. - pullClosedReq := GitHubPullRequestClosedEvent(t) - w = httptest.NewRecorder() - ctrl.Post(w, pullClosedReq) - ResponseContains(t, w, http.StatusOK, "Processing...") - - // Verify - actReplies := ghClient.CapturedComments - Assert(t, len(c.ExpReplies) == len(actReplies), "missing expected replies, got %d but expected %d", len(actReplies), len(c.ExpReplies)) - for i, expReply := range c.ExpReplies { - assertCommentEquals(t, expReply, actReplies[i], c.RepoDir, false) - } - }) - } -} - -type e2eOptions struct { - featureConfig ffclient.Retriever -} - -func setupE2E(t *testing.T, repoFixtureDir string, userConfig *server.UserConfig, ghClient vcs.IGithubClient, options ...e2eOptions) (string, events_controllers.VCSEventsController) { - var featureConfig ffclient.Retriever - for _, o := range options { - if o.featureConfig != nil { - featureConfig = o.featureConfig - } - } - - // env vars - // need this to be set or we'll fail the policy check step - os.Setenv(policy.DefaultConftestVersionEnvKey, "0.25.0") - - // First initialize the local repo we'll be working with in a unique test specific dir - repoDir, headSHA := initializeRepo(t, repoFixtureDir) - // Create subdirs for plugin cache, binaries, data - // unclear how these are used in conjunction with the above? - // TODO: investigate unifying this code with the above - dataDir, binDir, cacheDir := mkSubDirs(t) - - // Set up test dependencies, this is where the code path would diverge from the standard server - // initialization for testing purposes - // ! We should try to keep this as minimal as possible - - lockURLGenerator := &testLockURLGenerator{} - webhookSender := &testWebhookSender{} - - conftestCache := &LocalConftestCache{} - downloader := &NoopTFDownloader{} - overrideCloneURL := fmt.Sprintf("file://%s", repoDir) - - // TODO: we should compare this output against what we post on github - projectCmdOutputHandler := &jobs.NoopProjectOutputHandler{} - - ctxLogger := logging.NewNoopCtxLogger(t) - - var featureAllocator *feature.PercentageBasedAllocator - var featureAllocatorErr error - if featureConfig != nil { - featureAllocator, featureAllocatorErr = feature.NewStringSourcedAllocatorWithRetriever(ctxLogger, featureConfig) - } else { - featureAllocator, featureAllocatorErr = feature.NewStringSourcedAllocator(ctxLogger) - } - - Ok(t, featureAllocatorErr) - - t.Cleanup(featureAllocator.Close) - - terraformClient, err := terraform.NewE2ETestClient(binDir, cacheDir, "", "", "", "default-tf-version", "https://releases.hashicorp.com", downloader, false, projectCmdOutputHandler) - Ok(t, err) - - // Set real dependencies here. - // TODO: aggregate some of this with that of server.go to minimize duplication - vcsClient := vcs.NewClientProxy(ghClient) - e2eStatusUpdater := &command.VCSStatusUpdater{Client: vcsClient, TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "atlantis"}} - - eventParser := &events.EventParser{ - GithubUser: "github-user", - GithubToken: "github-token", - } - commentParser := &events.CommentParser{ - GithubUser: "github-user", - } - - boltdb, err := db.New(dataDir) - Ok(t, err) - - lockingClient := locking.NewClient(boltdb) - applyLocker := locking.NewApplyClient(boltdb, userConfig.DisableApply) - projectLocker := &events.DefaultProjectLocker{ - Locker: lockingClient, - VCSClient: vcsClient, - } - - globalCfg := valid.NewGlobalCfg(dataDir) - - workingDir := &events.FileWorkspace{ - DataDir: dataDir, - TestingOverrideHeadCloneURL: overrideCloneURL, - GlobalCfg: globalCfg, - } - - defaultTFVersion := terraformClient.DefaultVersion() - locker := events.NewDefaultWorkingDirLocker() - parser := &config.ParserValidator{} - - expCfgPath := filepath.Join(absRepoPath(t, repoFixtureDir), "repos.yaml") - if _, err := os.Stat(expCfgPath); err == nil { - globalCfg, err = parser.ParseGlobalCfg(expCfgPath, globalCfg) - Ok(t, err) - } else { - globalCfg, err = parser.ParseGlobalCfgJSON(`{"repos": [{"id":"/.*/", "allow_custom_workflows": true, "allowed_overrides": ["workflow"], "pre_workflow_hooks":[{"run": "echo 'hello world'"}]}]}`, globalCfg) - Ok(t, err) - } - drainer := &events.Drainer{} - - parallelPoolSize := 1 - - preWorkflowHooksCommandRunner := &events.DefaultPreWorkflowHooksCommandRunner{ - VCSClient: vcsClient, - GlobalCfg: globalCfg, - WorkingDirLocker: locker, - WorkingDir: workingDir, - PreWorkflowHookRunner: runtime.DefaultPreWorkflowHookRunner{}, - } - statsScope, _, err := metrics.NewLoggingScope(ctxLogger, "atlantis") - Ok(t, err) - - projectContextBuilder := wrappers. - WrapProjectContext(events.NewProjectCommandContextBuilder(commentParser)). - WithInstrumentation(statsScope) - - projectContextBuilder = wrappers. - WrapProjectContext(events.NewPlatformModeProjectCommandContextBuilder(commentParser, projectContextBuilder, ctxLogger, featureAllocator)). - WithInstrumentation(statsScope) - - if userConfig.EnablePolicyChecks { - projectContextBuilder = projectContextBuilder.EnablePolicyChecks(commentParser) - } - - projectCommandBuilder := events.NewProjectCommandBuilder( - projectContextBuilder, - parser, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - locker, - globalCfg, - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - ctxLogger, - events.InfiniteProjectsPerPR, - ) - - showStepRunner, err := runtime.NewShowStepRunner(terraformClient, defaultTFVersion) - Ok(t, err) - - conftestVersion, err := version.NewVersion(ConftestVersion) - Ok(t, err) - - conftextExec := policy.NewConfTestVersionEnsurer(ctxLogger, binDir, downloader) - - // swapping out version cache to something that always returns local contest - // binary - conftextExec.VersionCache = conftestCache - - reviewFetcher := &mockReviewFetcher{ - approvers: []string{}, - } - teamFetcher := &mockTeamFetcher{ - members: []string{}, - } - reviewDismisser := &mockReviewDismisser{} - policyFilter := events.NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, featureAllocator, globalCfg.PolicySets.PolicySets, ctxLogger) - conftestExecutor := &policy.ConfTestExecutor{ - Exec: runtime_models.LocalExec{}, - PolicyFilter: policyFilter, - } - policyCheckRunner, err := runtime.NewPolicyCheckStepRunner( - conftestVersion, - conftextExec, - conftestExecutor, - ) - Ok(t, err) - initStepRunner := &runtime.InitStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTFVersion, - } - planStepRunner := &runtime.PlanStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTFVersion, - AsyncTFExec: terraformClient, - } - - applyStepRunner := &runtime.ApplyStepRunner{ - TerraformExecutor: terraformClient, - AsyncTFExec: terraformClient, - } - - versionStepRunner := &runtime.VersionStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTFVersion, - } - - runStepRunner := &runtime.RunStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTFVersion, - TerraformBinDir: binDir, - } - - envStepRunner := &runtime.EnvStepRunner{ - RunStepRunner: runStepRunner, - } - - stepsRunner := runtime.NewStepsRunner( - initStepRunner, - planStepRunner, - showStepRunner, - policyCheckRunner, - applyStepRunner, - versionStepRunner, - runStepRunner, - envStepRunner, - ) - - Ok(t, err) - - applyRequirementHandler := &events.AggregateApplyRequirements{ - WorkingDir: workingDir, - } - unwrappedRunner := events.NewProjectCommandRunner( - stepsRunner, - workingDir, - webhookSender, - locker, - applyRequirementHandler, - ) - - legacyPrjCmdRunner := wrappers.WrapProjectRunner( - unwrappedRunner, - ).WithSync(projectLocker, lockURLGenerator) - - platformModePrjCmdRunner := wrappers.WrapProjectRunner( - unwrappedRunner, - ) - - prjCmdRunner := &lyftCommand.PlatformModeProjectRunner{ - PlatformModeRunner: platformModePrjCmdRunner, - PrModeRunner: legacyPrjCmdRunner, - Allocator: featureAllocator, - Logger: ctxLogger, - } - - dbUpdater := &events.DBUpdater{ - DB: boltdb, - } - - pullUpdater := &events.PullOutputUpdater{ - HidePrevPlanComments: false, - VCSClient: vcsClient, - MarkdownRenderer: &markdown.Renderer{}, - } - - deleteLockCommand := &events.DefaultDeleteLockCommand{ - Locker: lockingClient, - Logger: ctxLogger, - WorkingDir: workingDir, - WorkingDirLocker: locker, - DB: boltdb, - } - - policyCheckCommandRunner := events.NewPolicyCheckCommandRunner( - dbUpdater, - pullUpdater, - e2eStatusUpdater, - prjCmdRunner, - parallelPoolSize, - ) - - planCommandRunner := events.NewPlanCommandRunner( - vcsClient, - &events.DefaultPendingPlanFinder{}, - workingDir, - e2eStatusUpdater, - projectCommandBuilder, - prjCmdRunner, - dbUpdater, - pullUpdater, - policyCheckCommandRunner, - parallelPoolSize, - ) - - unlockCommandRunner := events.NewUnlockCommandRunner( - deleteLockCommand, - vcsClient, - ) - - versionCommandRunner := events.NewVersionCommandRunner( - pullUpdater, - projectCommandBuilder, - prjCmdRunner, - parallelPoolSize, - ) - - var applyCommandRunner command.Runner - e2ePullReqStatusFetcher := lyft_vcs.NewSQBasedPullStatusFetcher(ghClient, vcs.NewLyftPullMergeabilityChecker("atlantis")) - - applyCommandRunner = events.NewApplyCommandRunner( - vcsClient, - false, - applyLocker, - e2eStatusUpdater, - projectCommandBuilder, - prjCmdRunner, - pullUpdater, - dbUpdater, - parallelPoolSize, - e2ePullReqStatusFetcher, - ) - - commentCommandRunnerByCmd := map[command.Name]command.Runner{ - command.Plan: planCommandRunner, - command.Apply: applyCommandRunner, - command.Unlock: unlockCommandRunner, - command.Version: versionCommandRunner, - } - staleCommandChecker := &testStaleCommandChecker{} - prrPolicyCommandRunner := &events.PRRPolicyCheckCommandRunner{ - PrjCmdBuilder: projectCommandBuilder, - PolicyCheckCommandRunner: policyCheckCommandRunner, - } - commandRunner := &events.DefaultCommandRunner{ - VCSClient: vcsClient, - GlobalCfg: globalCfg, - StatsScope: statsScope, - CommentCommandRunnerByCmd: commentCommandRunnerByCmd, - Drainer: drainer, - PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, - PullStatusFetcher: boltdb, - StaleCommandChecker: staleCommandChecker, - Logger: ctxLogger, - PolicyCommandRunner: prrPolicyCommandRunner, - } - - repoAllowlistChecker, err := events.NewRepoAllowlistChecker("*") - Ok(t, err) - - autoplanner := &handlers.Autoplanner{ - CommandRunner: commandRunner, - } - - pullCleaner := &events.PullClosedExecutor{ - Locker: lockingClient, - VCSClient: vcsClient, - WorkingDir: workingDir, - DB: boltdb, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - LogStreamResourceCleaner: projectCmdOutputHandler, - } - - prHandler := handlers.NewPullRequestEventWithEventTypeHandlers( - repoAllowlistChecker, - - // Use synchronous handlers for testing purposes - autoplanner, autoplanner, - - &handlers.PullCleaner{ - PullCleaner: pullCleaner, - Logger: ctxLogger, - }, - ) - - prrHandler := handlers.PullRequestReviewEventHandler{ - PRReviewCommandRunner: commandRunner, - } - - commentHandler := handlers.NewCommentEventWithCommandHandler( - commentParser, - repoAllowlistChecker, - vcsClient, - - // Use synchronous handler for testing purposes - &handlers.CommandHandler{ - CommandRunner: commandRunner, - }, - ctxLogger, - ) - - repoConverter := github_converter.RepoConverter{ - GithubUser: userConfig.GithubUser, - GithubToken: userConfig.GithubToken, - } - - pullConverter := github_converter.PullConverter{ - RepoConverter: repoConverter, - } - opened := GitHubPullRequestParsed(headSHA) - closed := GitHubPullRequestParsed(headSHA) - closed.State = github.String("closed") - pullFetcher := &testPullFetcher{ - opened: opened, - closed: closed, - } - - requestRouter := &events_controllers.RequestRouter{ - Resolvers: []events_controllers.RequestResolver{ - request.NewHandler( - ctxLogger, - statsScope, - nil, - pullFetcher, - commentHandler, - prHandler, - noopPushEventHandler{}, - prrHandler, - noopCheckRunEventHandler{}, - noopCheckSuiteEventHandler{}, - false, - repoConverter, - pullConverter, - ghClient, - ), - }, - Logger: ctxLogger, - } - - ctrl := events_controllers.VCSEventsController{ - RequestRouter: requestRouter, - Logger: ctxLogger, - Scope: statsScope, - Parser: eventParser, - CommentParser: commentParser, - RepoAllowlistChecker: repoAllowlistChecker, - SupportedVCSHosts: []models.VCSHostType{models.Github}, - VCSClient: vcsClient, - } - return headSHA, ctrl -} - -var ( - // if not for these we'd be doing disk reads for each test - readCommentJSON sync.Once - readPullRequestOpenedJSON sync.Once - readPullRequestClosedJSON sync.Once - readPullRequestReviewedJSON sync.Once - - commentJSON string - pullRequestOpenedJSON string - pullRequestClosedJSON string - pullRequestReviewedJSON string -) - -func GitHubCommentEvent(t *testing.T, comment string) *http.Request { - readCommentJSON.Do( - func() { - jsonBytes, err := os.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent.json")) - Ok(t, err) - - commentJSON = string(jsonBytes) - }, - ) - modifiedCommentJSON := []byte(strings.Replace(commentJSON, "###comment body###", comment, 1)) - req, err := http.NewRequest(http.MethodPost, "/events", bytes.NewBuffer(modifiedCommentJSON)) - Ok(t, err) - req.Header.Set("Content-Type", "application/json") - req.Header.Set(githubHeader, "issue_comment") - return req -} - -func GitHubPullRequestOpenedEvent(t *testing.T, headSHA string) *http.Request { - readPullRequestOpenedJSON.Do( - func() { - jsonBytes, err := os.ReadFile(filepath.Join("testfixtures", "githubPullRequestOpenedEvent.json")) - Ok(t, err) - - pullRequestOpenedJSON = string(jsonBytes) - }, - ) - // Replace sha with expected sha. - requestJSONStr := strings.Replace(pullRequestOpenedJSON, "c31fd9ea6f557ad2ea659944c3844a059b83bc5d", headSHA, -1) - req, err := http.NewRequest(http.MethodPost, "/events", bytes.NewBuffer([]byte(requestJSONStr))) - Ok(t, err) - req.Header.Set("Content-Type", "application/json") - req.Header.Set(githubHeader, "pull_request") - return req -} - -func GitHubPullRequestClosedEvent(t *testing.T) *http.Request { - readPullRequestClosedJSON.Do( - func() { - jsonBytes, err := os.ReadFile(filepath.Join("testfixtures", "githubPullRequestClosedEvent.json")) - Ok(t, err) - - pullRequestClosedJSON = string(jsonBytes) - }, - ) - - req, err := http.NewRequest(http.MethodPost, "/events", bytes.NewBuffer([]byte(pullRequestClosedJSON))) - Ok(t, err) - req.Header.Set("Content-Type", "application/json") - req.Header.Set(githubHeader, "pull_request") - return req -} - -func GitHubPullRequestReviewedEvent(t *testing.T, headSHA string) *http.Request { - readPullRequestReviewedJSON.Do( - func() { - jsonBytes, err := os.ReadFile(filepath.Join("testfixtures", "githubPullRequestReviewedEvent.json")) - Ok(t, err) - - pullRequestReviewedJSON = string(jsonBytes) - }, - ) - // Replace sha with expected sha. - requestJSONStr := strings.Replace(pullRequestReviewedJSON, "c31fd9ea6f557ad2ea659944c3844a059b83bc5d", headSHA, -1) - req, err := http.NewRequest(http.MethodPost, "/events", bytes.NewBuffer([]byte(requestJSONStr))) - Ok(t, err) - req.Header.Set("Content-Type", "application/json") - req.Header.Set(githubHeader, "pull_request_review") - return req -} - -func GitHubPullRequestParsed(headSHA string) *github.PullRequest { - // headSHA can't be empty so default if not set. - if headSHA == "" { - headSHA = "13940d121be73f656e2132c6d7b4c8e87878ac8d" - } - cleanstate := "clean" - return &github.PullRequest{ - Number: github.Int(2), - State: github.String("open"), - HTMLURL: github.String("htmlurl"), - Head: &github.PullRequestBranch{ - Repo: &github.Repository{ - FullName: github.String("runatlantis/atlantis-tests"), - CloneURL: github.String("https://github.com/runatlantis/atlantis-tests.git"), - }, - SHA: github.String(headSHA), - Ref: github.String("branch"), - }, - Base: &github.PullRequestBranch{ - Repo: &github.Repository{ - FullName: github.String("runatlantis/atlantis-tests"), - CloneURL: github.String("https://github.com/runatlantis/atlantis-tests.git"), - }, - Ref: github.String("master"), - }, - User: &github.User{ - Login: github.String("atlantisbot"), - }, - MergeableState: &cleanstate, - } -} - -// absRepoPath returns the absolute path to the test repo under dir repoDir. -func absRepoPath(t *testing.T, repoDir string) string { - path, err := filepath.Abs(filepath.Join("testfixtures", "test-repos", repoDir)) - Ok(t, err) - return path -} - -// initializeRepo copies the repo data from testfixtures and initializes a new -// git repo in a temp directory. It returns that directory and a function -// to run in a defer that will delete the dir. -// The purpose of this function is to create a real git repository with a branch -// called 'branch' from the files under repoDir. This is so we can check in -// those files normally to this repo without needing a .git directory. -func initializeRepo(t *testing.T, repoDir string) (string, string) { - originRepo := absRepoPath(t, repoDir) - - // Copy the files to the temp dir. - destDir := t.TempDir() - runCmd(t, "", "cp", "-r", fmt.Sprintf("%s/.", originRepo), destDir) - - // Initialize the git repo. - runCmd(t, destDir, "git", "init") - runCmd(t, destDir, "touch", ".gitkeep") - runCmd(t, destDir, "git", "add", ".gitkeep") - runCmd(t, destDir, "git", "config", "--local", "user.email", "atlantisbot@runatlantis.io") - runCmd(t, destDir, "git", "config", "--local", "user.name", "atlantisbot") - runCmd(t, destDir, "git", "commit", "-m", "initial commit") - runCmd(t, destDir, "git", "checkout", "-b", "branch") - runCmd(t, destDir, "git", "add", ".") - runCmd(t, destDir, "git", "commit", "-am", "branch commit") - headSHA := runCmd(t, destDir, "git", "rev-parse", "HEAD") - headSHA = strings.Trim(headSHA, "\n") - - return destDir, headSHA -} - -func runCmd(t *testing.T, dir string, name string, args ...string) string { - cpCmd := exec.Command(name, args...) - cpCmd.Dir = dir - cpOut, err := cpCmd.CombinedOutput() - Assert(t, err == nil, "err running %q: %s", strings.Join(append([]string{name}, args...), " "), cpOut) - return string(cpOut) -} - -func assertCommentEquals(t *testing.T, expReplies []string, act string, repoDir string, parallel bool) { - t.Helper() - - // Replace all 'Creation complete after 0s [id=2135833172528078362]' strings with - // 'Creation complete after *s [id=*******************]' so we can do a comparison. - idRegex := regexp.MustCompile(`Creation complete after [0-9]+s \[id=[0-9]+]`) - act = idRegex.ReplaceAllString(act, "Creation complete after *s [id=*******************]") - - // Replace all null_resource.simple{n}: .* with null_resource.simple: because - // with multiple resources being created the logs are all out of order which - // makes comparison impossible. - resourceRegex := regexp.MustCompile(`null_resource\.simple(\[\d])?\d?:.*`) - act = resourceRegex.ReplaceAllString(act, "null_resource.simple:") - - // For parallel plans and applies, do a substring match since output may be out of order - var replyMatchesExpected func(string, string) bool - if parallel { - replyMatchesExpected = func(act string, expStr string) bool { - return strings.Contains(act, expStr) - } - } else { - replyMatchesExpected = func(act string, expStr string) bool { - return expStr == act - } - } - - for _, expFile := range expReplies { - exp, err := os.ReadFile(filepath.Join(absRepoPath(t, repoDir), expFile)) - Ok(t, err) - expStr := string(exp) - // My editor adds a newline to all the files, so if the actual comment - // doesn't end with a newline then strip the last newline from the file's - // contents. - if !strings.HasSuffix(act, "\n") { - expStr = strings.TrimSuffix(expStr, "\n") - } - - if !replyMatchesExpected(act, expStr) { - t.Logf("\nexp:\n %s\n got:\n %s\n", expStr, act) - t.FailNow() - } - } -} - -// returns parent, bindir, cachedir, cleanup func -func mkSubDirs(t *testing.T) (string, string, string) { - tmp := t.TempDir() - binDir := filepath.Join(tmp, "bin") - err := os.MkdirAll(binDir, 0o700) - Ok(t, err) - - cachedir := filepath.Join(tmp, "plugin-cache") - err = os.MkdirAll(cachedir, 0o700) - Ok(t, err) - - return tmp, binDir, cachedir -} - -// Will fail test if conftest isn't in path and isn't version >= 0.25.0 -func ensureRunningConftest(t *testing.T) { - var localPath string - var err error - localPath, err = exec.LookPath(fmt.Sprintf("conftest%s", ConftestVersion)) - if err != nil { - localPath, err = exec.LookPath("conftest") - if err != nil { - t.Logf("error finding conftest binary %s", err) - t.FailNow() - } - } - versionOutBytes, err := exec.Command(localPath, "--version").Output() // #nosec - if err != nil { - t.Logf("error running conftest version: %s", err) - t.FailNow() - } - versionOutput := string(versionOutBytes) - match := versionConftestRegex.FindStringSubmatch(versionOutput) - if len(match) <= 1 { - t.Logf("could not parse contest version from %s", versionOutput) - t.FailNow() - } - localVersion, err := version.NewVersion(match[1]) - Ok(t, err) - minVersion, err := version.NewVersion(ConftestVersion) - Ok(t, err) - if localVersion.LessThan(minVersion) { - t.Logf("must have contest version >= %s, you have %s", minVersion, localVersion) - t.FailNow() - } -} - -// Will fail test if terraform isn't in path and isn't version >= 0.14 -func ensureRunning014(t *testing.T) { - localPath, err := exec.LookPath("terraform") - if err != nil { - t.Log("terraform >= 0.14 must be installed to run this test") - t.FailNow() - } - versionOutBytes, err := exec.Command(localPath, "version").Output() // #nosec - if err != nil { - t.Logf("error running terraform version: %s", err) - t.FailNow() - } - versionOutput := string(versionOutBytes) - match := versionRegex.FindStringSubmatch(versionOutput) - if len(match) <= 1 { - t.Logf("could not parse terraform version from %s", versionOutput) - t.FailNow() - } - localVersion, err := version.NewVersion(match[1]) - Ok(t, err) - minVersion, err := version.NewVersion("0.14.0") - Ok(t, err) - if localVersion.LessThan(minVersion) { - t.Logf("must have terraform version >= %s, you have %s", minVersion, localVersion) - t.FailNow() - } -} - -// versionRegex extracts the version from `terraform version` output. -// -// Terraform v0.12.0-alpha4 (2c36829d3265661d8edbd5014de8090ea7e2a076) -// => 0.12.0-alpha4 -// -// Terraform v0.11.10 -// => 0.11.10 -var versionRegex = regexp.MustCompile("Terraform v(.*?)(\\s.*)?\n") - -var versionConftestRegex = regexp.MustCompile("Version: (.*?)(\\s.*)?\n") - -type testLockURLGenerator struct{} - -func (m *testLockURLGenerator) GenerateLockURL(lockID string) string { - return "lock-url" -} - -type testWebhookSender struct{} - -func (w *testWebhookSender) Send(log logging.Logger, result webhooks.ApplyResult) error { - return nil -} - -type testStaleCommandChecker struct{} - -func (t *testStaleCommandChecker) CommandIsStale(ctx *command.Context) bool { - return false -} - -type mockReviewDismisser struct { - error error - isCalled bool -} - -func (d *mockReviewDismisser) Dismiss(_ context.Context, _ int64, _ models.Repo, _ int, _ int64) error { - d.isCalled = true - return d.error -} - -type mockReviewFetcher struct { - approvers []string - listUsernamesIsCalled bool - listUsernamesError error - reviews []*github.PullRequestReview - listApprovalsIsCalled bool - listApprovalsError error -} - -func (f *mockReviewFetcher) ListLatestApprovalUsernames(_ context.Context, _ int64, _ models.Repo, _ int) ([]string, error) { - f.listUsernamesIsCalled = true - return f.approvers, f.listUsernamesError -} - -func (f *mockReviewFetcher) ListApprovalReviews(_ context.Context, _ int64, _ models.Repo, _ int) ([]*github.PullRequestReview, error) { - f.listApprovalsIsCalled = true - return f.reviews, f.listApprovalsError -} - -type mockTeamFetcher struct { - members []string - error error - isCalled bool -} - -func (t *mockTeamFetcher) ListTeamMembers(_ context.Context, _ int64, _ string) ([]string, error) { - t.isCalled = true - return t.members, t.error -} - -type testGithubClient struct { - ExpectedModifiedFiles []string - ExpectedPull *github.PullRequest - ExpectedApprovalStatus models.ApprovalStatus - CapturedComments []string -} - -func (t *testGithubClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { - return t.ExpectedModifiedFiles, nil -} - -func (t *testGithubClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - t.CapturedComments = append(t.CapturedComments, comment) - return nil -} - -func (t *testGithubClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { - return nil -} - -func (t *testGithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { - return t.ExpectedApprovalStatus, nil -} - -func (t *testGithubClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { - return false, nil -} - -func (t *testGithubClient) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - return "", nil -} - -func (t *testGithubClient) MarkdownPullLink(pull models.PullRequest) (string, error) { - return "", nil -} - -func (t *testGithubClient) DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) { - return false, []byte{}, nil -} - -func (t *testGithubClient) SupportsSingleFileDownload(repo models.Repo) bool { - return false -} - -func (t *testGithubClient) GetContents(owner, repo, branch, path string) ([]byte, error) { - return []byte{}, nil -} - -func (t *testGithubClient) GetRepoStatuses(repo models.Repo, pull models.PullRequest) ([]*github.RepoStatus, error) { - return []*github.RepoStatus{}, nil -} - -func (t *testGithubClient) GetRepoChecks(repo models.Repo, commitSHA string) ([]*github.CheckRun, error) { - return []*github.CheckRun{}, nil -} - -func (t *testGithubClient) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { - return t.ExpectedPull, nil -} - -func (t *testGithubClient) GetPullRequestFromName(repoName string, repoOwner string, pullNum int) (*github.PullRequest, error) { - return t.ExpectedPull, nil -} - -type testPullFetcher struct { - opened *github.PullRequest - closed *github.PullRequest - sentOpenEvent bool -} - -func (t *testPullFetcher) Fetch(_ context.Context, _ int64, _ string, _ string, _ int) (*github.PullRequest, error) { - if t.sentOpenEvent { - return t.closed, nil - } - t.sentOpenEvent = true - return t.opened, nil -} diff --git a/server/legacy/controllers/events/events_controller_test.go b/server/legacy/controllers/events/events_controller_test.go deleted file mode 100644 index c7e04caa3..000000000 --- a/server/legacy/controllers/events/events_controller_test.go +++ /dev/null @@ -1,96 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "bytes" - "context" - "net/http" - "net/http/httptest" - "reflect" - "testing" - - "github.com/google/go-github/v45/github" - . "github.com/petergtz/pegomock" - events_controllers "github.com/runatlantis/atlantis/server/legacy/controllers/events" - "github.com/runatlantis/atlantis/server/legacy/events" - emocks "github.com/runatlantis/atlantis/server/legacy/events/mocks" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - httputils "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" - . "github.com/runatlantis/atlantis/testing" -) - -func AnyRepo() models.Repo { - RegisterMatcher(NewAnyMatcher(reflect.TypeOf(models.Repo{}))) - return models.Repo{} -} - -func AnyStatus() []*github.RepoStatus { - RegisterMatcher(NewAnyMatcher(reflect.TypeOf(github.RepoStatus{}))) - return []*github.RepoStatus{} -} - -func TestPost_NotGitlab(t *testing.T) { - t.Log("when the request is not for gitlab or github a 400 is returned") - e, _, _, _, _ := setup(t) - w := httptest.NewRecorder() - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - e.Post(w, req) - ResponseContains(t, w, http.StatusBadRequest, "Ignoring request") -} - -//nolint:unparam -func setup(t *testing.T) (events_controllers.VCSEventsController, *emocks.MockEventParsing, *emocks.MockPullCleaner, *vcsmocks.MockClient, *emocks.MockCommentParsing) { - RegisterMockTestingT(t) - p := emocks.NewMockEventParsing() - cp := emocks.NewMockCommentParsing() - c := emocks.NewMockPullCleaner() - vcsmock := vcsmocks.NewMockClient() - repoAllowlistChecker, err := events.NewRepoAllowlistChecker("*") - Ok(t, err) - ctxLogger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(ctxLogger, "null") - e := events_controllers.VCSEventsController{ - Logger: ctxLogger, - Scope: scope, - Parser: p, - CommentEventHandler: noopCommentHandler{}, - PREventHandler: noopPRHandler{}, - CommentParser: cp, - SupportedVCSHosts: []models.VCSHostType{}, - RepoAllowlistChecker: repoAllowlistChecker, - VCSClient: vcsmock, - } - return e, p, c, vcsmock, cp -} - -// This struct shouldn't be using these anyways since it should be broken down into separate packages (ie. see github) -// therefore we're just using noop implementations here for now. -// agreed this means we're not verifying any of the arguments passed in, but that's fine since we don't use any of these providers -// atm. -type noopPRHandler struct{} - -func (h noopPRHandler) Handle(ctx context.Context, request *httputils.BufferedRequest, event event_types.PullRequest) error { - return nil -} - -type noopCommentHandler struct{} - -func (h noopCommentHandler) Handle(ctx context.Context, request *httputils.BufferedRequest, event event_types.Comment) error { - return nil -} diff --git a/server/legacy/controllers/events/handlers/comment.go b/server/legacy/controllers/events/handlers/comment.go deleted file mode 100644 index a9ac48a3e..000000000 --- a/server/legacy/controllers/events/handlers/comment.go +++ /dev/null @@ -1,144 +0,0 @@ -package handlers - -import ( - "context" - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - contextInternal "github.com/runatlantis/atlantis/server/neptune/context" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" -) - -// commentCreator creates a comment on a pull request for a given repo -type commentCreator interface { - CreateComment(repo models.Repo, pullNum int, comment string, command string) error -} - -// commentParser parsers a vcs pull request comment and returns the result -type commentParser interface { - Parse(comment string, vcsHost models.VCSHostType) events.CommentParseResult -} - -func NewCommentEvent( - commentParser commentParser, - repoAllowlistChecker *events.RepoAllowlistChecker, - commentCreator commentCreator, - commandRunner events.CommandRunner, - logger logging.Logger, -) *CommentEvent { - return &CommentEvent{ - commentParser: commentParser, - commandHandler: &asyncHandler{ - commandHandler: &CommandHandler{ - CommandRunner: commandRunner, - }, - logger: logger, - }, - RepoAllowlistChecker: repoAllowlistChecker, - commentCreator: commentCreator, - logger: logger, - } -} - -func NewCommentEventWithCommandHandler( - commentParser events.CommentParsing, - repoAllowlistChecker *events.RepoAllowlistChecker, - commentCreator commentCreator, - commandHandler commandHandler, - logger logging.Logger, -) *CommentEvent { - return &CommentEvent{ - commentParser: commentParser, - commandHandler: commandHandler, - RepoAllowlistChecker: repoAllowlistChecker, - commentCreator: commentCreator, - logger: logger, - } -} - -// commandHandler is the handler responsible for running a specific command -// after it's been parsed from a comment. -type commandHandler interface { - Handle(ctx context.Context, request *http.BufferedRequest, event event_types.Comment, command *command.Comment) error -} - -type CommandHandler struct { - CommandRunner events.CommandRunner -} - -func (h *CommandHandler) Handle(ctx context.Context, _ *http.BufferedRequest, event event_types.Comment, command *command.Comment) error { - h.CommandRunner.RunCommentCommand( - ctx, - event.BaseRepo, - event.HeadRepo, - event.Pull, - event.User, - event.PullNum, - command, - event.Timestamp, - event.InstallationToken, - ) - return nil -} - -type asyncHandler struct { - commandHandler *CommandHandler - logger logging.Logger -} - -func (h *asyncHandler) Handle(ctx context.Context, request *http.BufferedRequest, event event_types.Comment, command *command.Comment) error { - go func() { - // Passing background context to avoid context cancellation since the parent goroutine does not wait for this goroutine to finish execution. - ctx = contextInternal.CopyFields(context.Background(), ctx) - err := h.commandHandler.Handle(ctx, request, event, command) - - if err != nil { - h.logger.ErrorContext(ctx, err.Error()) - } - }() - return nil -} - -type CommentEvent struct { - commentParser events.CommentParsing - commandHandler commandHandler - RepoAllowlistChecker *events.RepoAllowlistChecker - commentCreator commentCreator - logger logging.Logger -} - -func (h *CommentEvent) Handle(ctx context.Context, request *http.BufferedRequest, event event_types.Comment) error { - comment := event.Comment - vcsHost := event.VCSHost - baseRepo := event.BaseRepo - pullNum := event.PullNum - - parseResult := h.commentParser.Parse(comment, vcsHost) - if parseResult.Ignore { - h.logger.WarnContext(ctx, "ignoring comment") - return nil - } - - // At this point we know it's a command we're not supposed to ignore, so now - // we check if this repo is allowed to run commands in the first plach. - if !h.RepoAllowlistChecker.IsAllowlisted(baseRepo.FullName, baseRepo.VCSHost.Hostname) { - return fmt.Errorf("comment event from non-allowlisted repo \"%s/%s\"", baseRepo.VCSHost.Hostname, baseRepo.FullName) - } - - // If the command isn't valid or doesn't require processing, ex. - // "atlantis help" then we just comment back immediately. - // We do this here rather than earlier because we need access to the pull - // variable to comment back on the pull request. - if parseResult.CommentResponse != "" { - if err := h.commentCreator.CreateComment(baseRepo, pullNum, parseResult.CommentResponse, ""); err != nil { - h.logger.ErrorContext(ctx, err.Error()) - } - return nil - } - - return h.commandHandler.Handle(ctx, request, event, parseResult.Command) -} diff --git a/server/legacy/controllers/events/handlers/comment_test.go b/server/legacy/controllers/events/handlers/comment_test.go deleted file mode 100644 index ae140ca18..000000000 --- a/server/legacy/controllers/events/handlers/comment_test.go +++ /dev/null @@ -1,104 +0,0 @@ -package handlers_test - -import ( - "bytes" - "context" - "net/http" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/controllers/events/handlers" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - httputils "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" - "github.com/stretchr/testify/assert" -) - -type testCommentParser struct { - returnedResult events.CommentParseResult -} - -type testCommentCreator struct{} - -func (c *testCommentCreator) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - return nil -} - -func (p *testCommentParser) Parse(comment string, vcsHost models.VCSHostType) events.CommentParseResult { - return p.returnedResult -} - -type assertingCommentHandler struct { - expectedEvent event_types.Comment - expectedRequest *httputils.BufferedRequest - expectedCommand *command.Comment - t *testing.T -} - -func (h *assertingCommentHandler) Handle(ctx context.Context, request *httputils.BufferedRequest, event event_types.Comment, command *command.Comment) error { - assert.Equal(h.t, h.expectedRequest, request) - assert.Equal(h.t, h.expectedEvent, event) - assert.Equal(h.t, h.expectedCommand, command) - - return nil -} - -func TestCommentHandler(t *testing.T) { - command := &command.Comment{ - Name: command.Apply, - } - - rawRequest, err := http.NewRequestWithContext( - context.Background(), - http.MethodPost, "", - bytes.NewBuffer([]byte("body")), - ) - assert.NoError(t, err) - - request, err := httputils.NewBufferedRequest(rawRequest) - assert.NoError(t, err) - - event := event_types.Comment{} - - repoAllowlistChecker, err := events.NewRepoAllowlistChecker("*") - assert.NoError(t, err) - - t.Run("success", func(t *testing.T) { - commentEventHandler := handlers.NewCommentEventWithCommandHandler( - &testCommentParser{ - returnedResult: events.CommentParseResult{ - Command: command, - }, - }, - repoAllowlistChecker, - &testCommentCreator{}, - &assertingCommentHandler{expectedEvent: event, expectedRequest: request, t: t, expectedCommand: command}, - logging.NewNoopCtxLogger(t), - ) - - err = commentEventHandler.Handle(context.Background(), request, event) - assert.NoError(t, err) - }) - - t.Run("ignore non-atlantis comment", func(t *testing.T) { - commentEventHandler := handlers.NewCommentEventWithCommandHandler( - &testCommentParser{ - returnedResult: events.CommentParseResult{ - Command: command, - Ignore: true, - }, - }, - repoAllowlistChecker, - &testCommentCreator{}, - - // by not passing in any data here, we're basically simulating VerifyNeverCalled - &assertingCommentHandler{}, - logging.NewNoopCtxLogger(t), - ) - - err = commentEventHandler.Handle(context.Background(), request, event) - assert.NoError(t, err) - }) -} diff --git a/server/legacy/controllers/events/handlers/pull_request.go b/server/legacy/controllers/events/handlers/pull_request.go deleted file mode 100644 index d3ffdb38f..000000000 --- a/server/legacy/controllers/events/handlers/pull_request.go +++ /dev/null @@ -1,137 +0,0 @@ -package handlers - -import ( - "context" - "fmt" - - contextInternal "github.com/runatlantis/atlantis/server/neptune/context" - - "github.com/runatlantis/atlantis/server/legacy/controllers/events/errors" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" -) - -type eventTypeHandler interface { - Handle(ctx context.Context, request *http.BufferedRequest, event event_types.PullRequest) error -} - -type Autoplanner struct { - CommandRunner events.CommandRunner -} - -func (p *Autoplanner) Handle(ctx context.Context, _ *http.BufferedRequest, event event_types.PullRequest) error { - p.CommandRunner.RunAutoplanCommand( - ctx, - event.Pull.BaseRepo, - event.Pull.HeadRepo, - event.Pull, - event.User, - event.Timestamp, - event.InstallationToken, - ) - - return nil -} - -type asyncAutoplanner struct { - autoplanner *Autoplanner - logger logging.Logger -} - -func (p *asyncAutoplanner) Handle(ctx context.Context, request *http.BufferedRequest, event event_types.PullRequest) error { - go func() { - // Passing background context to avoid context cancellation since the parent goroutine does not wait for this goroutine to finish execution. - ctx = contextInternal.CopyFields(context.Background(), ctx) - err := p.autoplanner.Handle(ctx, request, event) - - if err != nil { - p.logger.ErrorContext(ctx, err.Error()) - } - }() - return nil -} - -type PullCleaner struct { - PullCleaner events.PullCleaner - Logger logging.Logger -} - -func (c *PullCleaner) Handle(ctx context.Context, _ *http.BufferedRequest, event event_types.PullRequest) error { - if err := c.PullCleaner.CleanUpPull(event.Pull.BaseRepo, event.Pull); err != nil { - return err - } - - c.Logger.InfoContext(ctx, "deleted locks and workspace") - - return nil -} - -func NewPullRequestEvent( - repoAllowlistChecker *events.RepoAllowlistChecker, - pullCleaner events.PullCleaner, - logger logging.Logger, - commandRunner events.CommandRunner) *PullRequestEvent { - asyncAutoplanner := &asyncAutoplanner{ - autoplanner: &Autoplanner{ - CommandRunner: commandRunner, - }, - logger: logger, - } - return &PullRequestEvent{ - RepoAllowlistChecker: repoAllowlistChecker, - OpenedPullEventHandler: asyncAutoplanner, - UpdatedPullEventHandler: asyncAutoplanner, - ClosedPullEventHandler: &PullCleaner{ - PullCleaner: pullCleaner, - Logger: logger, - }, - } -} - -func NewPullRequestEventWithEventTypeHandlers( - repoAllowlistChecker *events.RepoAllowlistChecker, - openedPullEventHandler eventTypeHandler, - updatedPullEventHandler eventTypeHandler, - closedPullEventHandler eventTypeHandler, -) *PullRequestEvent { - return &PullRequestEvent{ - RepoAllowlistChecker: repoAllowlistChecker, - OpenedPullEventHandler: openedPullEventHandler, - UpdatedPullEventHandler: updatedPullEventHandler, - ClosedPullEventHandler: closedPullEventHandler, - } -} - -type PullRequestEvent struct { - RepoAllowlistChecker *events.RepoAllowlistChecker - - // Delegate Handlers - OpenedPullEventHandler eventTypeHandler - UpdatedPullEventHandler eventTypeHandler - ClosedPullEventHandler eventTypeHandler -} - -func (h *PullRequestEvent) Handle(ctx context.Context, request *http.BufferedRequest, event event_types.PullRequest) error { - pull := event.Pull - baseRepo := pull.BaseRepo - eventType := event.EventType - - if !h.RepoAllowlistChecker.IsAllowlisted(baseRepo.FullName, baseRepo.VCSHost.Hostname) { - return fmt.Errorf("Pull request event from non-allowlisted repo \"%s/%s\"", baseRepo.VCSHost.Hostname, baseRepo.FullName) - } - - switch eventType { - case models.OpenedPullEvent: - return h.OpenedPullEventHandler.Handle(ctx, request, event) - case models.UpdatedPullEvent: - return h.UpdatedPullEventHandler.Handle(ctx, request, event) - case models.ClosedPullEvent: - return h.ClosedPullEventHandler.Handle(ctx, request, event) - case models.OtherPullEvent: - return &errors.UnsupportedEventTypeError{Msg: "Unsupported event type made it through, this is likely a bug in the code."} - } - return nil -} diff --git a/server/legacy/controllers/events/handlers/pull_request_review.go b/server/legacy/controllers/events/handlers/pull_request_review.go deleted file mode 100644 index fcf9dd9b4..000000000 --- a/server/legacy/controllers/events/handlers/pull_request_review.go +++ /dev/null @@ -1,53 +0,0 @@ -package handlers - -import ( - "context" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/runatlantis/atlantis/server/logging" - contextInternal "github.com/runatlantis/atlantis/server/neptune/context" - "github.com/runatlantis/atlantis/server/neptune/gateway/event" -) - -type PullRequestReviewEventHandler struct { - PRReviewCommandRunner events.CommandRunner -} - -func (p PullRequestReviewEventHandler) Handle(ctx context.Context, event event.PullRequestReview, _ *http.BufferedRequest) error { - p.PRReviewCommandRunner.RunPRReviewCommand( - ctx, - event.Repo, - event.Pull, - event.User, - event.Timestamp, - event.InstallationToken, - ) - return nil -} - -type AsyncPullRequestReviewEvent struct { - handler *PullRequestReviewEventHandler - logger logging.Logger -} - -func NewPullRequestReviewEvent(prReviewCommandRunner events.CommandRunner, logger logging.Logger) *AsyncPullRequestReviewEvent { - return &AsyncPullRequestReviewEvent{ - handler: &PullRequestReviewEventHandler{ - PRReviewCommandRunner: prReviewCommandRunner, - }, - logger: logger, - } -} - -func (a AsyncPullRequestReviewEvent) Handle(ctx context.Context, event event.PullRequestReview, req *http.BufferedRequest) error { - go func() { - // Passing background context to avoid context cancellation since the parent goroutine does not wait for this goroutine to finish execution. - ctx = contextInternal.CopyFields(context.Background(), ctx) - err := a.handler.Handle(ctx, event, req) - if err != nil { - a.logger.ErrorContext(ctx, err.Error()) - } - }() - return nil -} diff --git a/server/legacy/controllers/events/handlers/pull_request_test.go b/server/legacy/controllers/events/handlers/pull_request_test.go deleted file mode 100644 index 767ddf618..000000000 --- a/server/legacy/controllers/events/handlers/pull_request_test.go +++ /dev/null @@ -1,62 +0,0 @@ -package handlers_test - -import ( - "bytes" - "context" - "net/http" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/controllers/events/handlers" - "github.com/runatlantis/atlantis/server/legacy/events" - httputils "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/runatlantis/atlantis/server/models" - event_types "github.com/runatlantis/atlantis/server/neptune/gateway/event" - "github.com/stretchr/testify/assert" -) - -type assertingPRHandler struct { - expectedEvent event_types.PullRequest - expectedRequest *httputils.BufferedRequest - t *testing.T -} - -func (h *assertingPRHandler) Handle(ctx context.Context, request *httputils.BufferedRequest, event event_types.PullRequest) error { - assert.Equal(h.t, h.expectedRequest, request) - assert.Equal(h.t, h.expectedEvent, event) - - return nil -} - -func TestPREventHandler(t *testing.T) { - rawRequest, err := http.NewRequestWithContext( - context.Background(), - http.MethodPost, "", - bytes.NewBuffer([]byte("body")), - ) - assert.NoError(t, err) - - request, err := httputils.NewBufferedRequest(rawRequest) - assert.NoError(t, err) - - event := event_types.PullRequest{ - Pull: models.PullRequest{Num: 1}, - } - - repoAllowlistChecker, err := events.NewRepoAllowlistChecker("*") - assert.NoError(t, err) - - openPREventHandler := &assertingPRHandler{expectedEvent: event, expectedRequest: request, t: t} - - t.Run("invokes open event type handler", func(t *testing.T) { - prEventHandler := handlers.NewPullRequestEventWithEventTypeHandlers( - repoAllowlistChecker, - openPREventHandler, - - // keeping these with empty fields in order to simulate VerifyWasNeverCalled - &assertingPRHandler{}, - &assertingPRHandler{}, - ) - err = prEventHandler.Handle(context.Background(), request, event) - assert.NoError(t, err) - }) -} diff --git a/server/legacy/controllers/events/mocks/matchers/http_responsewriter.go b/server/legacy/controllers/events/mocks/matchers/http_responsewriter.go deleted file mode 100644 index 0b5f11074..000000000 --- a/server/legacy/controllers/events/mocks/matchers/http_responsewriter.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - http "net/http" -) - -func AnyHTTPResponseWriter() http.ResponseWriter { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(http.ResponseWriter))(nil)).Elem())) - var nullValue http.ResponseWriter - return nullValue -} - -func EqHTTPResponseWriter(value http.ResponseWriter) http.ResponseWriter { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue http.ResponseWriter - return nullValue -} - -func NotEqHTTPResponseWriter(value http.ResponseWriter) http.ResponseWriter { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue http.ResponseWriter - return nullValue -} - -func HTTPResponseWriterThat(matcher pegomock.ArgumentMatcher) http.ResponseWriter { - pegomock.RegisterMatcher(matcher) - var nullValue http.ResponseWriter - return nullValue -} diff --git a/server/legacy/controllers/events/mocks/matchers/models_repo.go b/server/legacy/controllers/events/mocks/matchers/models_repo.go deleted file mode 100644 index b36c3ee7c..000000000 --- a/server/legacy/controllers/events/mocks/matchers/models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func NotEqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func ModelsRepoThat(matcher pegomock.ArgumentMatcher) models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/controllers/events/mocks/matchers/ptr_to_http_request.go b/server/legacy/controllers/events/mocks/matchers/ptr_to_http_request.go deleted file mode 100644 index 9e9927366..000000000 --- a/server/legacy/controllers/events/mocks/matchers/ptr_to_http_request.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - http "net/http" -) - -func AnyPtrToHTTPRequest() *http.Request { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*http.Request))(nil)).Elem())) - var nullValue *http.Request - return nullValue -} - -func EqPtrToHTTPRequest(value *http.Request) *http.Request { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *http.Request - return nullValue -} - -func NotEqPtrToHTTPRequest(value *http.Request) *http.Request { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *http.Request - return nullValue -} - -func PtrToHTTPRequestThat(matcher pegomock.ArgumentMatcher) *http.Request { - pegomock.RegisterMatcher(matcher) - var nullValue *http.Request - return nullValue -} diff --git a/server/legacy/controllers/events/mocks/matchers/slice_of_byte.go b/server/legacy/controllers/events/mocks/matchers/slice_of_byte.go deleted file mode 100644 index 7ff2e45ca..000000000 --- a/server/legacy/controllers/events/mocks/matchers/slice_of_byte.go +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" -) - -func AnySliceOfByte() []byte { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]byte))(nil)).Elem())) - var nullValue []byte - return nullValue -} - -func EqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func NotEqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func SliceOfByteThat(matcher pegomock.ArgumentMatcher) []byte { - pegomock.RegisterMatcher(matcher) - var nullValue []byte - return nullValue -} diff --git a/server/legacy/controllers/events/mocks/mock_github_request_validator.go b/server/legacy/controllers/events/mocks/mock_github_request_validator.go deleted file mode 100644 index 1b7ee2e06..000000000 --- a/server/legacy/controllers/events/mocks/mock_github_request_validator.go +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/controllers/events (interfaces: GithubRequestValidator) - -package mocks - -import ( - http "net/http" - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockGithubRequestValidator struct { - fail func(message string, callerSkip ...int) -} - -func NewMockGithubRequestValidator(options ...pegomock.Option) *MockGithubRequestValidator { - mock := &MockGithubRequestValidator{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockGithubRequestValidator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockGithubRequestValidator) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockGithubRequestValidator) Validate(r *http.Request, secret []byte) ([]byte, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubRequestValidator().") - } - params := []pegomock.Param{r, secret} - result := pegomock.GetGenericMockFrom(mock).Invoke("Validate", params, []reflect.Type{reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []byte - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]byte) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubRequestValidator) VerifyWasCalledOnce() *VerifierMockGithubRequestValidator { - return &VerifierMockGithubRequestValidator{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockGithubRequestValidator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockGithubRequestValidator { - return &VerifierMockGithubRequestValidator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockGithubRequestValidator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockGithubRequestValidator { - return &VerifierMockGithubRequestValidator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockGithubRequestValidator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockGithubRequestValidator { - return &VerifierMockGithubRequestValidator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockGithubRequestValidator struct { - mock *MockGithubRequestValidator - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockGithubRequestValidator) Validate(r *http.Request, secret []byte) *MockGithubRequestValidator_Validate_OngoingVerification { - params := []pegomock.Param{r, secret} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Validate", params, verifier.timeout) - return &MockGithubRequestValidator_Validate_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubRequestValidator_Validate_OngoingVerification struct { - mock *MockGithubRequestValidator - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubRequestValidator_Validate_OngoingVerification) GetCapturedArguments() (*http.Request, []byte) { - r, secret := c.GetAllCapturedArguments() - return r[len(r)-1], secret[len(secret)-1] -} - -func (c *MockGithubRequestValidator_Validate_OngoingVerification) GetAllCapturedArguments() (_param0 []*http.Request, _param1 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*http.Request, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*http.Request) - } - _param1 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.([]byte) - } - } - return -} diff --git a/server/legacy/controllers/events/mocks/mock_vcs_post_handler.go b/server/legacy/controllers/events/mocks/mock_vcs_post_handler.go deleted file mode 100644 index d01e944d6..000000000 --- a/server/legacy/controllers/events/mocks/mock_vcs_post_handler.go +++ /dev/null @@ -1,103 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/controllers/events (interfaces: VCSPostHandler) - -package mocks - -import ( - http "net/http" - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockVCSPostHandler struct { - fail func(message string, callerSkip ...int) -} - -func NewMockVCSPostHandler(options ...pegomock.Option) *MockVCSPostHandler { - mock := &MockVCSPostHandler{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockVCSPostHandler) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockVCSPostHandler) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockVCSPostHandler) Post(w http.ResponseWriter, r *http.Request) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockVCSPostHandler().") - } - params := []pegomock.Param{w, r} - pegomock.GetGenericMockFrom(mock).Invoke("Post", params, []reflect.Type{}) -} - -func (mock *MockVCSPostHandler) VerifyWasCalledOnce() *VerifierMockVCSPostHandler { - return &VerifierMockVCSPostHandler{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockVCSPostHandler) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockVCSPostHandler { - return &VerifierMockVCSPostHandler{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockVCSPostHandler) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockVCSPostHandler { - return &VerifierMockVCSPostHandler{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockVCSPostHandler) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockVCSPostHandler { - return &VerifierMockVCSPostHandler{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockVCSPostHandler struct { - mock *MockVCSPostHandler - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockVCSPostHandler) Post(w http.ResponseWriter, r *http.Request) *MockVCSPostHandler_Post_OngoingVerification { - params := []pegomock.Param{w, r} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Post", params, verifier.timeout) - return &MockVCSPostHandler_Post_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockVCSPostHandler_Post_OngoingVerification struct { - mock *MockVCSPostHandler - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockVCSPostHandler_Post_OngoingVerification) GetCapturedArguments() (http.ResponseWriter, *http.Request) { - w, r := c.GetAllCapturedArguments() - return w[len(w)-1], r[len(r)-1] -} - -func (c *MockVCSPostHandler_Post_OngoingVerification) GetAllCapturedArguments() (_param0 []http.ResponseWriter, _param1 []*http.Request) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]http.ResponseWriter, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(http.ResponseWriter) - } - _param1 = make([]*http.Request, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*http.Request) - } - } - return -} diff --git a/server/legacy/controllers/events/testfixtures/bb-server-pull-deleted-event.json b/server/legacy/controllers/events/testfixtures/bb-server-pull-deleted-event.json deleted file mode 100644 index 29f83137b..000000000 --- a/server/legacy/controllers/events/testfixtures/bb-server-pull-deleted-event.json +++ /dev/null @@ -1,101 +0,0 @@ -{ - "eventKey":"pr:deleted", - "date":"2017-09-19T11:16:17+1000", - "actor":{ - "name":"admin", - "emailAddress":"admin@example.com", - "id":1, - "displayName":"Administrator", - "active":true, - "slug":"admin", - "type":"NORMAL" - }, - "pullRequest":{ - "id":10, - "version":3, - "title":"Commit message", - "state":"OPEN", - "open":true, - "closed":false, - "createdDate":1505783668760, - "updatedDate":1505783750704, - "fromRef":{ - "id":"refs/heads/decline-me", - "displayId":"decline-me", - "latestCommit":"2d9fb6b9a46eafb1dcef7b008d1a429d45ca742c", - "repository":{ - "slug":"repository", - "id":84, - "name":"repository", - "scmId":"git", - "state":"AVAILABLE", - "statusMessage":"Available", - "forkable":true, - "project":{ - "key":"PROJ", - "id":84, - "name":"project", - "public":false, - "type":"NORMAL" - }, - "public":false - } - }, - "toRef":{ - "id":"refs/heads/master", - "displayId":"master", - "latestCommit":"7e48f426f0a6e47c5b5e862c31be6ca965f82c9c", - "repository":{ - "slug":"repository", - "id":84, - "name":"repository", - "scmId":"git", - "state":"AVAILABLE", - "statusMessage":"Available", - "forkable":true, - "project":{ - "key":"PROJ", - "id":84, - "name":"project", - "public":false, - "type":"NORMAL" - }, - "public":false - } - }, - "locked":false, - "author":{ - "user":{ - "name":"admin", - "emailAddress":"admin@example.com", - "id":1, - "displayName":"Administrator", - "active":true, - "slug":"admin", - "type":"NORMAL" - }, - "role":"AUTHOR", - "approved":false, - "status":"UNAPPROVED" - }, - "reviewers":[ - { - "user":{ - "name":"user", - "emailAddress":"user@example.com", - "id":2, - "displayName":"User", - "active":true, - "slug":"user", - "type":"NORMAL" - }, - "role":"REVIEWER", - "approved":false, - "status":"UNAPPROVED" - } - ], - "participants":[ - - ] - } -} diff --git a/server/legacy/controllers/events/testfixtures/githubIssueCommentEvent.json b/server/legacy/controllers/events/testfixtures/githubIssueCommentEvent.json deleted file mode 100644 index 207dd4e15..000000000 --- a/server/legacy/controllers/events/testfixtures/githubIssueCommentEvent.json +++ /dev/null @@ -1,210 +0,0 @@ -{ - "action": "created", - "issue": { - "url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1", - "repository_url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1/labels{/name}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1/comments", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1/events", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/1", - "id": 330256251, - "node_id": "MDExOlB1bGxSZXF1ZXN0MTkzMzA4NzA3", - "number": 1, - "title": "Add new project layouts", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "labels": [ - - ], - "state": "open", - "locked": false, - "assignee": null, - "assignees": [ - - ], - "milestone": null, - "comments": 61, - "created_at": "2018-06-07T12:45:41Z", - "updated_at": "2018-06-13T12:53:40Z", - "closed_at": null, - "author_association": "OWNER", - "pull_request": { - "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/1", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/1", - "diff_url": "https://github.com/runatlantis/atlantis-tests/pull/1.diff", - "patch_url": "https://github.com/runatlantis/atlantis-tests/pull/1.patch" - }, - "body": "" - }, - "comment": { - "url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments/396926483", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/1#issuecomment-396926483", - "issue_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1", - "id": 396926483, - "node_id": "MDEyOklzc3VlQ29tbWVudDM5NjkyNjQ4Mw==", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "created_at": "2018-06-13T12:53:40Z", - "updated_at": "2018-06-13T12:53:40Z", - "author_association": "OWNER", - "body": "###comment body###" - }, - "repository": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:17Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 8, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 2, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 2, - "watchers": 0, - "default_branch": "master" - }, - "sender": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "installation": { - "id": 12345 - } -} \ No newline at end of file diff --git a/server/legacy/controllers/events/testfixtures/githubIssueCommentEvent_notAllowlisted.json b/server/legacy/controllers/events/testfixtures/githubIssueCommentEvent_notAllowlisted.json deleted file mode 100644 index 0a71381c4..000000000 --- a/server/legacy/controllers/events/testfixtures/githubIssueCommentEvent_notAllowlisted.json +++ /dev/null @@ -1,182 +0,0 @@ -{ - "action": "created", - "issue": { - "url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2", - "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2/labels{/name}", - "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2/comments", - "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2/events", - "html_url": "https://github.com/baxterthehacker/public-repo/issues/2", - "id": 73464126, - "number": 2, - "title": "Spelling error in the README file", - "user": { - "login": "baxterthehacker", - "id": 6752317, - "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", - "gravatar_id": "", - "url": "https://api.github.com/users/baxterthehacker", - "html_url": "https://github.com/baxterthehacker", - "followers_url": "https://api.github.com/users/baxterthehacker/followers", - "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", - "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", - "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", - "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", - "repos_url": "https://api.github.com/users/baxterthehacker/repos", - "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", - "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", - "type": "User", - "site_admin": false - }, - "labels": [ - { - "url": "https://api.github.com/repos/baxterthehacker/public-repo/labels/bug", - "name": "bug", - "color": "fc2929" - } - ], - "state": "open", - "locked": false, - "assignee": null, - "milestone": null, - "comments": 1, - "created_at": "2015-05-05T23:40:28Z", - "updated_at": "2015-05-05T23:40:28Z", - "closed_at": null, - "body": "It looks like you accidentally spelled 'commit' with two 't's." - }, - "comment": { - "url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments/99262140", - "html_url": "https://github.com/baxterthehacker/public-repo/issues/2#issuecomment-99262140", - "issue_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/2", - "id": 99262140, - "user": { - "login": "baxterthehacker", - "id": 6752317, - "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", - "gravatar_id": "", - "url": "https://api.github.com/users/baxterthehacker", - "html_url": "https://github.com/baxterthehacker", - "followers_url": "https://api.github.com/users/baxterthehacker/followers", - "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", - "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", - "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", - "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", - "repos_url": "https://api.github.com/users/baxterthehacker/repos", - "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", - "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", - "type": "User", - "site_admin": false - }, - "created_at": "2015-05-05T23:40:28Z", - "updated_at": "2015-05-05T23:40:28Z", - "body": "atlantis plan" - }, - "repository": { - "id": 35129377, - "name": "public-repo", - "full_name": "baxterthehacker/public-repo", - "owner": { - "login": "baxterthehacker", - "id": 6752317, - "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", - "gravatar_id": "", - "url": "https://api.github.com/users/baxterthehacker", - "html_url": "https://github.com/baxterthehacker", - "followers_url": "https://api.github.com/users/baxterthehacker/followers", - "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", - "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", - "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", - "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", - "repos_url": "https://api.github.com/users/baxterthehacker/repos", - "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", - "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/baxterthehacker/public-repo", - "description": "", - "fork": false, - "url": "https://api.github.com/repos/baxterthehacker/public-repo", - "forks_url": "https://api.github.com/repos/baxterthehacker/public-repo/forks", - "keys_url": "https://api.github.com/repos/baxterthehacker/public-repo/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/baxterthehacker/public-repo/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/baxterthehacker/public-repo/teams", - "hooks_url": "https://api.github.com/repos/baxterthehacker/public-repo/hooks", - "issue_events_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/events{/number}", - "events_url": "https://api.github.com/repos/baxterthehacker/public-repo/events", - "assignees_url": "https://api.github.com/repos/baxterthehacker/public-repo/assignees{/user}", - "branches_url": "https://api.github.com/repos/baxterthehacker/public-repo/branches{/branch}", - "tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/tags", - "blobs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/baxterthehacker/public-repo/statuses/{sha}", - "languages_url": "https://api.github.com/repos/baxterthehacker/public-repo/languages", - "stargazers_url": "https://api.github.com/repos/baxterthehacker/public-repo/stargazers", - "contributors_url": "https://api.github.com/repos/baxterthehacker/public-repo/contributors", - "subscribers_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscribers", - "subscription_url": "https://api.github.com/repos/baxterthehacker/public-repo/subscription", - "commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/baxterthehacker/public-repo/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/baxterthehacker/public-repo/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/baxterthehacker/public-repo/contents/{+path}", - "compare_url": "https://api.github.com/repos/baxterthehacker/public-repo/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/baxterthehacker/public-repo/merges", - "archive_url": "https://api.github.com/repos/baxterthehacker/public-repo/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/baxterthehacker/public-repo/downloads", - "issues_url": "https://api.github.com/repos/baxterthehacker/public-repo/issues{/number}", - "pulls_url": "https://api.github.com/repos/baxterthehacker/public-repo/pulls{/number}", - "milestones_url": "https://api.github.com/repos/baxterthehacker/public-repo/milestones{/number}", - "notifications_url": "https://api.github.com/repos/baxterthehacker/public-repo/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/baxterthehacker/public-repo/labels{/name}", - "releases_url": "https://api.github.com/repos/baxterthehacker/public-repo/releases{/id}", - "created_at": "2015-05-05T23:40:12Z", - "updated_at": "2015-05-05T23:40:12Z", - "pushed_at": "2015-05-05T23:40:27Z", - "git_url": "git://github.com/baxterthehacker/public-repo.git", - "ssh_url": "git@github.com:baxterthehacker/public-repo.git", - "clone_url": "https://github.com/baxterthehacker/public-repo.git", - "svn_url": "https://github.com/baxterthehacker/public-repo", - "homepage": null, - "size": 0, - "stargazers_count": 0, - "watchers_count": 0, - "language": null, - "has_issues": true, - "has_downloads": true, - "has_wiki": true, - "has_pages": true, - "forks_count": 0, - "mirror_url": null, - "open_issues_count": 2, - "forks": 0, - "open_issues": 2, - "watchers": 0, - "default_branch": "master" - }, - "sender": { - "login": "baxterthehacker", - "id": 6752317, - "avatar_url": "https://avatars.githubusercontent.com/u/6752317?v=3", - "gravatar_id": "", - "url": "https://api.github.com/users/baxterthehacker", - "html_url": "https://github.com/baxterthehacker", - "followers_url": "https://api.github.com/users/baxterthehacker/followers", - "following_url": "https://api.github.com/users/baxterthehacker/following{/other_user}", - "gists_url": "https://api.github.com/users/baxterthehacker/gists{/gist_id}", - "starred_url": "https://api.github.com/users/baxterthehacker/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/baxterthehacker/subscriptions", - "organizations_url": "https://api.github.com/users/baxterthehacker/orgs", - "repos_url": "https://api.github.com/users/baxterthehacker/repos", - "events_url": "https://api.github.com/users/baxterthehacker/events{/privacy}", - "received_events_url": "https://api.github.com/users/baxterthehacker/received_events", - "type": "User", - "site_admin": false - } -} \ No newline at end of file diff --git a/server/legacy/controllers/events/testfixtures/githubPullRequestClosedEvent.json b/server/legacy/controllers/events/testfixtures/githubPullRequestClosedEvent.json deleted file mode 100644 index cc281a8d7..000000000 --- a/server/legacy/controllers/events/testfixtures/githubPullRequestClosedEvent.json +++ /dev/null @@ -1,468 +0,0 @@ -{ - "action": "closed", - "number": 2, - "pull_request": { - "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2", - "id": 193308707, - "node_id": "MDExOlB1bGxSZXF1ZXN0MTkzMzA4NzA3", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/2", - "diff_url": "https://github.com/runatlantis/atlantis-tests/pull/2.diff", - "patch_url": "https://github.com/runatlantis/atlantis-tests/pull/2.patch", - "issue_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2", - "number": 2, - "state": "closed", - "locked": false, - "title": "Add new project layouts", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "body": "", - "created_at": "2018-06-07T12:45:41Z", - "updated_at": "2018-06-16T16:55:19Z", - "closed_at": "2018-06-16T16:55:19Z", - "merged_at": null, - "merge_commit_sha": "e96e1cea0d79f4ff07845060ade0b21ff1ffe37f", - "assignee": null, - "assignees": [ - - ], - "requested_reviewers": [ - - ], - "requested_teams": [ - - ], - "labels": [ - - ], - "milestone": null, - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits", - "review_comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments", - "review_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/5e2d140b2d74bf61675677f01dc947ae8512e18e", - "head": { - "label": "runatlantis:atlantisyaml", - "ref": "atlantisyaml", - "sha": "5e2d140b2d74bf61675677f01dc947ae8512e18e", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "repo": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:17Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 8, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 1, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 1, - "watchers": 0, - "default_branch": "master" - } - }, - "base": { - "label": "runatlantis:master", - "ref": "master", - "sha": "f59a822e83b3cd193142c7624ea635a5d7894388", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "repo": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:17Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 8, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 1, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 1, - "watchers": 0, - "default_branch": "master" - } - }, - "_links": { - "self": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2" - }, - "html": { - "href": "https://github.com/runatlantis/atlantis-tests/pull/2" - }, - "issue": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2" - }, - "comments": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments" - }, - "review_comments": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments" - }, - "review_comment": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}" - }, - "commits": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits" - }, - "statuses": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/5e2d140b2d74bf61675677f01dc947ae8512e18e" - } - }, - "author_association": "OWNER", - "merged": false, - "mergeable": true, - "rebaseable": true, - "mergeable_state": "clean", - "merged_by": null, - "comments": 62, - "review_comments": 0, - "maintainer_can_modify": false, - "commits": 3, - "additions": 198, - "deletions": 8, - "changed_files": 24 - }, - "repository": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:17Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 8, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 1, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 1, - "watchers": 0, - "default_branch": "master" - }, - "sender": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - } -} \ No newline at end of file diff --git a/server/legacy/controllers/events/testfixtures/githubPullRequestOpenedEvent.json b/server/legacy/controllers/events/testfixtures/githubPullRequestOpenedEvent.json deleted file mode 100644 index 1bd1c9733..000000000 --- a/server/legacy/controllers/events/testfixtures/githubPullRequestOpenedEvent.json +++ /dev/null @@ -1,471 +0,0 @@ -{ - "action": "opened", - "number": 2, - "pull_request": { - "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2", - "id": 194034250, - "node_id": "MDExOlB1bGxSZXF1ZXN0MTk0MDM0MjUw", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/2", - "diff_url": "https://github.com/runatlantis/atlantis-tests/pull/2.diff", - "patch_url": "https://github.com/runatlantis/atlantis-tests/pull/2.patch", - "issue_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2", - "number": 2, - "state": "open", - "locked": false, - "title": "branch", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "body": "", - "created_at": "2018-06-11T16:22:16Z", - "updated_at": "2018-06-11T16:22:16Z", - "closed_at": null, - "merged_at": null, - "merge_commit_sha": null, - "assignee": null, - "assignees": [ - - ], - "requested_reviewers": [ - - ], - "requested_teams": [ - - ], - "labels": [ - - ], - "milestone": null, - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits", - "review_comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments", - "review_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/c31fd9ea6f557ad2ea659944c3844a059b83bc5d", - "head": { - "label": "runatlantis:branch", - "ref": "branch", - "sha": "c31fd9ea6f557ad2ea659944c3844a059b83bc5d", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "repo": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:09Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 7, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 2, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 2, - "watchers": 0, - "default_branch": "master" - } - }, - "base": { - "label": "runatlantis:master", - "ref": "master", - "sha": "f59a822e83b3cd193142c7624ea635a5d7894388", - "user": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "repo": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:09Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 7, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 2, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 2, - "watchers": 0, - "default_branch": "master" - } - }, - "_links": { - "self": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2" - }, - "html": { - "href": "https://github.com/runatlantis/atlantis-tests/pull/2" - }, - "issue": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2" - }, - "comments": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments" - }, - "review_comments": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments" - }, - "review_comment": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}" - }, - "commits": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits" - }, - "statuses": { - "href": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/c31fd9ea6f557ad2ea659944c3844a059b83bc5d" - } - }, - "author_association": "OWNER", - "merged": false, - "mergeable": null, - "rebaseable": null, - "mergeable_state": "unknown", - "merged_by": null, - "comments": 0, - "review_comments": 0, - "maintainer_can_modify": false, - "commits": 5, - "additions": 181, - "deletions": 8, - "changed_files": 23 - }, - "repository": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/runatlantis/atlantis-tests", - "description": "A set of terraform projects that atlantis e2e tests run on.", - "fork": true, - "url": "https://api.github.com/repos/runatlantis/atlantis-tests", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments", - "created_at": "2018-06-07T12:28:23Z", - "updated_at": "2018-06-07T12:28:27Z", - "pushed_at": "2018-06-11T16:22:09Z", - "git_url": "git://github.com/runatlantis/atlantis-tests.git", - "ssh_url": "git@github.com:runatlantis/atlantis-tests.git", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "svn_url": "https://github.com/runatlantis/atlantis-tests", - "homepage": null, - "size": 7, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": false, - "has_projects": true, - "has_downloads": true, - "has_wiki": false, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 2, - "license": { - "key": "other", - "name": "Other", - "spdx_id": null, - "url": null, - "node_id": "MDc6TGljZW5zZTA=" - }, - "forks": 0, - "open_issues": 2, - "watchers": 0, - "default_branch": "master" - }, - "sender": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "installation": { - "id": 12345 - } -} \ No newline at end of file diff --git a/server/legacy/controllers/events/testfixtures/githubPullRequestReviewedEvent.json b/server/legacy/controllers/events/testfixtures/githubPullRequestReviewedEvent.json deleted file mode 100644 index ce9ebaaab..000000000 --- a/server/legacy/controllers/events/testfixtures/githubPullRequestReviewedEvent.json +++ /dev/null @@ -1,96 +0,0 @@ -{ - "action": "submitted", - "review": { - "id": 1241209205, - "commit_id": "d1968d00316b3c3f189138dc0e1c7318b2111a87", - "submitted_at": "2020-01-09T21:10:40Z", - "state": "approved", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/2#pullrequestreview-1241209205", - "pull_request_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pull/2", - "author_association": "NONE" - }, - "pull_request": { - "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2", - "html_url": "https://github.com/runatlantis/atlantis-tests/pull/2", - "id": 194034250, - "number": 2, - "state": "open", - "locked": false, - "title": "branch", - "user": { - "login": "runatlantis", - "id": 1034429 - }, - "body": "", - "created_at": "2018-06-11T16:22:16Z", - "updated_at": "2018-06-11T16:22:16Z", - "closed_at": null, - "merged_at": null, - "merge_commit_sha": null, - "assignee": null, - "assignees": [ - - ], - "requested_reviewers": [ - - ], - "requested_teams": [ - - ], - "labels": [ - - ], - "head": { - "label": "runatlantis:branch", - "ref": "branch", - "sha": "c31fd9ea6f557ad2ea659944c3844a059b83bc5d", - "user": { - "login": "runatlantis" - }, - "repo": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis" - } - } - }, - "base": { - "label": "runatlantis:master", - "ref": "master", - "sha": "f59a822e83b3cd193142c7624ea635a5d7894388", - "user": { - "login": "runatlantis" - }, - "repo": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis" - } - } - } - }, - "repository": { - "id": 136474117, - "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=", - "clone_url": "https://github.com/runatlantis/atlantis-tests.git", - "name": "atlantis-tests", - "full_name": "runatlantis/atlantis-tests", - "owner": { - "login": "runatlantis" - } - }, - "sender": { - "login": "runatlantis" - }, - "installation": { - "id": 1 - } -} diff --git a/server/legacy/controllers/events/testfixtures/null_provider_lockfile_old_version b/server/legacy/controllers/events/testfixtures/null_provider_lockfile_old_version deleted file mode 100644 index 09c858af0..000000000 --- a/server/legacy/controllers/events/testfixtures/null_provider_lockfile_old_version +++ /dev/null @@ -1,20 +0,0 @@ -# This file is maintained automatically by "terraform init". -# Manual edits may be lost in future updates. - -provider "registry.terraform.io/hashicorp/null" { - version = "3.0.0" - constraints = "3.0.0" - hashes = [ - "h1:ysHGBhBNkIiJLEpthB/IVCLpA1Qoncp3KbCTFGFZTO0=", - "zh:05fb7eab469324c97e9b73a61d2ece6f91de4e9b493e573bfeda0f2077bc3a4c", - "zh:1688aa91885a395c4ae67636d411475d0b831e422e005dcf02eedacaafac3bb4", - "zh:24a0b1292e3a474f57c483a7a4512d797e041bc9c2fbaac42fe12e86a7fb5a3c", - "zh:2fc951bd0d1b9b23427acc93be09b6909d72871e464088171da60fbee4fdde03", - "zh:6db825759425599a326385a68acc6be2d9ba0d7d6ef587191d0cdc6daef9ac63", - "zh:85985763d02618993c32c294072cc6ec51f1692b803cb506fcfedca9d40eaec9", - "zh:a53186599c57058be1509f904da512342cfdc5d808efdaf02dec15f0f3cb039a", - "zh:c2e07b49b6efa676bdc7b00c06333ea1792a983a5720f9e2233db27323d2707c", - "zh:cdc8fe1096103cf5374751e2e8408ec4abd2eb67d5a1c5151fe2c7ecfd525bef", - "zh:dbdef21df0c012b0d08776f3d4f34eb0f2f229adfde07ff252a119e52c0f65b7", - ] -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/atlantis.yaml b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/atlantis.yaml deleted file mode 100644 index 64bc9cd7c..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/atlantis.yaml +++ /dev/null @@ -1,7 +0,0 @@ -version: 3 -workflow_mode_type: platform -projects: - - dir: staging - workspace: staging - autoplan: - when_modified: ["**/*.tf*"] diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-apply.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-apply.txt deleted file mode 100644 index 97e695aef..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-apply.txt +++ /dev/null @@ -1,5 +0,0 @@ -Ran Apply for dir: `staging` workspace: `staging` - -```diff -atlantis apply is disabled for this project. Please track the deployment when the PR is merged. -``` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-auto-policy-check.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-auto-policy-check.txt deleted file mode 100644 index 6f8c0f6b5..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-auto-policy-check.txt +++ /dev/null @@ -1,16 +0,0 @@ -Ran Policy Check for dir: `staging` workspace: `staging` - -**Policy Check Failed** -``` -exit status 1 -Checking plan against the following policies: - test_policy - -test_policy: -FAIL - - main - WARNING: Null Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-autoplan.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-autoplan.txt deleted file mode 100644 index 6b80a84e2..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/exp-output-autoplan.txt +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for dir: `staging` workspace: `staging` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.this will be created -+ resource "null_resource" "this" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "staging" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/policies/policy.rego b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/policies/policy.rego deleted file mode 100644 index 126c2e459..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/policies/policy.rego +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import input as tfplan - -deny[reason] { - num_deletes.null_resource > 0 - reason := "WARNING: Null Resource creation is prohibited." -} - -resource_types = {"null_resource"} - -resources[resource_type] = all { - some resource_type - resource_types[resource_type] - all := [name | - name := tfplan.resource_changes[_] - name.type == resource_type - ] -} - -# number of deletions of resources of a given type -num_deletes[resource_type] = num { - some resource_type - resource_types[resource_type] - all := resources[resource_type] - deletions := [res | res := all[_]; res.change.actions[_] == "create"] - num := count(deletions) -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/repos.yaml b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/repos.yaml deleted file mode 100644 index 22987469b..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/repos.yaml +++ /dev/null @@ -1,8 +0,0 @@ -repos: - - id: /.*/ -policies: - policy_sets: - - name: test_policy - owner: someoneelse - paths: - - ../policies/policy.rego diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/staging/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/staging/main.tf deleted file mode 100644 index 62f1e7796..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/disabled-apply/staging/main.tf +++ /dev/null @@ -1,5 +0,0 @@ -resource "null_resource" "this" { -} -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/atlantis.yaml b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/atlantis.yaml deleted file mode 100644 index 09ae832ae..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/atlantis.yaml +++ /dev/null @@ -1,5 +0,0 @@ -version: 3 -workflow_mode_type: platform -projects: -- dir: . - workspace: default diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-apply-failed.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-apply-failed.txt deleted file mode 100644 index 32dede335..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-apply-failed.txt +++ /dev/null @@ -1 +0,0 @@ -**Apply Failed**: Atlantis apply is being deprecated, please merge the PR to apply your changes diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-apply.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-apply.txt deleted file mode 100644 index f78a67fd0..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-apply.txt +++ /dev/null @@ -1,5 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -```diff -atlantis apply is disabled for this project. Please track the deployment when the PR is merged. -``` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-approve-policies.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-approve-policies.txt deleted file mode 100644 index f5e100c23..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-approve-policies.txt +++ /dev/null @@ -1,5 +0,0 @@ -Approved Policies for 1 projects: - -1. dir: `.` workspace: `default` - - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-auto-policy-check.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-auto-policy-check.txt deleted file mode 100644 index 2b9fbeaa3..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-auto-policy-check.txt +++ /dev/null @@ -1,16 +0,0 @@ -Ran Policy Check for dir: `.` workspace: `default` - -**Policy Check Failed** -``` -exit status 1 -Checking plan against the following policies: - test_policy - -test_policy: -FAIL - - main - WARNING: Null Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-autoplan.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-autoplan.txt deleted file mode 100644 index 07b5c943c..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-autoplan.txt +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-merge.txt b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-merge.txt deleted file mode 100644 index 872c5ee40..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/exp-output-merge.txt +++ /dev/null @@ -1,3 +0,0 @@ -Locks and plans deleted for the projects and workspaces modified in this pull request: - -- dir: `.` workspace: `default` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/main.tf deleted file mode 100644 index 582f9ea01..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/main.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "null_resource" "simple" { - count = 1 -} - -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/policies/policy.rego b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/policies/policy.rego deleted file mode 100644 index 126c2e459..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/policies/policy.rego +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import input as tfplan - -deny[reason] { - num_deletes.null_resource > 0 - reason := "WARNING: Null Resource creation is prohibited." -} - -resource_types = {"null_resource"} - -resources[resource_type] = all { - some resource_type - resource_types[resource_type] - all := [name | - name := tfplan.resource_changes[_] - name.type == resource_type - ] -} - -# number of deletions of resources of a given type -num_deletes[resource_type] = num { - some resource_type - resource_types[resource_type] - all := resources[resource_type] - deletions := [res | res := all[_]; res.change.actions[_] == "create"] - num := count(deletions) -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/repos.yaml b/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/repos.yaml deleted file mode 100644 index 6b379fc7f..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/platform-mode/policy-check-approval/repos.yaml +++ /dev/null @@ -1,9 +0,0 @@ -repos: -- id: /.*/ - apply_requirements: [approved] -policies: - policy_sets: - - name: test_policy - owner: runatlantis - paths: - - policies/policy.rego diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/atlantis.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/atlantis.yaml deleted file mode 100644 index 8435733cd..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/atlantis.yaml +++ /dev/null @@ -1,4 +0,0 @@ -version: 3 -projects: -- dir: . - workspace: default diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-apply-failed.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-apply-failed.txt deleted file mode 100644 index 7588f2d2f..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-apply-failed.txt +++ /dev/null @@ -1,3 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -**Apply Failed**: All policies must pass for project before running apply diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-apply.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-apply.txt deleted file mode 100644 index 32dede335..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-apply.txt +++ /dev/null @@ -1 +0,0 @@ -**Apply Failed**: Atlantis apply is being deprecated, please merge the PR to apply your changes diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-approve-policies.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-approve-policies.txt deleted file mode 100644 index f5e100c23..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-approve-policies.txt +++ /dev/null @@ -1,5 +0,0 @@ -Approved Policies for 1 projects: - -1. dir: `.` workspace: `default` - - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt deleted file mode 100644 index 2b9fbeaa3..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt +++ /dev/null @@ -1,16 +0,0 @@ -Ran Policy Check for dir: `.` workspace: `default` - -**Policy Check Failed** -``` -exit status 1 -Checking plan against the following policies: - test_policy - -test_policy: -FAIL - - main - WARNING: Null Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt deleted file mode 100644 index 07b5c943c..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-merge.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-merge.txt deleted file mode 100644 index 872c5ee40..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/exp-output-merge.txt +++ /dev/null @@ -1,3 +0,0 @@ -Locks and plans deleted for the projects and workspaces modified in this pull request: - -- dir: `.` workspace: `default` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/main.tf deleted file mode 100644 index 582f9ea01..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/main.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "null_resource" "simple" { - count = 1 -} - -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/policies/policy.rego b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/policies/policy.rego deleted file mode 100644 index 126c2e459..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/policies/policy.rego +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import input as tfplan - -deny[reason] { - num_deletes.null_resource > 0 - reason := "WARNING: Null Resource creation is prohibited." -} - -resource_types = {"null_resource"} - -resources[resource_type] = all { - some resource_type - resource_types[resource_type] - all := [name | - name := tfplan.resource_changes[_] - name.type == resource_type - ] -} - -# number of deletions of resources of a given type -num_deletes[resource_type] = num { - some resource_type - resource_types[resource_type] - all := resources[resource_type] - deletions := [res | res := all[_]; res.change.actions[_] == "create"] - num := count(deletions) -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/repos.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/repos.yaml deleted file mode 100644 index 6b379fc7f..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-apply-reqs/repos.yaml +++ /dev/null @@ -1,9 +0,0 @@ -repos: -- id: /.*/ - apply_requirements: [approved] -policies: - policy_sets: - - name: test_policy - owner: runatlantis - paths: - - policies/policy.rego diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/atlantis.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/atlantis.yaml deleted file mode 100644 index 8435733cd..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/atlantis.yaml +++ /dev/null @@ -1,4 +0,0 @@ -version: 3 -projects: -- dir: . - workspace: default diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-apply-failed.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-apply-failed.txt deleted file mode 100644 index 7588f2d2f..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-apply-failed.txt +++ /dev/null @@ -1,3 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -**Apply Failed**: All policies must pass for project before running apply diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt deleted file mode 100644 index 1b72496de..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt +++ /dev/null @@ -1,4 +0,0 @@ -**Approve Policies Error** -``` -contact policy owners to approve failing policies -``` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt deleted file mode 100644 index 2b9fbeaa3..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt +++ /dev/null @@ -1,16 +0,0 @@ -Ran Policy Check for dir: `.` workspace: `default` - -**Policy Check Failed** -``` -exit status 1 -Checking plan against the following policies: - test_policy - -test_policy: -FAIL - - main - WARNING: Null Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt deleted file mode 100644 index 07b5c943c..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-merge.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-merge.txt deleted file mode 100644 index 70df2f251..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/exp-output-merge.txt +++ /dev/null @@ -1,3 +0,0 @@ -Locks and plans deleted for the projects and workspaces modified in this pull request: - -- dir: `.` workspace: `default` \ No newline at end of file diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/main.tf deleted file mode 100644 index 582f9ea01..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/main.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "null_resource" "simple" { - count = 1 -} - -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/policies/policy.rego b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/policies/policy.rego deleted file mode 100644 index 126c2e459..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/policies/policy.rego +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import input as tfplan - -deny[reason] { - num_deletes.null_resource > 0 - reason := "WARNING: Null Resource creation is prohibited." -} - -resource_types = {"null_resource"} - -resources[resource_type] = all { - some resource_type - resource_types[resource_type] - all := [name | - name := tfplan.resource_changes[_] - name.type == resource_type - ] -} - -# number of deletions of resources of a given type -num_deletes[resource_type] = num { - some resource_type - resource_types[resource_type] - all := resources[resource_type] - deletions := [res | res := all[_]; res.change.actions[_] == "create"] - num := count(deletions) -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/repos.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/repos.yaml deleted file mode 100644 index 72bf680be..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-diff-owner/repos.yaml +++ /dev/null @@ -1,8 +0,0 @@ -repos: - - id: /.*/ -policies: - policy_sets: - - name: test_policy - owner: someoneelse - paths: - - policies/policy.rego diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/atlantis.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/atlantis.yaml deleted file mode 100644 index 353e26b47..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/atlantis.yaml +++ /dev/null @@ -1,6 +0,0 @@ -version: 3 -projects: -- dir: dir1 - name: dir1 -- dir: dir2 - name: dir2 diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/dir1/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/dir1/main.tf deleted file mode 100644 index 582f9ea01..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/dir1/main.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "null_resource" "simple" { - count = 1 -} - -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/dir2/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/dir2/main.tf deleted file mode 100644 index 8813d4459..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/dir2/main.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "null_resource" "forbidden" { - count = 1 -} - -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-apply.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-apply.txt deleted file mode 100644 index 4e15336a0..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-apply.txt +++ /dev/null @@ -1,24 +0,0 @@ -Ran Apply for 2 projects: - -1. project: `dir1` dir: `dir1` workspace: `default` -1. project: `dir2` dir: `dir2` workspace: `default` - -### 1. project: `dir1` dir: `dir1` workspace: `default` -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "default" - -``` - ---- -### 2. project: `dir2` dir: `dir2` workspace: `default` -**Apply Failed**: All policies must pass for project before running apply - ---- - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-approve-policies.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-approve-policies.txt deleted file mode 100644 index f5e100c23..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-approve-policies.txt +++ /dev/null @@ -1,5 +0,0 @@ -Approved Policies for 1 projects: - -1. dir: `.` workspace: `default` - - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt deleted file mode 100644 index 1ab60f90e..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt +++ /dev/null @@ -1,48 +0,0 @@ -Ran Policy Check for 2 projects: - -1. project: `dir1` dir: `dir1` workspace: `default` -1. project: `dir2` dir: `dir2` workspace: `default` - -### 1. project: `dir1` dir: `dir1` workspace: `default` -```diff -Checking plan against the following policies: - test_policy - test_policy_2 - -test_policy: -1 test, 1 passed, 0 warnings, 0 failures, 0 exceptions - -test_policy_2: -1 test, 1 passed, 0 warnings, 0 failures, 0 exceptions - -``` - - ---- -### 2. project: `dir2` dir: `dir2` workspace: `default` -**Policy Check Failed** -``` -exit status 1 -Checking plan against the following policies: - test_policy - test_policy_2 - -test_policy: -FAIL - - main - WARNING: Forbidden Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -test_policy_2: -FAIL - - main - WARNING: Forbidden Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt deleted file mode 100644 index b21d3d8dc..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt +++ /dev/null @@ -1,75 +0,0 @@ -Ran Plan for 2 projects: - -1. project: `dir1` dir: `dir1` workspace: `default` -1. project: `dir2` dir: `dir2` workspace: `default` - -### 1. project: `dir1` dir: `dir1` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -### 2. project: `dir2` dir: `dir2` workspace: `default` -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.forbidden[0] will be created -+ resource "null_resource" "forbidden" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir2` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir2` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-merge.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-merge.txt deleted file mode 100644 index 1a1225918..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/exp-output-merge.txt +++ /dev/null @@ -1,4 +0,0 @@ -Locks and plans deleted for the projects and workspaces modified in this pull request: - -- dir: `dir1` workspace: `default` -- dir: `dir2` workspace: `default` \ No newline at end of file diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/policies/policy.rego b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/policies/policy.rego deleted file mode 100644 index 4b9e5254e..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/policies/policy.rego +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import input as tfplan - -deny[reason] { - num_creates[_] > 0 - reason := "WARNING: Forbidden Resource creation is prohibited." -} - -resource_names = {"forbidden"} - -resources[resource_name] = all { - some resource_name - resource_names[resource_name] - all := [res | - res := tfplan.resource_changes[_] - res.name == resource_name - ] -} - -# number of creations of resources of a given name -num_creates[resource_name] = num { - some resource_name - resource_names[resource_name] - all := resources[resource_name] - creations := [res | res := all[_]; res.change.actions[_] == "create"] - num := count(creations) -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/repos.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/repos.yaml deleted file mode 100644 index 145f3a3dd..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks-multi-projects/repos.yaml +++ /dev/null @@ -1,12 +0,0 @@ -repos: - - id: /.*/ -policies: - policy_sets: - - name: test_policy - owner: runatlantis - paths: - - ../policies/policy.rego - - name: test_policy_2 - owner: runatlantis - paths: - - ../policies/policy.rego diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/atlantis.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/atlantis.yaml deleted file mode 100644 index 00e599a5d..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/atlantis.yaml +++ /dev/null @@ -1,5 +0,0 @@ -version: 3 -workflow_mode_type: pr -projects: -- dir: . - workspace: default diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-apply-failed.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-apply-failed.txt deleted file mode 100644 index 7588f2d2f..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-apply-failed.txt +++ /dev/null @@ -1,3 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -**Apply Failed**: All policies must pass for project before running apply diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-apply.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-apply.txt deleted file mode 100644 index b8eb00317..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-apply.txt +++ /dev/null @@ -1,13 +0,0 @@ -Ran Apply for dir: `.` workspace: `default` - -```diff -null_resource.simple: -null_resource.simple: - -Apply complete! Resources: 1 added, 0 changed, 0 destroyed. - -Outputs: - -workspace = "default" - -``` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-approve-policies.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-approve-policies.txt deleted file mode 100644 index f5e100c23..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-approve-policies.txt +++ /dev/null @@ -1,5 +0,0 @@ -Approved Policies for 1 projects: - -1. dir: `.` workspace: `default` - - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt deleted file mode 100644 index 2b9fbeaa3..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-auto-policy-check.txt +++ /dev/null @@ -1,16 +0,0 @@ -Ran Policy Check for dir: `.` workspace: `default` - -**Policy Check Failed** -``` -exit status 1 -Checking plan against the following policies: - test_policy - -test_policy: -FAIL - - main - WARNING: Null Resource creation is prohibited. - -1 test, 0 passed, 0 warnings, 1 failure, 0 exceptions - -``` -* :heavy_check_mark: To **approve** failing policies either request an approval from approvers or address the failure by modifying the codebase. - diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-autoplan.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-autoplan.txt deleted file mode 100644 index 07b5c943c..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-autoplan.txt +++ /dev/null @@ -1,38 +0,0 @@ -Ran Plan for dir: `.` workspace: `default` - -
Show Output - -```diff - -Terraform used the selected providers to generate the following execution -plan. Resource actions are indicated with the following symbols: -+ create - -Terraform will perform the following actions: - - # null_resource.simple[0] will be created -+ resource "null_resource" "simple" { - + id = (known after apply) - } - -Plan: 1 to add, 0 to change, 0 to destroy. - -Changes to Outputs: -+ workspace = "default" - -``` - -* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here]() -* :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` -
-Plan: 1 to add, 0 to change, 0 to destroy. - - ---- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To delete all plans and locks for the PR, comment: - * `atlantis unlock` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-merge.txt b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-merge.txt deleted file mode 100644 index 872c5ee40..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/exp-output-merge.txt +++ /dev/null @@ -1,3 +0,0 @@ -Locks and plans deleted for the projects and workspaces modified in this pull request: - -- dir: `.` workspace: `default` diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/main.tf b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/main.tf deleted file mode 100644 index 582f9ea01..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/main.tf +++ /dev/null @@ -1,7 +0,0 @@ -resource "null_resource" "simple" { - count = 1 -} - -output "workspace" { - value = terraform.workspace -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/policies/policy.rego b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/policies/policy.rego deleted file mode 100644 index 126c2e459..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/policies/policy.rego +++ /dev/null @@ -1,28 +0,0 @@ -package main - -import input as tfplan - -deny[reason] { - num_deletes.null_resource > 0 - reason := "WARNING: Null Resource creation is prohibited." -} - -resource_types = {"null_resource"} - -resources[resource_type] = all { - some resource_type - resource_types[resource_type] - all := [name | - name := tfplan.resource_changes[_] - name.type == resource_type - ] -} - -# number of deletions of resources of a given type -num_deletes[resource_type] = num { - some resource_type - resource_types[resource_type] - all := resources[resource_type] - deletions := [res | res := all[_]; res.change.actions[_] == "create"] - num := count(deletions) -} diff --git a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/repos.yaml b/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/repos.yaml deleted file mode 100644 index 72bf680be..000000000 --- a/server/legacy/controllers/events/testfixtures/test-repos/policy-checks/repos.yaml +++ /dev/null @@ -1,8 +0,0 @@ -repos: - - id: /.*/ -policies: - policy_sets: - - name: test_policy - owner: someoneelse - paths: - - policies/policy.rego diff --git a/server/legacy/controllers/github_app_controller.go b/server/legacy/controllers/github_app_controller.go deleted file mode 100644 index a3678728a..000000000 --- a/server/legacy/controllers/github_app_controller.go +++ /dev/null @@ -1,158 +0,0 @@ -package controllers - -import ( - "encoding/json" - "fmt" - "net/http" - "net/url" - - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - - "github.com/runatlantis/atlantis/server/legacy/controllers/templates" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" -) - -// GithubAppController handles the creation and setup of a new GitHub app -type GithubAppController struct { - AtlantisURL *url.URL - Logger logging.Logger - GithubSetupComplete bool - GithubHostname string - GithubOrg string - GithubStatusName string - Allocator feature.Allocator -} - -type githubWebhook struct { - URL string `json:"url"` - Active bool `json:"active"` -} - -// githubAppRequest contains the query parameters for -// https://developer.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest -type githubAppRequest struct { - Description string `json:"description"` - Events []string `json:"default_events"` - Name string `json:"name"` - Permissions map[string]string `json:"default_permissions"` - Public bool `json:"public"` - RedirectURL string `json:"redirect_url"` - URL string `json:"url"` - Webhook *githubWebhook `json:"hook_attributes"` -} - -// ExchangeCode handles the user coming back from creating their app -// A code query parameter is exchanged for this app's ID, key, and webhook_secret -// Implements https://developer.github.com/apps/building-github-apps/creating-github-apps-from-a-manifest/#implementing-the-github-app-manifest-flow -func (g *GithubAppController) ExchangeCode(w http.ResponseWriter, r *http.Request) { - if g.GithubSetupComplete { - g.respond(w, http.StatusBadRequest, "Atlantis already has GitHub credentials") - return - } - - code := r.URL.Query().Get("code") - if code == "" { - g.respond(w, http.StatusOK, "Ignoring callback, missing code query parameter") - } - - creds := &vcs.GithubAnonymousCredentials{} - - // TODO: unify this in a single inject.go file - mergeabilityChecker := vcs.NewLyftPullMergeabilityChecker(g.GithubStatusName) - client, err := vcs.NewGithubClient(g.GithubHostname, creds, g.Logger, g.Allocator, mergeabilityChecker) - - if err != nil { - g.respond(w, http.StatusInternalServerError, "Failed to exchange code for github app: %s", err) - return - } - - app, err := client.ExchangeCode(code) - if err != nil { - g.respond(w, http.StatusInternalServerError, "Failed to exchange code for github app: %s", err) - return - } - - err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{ - Target: "", - Manifest: "", - ID: app.ID, - Key: app.Key, - WebhookSecret: app.WebhookSecret, - URL: app.URL, - }) - if err != nil { - g.Logger.Error(err.Error()) - } -} - -// New redirects the user to create a new GitHub app -func (g *GithubAppController) New(w http.ResponseWriter, r *http.Request) { - if g.GithubSetupComplete { - g.respond(w, http.StatusBadRequest, "Atlantis already has GitHub credentials") - return - } - - manifest := &githubAppRequest{ - Name: fmt.Sprintf("Atlantis for %s", g.AtlantisURL.Hostname()), - Description: fmt.Sprintf("Terraform Pull Request Automation at %s", g.AtlantisURL), - URL: g.AtlantisURL.String(), - RedirectURL: fmt.Sprintf("%s/github-app/exchange-code", g.AtlantisURL), - Public: false, - Webhook: &githubWebhook{ - Active: true, - URL: fmt.Sprintf("%s/events", g.AtlantisURL), - }, - Events: []string{ - "check_run", - "create", - "delete", - "issue_comment", - "issues", - "pull_request_review_comment", - "pull_request_review", - "pull_request", - "push", - }, - Permissions: map[string]string{ - "checks": "write", - "contents": "write", - "issues": "write", - "pull_requests": "write", - "repository_hooks": "write", - "statuses": "write", - }, - } - - url := &url.URL{ - Scheme: "https", - Host: g.GithubHostname, - Path: "/settings/apps/new", - } - - // https://developer.github.com/apps/building-github-apps/creating-github-apps-using-url-parameters/#about-github-app-url-parameters - if g.GithubOrg != "" { - url.Path = fmt.Sprintf("organizations/%s%s", g.GithubOrg, url.Path) - } - - jsonManifest, err := json.MarshalIndent(manifest, "", " ") - if err != nil { - g.respond(w, http.StatusBadRequest, "Failed to serialize manifest: %s", err) - return - } - - err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{ - Target: url.String(), - Manifest: string(jsonManifest), - }) - if err != nil { - g.Logger.Error(err.Error()) - } -} - -func (g *GithubAppController) respond(w http.ResponseWriter, code int, format string, args ...interface{}) { - response := fmt.Sprintf(format, args...) - g.Logger.Error(response) - w.WriteHeader(code) - fmt.Fprintln(w, response) -} diff --git a/server/legacy/controllers/jobs_controller.go b/server/legacy/controllers/jobs_controller.go deleted file mode 100644 index 6a0528ca1..000000000 --- a/server/legacy/controllers/jobs_controller.go +++ /dev/null @@ -1,98 +0,0 @@ -package controllers - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/gorilla/mux" - "github.com/runatlantis/atlantis/server/legacy/controllers/templates" - "github.com/runatlantis/atlantis/server/legacy/controllers/websocket" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/uber-go/tally/v4" -) - -type JobIDKeyGenerator struct{} - -func (g JobIDKeyGenerator) Generate(r *http.Request) (string, error) { - jobID, ok := mux.Vars(r)["job-id"] - if !ok { - return "", fmt.Errorf("internal error: no job-id in route") - } - - return jobID, nil -} - -type JobsController struct { - AtlantisVersion string - AtlantisURL *url.URL - Logger logging.Logger - ProjectJobsTemplate templates.TemplateWriter - ProjectJobsErrorTemplate templates.TemplateWriter - Db *db.BoltDB - WsMux websocket.Multiplexor - StatsScope tally.Scope - KeyGenerator JobIDKeyGenerator -} - -func (j *JobsController) getProjectJobs(w http.ResponseWriter, r *http.Request) error { - jobID, err := j.KeyGenerator.Generate(r) - - if err != nil { - j.respond(w, http.StatusBadRequest, err.Error()) - return err - } - - viewData := templates.ProjectJobData{ - AtlantisVersion: j.AtlantisVersion, - ProjectPath: jobID, - CleanedBasePath: j.AtlantisURL.Path, - } - - if err = j.ProjectJobsTemplate.Execute(w, viewData); err != nil { - j.Logger.Error(err.Error()) - return err - } - - return nil -} - -func (j *JobsController) GetProjectJobs(w http.ResponseWriter, r *http.Request) { - errorCounter := j.StatsScope.SubScope("api").Counter(metrics.ExecutionErrorMetric) - err := j.getProjectJobs(w, r) - if err != nil { - errorCounter.Inc(1) - } -} - -func (j *JobsController) getProjectJobsWS(w http.ResponseWriter, r *http.Request) error { - err := j.WsMux.Handle(w, r) - - if err != nil { - j.respond(w, http.StatusBadRequest, err.Error()) - return err - } - - return nil -} - -func (j *JobsController) GetProjectJobsWS(w http.ResponseWriter, r *http.Request) { - jobsMetric := j.StatsScope.SubScope("api") - errorCounter := jobsMetric.Counter(metrics.ExecutionErrorMetric) - executionTime := jobsMetric.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - err := j.getProjectJobsWS(w, r) - if err != nil { - errorCounter.Inc(1) - } -} - -func (j *JobsController) respond(w http.ResponseWriter, responseCode int, format string, args ...interface{}) { - response := fmt.Sprintf(format, args...) - j.Logger.Error(response) - w.WriteHeader(responseCode) - fmt.Fprintln(w, response) -} diff --git a/server/legacy/controllers/locks_controller.go b/server/legacy/controllers/locks_controller.go deleted file mode 100644 index 11e1245d2..000000000 --- a/server/legacy/controllers/locks_controller.go +++ /dev/null @@ -1,172 +0,0 @@ -package controllers - -import ( - "fmt" - "net/http" - "net/url" - - "github.com/runatlantis/atlantis/server/legacy/controllers/templates" - "github.com/runatlantis/atlantis/server/legacy/core/db" - - "github.com/gorilla/mux" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -// LocksController handles all requests relating to Atlantis locks. -type LocksController struct { - AtlantisVersion string - AtlantisURL *url.URL - Locker locking.Locker - Logger logging.Logger - ApplyLocker locking.ApplyLocker - VCSClient vcs.Client - LockDetailTemplate templates.TemplateWriter - WorkingDir events.WorkingDir - WorkingDirLocker events.WorkingDirLocker - DB *db.BoltDB - DeleteLockCommand events.DeleteLockCommand -} - -// LockApply handles creating a global apply lock. -// If Lock already exists it will be a no-op -func (l *LocksController) LockApply(w http.ResponseWriter, r *http.Request) { - lock, err := l.ApplyLocker.LockApply() - if err != nil { - l.respond(w, logging.Error, http.StatusInternalServerError, "creating apply lock failed with: %s", err) - return - } - - l.respond(w, logging.Info, http.StatusOK, "Apply Lock is acquired on %s", lock.Time.Format("2006-01-02 15:04:05")) -} - -// UnlockApply handles releasing a global apply lock. -// If Lock doesn't exists it will be a no-op -func (l *LocksController) UnlockApply(w http.ResponseWriter, r *http.Request) { - err := l.ApplyLocker.UnlockApply() - if err != nil { - l.respond(w, logging.Error, http.StatusInternalServerError, "deleting apply lock failed with: %s", err) - return - } - - l.respond(w, logging.Info, http.StatusOK, "Deleted apply lock") -} - -// GetLock is the GET /locks/{id} route. It renders the lock detail view. -func (l *LocksController) GetLock(w http.ResponseWriter, r *http.Request) { - id, ok := mux.Vars(r)["id"] - if !ok { - l.respond(w, logging.Warn, http.StatusBadRequest, "No lock id in request") - return - } - - idUnencoded, err := url.QueryUnescape(id) - if err != nil { - l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id: %s", err) - return - } - lock, err := l.Locker.GetLock(idUnencoded) - if err != nil { - l.respond(w, logging.Error, http.StatusInternalServerError, "Failed getting lock: %s", err) - return - } - if lock == nil { - l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded) - return - } - - owner, repo := models.SplitRepoFullName(lock.Project.RepoFullName) - viewData := templates.LockDetailData{ - LockKeyEncoded: id, - LockKey: idUnencoded, - PullRequestLink: lock.Pull.URL, - LockedBy: lock.Pull.Author, - Workspace: lock.Workspace, - AtlantisVersion: l.AtlantisVersion, - CleanedBasePath: l.AtlantisURL.Path, - RepoOwner: owner, - RepoName: repo, - } - - err = l.LockDetailTemplate.Execute(w, viewData) - if err != nil { - l.Logger.Error(err.Error()) - } -} - -// DeleteLock handles deleting the lock at id and commenting back on the -// pull request that the lock has been deleted. -func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) { - id, ok := mux.Vars(r)["id"] - if !ok || id == "" { - l.respond(w, logging.Warn, http.StatusBadRequest, "No lock id in request") - return - } - - idUnencoded, err := url.PathUnescape(id) - if err != nil { - l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id %q. Failed with error: %s", id, err) - return - } - - lock, err := l.DeleteLockCommand.DeleteLock(idUnencoded) - if err != nil { - l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: %s", err) - return - } - - if lock == nil { - l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded) - return - } - - // NOTE: Because BaseRepo was added to the PullRequest model later, previous - // installations of Atlantis will have locks in their DB that do not have - // this field on PullRequest. We skip commenting in this case. - if lock.Pull.BaseRepo != (models.Repo{}) { - unlock, err := l.WorkingDirLocker.TryLock(lock.Pull.BaseRepo.FullName, lock.Pull.Num, lock.Workspace) - if err != nil { - l.Logger.Error(fmt.Sprintf("unable to obtain working dir lock when trying to delete old plans: %s", err)) - } else { - defer unlock() - // nolint: vetshadow - if err := l.WorkingDir.DeleteForWorkspace(lock.Pull.BaseRepo, lock.Pull, lock.Workspace); err != nil { - l.Logger.Error(fmt.Sprintf("unable to delete workspace: %s", err)) - } - } - if err := l.DB.UpdateProjectStatus(lock.Pull, lock.Workspace, lock.Project.Path, models.DiscardedPlanStatus); err != nil { - l.Logger.Error(fmt.Sprintf("unable to update project status: %s", err)) - } - - // Once the lock has been deleted, comment back on the pull request. - comment := fmt.Sprintf("**Warning**: The plan for dir: `%s` workspace: `%s` was **discarded** via the Atlantis UI.\n\n"+ - "To `apply` this plan you must run `plan` again.", lock.Project.Path, lock.Workspace) - if err = l.VCSClient.CreateComment(lock.Pull.BaseRepo, lock.Pull.Num, comment, ""); err != nil { - l.Logger.Warn(fmt.Sprintf("failed commenting on pull request: %s", err)) - } - } - l.respond(w, logging.Info, http.StatusOK, "Deleted lock id %q", id) -} - -// respond is a helper function to respond and log the response. lvl is the log -// level to log at, code is the HTTP response code. -func (l *LocksController) respond(w http.ResponseWriter, lvl logging.LogLevel, responseCode int, format string, args ...interface{}) { - response := fmt.Sprintf(format, args...) - switch lvl { - case logging.Error: - l.Logger.Error(response) - case logging.Info: - l.Logger.Info(response) - case logging.Warn: - l.Logger.Warn(response) - case logging.Debug: - l.Logger.Debug(response) - default: - l.Logger.Error(response) - } - w.WriteHeader(responseCode) - fmt.Fprintln(w, response) -} diff --git a/server/legacy/controllers/locks_controller_test.go b/server/legacy/controllers/locks_controller_test.go deleted file mode 100644 index 8c54e7289..000000000 --- a/server/legacy/controllers/locks_controller_test.go +++ /dev/null @@ -1,413 +0,0 @@ -package controllers_test - -import ( - "bytes" - "errors" - "fmt" - "net/http" - "net/http/httptest" - "net/url" - "reflect" - "testing" - "time" - - "github.com/runatlantis/atlantis/server/legacy/controllers" - "github.com/runatlantis/atlantis/server/legacy/controllers/templates" - tMocks "github.com/runatlantis/atlantis/server/legacy/controllers/templates/mocks" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - - "github.com/gorilla/mux" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events" - - "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - mocks2 "github.com/runatlantis/atlantis/server/legacy/events/mocks" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func AnyRepo() models.Repo { - RegisterMatcher(NewAnyMatcher(reflect.TypeOf(models.Repo{}))) - return models.Repo{} -} - -func TestCreateApplyLock(t *testing.T) { - t.Run("Creates apply lock", func(t *testing.T) { - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - - layout := "2006-01-02T15:04:05.000Z" - strLockTime := "2020-09-01T00:45:26.371Z" - expLockTime := "2020-09-01 00:45:26" - lockTime, _ := time.Parse(layout, strLockTime) - - l := mocks.NewMockApplyLocker() - When(l.LockApply()).ThenReturn(locking.ApplyCommandLock{ - Locked: true, - Time: lockTime, - }, nil) - - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - ApplyLocker: l, - } - lc.LockApply(w, req) - - ResponseContains(t, w, http.StatusOK, fmt.Sprintf("Apply Lock is acquired on %s", expLockTime)) - }) - - t.Run("Apply lock creation fails", func(t *testing.T) { - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - - l := mocks.NewMockApplyLocker() - When(l.LockApply()).ThenReturn(locking.ApplyCommandLock{ - Locked: false, - }, errors.New("failed to acquire lock")) - - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - ApplyLocker: l, - } - lc.LockApply(w, req) - - ResponseContains(t, w, http.StatusInternalServerError, "creating apply lock failed with: failed to acquire lock") - }) -} - -func TestUnlockApply(t *testing.T) { - t.Run("Apply lock deleted successfully", func(t *testing.T) { - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - - l := mocks.NewMockApplyLocker() - When(l.UnlockApply()).ThenReturn(nil) - - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - ApplyLocker: l, - } - lc.UnlockApply(w, req) - - ResponseContains(t, w, http.StatusOK, "Deleted apply lock") - }) - - t.Run("Apply lock deletion failed", func(t *testing.T) { - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - - l := mocks.NewMockApplyLocker() - When(l.UnlockApply()).ThenReturn(errors.New("failed to delete lock")) - - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - ApplyLocker: l, - } - lc.UnlockApply(w, req) - - ResponseContains(t, w, http.StatusInternalServerError, "deleting apply lock failed with: failed to delete lock") - }) -} - -func TestGetLockRoute_NoLockID(t *testing.T) { - t.Log("If there is no lock ID in the request then we should get a 400") - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - } - lc.GetLock(w, req) - ResponseContains(t, w, http.StatusBadRequest, "No lock id in request") -} - -func TestGetLock_InvalidLockID(t *testing.T) { - t.Log("If the lock ID is invalid then we should get a 400") - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "%A@"}) - w := httptest.NewRecorder() - lc.GetLock(w, req) - ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id") -} - -func TestGetLock_LockerErrorf(t *testing.T) { - t.Log("If there is an error retrieving the lock, a 500 is returned") - RegisterMockTestingT(t) - l := mocks.NewMockLocker() - When(l.GetLock("id")).ThenReturn(nil, errors.New("err")) - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - Locker: l, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.GetLock(w, req) - ResponseContains(t, w, http.StatusInternalServerError, "err") -} - -func TestGetLock_None(t *testing.T) { - t.Log("If there is no lock at that ID we get a 404") - RegisterMockTestingT(t) - l := mocks.NewMockLocker() - When(l.GetLock("id")).ThenReturn(nil, nil) - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - Locker: l, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.GetLock(w, req) - ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"") -} - -func TestGetLock_Success(t *testing.T) { - t.Log("Should be able to render a lock successfully") - RegisterMockTestingT(t) - l := mocks.NewMockLocker() - When(l.GetLock("id")).ThenReturn(&models.ProjectLock{ - Project: models.Project{RepoFullName: "owner/repo", Path: "path"}, - Pull: models.PullRequest{URL: "url", Author: "lkysow"}, - Workspace: "workspace", - }, nil) - tmpl := tMocks.NewMockTemplateWriter() - atlantisURL, err := url.Parse("https://example.com/basepath") - Ok(t, err) - lc := controllers.LocksController{ - Logger: logging.NewNoopCtxLogger(t), - Locker: l, - LockDetailTemplate: tmpl, - AtlantisVersion: "1300135", - AtlantisURL: atlantisURL, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.GetLock(w, req) - tmpl.VerifyWasCalledOnce().Execute(w, templates.LockDetailData{ - LockKeyEncoded: "id", - LockKey: "id", - RepoOwner: "owner", - RepoName: "repo", - PullRequestLink: "url", - LockedBy: "lkysow", - Workspace: "workspace", - AtlantisVersion: "1300135", - CleanedBasePath: "/basepath", - }) - ResponseContains(t, w, http.StatusOK, "") -} - -func TestDeleteLock_NoLockID(t *testing.T) { - t.Log("If there is no lock ID in the request then we should get a 400") - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - lc := controllers.LocksController{Logger: logging.NewNoopCtxLogger(t)} - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusBadRequest, "No lock id in request") -} - -func TestDeleteLock_InvalidLockID(t *testing.T) { - t.Log("If the lock ID is invalid then we should get a 400") - lc := controllers.LocksController{Logger: logging.NewNoopCtxLogger(t)} - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "%A@"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id \"%A@\"") -} - -func TestDeleteLock_LockerErrorf(t *testing.T) { - t.Log("If there is an error retrieving the lock, a 500 is returned") - RegisterMockTestingT(t) - dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(nil, errors.New("err")) - lc := controllers.LocksController{ - DeleteLockCommand: dlc, - Logger: logging.NewNoopCtxLogger(t), - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusInternalServerError, "err") -} - -func TestDeleteLock_None(t *testing.T) { - t.Log("If there is no lock at that ID we get a 404") - RegisterMockTestingT(t) - dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(nil, nil) - lc := controllers.LocksController{ - DeleteLockCommand: dlc, - Logger: logging.NewNoopCtxLogger(t), - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"") -} - -func TestDeleteLock_OldFormat(t *testing.T) { - t.Log("If the lock doesn't have BaseRepo set it is deleted successfully") - RegisterMockTestingT(t) - cp := vcsmocks.NewMockClient() - dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{}, nil) - lc := controllers.LocksController{ - DeleteLockCommand: dlc, - Logger: logging.NewNoopCtxLogger(t), - VCSClient: cp, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") - cp.VerifyWasCalled(Never()).CreateComment(AnyRepo(), AnyInt(), AnyString(), AnyString()) -} - -func TestDeleteLock_UpdateProjectStatus(t *testing.T) { - t.Log("When deleting a lock, pull status has to be updated to reflect discarded plan") - RegisterMockTestingT(t) - - repoName := "owner/repo" - projectPath := "path" - workspaceName := "workspace" - - cp := vcsmocks.NewMockClient() - l := mocks2.NewMockDeleteLockCommand() - workingDir := mocks2.NewMockWorkingDir() - workingDirLocker := events.NewDefaultWorkingDirLocker() - pull := models.PullRequest{ - BaseRepo: models.Repo{FullName: repoName}, - } - When(l.DeleteLock("id")).ThenReturn(&models.ProjectLock{ - Pull: pull, - Workspace: workspaceName, - Project: models.Project{ - Path: projectPath, - RepoFullName: repoName, - }, - }, nil) - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - // Seed the DB with a successful plan for that project (that is later discarded). - _, err = db.UpdatePullWithResults(pull, []command.ProjectResult{ - { - Command: command.Plan, - RepoRelDir: projectPath, - Workspace: workspaceName, - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: "tf-output", - LockURL: "lock-url", - }, - }, - }) - Ok(t, err) - lc := controllers.LocksController{ - DeleteLockCommand: l, - Logger: logging.NewNoopCtxLogger(t), - VCSClient: cp, - WorkingDirLocker: workingDirLocker, - WorkingDir: workingDir, - DB: db, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") - status, err := db.GetPullStatus(pull) - Ok(t, err) - Assert(t, status.Projects != nil, "status projects was nil") - Equals(t, []models.ProjectStatus{ - { - Workspace: workspaceName, - RepoRelDir: projectPath, - Status: models.DiscardedPlanStatus, - }, - }, status.Projects) -} - -func TestDeleteLock_CommentFailed(t *testing.T) { - t.Log("If the commenting fails we still return success") - RegisterMockTestingT(t) - dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{ - Pull: models.PullRequest{ - BaseRepo: models.Repo{FullName: "owner/repo"}, - }, - }, nil) - cp := vcsmocks.NewMockClient() - workingDir := mocks2.NewMockWorkingDir() - workingDirLocker := events.NewDefaultWorkingDirLocker() - When(cp.CreateComment(AnyRepo(), AnyInt(), AnyString(), AnyString())).ThenReturn(errors.New("err")) - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - lc := controllers.LocksController{ - DeleteLockCommand: dlc, - Logger: logging.NewNoopCtxLogger(t), - VCSClient: cp, - WorkingDir: workingDir, - WorkingDirLocker: workingDirLocker, - DB: db, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") -} - -func TestDeleteLock_CommentSuccess(t *testing.T) { - t.Log("We should comment back on the pull request if the lock is deleted") - RegisterMockTestingT(t) - cp := vcsmocks.NewMockClient() - dlc := mocks2.NewMockDeleteLockCommand() - workingDir := mocks2.NewMockWorkingDir() - workingDirLocker := events.NewDefaultWorkingDirLocker() - pull := models.PullRequest{ - BaseRepo: models.Repo{FullName: "owner/repo"}, - } - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{ - Pull: pull, - Workspace: "workspace", - Project: models.Project{ - Path: "path", - RepoFullName: "owner/repo", - }, - }, nil) - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - lc := controllers.LocksController{ - DeleteLockCommand: dlc, - Logger: logging.NewNoopCtxLogger(t), - VCSClient: cp, - DB: db, - WorkingDir: workingDir, - WorkingDirLocker: workingDirLocker, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - req = mux.SetURLVars(req, map[string]string{"id": "id"}) - w := httptest.NewRecorder() - lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") - cp.VerifyWasCalled(Once()).CreateComment(pull.BaseRepo, pull.Num, - "**Warning**: The plan for dir: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+ - "To `apply` this plan you must run `plan` again.", "") -} diff --git a/server/legacy/controllers/status_controller.go b/server/legacy/controllers/status_controller.go deleted file mode 100644 index 17125d4f3..000000000 --- a/server/legacy/controllers/status_controller.go +++ /dev/null @@ -1,37 +0,0 @@ -package controllers - -import ( - "encoding/json" - "fmt" - "net/http" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/logging" -) - -// StatusController handles the status of Atlantis. -type StatusController struct { - Logger logging.Logger - Drainer *events.Drainer -} - -type StatusResponse struct { - ShuttingDown bool `json:"shutting_down"` - InProgressOps int `json:"in_progress_operations"` -} - -// Get is the GET /status route. -func (d *StatusController) Get(w http.ResponseWriter, r *http.Request) { - status := d.Drainer.GetStatus() - data, err := json.MarshalIndent(&StatusResponse{ - ShuttingDown: status.ShuttingDown, - InProgressOps: status.InProgressOps, - }, "", " ") - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - fmt.Fprintf(w, "Error creating status json response: %s", err) - return - } - w.Header().Set("Content-Type", "application/json") - w.Write(data) // nolint: errcheck -} diff --git a/server/legacy/controllers/status_controller_test.go b/server/legacy/controllers/status_controller_test.go deleted file mode 100644 index 6c88e8881..000000000 --- a/server/legacy/controllers/status_controller_test.go +++ /dev/null @@ -1,82 +0,0 @@ -package controllers_test - -import ( - "bytes" - "encoding/json" - "io" - "net/http" - "net/http/httptest" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/controllers" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestStatusController_Startup(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - r, _ := http.NewRequest(http.MethodGet, "/status", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - dr := &events.Drainer{} - d := &controllers.StatusController{ - Logger: logger, - Drainer: dr, - } - d.Get(w, r) - - var result controllers.StatusResponse - body, err := io.ReadAll(w.Result().Body) - Ok(t, err) - Equals(t, 200, w.Result().StatusCode) - err = json.Unmarshal(body, &result) - Ok(t, err) - Equals(t, false, result.ShuttingDown) - Equals(t, 0, result.InProgressOps) -} - -func TestStatusController_InProgress(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - r, _ := http.NewRequest(http.MethodGet, "/status", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - dr := &events.Drainer{} - dr.StartOp() - - d := &controllers.StatusController{ - Logger: logger, - Drainer: dr, - } - d.Get(w, r) - - var result controllers.StatusResponse - body, err := io.ReadAll(w.Result().Body) - Ok(t, err) - Equals(t, 200, w.Result().StatusCode) - err = json.Unmarshal(body, &result) - Ok(t, err) - Equals(t, false, result.ShuttingDown) - Equals(t, 1, result.InProgressOps) -} - -func TestStatusController_Shutdown(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - r, _ := http.NewRequest(http.MethodGet, "/status", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - dr := &events.Drainer{} - dr.ShutdownBlocking() - - d := &controllers.StatusController{ - Logger: logger, - Drainer: dr, - } - d.Get(w, r) - - var result controllers.StatusResponse - body, err := io.ReadAll(w.Result().Body) - Ok(t, err) - Equals(t, 200, w.Result().StatusCode) - err = json.Unmarshal(body, &result) - Ok(t, err) - Equals(t, true, result.ShuttingDown) - Equals(t, 0, result.InProgressOps) -} diff --git a/server/legacy/controllers/templates/mocks/matchers/io_writer.go b/server/legacy/controllers/templates/mocks/matchers/io_writer.go deleted file mode 100644 index e39a22155..000000000 --- a/server/legacy/controllers/templates/mocks/matchers/io_writer.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - io "io" -) - -func AnyIoWriter() io.Writer { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(io.Writer))(nil)).Elem())) - var nullValue io.Writer - return nullValue -} - -func EqIoWriter(value io.Writer) io.Writer { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue io.Writer - return nullValue -} - -func NotEqIoWriter(value io.Writer) io.Writer { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue io.Writer - return nullValue -} - -func IoWriterThat(matcher pegomock.ArgumentMatcher) io.Writer { - pegomock.RegisterMatcher(matcher) - var nullValue io.Writer - return nullValue -} diff --git a/server/legacy/controllers/templates/mocks/mock_template_writer.go b/server/legacy/controllers/templates/mocks/mock_template_writer.go deleted file mode 100644 index 101f26731..000000000 --- a/server/legacy/controllers/templates/mocks/mock_template_writer.go +++ /dev/null @@ -1,110 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/controllers/templates (interfaces: TemplateWriter) - -package mocks - -import ( - io "io" - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockTemplateWriter struct { - fail func(message string, callerSkip ...int) -} - -func NewMockTemplateWriter(options ...pegomock.Option) *MockTemplateWriter { - mock := &MockTemplateWriter{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockTemplateWriter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockTemplateWriter) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockTemplateWriter) Execute(wr io.Writer, data interface{}) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockTemplateWriter().") - } - params := []pegomock.Param{wr, data} - result := pegomock.GetGenericMockFrom(mock).Invoke("Execute", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockTemplateWriter) VerifyWasCalledOnce() *VerifierMockTemplateWriter { - return &VerifierMockTemplateWriter{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockTemplateWriter) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockTemplateWriter { - return &VerifierMockTemplateWriter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockTemplateWriter) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockTemplateWriter { - return &VerifierMockTemplateWriter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockTemplateWriter) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockTemplateWriter { - return &VerifierMockTemplateWriter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockTemplateWriter struct { - mock *MockTemplateWriter - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockTemplateWriter) Execute(wr io.Writer, data interface{}) *MockTemplateWriter_Execute_OngoingVerification { - params := []pegomock.Param{wr, data} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Execute", params, verifier.timeout) - return &MockTemplateWriter_Execute_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockTemplateWriter_Execute_OngoingVerification struct { - mock *MockTemplateWriter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockTemplateWriter_Execute_OngoingVerification) GetCapturedArguments() (io.Writer, interface{}) { - wr, data := c.GetAllCapturedArguments() - return wr[len(wr)-1], data[len(data)-1] -} - -func (c *MockTemplateWriter_Execute_OngoingVerification) GetAllCapturedArguments() (_param0 []io.Writer, _param1 []interface{}) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]io.Writer, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(io.Writer) - } - _param1 = make([]interface{}, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(interface{}) - } - } - return -} diff --git a/server/legacy/controllers/templates/web_templates.go b/server/legacy/controllers/templates/web_templates.go deleted file mode 100644 index b32c91e7d..000000000 --- a/server/legacy/controllers/templates/web_templates.go +++ /dev/null @@ -1,628 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package templates - -import ( - "html/template" - "io" - "time" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_template_writer.go TemplateWriter - -// TemplateWriter is an interface over html/template that's used to enable -// mocking. -type TemplateWriter interface { - // Execute applies a parsed template to the specified data object, - // writing the output to wr. - Execute(wr io.Writer, data interface{}) error -} - -// LockIndexData holds the fields needed to display the index view for locks. -type LockIndexData struct { - LockPath string - RepoFullName string - PullNum int - Path string - Workspace string - Time time.Time - TimeFormatted string -} - -// ApplyLockData holds the fields to display in the index view -type ApplyLockData struct { - Locked bool - Time time.Time - TimeFormatted string -} - -// IndexData holds the data for rendering the index page -type IndexData struct { - Locks []LockIndexData - ApplyLock ApplyLockData - AtlantisVersion string - // CleanedBasePath is the path Atlantis is accessible at externally. If - // not using a path-based proxy, this will be an empty string. Never ends - // in a '/' (hence "cleaned"). - CleanedBasePath string -} - -var IndexTemplate = template.Must(template.New("index.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

Plan discarded and unlocked!

-
-
- {{ if .ApplyLock.Locked }} -
-
Apply commands are disabled globally
-
Lock Status: Active
-
Active Since: {{ .ApplyLock.TimeFormatted }}
- Enable Apply Commands -
- {{ else }} -
-
Apply commands are enabled
- Disable Apply Commands -
- {{ end }} -
-
-
-
-
-

Locks

- {{ if .Locks }} - {{ $basePath := .CleanedBasePath }} - {{ range .Locks }} - -
-
{{.RepoFullName}} #{{.PullNum}} {{.Path}} {{.Workspace}}
-
Locked
-
{{.TimeFormatted}}
-
-
- {{ end }} - {{ else }} -

No locks found.

- {{ end }} -
- - -
-
-v{{ .AtlantisVersion }} -
- - - -`)) - -// LockDetailData holds the fields needed to display the lock detail view. -type LockDetailData struct { - LockKeyEncoded string - LockKey string - RepoOwner string - RepoName string - PullRequestLink string - LockedBy string - Workspace string - Time time.Time - AtlantisVersion string - // CleanedBasePath is the path Atlantis is accessible at externally. If - // not using a path-based proxy, this will be an empty string. Never ends - // in a '/' (hence "cleaned"). - CleanedBasePath string -} - -var LockTemplate = template.Must(template.New("lock.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - -
-
- -

atlantis

-

{{.LockKey}} Locked

-
- -
-
-
-
Repo Owner: {{.RepoOwner}}
-
Repo Name: {{.RepoName}}
-
Pull Request Link: {{.PullRequestLink}}
-
Locked By: {{.LockedBy}}
-
Workspace: {{.Workspace}}
-
-
- -
-
- -
-v{{ .AtlantisVersion }} -
- - - -`)) - -// ProjectJobData holds the data needed to stream the current PR information -type ProjectJobData struct { - AtlantisVersion string - ProjectPath string - CleanedBasePath string -} - -var ProjectJobsTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
- -

atlantis

-

-
-
-
-
-
-
- -
-
- - - - - - - - - - -`)) - -type ProjectJobsError struct { - AtlantisVersion string - ProjectPath string - CleanedBasePath string -} - -var ProjectJobsErrorTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

-
-
-
-
-
-
-
-
-
- - - - - - - -`)) - -// GithubSetupData holds the data for rendering the github app setup page -type GithubSetupData struct { - Target string - Manifest string - ID int64 - Key string - WebhookSecret string - URL string -} - -var GithubAppSetupTemplate = template.Must(template.New("github-app.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

- -

- {{ if .Target }} - Create a github app - {{ else }} - Github app created successfully! - {{ end }} -

-
-
- {{ if .Target }} -
- - -
- {{ else }} -

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

- -
    -
  • gh-app-id:
    {{ .ID }}
  • -
  • gh-app-key-file:
    {{ .Key }}
  • -
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • -
- {{ end }} -
-
- - -`)) diff --git a/server/legacy/controllers/websocket/instrumented_mux.go b/server/legacy/controllers/websocket/instrumented_mux.go deleted file mode 100644 index 66d7c90d0..000000000 --- a/server/legacy/controllers/websocket/instrumented_mux.go +++ /dev/null @@ -1,24 +0,0 @@ -package websocket - -import ( - "github.com/uber-go/tally/v4" - "net/http" -) - -type InstrumentedMultiplexor struct { - Multiplexor - - NumWsConnections tally.Counter -} - -func NewInstrumentedMultiplexor(multiplexor Multiplexor, statsScope tally.Scope) Multiplexor { - return &InstrumentedMultiplexor{ - Multiplexor: multiplexor, - NumWsConnections: statsScope.SubScope("websocket").Counter("connections"), - } -} - -func (i *InstrumentedMultiplexor) Handle(w http.ResponseWriter, r *http.Request) error { - i.NumWsConnections.Inc(1) - return i.Multiplexor.Handle(w, r) -} diff --git a/server/legacy/controllers/websocket/mux.go b/server/legacy/controllers/websocket/mux.go deleted file mode 100644 index 11dd09ce4..000000000 --- a/server/legacy/controllers/websocket/mux.go +++ /dev/null @@ -1,70 +0,0 @@ -package websocket - -import ( - "context" - "net/http" - - "github.com/gorilla/websocket" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" -) - -// PartitionKeyGenerator generates partition keys for the multiplexor -type PartitionKeyGenerator interface { - Generate(r *http.Request) (string, error) -} - -// PartitionRegistry is the registry holding each partition -// and is responsible for registering/deregistering new buffers -type PartitionRegistry interface { - Register(ctx context.Context, key string, buffer chan string) -} - -type Multiplexor interface { - Handle(w http.ResponseWriter, r *http.Request) error -} - -// Multiplexor is responsible for handling the data transfer between the storage layer -// and the registry. Note this is still a WIP as right now the registry is assumed to handle -// everything. -type multiplexor struct { - writer *Writer - keyGenerator PartitionKeyGenerator - registry PartitionRegistry -} - -func NewMultiplexor(log logging.Logger, keyGenerator PartitionKeyGenerator, registry PartitionRegistry) Multiplexor { - //nolint:exhaustruct - upgrader := websocket.Upgrader{} - upgrader.CheckOrigin = func(r *http.Request) bool { return true } - return &multiplexor{ - writer: &Writer{ - upgrader: upgrader, - log: log, - }, - keyGenerator: keyGenerator, - registry: registry, - } -} - -// Handle should be called for a given websocket request. It blocks -// while writing to the websocket until the buffer is closed. -func (m *multiplexor) Handle(w http.ResponseWriter, r *http.Request) error { - key, err := m.keyGenerator.Generate(r) - - if err != nil { - return errors.Wrapf(err, "generating partition key") - } - - // Buffer size set to 1000 to ensure messages get queued. - // TODO: make buffer size configurable - buffer := make(chan string, 1000) - - // Note: Here we register the key without checking if the job exists because - // if the job DNE, the job is marked complete and we close the ws conn immediately - - // spinning up a goroutine for this since we are attempting to block on the read side. - go m.registry.Register(context.Background(), key, buffer) - - return errors.Wrapf(m.writer.Write(w, r, buffer), "writing to ws %s", key) -} diff --git a/server/legacy/controllers/websocket/writer.go b/server/legacy/controllers/websocket/writer.go deleted file mode 100644 index aab84010a..000000000 --- a/server/legacy/controllers/websocket/writer.go +++ /dev/null @@ -1,48 +0,0 @@ -package websocket - -import ( - "fmt" - "net/http" - - "github.com/gorilla/websocket" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" -) - -func NewWriter(log logging.Logger) *Writer { - upgrader := websocket.Upgrader{} - upgrader.CheckOrigin = func(r *http.Request) bool { return true } - return &Writer{ - upgrader: upgrader, - log: log, - } -} - -type Writer struct { - upgrader websocket.Upgrader - - //TODO: Remove dependency on atlantis logger here if we upstream this. - log logging.Logger -} - -func (w *Writer) Write(rw http.ResponseWriter, r *http.Request, input chan string) error { - conn, err := w.upgrader.Upgrade(rw, r, nil) - - if err != nil { - return errors.Wrap(err, "upgrading websocket connection") - } - - // block on reading our input channel - for msg := range input { - if err := conn.WriteMessage(websocket.BinaryMessage, []byte("\r"+msg+"\n")); err != nil { - w.log.Warn(fmt.Sprintf("Failed to write ws message: %s", err)) - return err - } - } - - // close ws conn after input channel is closed - if err = conn.Close(); err != nil { - w.log.Warn(fmt.Sprintf("Failed to close ws connection: %s", err)) - } - return nil -} diff --git a/server/legacy/core/db/boltdb.go b/server/legacy/core/db/boltdb.go deleted file mode 100644 index 46788a94a..000000000 --- a/server/legacy/core/db/boltdb.go +++ /dev/null @@ -1,492 +0,0 @@ -// Package db handles our database layer. -package db - -import ( - "bytes" - "encoding/json" - "fmt" - "os" - "path" - "strings" - "time" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - bolt "go.etcd.io/bbolt" -) - -// BoltDB is a database using BoltDB -type BoltDB struct { - db *bolt.DB - locksBucketName []byte - pullsBucketName []byte - globalLocksBucketName []byte -} - -const ( - locksBucketName = "runLocks" - pullsBucketName = "pulls" - globalLocksBucketName = "globalLocks" - pullKeySeparator = "::" -) - -// New returns a valid locker. We need to be able to write to dataDir -// since bolt stores its data as a file -func New(dataDir string) (*BoltDB, error) { - if err := os.MkdirAll(dataDir, 0700); err != nil { - return nil, errors.Wrap(err, "creating data dir") - } - db, err := bolt.Open(path.Join(dataDir, "atlantis.db"), 0600, &bolt.Options{Timeout: 1 * time.Second}) - if err != nil { - if err.Error() == "timeout" { - return nil, errors.New("starting BoltDB: timeout (a possible cause is another Atlantis instance already running)") - } - return nil, errors.Wrap(err, "starting BoltDB") - } - - // Create the buckets. - err = db.Update(func(tx *bolt.Tx) error { - if _, err = tx.CreateBucketIfNotExists([]byte(locksBucketName)); err != nil { - return errors.Wrapf(err, "creating bucket %q", locksBucketName) - } - if _, err = tx.CreateBucketIfNotExists([]byte(pullsBucketName)); err != nil { - return errors.Wrapf(err, "creating bucket %q", pullsBucketName) - } - if _, err = tx.CreateBucketIfNotExists([]byte(globalLocksBucketName)); err != nil { - return errors.Wrapf(err, "creating bucket %q", globalLocksBucketName) - } - return nil - }) - if err != nil { - return nil, errors.Wrap(err, "starting BoltDB") - } - // todo: close BoltDB when server is sigtermed - return &BoltDB{ - db: db, - locksBucketName: []byte(locksBucketName), - pullsBucketName: []byte(pullsBucketName), - globalLocksBucketName: []byte(globalLocksBucketName), - }, nil -} - -// NewWithDB is used for testing. -func NewWithDB(db *bolt.DB, bucket string, globalBucket string) (*BoltDB, error) { - return &BoltDB{ - db: db, - locksBucketName: []byte(bucket), - pullsBucketName: []byte(pullsBucketName), - globalLocksBucketName: []byte(globalBucket), - }, nil -} - -// TryLock attempts to create a new lock. If the lock is -// acquired, it will return true and the lock returned will be newLock. -// If the lock is not acquired, it will return false and the current -// lock that is preventing this lock from being acquired. -func (b *BoltDB) TryLock(newLock models.ProjectLock) (bool, models.ProjectLock, error) { - var lockAcquired bool - var currLock models.ProjectLock - key := b.lockKey(newLock.Project, newLock.Workspace) - newLockSerialized, _ := json.Marshal(newLock) - transactionErr := b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.locksBucketName) - - // if there is no run at that key then we're free to create the lock - currLockSerialized := bucket.Get([]byte(key)) - if currLockSerialized == nil { - // This will only error on readonly buckets, it's okay to ignore. - bucket.Put([]byte(key), newLockSerialized) // nolint: errcheck - lockAcquired = true - currLock = newLock - return nil - } - - // otherwise the lock fails, return to caller the run that's holding the lock - if err := json.Unmarshal(currLockSerialized, &currLock); err != nil { - return errors.Wrap(err, "failed to deserialize current lock") - } - lockAcquired = false - return nil - }) - - if transactionErr != nil { - return false, currLock, errors.Wrap(transactionErr, "DB transaction failed") - } - - return lockAcquired, currLock, nil -} - -// Unlock attempts to unlock the project and workspace. -// If there is no lock, then it will return a nil pointer. -// If there is a lock, then it will delete it, and then return a pointer -// to the deleted lock. -func (b *BoltDB) Unlock(p models.Project, workspace string) (*models.ProjectLock, error) { - var lock models.ProjectLock - foundLock := false - key := b.lockKey(p, workspace) - err := b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.locksBucketName) - serialized := bucket.Get([]byte(key)) - if serialized != nil { - if err := json.Unmarshal(serialized, &lock); err != nil { - return errors.Wrap(err, "failed to deserialize lock") - } - foundLock = true - } - return bucket.Delete([]byte(key)) - }) - err = errors.Wrap(err, "DB transaction failed") - if foundLock { - return &lock, err - } - return nil, err -} - -// List lists all current locks. -func (b *BoltDB) List() ([]models.ProjectLock, error) { - var locks []models.ProjectLock - var locksBytes [][]byte - err := b.db.View(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.locksBucketName) - c := bucket.Cursor() - for k, v := c.First(); k != nil; k, v = c.Next() { - locksBytes = append(locksBytes, v) - } - return nil - }) - if err != nil { - return locks, errors.Wrap(err, "DB transaction failed") - } - - // deserialize bytes into the proper objects - for k, v := range locksBytes { - var lock models.ProjectLock - if err := json.Unmarshal(v, &lock); err != nil { - return locks, errors.Wrap(err, fmt.Sprintf("failed to deserialize lock at key '%d'", k)) - } - locks = append(locks, lock) - } - - return locks, nil -} - -// LockCommand attempts to create a new lock for a CommandName. -// If the lock doesn't exists, it will create a lock and return a pointer to it. -// If the lock already exists, it will return an "lock already exists" error -func (b *BoltDB) LockCommand(cmdName command.Name, lockTime time.Time) (*command.Lock, error) { - lock := command.Lock{ - CommandName: cmdName, - LockMetadata: command.LockMetadata{ - UnixTime: lockTime.Unix(), - }, - } - - newLockSerialized, _ := json.Marshal(lock) - transactionErr := b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.globalLocksBucketName) - - currLockSerialized := bucket.Get([]byte(b.commandLockKey(cmdName))) - if currLockSerialized != nil { - return errors.New("lock already exists") - } - - // This will only error on readonly buckets, it's okay to ignore. - bucket.Put([]byte(b.commandLockKey(cmdName)), newLockSerialized) // nolint: errcheck - return nil - }) - - if transactionErr != nil { - return nil, errors.Wrap(transactionErr, "db transaction failed") - } - - return &lock, nil -} - -// UnlockCommand removes CommandName lock if present. -// If there are no lock it returns an error. -func (b *BoltDB) UnlockCommand(cmdName command.Name) error { - transactionErr := b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.globalLocksBucketName) - - if l := bucket.Get([]byte(b.commandLockKey(cmdName))); l == nil { - return errors.New("no lock exists") - } - - return bucket.Delete([]byte(b.commandLockKey(cmdName))) - }) - - if transactionErr != nil { - return errors.Wrap(transactionErr, "db transaction failed") - } - - return nil -} - -// CheckCommandLock checks if CommandName lock was set. -// If the lock exists return the pointer to the lock object, otherwise return nil -func (b *BoltDB) CheckCommandLock(cmdName command.Name) (*command.Lock, error) { - cmdLock := command.Lock{} - - found := false - - err := b.db.View(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.globalLocksBucketName) - - serializedLock := bucket.Get([]byte(b.commandLockKey(cmdName))) - - if serializedLock != nil { - if err := json.Unmarshal(serializedLock, &cmdLock); err != nil { - return errors.Wrap(err, "failed to deserialize UserConfig") - } - found = true - } - - return nil - }) - - if found { - return &cmdLock, err - } - - return nil, err -} - -// UnlockByPull deletes all locks associated with that pull request and returns them. -func (b *BoltDB) UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) { - var locks []models.ProjectLock - err := b.db.View(func(tx *bolt.Tx) error { - c := tx.Bucket(b.locksBucketName).Cursor() - - // we can use the repoFullName as a prefix search since that's the first part of the key - for k, v := c.Seek([]byte(repoFullName)); k != nil && bytes.HasPrefix(k, []byte(repoFullName)); k, v = c.Next() { - var lock models.ProjectLock - if err := json.Unmarshal(v, &lock); err != nil { - return errors.Wrapf(err, "deserializing lock at key %q", string(k)) - } - if lock.Pull.Num == pullNum { - locks = append(locks, lock) - } - } - return nil - }) - if err != nil { - return locks, err - } - - // delete the locks - for _, lock := range locks { - if _, err = b.Unlock(lock.Project, lock.Workspace); err != nil { - return locks, errors.Wrapf(err, "unlocking repo %s, path %s, workspace %s", lock.Project.RepoFullName, lock.Project.Path, lock.Workspace) - } - } - return locks, nil -} - -// GetLock returns a pointer to the lock for that project and workspace. -// If there is no lock, it returns a nil pointer. -func (b *BoltDB) GetLock(p models.Project, workspace string) (*models.ProjectLock, error) { - key := b.lockKey(p, workspace) - var lockBytes []byte - err := b.db.View(func(tx *bolt.Tx) error { - b := tx.Bucket(b.locksBucketName) - lockBytes = b.Get([]byte(key)) - return nil - }) - if err != nil { - return nil, errors.Wrap(err, "getting lock data") - } - // lockBytes will be nil if there was no data at that key - if lockBytes == nil { - return nil, nil - } - - var lock models.ProjectLock - if err := json.Unmarshal(lockBytes, &lock); err != nil { - return nil, errors.Wrapf(err, "deserializing lock at key %q", key) - } - - // need to set it to Local after deserialization due to https://github.com/golang/go/issues/19486 - lock.Time = lock.Time.Local() - return &lock, nil -} - -// UpdatePullWithResults updates pull's status with the latest project results. -// It returns the new PullStatus object. -func (b *BoltDB) UpdatePullWithResults(pull models.PullRequest, newResults []command.ProjectResult) (models.PullStatus, error) { - key, err := b.pullKey(pull) - if err != nil { - return models.PullStatus{}, err - } - - var newStatus models.PullStatus - err = b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.pullsBucketName) - currStatus, err := b.getPullFromBucket(bucket, key) - if err != nil { - return err - } - - // If there is no pull OR if the pull we have is out of date, we - // just write a new pull. - if currStatus == nil || currStatus.Pull.HeadCommit != pull.HeadCommit { - var statuses []models.ProjectStatus - for _, r := range newResults { - statuses = append(statuses, b.projectResultToProject(r)) - } - newStatus = models.PullStatus{ - Pull: pull, - Projects: statuses, - UpdatedAt: time.Now().Unix(), - } - } else { - // If there's an existing pull at the right commit then we have to - // merge our project results with the existing ones. We do a merge - // because it's possible a user is just applying a single project - // in this command and so we don't want to delete our data about - // other projects that aren't affected by this command. - newStatus = *currStatus - newStatus.UpdatedAt = time.Now().Unix() - for _, res := range newResults { - // First, check if we should update any existing projects. - updatedExisting := false - for i := range newStatus.Projects { - // NOTE: We're using a reference here because we are - // in-place updating its Status field. - proj := &newStatus.Projects[i] - if res.Workspace == proj.Workspace && - res.RepoRelDir == proj.RepoRelDir && - res.ProjectName == proj.ProjectName { - proj.Status = res.PlanStatus() - updatedExisting = true - break - } - } - - if !updatedExisting { - // If we didn't update an existing project, then we need to - // add this because it's a new one. - newStatus.Projects = append(newStatus.Projects, b.projectResultToProject(res)) - } - } - } - - // Now, we overwrite the key with our new status. - return b.writePullToBucket(bucket, key, newStatus) - }) - return newStatus, errors.Wrap(err, "DB transaction failed") -} - -// GetPullStatus returns the status for pull. -// If there is no status, returns a nil pointer. -func (b *BoltDB) GetPullStatus(pull models.PullRequest) (*models.PullStatus, error) { - key, err := b.pullKey(pull) - if err != nil { - return nil, err - } - var s *models.PullStatus - err = b.db.View(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.pullsBucketName) - var txErr error - s, txErr = b.getPullFromBucket(bucket, key) - return txErr - }) - return s, errors.Wrap(err, "DB transaction failed") -} - -// DeletePullStatus deletes the status for pull. -func (b *BoltDB) DeletePullStatus(pull models.PullRequest) error { - key, err := b.pullKey(pull) - if err != nil { - return err - } - err = b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.pullsBucketName) - return bucket.Delete(key) - }) - return errors.Wrap(err, "DB transaction failed") -} - -// UpdateProjectStatus updates project status. -func (b *BoltDB) UpdateProjectStatus(pull models.PullRequest, workspace string, repoRelDir string, newStatus models.ProjectPlanStatus) error { - key, err := b.pullKey(pull) - if err != nil { - return err - } - err = b.db.Update(func(tx *bolt.Tx) error { - bucket := tx.Bucket(b.pullsBucketName) - currStatusPtr, err := b.getPullFromBucket(bucket, key) - if err != nil { - return err - } - if currStatusPtr == nil { - return nil - } - currStatus := *currStatusPtr - - // Update the status. - for i := range currStatus.Projects { - // NOTE: We're using a reference here because we are - // in-place updating its Status field. - proj := &currStatus.Projects[i] - if proj.Workspace == workspace && proj.RepoRelDir == repoRelDir { - proj.Status = newStatus - break - } - } - return b.writePullToBucket(bucket, key, currStatus) - }) - return errors.Wrap(err, "DB transaction failed") -} - -func (b *BoltDB) pullKey(pull models.PullRequest) ([]byte, error) { - hostname := pull.BaseRepo.VCSHost.Hostname - if strings.Contains(hostname, pullKeySeparator) { - return nil, fmt.Errorf("vcs hostname %q contains illegal string %q", hostname, pullKeySeparator) - } - repo := pull.BaseRepo.FullName - if strings.Contains(repo, pullKeySeparator) { - return nil, fmt.Errorf("repo name %q contains illegal string %q", hostname, pullKeySeparator) - } - - return []byte(fmt.Sprintf("%s::%s::%d", hostname, repo, pull.Num)), - nil -} - -func (b *BoltDB) commandLockKey(cmdName command.Name) string { - return fmt.Sprintf("%s/lock", cmdName) -} - -func (b *BoltDB) lockKey(p models.Project, workspace string) string { - return fmt.Sprintf("%s/%s/%s", p.RepoFullName, p.Path, workspace) -} - -func (b *BoltDB) getPullFromBucket(bucket *bolt.Bucket, key []byte) (*models.PullStatus, error) { - serialized := bucket.Get(key) - if serialized == nil { - return nil, nil - } - - var p models.PullStatus - if err := json.Unmarshal(serialized, &p); err != nil { - return nil, errors.Wrapf(err, "deserializing pull at %q with contents %q", key, serialized) - } - return &p, nil -} - -func (b *BoltDB) writePullToBucket(bucket *bolt.Bucket, key []byte, pull models.PullStatus) error { - serialized, err := json.Marshal(pull) - if err != nil { - return errors.Wrap(err, "serializing") - } - return bucket.Put(key, serialized) -} - -func (b *BoltDB) projectResultToProject(p command.ProjectResult) models.ProjectStatus { - return models.ProjectStatus{ - Workspace: p.Workspace, - RepoRelDir: p.RepoRelDir, - ProjectName: p.ProjectName, - Status: p.PlanStatus(), - } -} diff --git a/server/legacy/core/db/boltdb_test.go b/server/legacy/core/db/boltdb_test.go deleted file mode 100644 index 74ac8b6ee..000000000 --- a/server/legacy/core/db/boltdb_test.go +++ /dev/null @@ -1,821 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package db_test - -import ( - "os" - "testing" - "time" - - "github.com/runatlantis/atlantis/server/legacy/core/db" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - bolt "go.etcd.io/bbolt" -) - -var lockBucket = "bucket" -var configBucket = "configBucket" -var project = models.NewProject("owner/repo", "parent/child") -var workspace = "default" -var pullNum = 1 -var lock = models.ProjectLock{ - Pull: models.PullRequest{ - Num: pullNum, - }, - User: models.User{ - Username: "lkysow", - }, - Workspace: workspace, - Project: project, - Time: time.Now(), -} - -func TestLockCommandNotSet(t *testing.T) { - t.Log("retrieving apply lock when there are none should return empty LockCommand") - db, b := newTestDB() - defer cleanupDB(db) - exists, err := b.CheckCommandLock(command.Apply) - Ok(t, err) - Assert(t, exists == nil, "exp nil") -} - -func TestLockCommandEnabled(t *testing.T) { - t.Log("setting the apply lock") - db, b := newTestDB() - defer cleanupDB(db) - timeNow := time.Now() - _, err := b.LockCommand(command.Apply, timeNow) - Ok(t, err) - - config, err := b.CheckCommandLock(command.Apply) - Ok(t, err) - Equals(t, true, config.IsLocked()) -} - -func TestLockCommandFail(t *testing.T) { - t.Log("setting the apply lock") - db, b := newTestDB() - defer cleanupDB(db) - timeNow := time.Now() - _, err := b.LockCommand(command.Apply, timeNow) - Ok(t, err) - - _, err = b.LockCommand(command.Apply, timeNow) - ErrEquals(t, "db transaction failed: lock already exists", err) -} - -func TestUnlockCommandDisabled(t *testing.T) { - t.Log("unsetting the apply lock") - db, b := newTestDB() - defer cleanupDB(db) - timeNow := time.Now() - _, err := b.LockCommand(command.Apply, timeNow) - Ok(t, err) - - config, err := b.CheckCommandLock(command.Apply) - Ok(t, err) - Equals(t, true, config.IsLocked()) - - err = b.UnlockCommand(command.Apply) - Ok(t, err) - - config, err = b.CheckCommandLock(command.Apply) - Ok(t, err) - Assert(t, config == nil, "exp nil object") -} - -func TestUnlockCommandFail(t *testing.T) { - t.Log("setting the apply lock") - db, b := newTestDB() - defer cleanupDB(db) - err := b.UnlockCommand(command.Apply) - ErrEquals(t, "db transaction failed: no lock exists", err) -} - -func TestMixedLocksPresent(t *testing.T) { - db, b := newTestDB() - defer cleanupDB(db) - timeNow := time.Now() - _, err := b.LockCommand(command.Apply, timeNow) - Ok(t, err) - - _, _, err = b.TryLock(lock) - Ok(t, err) - ls, err := b.List() - Ok(t, err) - Equals(t, 1, len(ls)) -} - -func TestListNoLocks(t *testing.T) { - t.Log("listing locks when there are none should return an empty list") - db, b := newTestDB() - defer cleanupDB(db) - ls, err := b.List() - Ok(t, err) - Equals(t, 0, len(ls)) -} - -func TestListOneLock(t *testing.T) { - t.Log("listing locks when there is one should return it") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - ls, err := b.List() - Ok(t, err) - Equals(t, 1, len(ls)) -} - -func TestListMultipleLocks(t *testing.T) { - t.Log("listing locks when there are multiple should return them") - db, b := newTestDB() - defer cleanupDB(db) - - // add multiple locks - repos := []string{ - "owner/repo1", - "owner/repo2", - "owner/repo3", - "owner/repo4", - } - - for _, r := range repos { - newLock := lock - newLock.Project = models.NewProject(r, "path") - _, _, err := b.TryLock(newLock) - Ok(t, err) - } - ls, err := b.List() - Ok(t, err) - Equals(t, 4, len(ls)) - for _, r := range repos { - found := false - for _, l := range ls { - if l.Project.RepoFullName == r { - found = true - } - } - Assert(t, found, "expected %s in %v", r, ls) - } -} - -func TestListAddRemove(t *testing.T) { - t.Log("listing after adding and removing should return none") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - _, err = b.Unlock(project, workspace) - Ok(t, err) - - ls, err := b.List() - Ok(t, err) - Equals(t, 0, len(ls)) -} - -func TestLockingNoLocks(t *testing.T) { - t.Log("with no locks yet, lock should succeed") - db, b := newTestDB() - defer cleanupDB(db) - acquired, currLock, err := b.TryLock(lock) - Ok(t, err) - Equals(t, true, acquired) - Equals(t, lock, currLock) -} - -func TestLockingExistingLock(t *testing.T) { - t.Log("if there is an existing lock, lock should...") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - - t.Log("...succeed if the new project has a different path") - { - newLock := lock - newLock.Project = models.NewProject(project.RepoFullName, "different/path") - acquired, currLock, err := b.TryLock(newLock) - Ok(t, err) - Equals(t, true, acquired) - Equals(t, pullNum, currLock.Pull.Num) - } - - t.Log("...succeed if the new project has a different workspace") - { - newLock := lock - newLock.Workspace = "different-workspace" - acquired, currLock, err := b.TryLock(newLock) - Ok(t, err) - Equals(t, true, acquired) - Equals(t, newLock, currLock) - } - - t.Log("...succeed if the new project has a different repoName") - { - newLock := lock - newLock.Project = models.NewProject("different/repo", project.Path) - acquired, currLock, err := b.TryLock(newLock) - Ok(t, err) - Equals(t, true, acquired) - Equals(t, newLock, currLock) - } - - t.Log("...not succeed if the new project only has a different pullNum") - { - newLock := lock - newLock.Pull.Num = lock.Pull.Num + 1 - acquired, currLock, err := b.TryLock(newLock) - Ok(t, err) - Equals(t, false, acquired) - Equals(t, currLock.Pull.Num, pullNum) - } -} - -func TestUnlockingNoLocks(t *testing.T) { - t.Log("unlocking with no locks should succeed") - db, b := newTestDB() - defer cleanupDB(db) - _, err := b.Unlock(project, workspace) - - Ok(t, err) -} - -func TestUnlocking(t *testing.T) { - t.Log("unlocking with an existing lock should succeed") - db, b := newTestDB() - defer cleanupDB(db) - - _, _, err := b.TryLock(lock) - Ok(t, err) - _, err = b.Unlock(project, workspace) - Ok(t, err) - - // should be no locks listed - ls, err := b.List() - Ok(t, err) - Equals(t, 0, len(ls)) - - // should be able to re-lock that repo with a new pull num - newLock := lock - newLock.Pull.Num = lock.Pull.Num + 1 - acquired, currLock, err := b.TryLock(newLock) - Ok(t, err) - Equals(t, true, acquired) - Equals(t, newLock, currLock) -} - -func TestUnlockingMultiple(t *testing.T) { - t.Log("unlocking and locking multiple locks should succeed") - db, b := newTestDB() - defer cleanupDB(db) - - _, _, err := b.TryLock(lock) - Ok(t, err) - - new := lock - new.Project.RepoFullName = "new/repo" - _, _, err = b.TryLock(new) - Ok(t, err) - - new2 := lock - new2.Project.Path = "new/path" - _, _, err = b.TryLock(new2) - Ok(t, err) - - new3 := lock - new3.Workspace = "new-workspace" - _, _, err = b.TryLock(new3) - Ok(t, err) - - // now try and unlock them - _, err = b.Unlock(new3.Project, new3.Workspace) - Ok(t, err) - _, err = b.Unlock(new2.Project, workspace) - Ok(t, err) - _, err = b.Unlock(new.Project, workspace) - Ok(t, err) - _, err = b.Unlock(project, workspace) - Ok(t, err) - - // should be none left - ls, err := b.List() - Ok(t, err) - Equals(t, 0, len(ls)) -} - -func TestUnlockByPullNone(t *testing.T) { - t.Log("UnlockByPull should be successful when there are no locks") - db, b := newTestDB() - defer cleanupDB(db) - - _, err := b.UnlockByPull("any/repo", 1) - Ok(t, err) -} - -func TestUnlockByPullOne(t *testing.T) { - t.Log("with one lock, UnlockByPull should...") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - - t.Log("...delete nothing when its the same repo but a different pull") - { - _, err := b.UnlockByPull(project.RepoFullName, pullNum+1) - Ok(t, err) - ls, err := b.List() - Ok(t, err) - Equals(t, 1, len(ls)) - } - t.Log("...delete nothing when its the same pull but a different repo") - { - _, err := b.UnlockByPull("different/repo", pullNum) - Ok(t, err) - ls, err := b.List() - Ok(t, err) - Equals(t, 1, len(ls)) - } - t.Log("...delete the lock when its the same repo and pull") - { - _, err := b.UnlockByPull(project.RepoFullName, pullNum) - Ok(t, err) - ls, err := b.List() - Ok(t, err) - Equals(t, 0, len(ls)) - } -} - -func TestUnlockByPullAfterUnlock(t *testing.T) { - t.Log("after locking and unlocking, UnlockByPull should be successful") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - _, err = b.Unlock(project, workspace) - Ok(t, err) - - _, err = b.UnlockByPull(project.RepoFullName, pullNum) - Ok(t, err) - ls, err := b.List() - Ok(t, err) - Equals(t, 0, len(ls)) -} - -func TestUnlockByPullMatching(t *testing.T) { - t.Log("UnlockByPull should delete all locks in that repo and pull num") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - - // add additional locks with the same repo and pull num but different paths/workspaces - new := lock - new.Project.Path = "dif/path" - _, _, err = b.TryLock(new) - Ok(t, err) - new2 := lock - new2.Workspace = "new-workspace" - _, _, err = b.TryLock(new2) - Ok(t, err) - - // there should now be 3 - ls, err := b.List() - Ok(t, err) - Equals(t, 3, len(ls)) - - // should all be unlocked - _, err = b.UnlockByPull(project.RepoFullName, pullNum) - Ok(t, err) - ls, err = b.List() - Ok(t, err) - Equals(t, 0, len(ls)) -} - -func TestGetLockNotThere(t *testing.T) { - t.Log("getting a lock that doesn't exist should return a nil pointer") - db, b := newTestDB() - defer cleanupDB(db) - l, err := b.GetLock(project, workspace) - Ok(t, err) - Equals(t, (*models.ProjectLock)(nil), l) -} - -func TestGetLock(t *testing.T) { - t.Log("getting a lock should return the lock") - db, b := newTestDB() - defer cleanupDB(db) - _, _, err := b.TryLock(lock) - Ok(t, err) - - l, err := b.GetLock(project, workspace) - Ok(t, err) - // can't compare against time so doing each field - Equals(t, lock.Project, l.Project) - Equals(t, lock.Workspace, l.Workspace) - Equals(t, lock.Pull, l.Pull) - Equals(t, lock.User, l.User) -} - -// Test we can create a status and then getCommandLock it. -func TestPullStatus_UpdateGet(t *testing.T) { - b, cleanup := newTestDB2(t) - defer cleanup() - - pull := models.PullRequest{ - Num: 1, - HeadCommit: "sha", - URL: "url", - HeadBranch: "head", - BaseBranch: "base", - Author: "lkysow", - State: models.OpenPullState, - BaseRepo: models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "clone-url", - SanitizedCloneURL: "clone-url", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - } - status, err := b.UpdatePullWithResults( - pull, - []command.ProjectResult{ - { - Command: command.Plan, - RepoRelDir: ".", - Workspace: "default", - Failure: "failure", - }, - }) - Ok(t, err) - - maybeStatus, err := b.GetPullStatus(pull) - Ok(t, err) - Assert(t, maybeStatus.UpdatedAt != 0, "status should have a new time set") - Equals(t, pull, maybeStatus.Pull) // nolint: staticcheck - Equals(t, []models.ProjectStatus{ - { - Workspace: "default", - RepoRelDir: ".", - ProjectName: "", - Status: models.ErroredPlanStatus, - }, - }, status.Projects) -} - -// Test we can create a status, delete it, and then we shouldn't be able to getCommandLock -// it. -func TestPullStatus_UpdateDeleteGet(t *testing.T) { - b, cleanup := newTestDB2(t) - defer cleanup() - - pull := models.PullRequest{ - Num: 1, - HeadCommit: "sha", - URL: "url", - HeadBranch: "head", - BaseBranch: "base", - Author: "lkysow", - State: models.OpenPullState, - BaseRepo: models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "clone-url", - SanitizedCloneURL: "clone-url", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - } - _, err := b.UpdatePullWithResults( - pull, - []command.ProjectResult{ - { - RepoRelDir: ".", - Workspace: "default", - Failure: "failure", - }, - }) - Ok(t, err) - - err = b.DeletePullStatus(pull) - Ok(t, err) - - maybeStatus, err := b.GetPullStatus(pull) - Ok(t, err) - Assert(t, maybeStatus == nil, "exp nil") -} - -// Test we can create a status, update a specific project's status within that -// pull status, and when we getCommandLock all the project statuses, that specific project -// should be updated. -func TestPullStatus_UpdateProject(t *testing.T) { - b, cleanup := newTestDB2(t) - defer cleanup() - - pull := models.PullRequest{ - Num: 1, - HeadCommit: "sha", - URL: "url", - HeadBranch: "head", - BaseBranch: "base", - Author: "lkysow", - State: models.OpenPullState, - BaseRepo: models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "clone-url", - SanitizedCloneURL: "clone-url", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - } - _, err := b.UpdatePullWithResults( - pull, - []command.ProjectResult{ - { - RepoRelDir: ".", - Workspace: "default", - Failure: "failure", - }, - { - RepoRelDir: ".", - Workspace: "staging", - ApplySuccess: "success!", - }, - }) - Ok(t, err) - - err = b.UpdateProjectStatus(pull, "default", ".", models.DiscardedPlanStatus) - Ok(t, err) - - status, err := b.GetPullStatus(pull) - Ok(t, err) - Assert(t, status.UpdatedAt != 0, "status should have a new time set") - Equals(t, pull, status.Pull) // nolint: staticcheck - Equals(t, []models.ProjectStatus{ - { - Workspace: "default", - RepoRelDir: ".", - ProjectName: "", - Status: models.DiscardedPlanStatus, - }, - { - Workspace: "staging", - RepoRelDir: ".", - ProjectName: "", - Status: models.AppliedPlanStatus, - }, - }, status.Projects) // nolint: staticcheck -} - -// Test that if we update an existing pull status and our new status is for a -// different HeadSHA, that we just overwrite the old status. -func TestPullStatus_UpdateNewCommit(t *testing.T) { - b, cleanup := newTestDB2(t) - defer cleanup() - - pull := models.PullRequest{ - Num: 1, - HeadCommit: "sha", - URL: "url", - HeadBranch: "head", - BaseBranch: "base", - Author: "lkysow", - State: models.OpenPullState, - BaseRepo: models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "clone-url", - SanitizedCloneURL: "clone-url", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - } - initialStatus, err := b.UpdatePullWithResults( - pull, - []command.ProjectResult{ - { - RepoRelDir: ".", - Workspace: "default", - Failure: "failure", - }, - }) - Ok(t, err) - initialTimestamp := initialStatus.UpdatedAt - Assert(t, initialTimestamp != 0, "status should have a new time set") - time.Sleep(1 * time.Second) - - pull.HeadCommit = "newsha" - status, err := b.UpdatePullWithResults(pull, - []command.ProjectResult{ - { - RepoRelDir: ".", - Workspace: "staging", - ApplySuccess: "success!", - }, - }) - - Ok(t, err) - Equals(t, 1, len(status.Projects)) - - maybeStatus, err := b.GetPullStatus(pull) - Ok(t, err) - Assert(t, maybeStatus.UpdatedAt > initialTimestamp, "new timestamp %v should be after old %v", maybeStatus.UpdatedAt, initialTimestamp) - Equals(t, pull, maybeStatus.Pull) - Equals(t, []models.ProjectStatus{ - { - Workspace: "staging", - RepoRelDir: ".", - ProjectName: "", - Status: models.AppliedPlanStatus, - }, - }, maybeStatus.Projects) -} - -// Test that if we update an existing pull status and our new status is for a -// the same commit, that we merge the statuses. -func TestPullStatus_UpdateMerge(t *testing.T) { - b, cleanup := newTestDB2(t) - defer cleanup() - - pull := models.PullRequest{ - Num: 1, - HeadCommit: "sha", - URL: "url", - HeadBranch: "head", - BaseBranch: "base", - Author: "lkysow", - State: models.OpenPullState, - BaseRepo: models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "clone-url", - SanitizedCloneURL: "clone-url", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - } - initialStatus, err := b.UpdatePullWithResults( - pull, - []command.ProjectResult{ - { - Command: command.Plan, - RepoRelDir: "mergeme", - Workspace: "default", - Failure: "failure", - }, - { - Command: command.Plan, - RepoRelDir: "projectname", - Workspace: "default", - ProjectName: "projectname", - Failure: "failure", - }, - { - Command: command.Plan, - RepoRelDir: "staythesame", - Workspace: "default", - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: "tf out", - LockURL: "lock-url", - RePlanCmd: "plan command", - ApplyCmd: "apply command", - }, - }, - }) - Ok(t, err) - initialTimestamp := initialStatus.UpdatedAt - Assert(t, initialTimestamp != 0, "status should have a new time set") - time.Sleep(1 * time.Second) - - updateStatus, err := b.UpdatePullWithResults(pull, - []command.ProjectResult{ - { - Command: command.Apply, - RepoRelDir: "mergeme", - Workspace: "default", - ApplySuccess: "applied!", - }, - { - Command: command.Apply, - RepoRelDir: "projectname", - Workspace: "default", - ProjectName: "projectname", - Error: errors.New("apply error"), - }, - { - Command: command.Apply, - RepoRelDir: "newresult", - Workspace: "default", - ApplySuccess: "success!", - }, - }) - Ok(t, err) - - getStatus, err := b.GetPullStatus(pull) - Ok(t, err) - - // Test both the pull state returned from the update call *and* the getCommandLock - // call. - for _, s := range []models.PullStatus{updateStatus, *getStatus} { - Assert(t, s.UpdatedAt > initialTimestamp, "new timestamp should be after old") - Equals(t, pull, s.Pull) - Equals(t, []models.ProjectStatus{ - { - RepoRelDir: "mergeme", - Workspace: "default", - Status: models.AppliedPlanStatus, - }, - { - RepoRelDir: "projectname", - Workspace: "default", - ProjectName: "projectname", - Status: models.ErroredApplyStatus, - }, - { - RepoRelDir: "staythesame", - Workspace: "default", - Status: models.PlannedPlanStatus, - }, - { - RepoRelDir: "newresult", - Workspace: "default", - Status: models.AppliedPlanStatus, - }, - }, updateStatus.Projects) - } -} - -// newTestDB returns a TestDB using a temporary path. -func newTestDB() (*bolt.DB, *db.BoltDB) { - // Retrieve a temporary path. - f, err := os.CreateTemp("", "") - if err != nil { - panic(errors.Wrap(err, "failed to create temp file")) - } - path := f.Name() - f.Close() // nolint: errcheck - - // Open the database. - boltDB, err := bolt.Open(path, 0600, nil) - if err != nil { - panic(errors.Wrap(err, "could not start bolt DB")) - } - if err := boltDB.Update(func(tx *bolt.Tx) error { - if _, err := tx.CreateBucketIfNotExists([]byte(lockBucket)); err != nil { - return errors.Wrap(err, "failed to create bucket") - } - if _, err := tx.CreateBucketIfNotExists([]byte(configBucket)); err != nil { - return errors.Wrap(err, "failed to create bucket") - } - return nil - }); err != nil { - panic(errors.Wrap(err, "could not create bucket")) - } - b, _ := db.NewWithDB(boltDB, lockBucket, configBucket) - return boltDB, b -} - -func newTestDB2(t *testing.T) (*db.BoltDB, func()) { - tmp, cleanup := TempDir(t) - boltDB, err := db.New(tmp) - Ok(t, err) - return boltDB, func() { - cleanup() - } -} - -func cleanupDB(db *bolt.DB) { - os.Remove(db.Path()) // nolint: errcheck - db.Close() // nolint: errcheck -} diff --git a/server/legacy/core/locking/apply_locking.go b/server/legacy/core/locking/apply_locking.go deleted file mode 100644 index 95713e8d6..000000000 --- a/server/legacy/core/locking/apply_locking.go +++ /dev/null @@ -1,113 +0,0 @@ -package locking - -import ( - "errors" - "time" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_apply_lock_checker.go ApplyLockChecker - -// ApplyLockChecker is an implementation of the global apply lock retrieval. -// It returns an object that contains information about apply locks status. -type ApplyLockChecker interface { - CheckApplyLock() (ApplyCommandLock, error) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_apply_locker.go ApplyLocker - -// ApplyLocker interface that manages locks for apply command runner -type ApplyLocker interface { - // LockApply creates a lock for ApplyCommand if lock already exists it will - // return existing lock without any changes - LockApply() (ApplyCommandLock, error) - // UnlockApply deletes apply lock created by LockApply if present, otherwise - // it is a no-op - UnlockApply() error - ApplyLockChecker -} - -// ApplyCommandLock contains information about apply command lock status. -type ApplyCommandLock struct { - // Locked is true is when apply commands are locked - // Either by using DisableApply flag or creating a global ApplyCommandLock - // DisableApply lock take precedence when set - Locked bool - Time time.Time - Failure string -} - -type ApplyClient struct { - backend Backend - disableApplyFlag bool -} - -func NewApplyClient(backend Backend, disableApplyFlag bool) ApplyLocker { - return &ApplyClient{ - backend: backend, - disableApplyFlag: disableApplyFlag, - } -} - -// LockApply acquires global apply lock. -// DisableApplyFlag takes presedence to any existing locks, if it is set to true -// this function returns an error -func (c *ApplyClient) LockApply() (ApplyCommandLock, error) { - response := ApplyCommandLock{} - - if c.disableApplyFlag { - return response, errors.New("DisableApplyFlag is set; Apply commands are locked globally until flag is unset") - } - - applyCmdLock, err := c.backend.LockCommand(command.Apply, time.Now()) - if err != nil { - return response, err - } - - if applyCmdLock != nil { - response.Locked = true - response.Time = applyCmdLock.LockTime() - } - return response, nil -} - -// UnlockApply releases a global apply lock. -// DisableApplyFlag takes presedence to any existing locks, if it is set to true -// this function returns an error -func (c *ApplyClient) UnlockApply() error { - if c.disableApplyFlag { - return errors.New("apply commands are disabled until DisableApply flag is unset") - } - - err := c.backend.UnlockCommand(command.Apply) - if err != nil { - return err - } - - return nil -} - -// CheckApplyLock retrieves an apply command lock if present. -// If DisableApplyFlag is set it will always return a lock. -func (c *ApplyClient) CheckApplyLock() (ApplyCommandLock, error) { - response := ApplyCommandLock{} - - if c.disableApplyFlag { - return ApplyCommandLock{ - Locked: true, - }, nil - } - - applyCmdLock, err := c.backend.CheckCommandLock(command.Apply) - if err != nil { - return response, err - } - - if applyCmdLock != nil { - response.Locked = true - response.Time = applyCmdLock.LockTime() - } - - return response, nil -} diff --git a/server/legacy/core/locking/locking.go b/server/legacy/core/locking/locking.go deleted file mode 100644 index 6c01c45e0..000000000 --- a/server/legacy/core/locking/locking.go +++ /dev/null @@ -1,201 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. -// -// Package locking handles locking projects when they have in-progress runs. -package locking - -import ( - "errors" - "fmt" - "regexp" - "time" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_backend.go Backend - -// Backend is an implementation of the locking API we require. -// -//nolint:interfacebloat -type Backend interface { - TryLock(lock models.ProjectLock) (bool, models.ProjectLock, error) - Unlock(project models.Project, workspace string) (*models.ProjectLock, error) - List() ([]models.ProjectLock, error) - GetLock(project models.Project, workspace string) (*models.ProjectLock, error) - UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) - - LockCommand(cmdName command.Name, lockTime time.Time) (*command.Lock, error) - UnlockCommand(cmdName command.Name) error - CheckCommandLock(cmdName command.Name) (*command.Lock, error) -} - -// TryLockResponse results from an attempted lock. -type TryLockResponse struct { - // LockAcquired is true if the lock was acquired from this call. - LockAcquired bool - // CurrLock is what project is currently holding the lock. - CurrLock models.ProjectLock - // LockKey is an identified by which to lookup and delete this lock. - LockKey string -} - -// Client is used to perform locking actions. -type Client struct { - backend Backend -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_locker.go Locker - -type Locker interface { - TryLock(p models.Project, workspace string, pull models.PullRequest, user models.User) (TryLockResponse, error) - Unlock(key string) (*models.ProjectLock, error) - List() (map[string]models.ProjectLock, error) - UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) - GetLock(key string) (*models.ProjectLock, error) -} - -// NewClient returns a new locking client. -func NewClient(backend Backend) *Client { - return &Client{ - backend: backend, - } -} - -// keyRegex matches and captures {repoFullName}/{path}/{workspace} where path can have multiple /'s in it. -var keyRegex = regexp.MustCompile(`^(.*?\/.*?)\/(.*)\/(.*)$`) - -// TryLock attempts to acquire a lock to a project and workspace. -func (c *Client) TryLock(p models.Project, workspace string, pull models.PullRequest, user models.User) (TryLockResponse, error) { - lock := models.ProjectLock{ - Workspace: workspace, - Time: time.Now().Local(), - Project: p, - User: user, - Pull: pull, - } - lockAcquired, currLock, err := c.backend.TryLock(lock) - if err != nil { - return TryLockResponse{}, err - } - return TryLockResponse{lockAcquired, currLock, c.key(p, workspace)}, nil -} - -// Unlock attempts to unlock a project and workspace. If successful, -// a pointer to the now deleted lock will be returned. Else, that -// pointer will be nil. An error will only be returned if there was -// an error deleting the lock (i.e. not if there was no lock). -func (c *Client) Unlock(key string) (*models.ProjectLock, error) { - project, workspace, err := c.lockKeyToProjectWorkspace(key) - if err != nil { - return nil, err - } - return c.backend.Unlock(project, workspace) -} - -// List returns a map of all locks with their lock key as the map key. -// The lock key can be used in GetLock() and Unlock(). -func (c *Client) List() (map[string]models.ProjectLock, error) { - m := make(map[string]models.ProjectLock) - locks, err := c.backend.List() - if err != nil { - return m, err - } - for _, lock := range locks { - m[c.key(lock.Project, lock.Workspace)] = lock - } - return m, nil -} - -// UnlockByPull deletes all locks associated with that pull request. -func (c *Client) UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) { - return c.backend.UnlockByPull(repoFullName, pullNum) -} - -// GetLock attempts to get the lock stored at key. If successful, -// a pointer to the lock will be returned. Else, the pointer will be nil. -// An error will only be returned if there was an error getting the lock -// (i.e. not if there was no lock). -func (c *Client) GetLock(key string) (*models.ProjectLock, error) { - project, workspace, err := c.lockKeyToProjectWorkspace(key) - if err != nil { - return nil, err - } - - projectLock, err := c.backend.GetLock(project, workspace) - if err != nil { - return nil, err - } - - return projectLock, nil -} - -func (c *Client) key(p models.Project, workspace string) string { - return fmt.Sprintf("%s/%s/%s", p.RepoFullName, p.Path, workspace) -} - -func (c *Client) lockKeyToProjectWorkspace(key string) (models.Project, string, error) { - matches := keyRegex.FindStringSubmatch(key) - if len(matches) != 4 { - return models.Project{}, "", errors.New("invalid key format") - } - - return models.Project{RepoFullName: matches[1], Path: matches[2]}, matches[3], nil -} - -type NoOpLocker struct{} - -// NewNoOpLocker returns a new lno operation lockingclient. -func NewNoOpLocker() *NoOpLocker { - return &NoOpLocker{} -} - -// TryLock attempts to acquire a lock to a project and workspace. -func (c *NoOpLocker) TryLock(p models.Project, workspace string, pull models.PullRequest, user models.User) (TryLockResponse, error) { - return TryLockResponse{true, models.ProjectLock{}, c.key(p, workspace)}, nil -} - -// Unlock attempts to unlock a project and workspace. If successful, -// a pointer to the now deleted lock will be returned. Else, that -// pointer will be nil. An error will only be returned if there was -// an error deleting the lock (i.e. not if there was no lock). -func (c *NoOpLocker) Unlock(key string) (*models.ProjectLock, error) { - return &models.ProjectLock{}, nil -} - -// List returns a map of all locks with their lock key as the map key. -// The lock key can be used in GetLock() and Unlock(). -func (c *NoOpLocker) List() (map[string]models.ProjectLock, error) { - m := make(map[string]models.ProjectLock) - return m, nil -} - -// UnlockByPull deletes all locks associated with that pull request. -func (c *NoOpLocker) UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) { - return []models.ProjectLock{}, nil -} - -// GetLock attempts to get the lock stored at key. If successful, -// a pointer to the lock will be returned. Else, the pointer will be nil. -// An error will only be returned if there was an error getting the lock -// (i.e. not if there was no lock). -func (c *NoOpLocker) GetLock(key string) (*models.ProjectLock, error) { - return nil, nil -} - -func (c *NoOpLocker) key(p models.Project, workspace string) string { - return fmt.Sprintf("%s/%s/%s", p.RepoFullName, p.Path, workspace) -} diff --git a/server/legacy/core/locking/locking_test.go b/server/legacy/core/locking/locking_test.go deleted file mode 100644 index feec86e74..000000000 --- a/server/legacy/core/locking/locking_test.go +++ /dev/null @@ -1,287 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package locking_test - -import ( - "errors" - "testing" - "time" - - "strings" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks/matchers" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -var project = models.NewProject("owner/repo", "path") -var workspace = "workspace" -var pull = models.PullRequest{} -var user = models.User{} -var errExpected = errors.New("err") -var timeNow = time.Now().Local() -var pl = models.ProjectLock{Project: project, Pull: pull, User: user, Workspace: workspace, Time: timeNow} - -func TestTryLock_Errorf(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.TryLock(matchers.AnyModelsProjectLock())).ThenReturn(false, models.ProjectLock{}, errExpected) - t.Log("when the backend returns an error, TryLock should return that error") - l := locking.NewClient(backend) - _, err := l.TryLock(project, workspace, pull, user) - Equals(t, err, err) -} - -func TestTryLock_Success(t *testing.T) { - RegisterMockTestingT(t) - currLock := models.ProjectLock{} - backend := mocks.NewMockBackend() - When(backend.TryLock(matchers.AnyModelsProjectLock())).ThenReturn(true, currLock, nil) - l := locking.NewClient(backend) - r, err := l.TryLock(project, workspace, pull, user) - Ok(t, err) - Equals(t, locking.TryLockResponse{LockAcquired: true, CurrLock: currLock, LockKey: "owner/repo/path/workspace"}, r) -} - -func TestUnlock_InvalidKey(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - l := locking.NewClient(backend) - - _, err := l.Unlock("invalidkey") - Assert(t, err != nil, "expected err") - Assert(t, strings.Contains(err.Error(), "invalid key format"), "expected err") -} - -func TestUnlock_Errorf(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.Unlock(matchers.AnyModelsProject(), AnyString())).ThenReturn(nil, errExpected) - l := locking.NewClient(backend) - _, err := l.Unlock("owner/repo/path/workspace") - Equals(t, err, err) - backend.VerifyWasCalledOnce().Unlock(project, "workspace") -} - -func TestUnlock(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.Unlock(matchers.AnyModelsProject(), AnyString())).ThenReturn(&pl, nil) - l := locking.NewClient(backend) - lock, err := l.Unlock("owner/repo/path/workspace") - Ok(t, err) - Equals(t, &pl, lock) -} - -func TestList_Errorf(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.List()).ThenReturn(nil, errExpected) - l := locking.NewClient(backend) - _, err := l.List() - Equals(t, errExpected, err) -} - -func TestList(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.List()).ThenReturn([]models.ProjectLock{pl}, nil) - l := locking.NewClient(backend) - list, err := l.List() - Ok(t, err) - Equals(t, map[string]models.ProjectLock{ - "owner/repo/path/workspace": pl, - }, list) -} - -func TestUnlockByPull(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.UnlockByPull("owner/repo", 1)).ThenReturn(nil, errExpected) - l := locking.NewClient(backend) - _, err := l.UnlockByPull("owner/repo", 1) - Equals(t, errExpected, err) -} - -func TestGetLock_BadKey(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - l := locking.NewClient(backend) - _, err := l.GetLock("invalidkey") - Assert(t, err != nil, "err should not be nil") - Assert(t, strings.Contains(err.Error(), "invalid key format"), "expected different err") -} - -func TestGetLock_Errorf(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.GetLock(project, workspace)).ThenReturn(nil, errExpected) - l := locking.NewClient(backend) - _, err := l.GetLock("owner/repo/path/workspace") - Equals(t, errExpected, err) -} - -func TestGetLock(t *testing.T) { - RegisterMockTestingT(t) - backend := mocks.NewMockBackend() - When(backend.GetLock(project, workspace)).ThenReturn(&pl, nil) - l := locking.NewClient(backend) - lock, err := l.GetLock("owner/repo/path/workspace") - Ok(t, err) - Equals(t, &pl, lock) -} - -func TestTryLock_NoOpLocker(t *testing.T) { - RegisterMockTestingT(t) - currLock := models.ProjectLock{} - l := locking.NewNoOpLocker() - r, err := l.TryLock(project, workspace, pull, user) - Ok(t, err) - Equals(t, locking.TryLockResponse{LockAcquired: true, CurrLock: currLock, LockKey: "owner/repo/path/workspace"}, r) -} - -func TestUnlock_NoOpLocker(t *testing.T) { - l := locking.NewNoOpLocker() - lock, err := l.Unlock("owner/repo/path/workspace") - Ok(t, err) - Equals(t, &models.ProjectLock{}, lock) -} - -func TestList_NoOpLocker(t *testing.T) { - l := locking.NewNoOpLocker() - list, err := l.List() - Ok(t, err) - Equals(t, map[string]models.ProjectLock{}, list) -} - -func TestUnlockByPull_NoOpLocker(t *testing.T) { - l := locking.NewNoOpLocker() - _, err := l.UnlockByPull("owner/repo", 1) - Ok(t, err) -} - -func TestGetLock_NoOpLocker(t *testing.T) { - l := locking.NewNoOpLocker() - lock, err := l.GetLock("owner/repo/path/workspace") - Ok(t, err) - var expected *models.ProjectLock - Equals(t, expected, lock) -} - -func TestApplyLocker(t *testing.T) { - RegisterMockTestingT(t) - applyLock := &command.Lock{ - CommandName: command.Apply, - LockMetadata: command.LockMetadata{ - UnixTime: time.Now().Unix(), - }, - } - - t.Run("LockApply", func(t *testing.T) { - t.Run("backend errors", func(t *testing.T) { - backend := mocks.NewMockBackend() - - When(backend.LockCommand(matchers.AnyModelsCommandName(), matchers.AnyTimeTime())).ThenReturn(nil, errExpected) - l := locking.NewApplyClient(backend, false) - lock, err := l.LockApply() - Equals(t, errExpected, err) - Assert(t, !lock.Locked, "exp false") - }) - - t.Run("can't lock if userConfig.DisableApply is set", func(t *testing.T) { - backend := mocks.NewMockBackend() - - l := locking.NewApplyClient(backend, true) - _, err := l.LockApply() - ErrEquals(t, "DisableApplyFlag is set; Apply commands are locked globally until flag is unset", err) - - backend.VerifyWasCalled(Never()).LockCommand(matchers.AnyModelsCommandName(), matchers.AnyTimeTime()) - }) - - t.Run("succeeds", func(t *testing.T) { - backend := mocks.NewMockBackend() - - When(backend.LockCommand(matchers.AnyModelsCommandName(), matchers.AnyTimeTime())).ThenReturn(applyLock, nil) - l := locking.NewApplyClient(backend, false) - lock, _ := l.LockApply() - Assert(t, lock.Locked, "exp lock present") - }) - }) - - t.Run("UnlockApply", func(t *testing.T) { - t.Run("backend fails", func(t *testing.T) { - backend := mocks.NewMockBackend() - - When(backend.UnlockCommand(matchers.AnyModelsCommandName())).ThenReturn(errExpected) - l := locking.NewApplyClient(backend, false) - err := l.UnlockApply() - Equals(t, errExpected, err) - }) - - t.Run("can't unlock if userConfig.DisableApply is set", func(t *testing.T) { - backend := mocks.NewMockBackend() - - l := locking.NewApplyClient(backend, true) - err := l.UnlockApply() - ErrEquals(t, "apply commands are disabled until DisableApply flag is unset", err) - - backend.VerifyWasCalled(Never()).UnlockCommand(matchers.AnyModelsCommandName()) - }) - - t.Run("succeeds", func(t *testing.T) { - backend := mocks.NewMockBackend() - - When(backend.UnlockCommand(matchers.AnyModelsCommandName())).ThenReturn(nil) - l := locking.NewApplyClient(backend, false) - err := l.UnlockApply() - Equals(t, nil, err) - }) - }) - - t.Run("CheckApplyLock", func(t *testing.T) { - t.Run("fails", func(t *testing.T) { - backend := mocks.NewMockBackend() - - When(backend.CheckCommandLock(matchers.AnyModelsCommandName())).ThenReturn(nil, errExpected) - l := locking.NewApplyClient(backend, false) - lock, err := l.CheckApplyLock() - Equals(t, errExpected, err) - Equals(t, lock.Locked, false) - }) - - t.Run("when DisableApply flag is set always return a lock", func(t *testing.T) { - backend := mocks.NewMockBackend() - - l := locking.NewApplyClient(backend, true) - lock, err := l.CheckApplyLock() - Ok(t, err) - Equals(t, lock.Locked, true) - backend.VerifyWasCalled(Never()).CheckCommandLock(matchers.AnyModelsCommandName()) - }) - - t.Run("UnlockCommand succeeds", func(t *testing.T) { - backend := mocks.NewMockBackend() - - When(backend.CheckCommandLock(matchers.AnyModelsCommandName())).ThenReturn(applyLock, nil) - l := locking.NewApplyClient(backend, false) - lock, err := l.CheckApplyLock() - Equals(t, nil, err) - Assert(t, lock.Locked, "exp lock present") - }) - }) -} diff --git a/server/legacy/core/locking/mocks/matchers/locking_applycommandlock.go b/server/legacy/core/locking/mocks/matchers/locking_applycommandlock.go deleted file mode 100644 index 681d48683..000000000 --- a/server/legacy/core/locking/mocks/matchers/locking_applycommandlock.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - locking "github.com/runatlantis/atlantis/server/legacy/core/locking" -) - -func AnyLockingApplyCommandLock() locking.ApplyCommandLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(locking.ApplyCommandLock))(nil)).Elem())) - var nullValue locking.ApplyCommandLock - return nullValue -} - -func EqLockingApplyCommandLock(value locking.ApplyCommandLock) locking.ApplyCommandLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue locking.ApplyCommandLock - return nullValue -} - -func NotEqLockingApplyCommandLock(value locking.ApplyCommandLock) locking.ApplyCommandLock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue locking.ApplyCommandLock - return nullValue -} - -func LockingApplyCommandLockThat(matcher pegomock.ArgumentMatcher) locking.ApplyCommandLock { - pegomock.RegisterMatcher(matcher) - var nullValue locking.ApplyCommandLock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/locking_applycommandlockresponse.go b/server/legacy/core/locking/mocks/matchers/locking_applycommandlockresponse.go deleted file mode 100644 index e2175e50f..000000000 --- a/server/legacy/core/locking/mocks/matchers/locking_applycommandlockresponse.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - locking "github.com/runatlantis/atlantis/server/legacy/core/locking" -) - -func AnyLockingApplyCommandLockResponse() locking.ApplyCommandLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(locking.ApplyCommandLock))(nil)).Elem())) - var nullValue locking.ApplyCommandLock - return nullValue -} - -func EqLockingApplyCommandLockResponse(value locking.ApplyCommandLock) locking.ApplyCommandLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue locking.ApplyCommandLock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/locking_trylockresponse.go b/server/legacy/core/locking/mocks/matchers/locking_trylockresponse.go deleted file mode 100644 index 2d301b537..000000000 --- a/server/legacy/core/locking/mocks/matchers/locking_trylockresponse.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - locking "github.com/runatlantis/atlantis/server/legacy/core/locking" -) - -func AnyLockingTryLockResponse() locking.TryLockResponse { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(locking.TryLockResponse))(nil)).Elem())) - var nullValue locking.TryLockResponse - return nullValue -} - -func EqLockingTryLockResponse(value locking.TryLockResponse) locking.TryLockResponse { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue locking.TryLockResponse - return nullValue -} - -func NotEqLockingTryLockResponse(value locking.TryLockResponse) locking.TryLockResponse { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue locking.TryLockResponse - return nullValue -} - -func LockingTryLockResponseThat(matcher pegomock.ArgumentMatcher) locking.TryLockResponse { - pegomock.RegisterMatcher(matcher) - var nullValue locking.TryLockResponse - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/map_of_string_to_models_projectlock.go b/server/legacy/core/locking/mocks/matchers/map_of_string_to_models_projectlock.go deleted file mode 100644 index eb8f8290a..000000000 --- a/server/legacy/core/locking/mocks/matchers/map_of_string_to_models_projectlock.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyMapOfStringToModelsProjectLock() map[string]models.ProjectLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(map[string]models.ProjectLock))(nil)).Elem())) - var nullValue map[string]models.ProjectLock - return nullValue -} - -func EqMapOfStringToModelsProjectLock(value map[string]models.ProjectLock) map[string]models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue map[string]models.ProjectLock - return nullValue -} - -func NotEqMapOfStringToModelsProjectLock(value map[string]models.ProjectLock) map[string]models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue map[string]models.ProjectLock - return nullValue -} - -func MapOfStringToModelsProjectLockThat(matcher pegomock.ArgumentMatcher) map[string]models.ProjectLock { - pegomock.RegisterMatcher(matcher) - var nullValue map[string]models.ProjectLock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/models_commandlock.go b/server/legacy/core/locking/mocks/matchers/models_commandlock.go deleted file mode 100644 index df7398d76..000000000 --- a/server/legacy/core/locking/mocks/matchers/models_commandlock.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsCommandLock() command.Lock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Lock))(nil)).Elem())) - var nullValue command.Lock - return nullValue -} - -func EqModelsCommandLock(value command.Lock) command.Lock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.Lock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/models_commandname.go b/server/legacy/core/locking/mocks/matchers/models_commandname.go deleted file mode 100644 index db61aecdc..000000000 --- a/server/legacy/core/locking/mocks/matchers/models_commandname.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsCommandName() command.Name { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Name))(nil)).Elem())) - var nullValue command.Name - return nullValue -} - -func EqModelsCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func NotEqModelsCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func ModelsCommandNameThat(matcher pegomock.ArgumentMatcher) command.Name { - pegomock.RegisterMatcher(matcher) - var nullValue command.Name - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/models_project.go b/server/legacy/core/locking/mocks/matchers/models_project.go deleted file mode 100644 index 8a78aae18..000000000 --- a/server/legacy/core/locking/mocks/matchers/models_project.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsProject() models.Project { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Project))(nil)).Elem())) - var nullValue models.Project - return nullValue -} - -func EqModelsProject(value models.Project) models.Project { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Project - return nullValue -} - -func NotEqModelsProject(value models.Project) models.Project { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Project - return nullValue -} - -func ModelsProjectThat(matcher pegomock.ArgumentMatcher) models.Project { - pegomock.RegisterMatcher(matcher) - var nullValue models.Project - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/models_projectlock.go b/server/legacy/core/locking/mocks/matchers/models_projectlock.go deleted file mode 100644 index fe328bdb5..000000000 --- a/server/legacy/core/locking/mocks/matchers/models_projectlock.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsProjectLock() models.ProjectLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ProjectLock))(nil)).Elem())) - var nullValue models.ProjectLock - return nullValue -} - -func EqModelsProjectLock(value models.ProjectLock) models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.ProjectLock - return nullValue -} - -func NotEqModelsProjectLock(value models.ProjectLock) models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.ProjectLock - return nullValue -} - -func ModelsProjectLockThat(matcher pegomock.ArgumentMatcher) models.ProjectLock { - pegomock.RegisterMatcher(matcher) - var nullValue models.ProjectLock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/models_pullrequest.go b/server/legacy/core/locking/mocks/matchers/models_pullrequest.go deleted file mode 100644 index 94e36a1ab..000000000 --- a/server/legacy/core/locking/mocks/matchers/models_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsPullRequest() models.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequest))(nil)).Elem())) - var nullValue models.PullRequest - return nullValue -} - -func EqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func NotEqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func ModelsPullRequestThat(matcher pegomock.ArgumentMatcher) models.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue models.PullRequest - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/models_user.go b/server/legacy/core/locking/mocks/matchers/models_user.go deleted file mode 100644 index 8e552bb1c..000000000 --- a/server/legacy/core/locking/mocks/matchers/models_user.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsUser() models.User { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.User))(nil)).Elem())) - var nullValue models.User - return nullValue -} - -func EqModelsUser(value models.User) models.User { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.User - return nullValue -} - -func NotEqModelsUser(value models.User) models.User { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.User - return nullValue -} - -func ModelsUserThat(matcher pegomock.ArgumentMatcher) models.User { - pegomock.RegisterMatcher(matcher) - var nullValue models.User - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/ptr_to_models_commandlock.go b/server/legacy/core/locking/mocks/matchers/ptr_to_models_commandlock.go deleted file mode 100644 index acfbc3251..000000000 --- a/server/legacy/core/locking/mocks/matchers/ptr_to_models_commandlock.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyPtrToModelsCommandLock() *command.Lock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Lock))(nil)).Elem())) - var nullValue *command.Lock - return nullValue -} - -func EqPtrToModelsCommandLock(value *command.Lock) *command.Lock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Lock - return nullValue -} - -func NotEqPtrToModelsCommandLock(value *command.Lock) *command.Lock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *command.Lock - return nullValue -} - -func PtrToModelsCommandLockThat(matcher pegomock.ArgumentMatcher) *command.Lock { - pegomock.RegisterMatcher(matcher) - var nullValue *command.Lock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/ptr_to_models_projectlock.go b/server/legacy/core/locking/mocks/matchers/ptr_to_models_projectlock.go deleted file mode 100644 index 092842ecb..000000000 --- a/server/legacy/core/locking/mocks/matchers/ptr_to_models_projectlock.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyPtrToModelsProjectLock() *models.ProjectLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*models.ProjectLock))(nil)).Elem())) - var nullValue *models.ProjectLock - return nullValue -} - -func EqPtrToModelsProjectLock(value *models.ProjectLock) *models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *models.ProjectLock - return nullValue -} - -func NotEqPtrToModelsProjectLock(value *models.ProjectLock) *models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *models.ProjectLock - return nullValue -} - -func PtrToModelsProjectLockThat(matcher pegomock.ArgumentMatcher) *models.ProjectLock { - pegomock.RegisterMatcher(matcher) - var nullValue *models.ProjectLock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/slice_of_models_projectlock.go b/server/legacy/core/locking/mocks/matchers/slice_of_models_projectlock.go deleted file mode 100644 index 70ada801d..000000000 --- a/server/legacy/core/locking/mocks/matchers/slice_of_models_projectlock.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnySliceOfModelsProjectLock() []models.ProjectLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]models.ProjectLock))(nil)).Elem())) - var nullValue []models.ProjectLock - return nullValue -} - -func EqSliceOfModelsProjectLock(value []models.ProjectLock) []models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []models.ProjectLock - return nullValue -} - -func NotEqSliceOfModelsProjectLock(value []models.ProjectLock) []models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []models.ProjectLock - return nullValue -} - -func SliceOfModelsProjectLockThat(matcher pegomock.ArgumentMatcher) []models.ProjectLock { - pegomock.RegisterMatcher(matcher) - var nullValue []models.ProjectLock - return nullValue -} diff --git a/server/legacy/core/locking/mocks/matchers/time_time.go b/server/legacy/core/locking/mocks/matchers/time_time.go deleted file mode 100644 index 755cf1bf8..000000000 --- a/server/legacy/core/locking/mocks/matchers/time_time.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - time "time" -) - -func AnyTimeTime() time.Time { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(time.Time))(nil)).Elem())) - var nullValue time.Time - return nullValue -} - -func EqTimeTime(value time.Time) time.Time { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue time.Time - return nullValue -} - -func NotEqTimeTime(value time.Time) time.Time { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue time.Time - return nullValue -} - -func TimeTimeThat(matcher pegomock.ArgumentMatcher) time.Time { - pegomock.RegisterMatcher(matcher) - var nullValue time.Time - return nullValue -} diff --git a/server/legacy/core/locking/mocks/mock_apply_lock_checker.go b/server/legacy/core/locking/mocks/mock_apply_lock_checker.go deleted file mode 100644 index 6ec2edcdb..000000000 --- a/server/legacy/core/locking/mocks/mock_apply_lock_checker.go +++ /dev/null @@ -1,100 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/locking (interfaces: ApplyLockChecker) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - locking "github.com/runatlantis/atlantis/server/legacy/core/locking" -) - -type MockApplyLockChecker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockApplyLockChecker(options ...pegomock.Option) *MockApplyLockChecker { - mock := &MockApplyLockChecker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockApplyLockChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockApplyLockChecker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockApplyLockChecker) CheckApplyLock() (locking.ApplyCommandLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyLockChecker().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("CheckApplyLock", params, []reflect.Type{reflect.TypeOf((*locking.ApplyCommandLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 locking.ApplyCommandLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(locking.ApplyCommandLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockApplyLockChecker) VerifyWasCalledOnce() *VerifierMockApplyLockChecker { - return &VerifierMockApplyLockChecker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockApplyLockChecker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockApplyLockChecker { - return &VerifierMockApplyLockChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockApplyLockChecker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockApplyLockChecker { - return &VerifierMockApplyLockChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockApplyLockChecker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockApplyLockChecker { - return &VerifierMockApplyLockChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockApplyLockChecker struct { - mock *MockApplyLockChecker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockApplyLockChecker) CheckApplyLock() *MockApplyLockChecker_CheckApplyLock_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CheckApplyLock", params, verifier.timeout) - return &MockApplyLockChecker_CheckApplyLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyLockChecker_CheckApplyLock_OngoingVerification struct { - mock *MockApplyLockChecker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyLockChecker_CheckApplyLock_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockApplyLockChecker_CheckApplyLock_OngoingVerification) GetAllCapturedArguments() { -} diff --git a/server/legacy/core/locking/mocks/mock_apply_locker.go b/server/legacy/core/locking/mocks/mock_apply_locker.go deleted file mode 100644 index 0efe2a5c0..000000000 --- a/server/legacy/core/locking/mocks/mock_apply_locker.go +++ /dev/null @@ -1,168 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/locking (interfaces: ApplyLocker) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - locking "github.com/runatlantis/atlantis/server/legacy/core/locking" -) - -type MockApplyLocker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockApplyLocker(options ...pegomock.Option) *MockApplyLocker { - mock := &MockApplyLocker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockApplyLocker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockApplyLocker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockApplyLocker) LockApply() (locking.ApplyCommandLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyLocker().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("LockApply", params, []reflect.Type{reflect.TypeOf((*locking.ApplyCommandLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 locking.ApplyCommandLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(locking.ApplyCommandLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockApplyLocker) UnlockApply() error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyLocker().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("UnlockApply", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockApplyLocker) CheckApplyLock() (locking.ApplyCommandLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyLocker().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("CheckApplyLock", params, []reflect.Type{reflect.TypeOf((*locking.ApplyCommandLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 locking.ApplyCommandLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(locking.ApplyCommandLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockApplyLocker) VerifyWasCalledOnce() *VerifierMockApplyLocker { - return &VerifierMockApplyLocker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockApplyLocker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockApplyLocker { - return &VerifierMockApplyLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockApplyLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockApplyLocker { - return &VerifierMockApplyLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockApplyLocker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockApplyLocker { - return &VerifierMockApplyLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockApplyLocker struct { - mock *MockApplyLocker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockApplyLocker) LockApply() *MockApplyLocker_LockApply_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "LockApply", params, verifier.timeout) - return &MockApplyLocker_LockApply_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyLocker_LockApply_OngoingVerification struct { - mock *MockApplyLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyLocker_LockApply_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockApplyLocker_LockApply_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockApplyLocker) UnlockApply() *MockApplyLocker_UnlockApply_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UnlockApply", params, verifier.timeout) - return &MockApplyLocker_UnlockApply_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyLocker_UnlockApply_OngoingVerification struct { - mock *MockApplyLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyLocker_UnlockApply_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockApplyLocker_UnlockApply_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockApplyLocker) CheckApplyLock() *MockApplyLocker_CheckApplyLock_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CheckApplyLock", params, verifier.timeout) - return &MockApplyLocker_CheckApplyLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyLocker_CheckApplyLock_OngoingVerification struct { - mock *MockApplyLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyLocker_CheckApplyLock_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockApplyLocker_CheckApplyLock_OngoingVerification) GetAllCapturedArguments() { -} diff --git a/server/legacy/core/locking/mocks/mock_backend.go b/server/legacy/core/locking/mocks/mock_backend.go deleted file mode 100644 index 733a9652f..000000000 --- a/server/legacy/core/locking/mocks/mock_backend.go +++ /dev/null @@ -1,439 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/locking (interfaces: Backend) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockBackend struct { - fail func(message string, callerSkip ...int) -} - -func NewMockBackend(options ...pegomock.Option) *MockBackend { - mock := &MockBackend{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockBackend) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockBackend) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockBackend) TryLock(lock models.ProjectLock) (bool, models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{lock} - result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 models.ProjectLock - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(models.ProjectLock) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockBackend) Unlock(project models.Project, workspace string) (*models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{project, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("Unlock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockBackend) List() ([]models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("List", params, []reflect.Type{reflect.TypeOf((*[]models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockBackend) GetLock(project models.Project, workspace string) (*models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{project, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetLock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockBackend) UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{repoFullName, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("UnlockByPull", params, []reflect.Type{reflect.TypeOf((*[]models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockBackend) LockCommand(cmdName command.Name, lockTime time.Time) (*command.Lock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{cmdName, lockTime} - result := pegomock.GetGenericMockFrom(mock).Invoke("LockCommand", params, []reflect.Type{reflect.TypeOf((**command.Lock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *command.Lock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*command.Lock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockBackend) UnlockCommand(cmdName command.Name) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{cmdName} - result := pegomock.GetGenericMockFrom(mock).Invoke("UnlockCommand", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockBackend) CheckCommandLock(cmdName command.Name) (*command.Lock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockBackend().") - } - params := []pegomock.Param{cmdName} - result := pegomock.GetGenericMockFrom(mock).Invoke("CheckCommandLock", params, []reflect.Type{reflect.TypeOf((**command.Lock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *command.Lock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*command.Lock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockBackend) VerifyWasCalledOnce() *VerifierMockBackend { - return &VerifierMockBackend{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockBackend) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockBackend { - return &VerifierMockBackend{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockBackend) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockBackend { - return &VerifierMockBackend{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockBackend) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockBackend { - return &VerifierMockBackend{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockBackend struct { - mock *MockBackend - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockBackend) TryLock(lock models.ProjectLock) *MockBackend_TryLock_OngoingVerification { - params := []pegomock.Param{lock} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params, verifier.timeout) - return &MockBackend_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_TryLock_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_TryLock_OngoingVerification) GetCapturedArguments() models.ProjectLock { - lock := c.GetAllCapturedArguments() - return lock[len(lock)-1] -} - -func (c *MockBackend_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectLock) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.ProjectLock, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.ProjectLock) - } - } - return -} - -func (verifier *VerifierMockBackend) Unlock(project models.Project, workspace string) *MockBackend_Unlock_OngoingVerification { - params := []pegomock.Param{project, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Unlock", params, verifier.timeout) - return &MockBackend_Unlock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_Unlock_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_Unlock_OngoingVerification) GetCapturedArguments() (models.Project, string) { - project, workspace := c.GetAllCapturedArguments() - return project[len(project)-1], workspace[len(workspace)-1] -} - -func (c *MockBackend_Unlock_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Project, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Project, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Project) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockBackend) List() *MockBackend_List_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "List", params, verifier.timeout) - return &MockBackend_List_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_List_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_List_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockBackend_List_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockBackend) GetLock(project models.Project, workspace string) *MockBackend_GetLock_OngoingVerification { - params := []pegomock.Param{project, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetLock", params, verifier.timeout) - return &MockBackend_GetLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_GetLock_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_GetLock_OngoingVerification) GetCapturedArguments() (models.Project, string) { - project, workspace := c.GetAllCapturedArguments() - return project[len(project)-1], workspace[len(workspace)-1] -} - -func (c *MockBackend_GetLock_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Project, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Project, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Project) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockBackend) UnlockByPull(repoFullName string, pullNum int) *MockBackend_UnlockByPull_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UnlockByPull", params, verifier.timeout) - return &MockBackend_UnlockByPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_UnlockByPull_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_UnlockByPull_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] -} - -func (c *MockBackend_UnlockByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} - -func (verifier *VerifierMockBackend) LockCommand(cmdName command.Name, lockTime time.Time) *MockBackend_LockCommand_OngoingVerification { - params := []pegomock.Param{cmdName, lockTime} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "LockCommand", params, verifier.timeout) - return &MockBackend_LockCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_LockCommand_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_LockCommand_OngoingVerification) GetCapturedArguments() (command.Name, time.Time) { - cmdName, lockTime := c.GetAllCapturedArguments() - return cmdName[len(cmdName)-1], lockTime[len(lockTime)-1] -} - -func (c *MockBackend_LockCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []command.Name, _param1 []time.Time) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.Name, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.Name) - } - _param1 = make([]time.Time, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(time.Time) - } - } - return -} - -func (verifier *VerifierMockBackend) UnlockCommand(cmdName command.Name) *MockBackend_UnlockCommand_OngoingVerification { - params := []pegomock.Param{cmdName} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UnlockCommand", params, verifier.timeout) - return &MockBackend_UnlockCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_UnlockCommand_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_UnlockCommand_OngoingVerification) GetCapturedArguments() command.Name { - cmdName := c.GetAllCapturedArguments() - return cmdName[len(cmdName)-1] -} - -func (c *MockBackend_UnlockCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []command.Name) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.Name, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.Name) - } - } - return -} - -func (verifier *VerifierMockBackend) CheckCommandLock(cmdName command.Name) *MockBackend_CheckCommandLock_OngoingVerification { - params := []pegomock.Param{cmdName} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CheckCommandLock", params, verifier.timeout) - return &MockBackend_CheckCommandLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockBackend_CheckCommandLock_OngoingVerification struct { - mock *MockBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockBackend_CheckCommandLock_OngoingVerification) GetCapturedArguments() command.Name { - cmdName := c.GetAllCapturedArguments() - return cmdName[len(cmdName)-1] -} - -func (c *MockBackend_CheckCommandLock_OngoingVerification) GetAllCapturedArguments() (_param0 []command.Name) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.Name, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.Name) - } - } - return -} diff --git a/server/legacy/core/locking/mocks/mock_locker.go b/server/legacy/core/locking/mocks/mock_locker.go deleted file mode 100644 index e68d7b0c4..000000000 --- a/server/legacy/core/locking/mocks/mock_locker.go +++ /dev/null @@ -1,300 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/locking (interfaces: Locker) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - locking "github.com/runatlantis/atlantis/server/legacy/core/locking" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockLocker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockLocker(options ...pegomock.Option) *MockLocker { - mock := &MockLocker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockLocker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockLocker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockLocker) TryLock(p models.Project, workspace string, pull models.PullRequest, user models.User) (locking.TryLockResponse, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockLocker().") - } - params := []pegomock.Param{p, workspace, pull, user} - result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((*locking.TryLockResponse)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 locking.TryLockResponse - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(locking.TryLockResponse) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockLocker) Unlock(key string) (*models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockLocker().") - } - params := []pegomock.Param{key} - result := pegomock.GetGenericMockFrom(mock).Invoke("Unlock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockLocker) List() (map[string]models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockLocker().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("List", params, []reflect.Type{reflect.TypeOf((*map[string]models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 map[string]models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(map[string]models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockLocker) UnlockByPull(repoFullName string, pullNum int) ([]models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockLocker().") - } - params := []pegomock.Param{repoFullName, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("UnlockByPull", params, []reflect.Type{reflect.TypeOf((*[]models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockLocker) GetLock(key string) (*models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockLocker().") - } - params := []pegomock.Param{key} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetLock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockLocker) VerifyWasCalledOnce() *VerifierMockLocker { - return &VerifierMockLocker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockLocker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockLocker { - return &VerifierMockLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockLocker { - return &VerifierMockLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockLocker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockLocker { - return &VerifierMockLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockLocker struct { - mock *MockLocker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockLocker) TryLock(p models.Project, workspace string, pull models.PullRequest, user models.User) *MockLocker_TryLock_OngoingVerification { - params := []pegomock.Param{p, workspace, pull, user} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params, verifier.timeout) - return &MockLocker_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockLocker_TryLock_OngoingVerification struct { - mock *MockLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockLocker_TryLock_OngoingVerification) GetCapturedArguments() (models.Project, string, models.PullRequest, models.User) { - p, workspace, pull, user := c.GetAllCapturedArguments() - return p[len(p)-1], workspace[len(workspace)-1], pull[len(pull)-1], user[len(user)-1] -} - -func (c *MockLocker_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Project, _param1 []string, _param2 []models.PullRequest, _param3 []models.User) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Project, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Project) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.PullRequest) - } - _param3 = make([]models.User, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.User) - } - } - return -} - -func (verifier *VerifierMockLocker) Unlock(key string) *MockLocker_Unlock_OngoingVerification { - params := []pegomock.Param{key} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Unlock", params, verifier.timeout) - return &MockLocker_Unlock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockLocker_Unlock_OngoingVerification struct { - mock *MockLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockLocker_Unlock_OngoingVerification) GetCapturedArguments() string { - key := c.GetAllCapturedArguments() - return key[len(key)-1] -} - -func (c *MockLocker_Unlock_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockLocker) List() *MockLocker_List_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "List", params, verifier.timeout) - return &MockLocker_List_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockLocker_List_OngoingVerification struct { - mock *MockLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockLocker_List_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockLocker_List_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockLocker) UnlockByPull(repoFullName string, pullNum int) *MockLocker_UnlockByPull_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UnlockByPull", params, verifier.timeout) - return &MockLocker_UnlockByPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockLocker_UnlockByPull_OngoingVerification struct { - mock *MockLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockLocker_UnlockByPull_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] -} - -func (c *MockLocker_UnlockByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} - -func (verifier *VerifierMockLocker) GetLock(key string) *MockLocker_GetLock_OngoingVerification { - params := []pegomock.Param{key} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetLock", params, verifier.timeout) - return &MockLocker_GetLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockLocker_GetLock_OngoingVerification struct { - mock *MockLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockLocker_GetLock_OngoingVerification) GetCapturedArguments() string { - key := c.GetAllCapturedArguments() - return key[len(key)-1] -} - -func (c *MockLocker_GetLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} diff --git a/server/legacy/core/runtime/apply_step_runner.go b/server/legacy/core/runtime/apply_step_runner.go deleted file mode 100644 index fe3dfad98..000000000 --- a/server/legacy/core/runtime/apply_step_runner.go +++ /dev/null @@ -1,249 +0,0 @@ -package runtime - -import ( - "context" - "fmt" - "os" - "path/filepath" - "reflect" - "strings" - - "github.com/pkg/errors" - - version "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -// ApplyStepRunner runs `terraform apply`. -type ApplyStepRunner struct { - TerraformExecutor TerraformExec - VCSStatusUpdater StatusUpdater - AsyncTFExec AsyncTFExec -} - -func (a *ApplyStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - if a.hasTargetFlag(prjCtx, extraArgs) { - return "", errors.New("cannot run apply with -target because we are applying an already generated plan. Instead, run -target with atlantis plan") - } - - planPath := filepath.Join(path, GetPlanFilename(prjCtx.Workspace, prjCtx.ProjectName)) - contents, err := os.ReadFile(planPath) - if os.IsNotExist(err) { - return "", fmt.Errorf("no plan found at path %q and workspace %q–did you run plan?", prjCtx.RepoRelDir, prjCtx.Workspace) - } - if err != nil { - return "", errors.Wrap(err, "unable to read planfile") - } - - prjCtx.Log.InfoContext(prjCtx.RequestCtx, "starting apply") - var out string - - // TODO: Leverage PlanTypeStepRunnerDelegate here - if IsRemotePlan(contents) { - args := append(append([]string{"apply", "-input=false"}, extraArgs...), prjCtx.EscapedCommentArgs...) - out, err = a.runRemoteApply(ctx, prjCtx, args, path, planPath, prjCtx.TerraformVersion, envs) - if err == nil { - out = a.cleanRemoteApplyOutput(out) - } - } else { - // NOTE: we need to quote the plan path because Bitbucket Server can - // have spaces in its repo owner names which is part of the path. - args := append(append(append([]string{"apply", "-input=false"}, extraArgs...), prjCtx.EscapedCommentArgs...), fmt.Sprintf("%q", planPath)) - out, err = a.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, path, args, envs, prjCtx.TerraformVersion, prjCtx.Workspace) - } - - // If the apply was successful, delete the plan. - if err == nil { - prjCtx.Log.InfoContext(prjCtx.RequestCtx, "apply successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { - prjCtx.Log.WarnContext(prjCtx.RequestCtx, fmt.Sprintf("failed to delete planfile after successful apply: %s", removeErr)) - } - } - return out, err -} - -func (a *ApplyStepRunner) hasTargetFlag(prjCtx command.ProjectContext, extraArgs []string) bool { - isTargetFlag := func(s string) bool { - if s == "-target" { - return true - } - split := strings.Split(s, "=") - return split[0] == "-target" - } - - for _, arg := range prjCtx.EscapedCommentArgs { - if isTargetFlag(arg) { - return true - } - } - for _, arg := range extraArgs { - if isTargetFlag(arg) { - return true - } - } - return false -} - -// cleanRemoteApplyOutput removes unneeded output like the refresh and plan -// phases to make the final comment cleaner. -func (a *ApplyStepRunner) cleanRemoteApplyOutput(out string) string { - applyStartText := ` Terraform will perform the actions described above. - Only 'yes' will be accepted to approve. - - Enter a value: -` - applyStartIdx := strings.Index(out, applyStartText) - if applyStartIdx < 0 { - return out - } - return out[applyStartIdx+len(applyStartText):] -} - -// runRemoteApply handles running the apply and performing actions in real-time -// as we get the output from the command. -// Specifically, we set commit statuses with links to Terraform Enterprise's -// UI to view real-time output. -// We also check if the plan that's about to be applied matches the one we -// printed to the pull request. -// We need to do this because remote plan doesn't support -out, so we do a -// manual diff. -// It also writes "yes" or "no" to the process to confirm the apply. -func (a *ApplyStepRunner) runRemoteApply( - ctx context.Context, - prjCtx command.ProjectContext, - applyArgs []string, - path string, - absPlanPath string, - tfVersion *version.Version, - envs map[string]string) (string, error) { - // The planfile contents are needed to ensure that the plan didn't change - // between plan and apply phases. - planfileBytes, err := os.ReadFile(absPlanPath) - if err != nil { - return "", errors.Wrap(err, "reading planfile") - } - - // updateStatusF will update the commit status and log any error. - updateStatusF := func(status models.VCSStatus, url string, statusID string) { - if _, err := a.VCSStatusUpdater.UpdateProject(ctx, prjCtx, command.Apply, status, url, statusID); err != nil { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, fmt.Sprintf("unable to update status: %s", err)) - } - } - - // Start the async command execution. - inCh := make(chan string) - defer close(inCh) - outCh := a.AsyncTFExec.RunCommandAsyncWithInput(ctx, prjCtx, filepath.Clean(path), applyArgs, envs, tfVersion, prjCtx.Workspace, inCh) - var lines []string - nextLineIsRunURL := false - var runURL string - var planChangedErr error - - for line := range outCh { - if line.Err != nil { - err = line.Err - break - } - lines = append(lines, line.Line) - - // Here we're checking for the run url and updating the status - // if found. - if line.Line == lineBeforeRunURL { - nextLineIsRunURL = true - } else if nextLineIsRunURL { - runURL = strings.TrimSpace(line.Line) - updateStatusF(models.PendingVCSStatus, runURL, prjCtx.StatusID) - nextLineIsRunURL = false - } - - // If the plan is complete and it's waiting for us to verify the apply, - // check if the plan is the same and if so, input "yes". - if a.atConfirmApplyPrompt(lines) { - // Check if the plan is as expected. - planChangedErr = a.remotePlanChanged(string(planfileBytes), strings.Join(lines, "\n"), tfVersion) - if planChangedErr != nil { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, "plan generated during apply does not match expected plan, aborting") - inCh <- "no\n" - // Need to continue so we read all the lines, otherwise channel - // sender (in TerraformClient) will block indefinitely waiting - // for us to read. - continue - } - - inCh <- "yes\n" - } - } - - output := strings.Join(lines, "\n") - if planChangedErr != nil { - updateStatusF(models.FailedVCSStatus, runURL, prjCtx.StatusID) - // The output isn't important if the plans don't match so we just - // discard it. - return "", planChangedErr - } - - if err != nil { - updateStatusF(models.FailedVCSStatus, runURL, prjCtx.StatusID) - } else { - updateStatusF(models.SuccessVCSStatus, runURL, prjCtx.StatusID) - } - return output, err -} - -// remotePlanChanged checks if the plan generated during the plan phase matches -// the one we're about to apply in the apply phase. -// If the plans don't match, it returns an error with a diff of the two plans -// that can be printed to the pull request. -func (a *ApplyStepRunner) remotePlanChanged(planfileContents string, applyOut string, tfVersion *version.Version) error { - output := StripRefreshingFromPlanOutput(applyOut, tfVersion) - - // Strip plan output after the prompt to execute the plan. - planEndIdx := strings.Index(output, "Do you want to perform these actions in workspace \"") - if planEndIdx < 0 { - return fmt.Errorf("Couldn't find plan end when parsing apply output:\n%q", applyOut) - } - currPlan := strings.TrimSpace(output[:planEndIdx]) - - // Ensure we strip the remoteOpsHeader from the plan contents so the - // comparison is fair. We add this header in the plan phase so we can - // identify that this planfile came from a remote plan. - expPlan := strings.TrimSpace(planfileContents[len(remoteOpsHeader):]) - - if currPlan != expPlan { - return fmt.Errorf(planChangedErrFmt, expPlan, currPlan) - } - return nil -} - -// atConfirmApplyPrompt returns true if the apply is at the "confirm this apply" step. -// This is determined by looking at the current command output provided by -// applyLines. -func (a *ApplyStepRunner) atConfirmApplyPrompt(applyLines []string) bool { - waitingMatchLines := strings.Split(waitingForConfirmation, "\n") - return len(applyLines) >= len(waitingMatchLines) && reflect.DeepEqual(applyLines[len(applyLines)-len(waitingMatchLines):], waitingMatchLines) -} - -// planChangedErrFmt is the error we print to pull requests when the plan changed -// between remote terraform plan and apply phases. -var planChangedErrFmt = `Plan generated during apply phase did not match plan generated during plan phase. -Aborting apply. - -Expected Plan: - -%s -************************************************** - -Actual Plan: - -%s -************************************************** - -This likely occurred because someone applied a change to this state in-between -your plan and apply commands. -To resolve, re-run plan.` - -// waitingForConfirmation is what is printed during a remote apply when -// terraform is waiting for confirmation to apply the plan. -var waitingForConfirmation = ` Terraform will perform the actions described above. - Only 'yes' will be accepted to approve.` diff --git a/server/legacy/core/runtime/apply_step_runner_internal_test.go b/server/legacy/core/runtime/apply_step_runner_internal_test.go deleted file mode 100644 index 2adfaa597..000000000 --- a/server/legacy/core/runtime/apply_step_runner_internal_test.go +++ /dev/null @@ -1,85 +0,0 @@ -package runtime - -import ( - "testing" - - . "github.com/runatlantis/atlantis/testing" -) - -func TestCleanRemoteOpOutput(t *testing.T) { - cases := []struct { - out string - exp string - }{ - { - ` -Running apply in the remote backend. Output will stream here. Pressing Ctrl-C -will cancel the remote apply if its still pending. If the apply started it -will stop streaming the logs, but will not stop the apply running remotely. - -Preparing the remote apply... - -To view this run in a browser, visit: -https://app.terraform.io/app/lkysow-enterprises/atlantis-tfe-test-dir2/runs/run-BCzC79gMDNmGU76T - -Waiting for the plan to start... - -Terraform v0.11.11 - -Configuring remote state backend... -Initializing Terraform configuration... -2019/02/27 21:47:23 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/d161c1b -Refreshing Terraform state in-memory prior to plan... -The refreshed state will be used to calculate this plan, but will not be -persisted to local or remote state storage. - -null_resource.dir2[1]: Refreshing state... (ID: 8554368366766418126) -null_resource.dir2: Refreshing state... (ID: 8492616078576984857) - ------------------------------------------------------------------------- - -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.dir2[1] - - -Plan: 0 to add, 0 to change, 1 to destroy. - -Do you want to perform these actions in workspace "atlantis-tfe-test-dir2"? - Terraform will perform the actions described above. - Only 'yes' will be accepted to approve. - - Enter a value: -2019/02/27 21:47:36 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/d161c1b -null_resource.dir2[1]: Destroying... (ID: 8554368366766418126) -null_resource.dir2[1]: Destruction complete after 0s - -Apply complete! Resources: 0 added, 0 changed, 1 destroyed. -`, - `2019/02/27 21:47:36 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/d161c1b -null_resource.dir2[1]: Destroying... (ID: 8554368366766418126) -null_resource.dir2[1]: Destruction complete after 0s - -Apply complete! Resources: 0 added, 0 changed, 1 destroyed. -`, - }, - { - "nodelim", - "nodelim", - }, - } - - for _, c := range cases { - t.Run(c.exp, func(t *testing.T) { - a := ApplyStepRunner{} - Equals(t, c.exp, a.cleanRemoteApplyOutput(c.out)) - }) - } -} - -// Test: works normally, sends yes, updates run urls -// Test: if plans don't match, sends no diff --git a/server/legacy/core/runtime/apply_step_runner_test.go b/server/legacy/core/runtime/apply_step_runner_test.go deleted file mode 100644 index 25c4ee723..000000000 --- a/server/legacy/core/runtime/apply_step_runner_test.go +++ /dev/null @@ -1,476 +0,0 @@ -package runtime_test - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - "sync" - "testing" - - version "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - matchers2 "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks/matchers" - "github.com/runatlantis/atlantis/server/legacy/events/command" - mocks2 "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - - . "github.com/runatlantis/atlantis/testing" -) - -func TestRun_NoDir(t *testing.T) { - o := runtime.ApplyStepRunner{ - TerraformExecutor: nil, - } - ctx := context.Background() - _, err := o.Run(ctx, command.ProjectContext{ - RepoRelDir: ".", - Workspace: "workspace", - }, nil, "/nonexistent/path", map[string]string(nil)) - ErrEquals(t, "no plan found at path \".\" and workspace \"workspace\"–did you run plan?", err) -} - -func TestRun_NoPlanFile(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - o := runtime.ApplyStepRunner{ - TerraformExecutor: nil, - } - ctx := context.Background() - _, err := o.Run(ctx, command.ProjectContext{ - RepoRelDir: ".", - Workspace: "workspace", - }, nil, tmpDir, map[string]string(nil)) - ErrEquals(t, "no plan found at path \".\" and workspace \"workspace\"–did you run plan?", err) -} - -func TestRun_Success(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, nil, 0600) - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - RequestCtx: context.TODO(), - } - Ok(t, err) - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - o := runtime.ApplyStepRunner{ - TerraformExecutor: terraform, - } - - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := o.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, []string{"apply", "-input=false", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), nil, "workspace") - _, err = os.Stat(planPath) - Assert(t, os.IsNotExist(err), "planfile should be deleted") -} - -func TestRun_AppliesCorrectProjectPlan(t *testing.T) { - // When running for a project, the planfile has a different name. - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "projectname-default.tfplan") - err := os.WriteFile(planPath, nil, 0600) - - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "default", - RepoRelDir: ".", - ProjectName: "projectname", - EscapedCommentArgs: []string{"comment", "args"}, - RequestCtx: context.TODO(), - } - Ok(t, err) - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - o := runtime.ApplyStepRunner{ - TerraformExecutor: terraform, - } - - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := o.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, []string{"apply", "-input=false", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), nil, "default") - _, err = os.Stat(planPath) - Assert(t, os.IsNotExist(err), "planfile should be deleted") -} - -func TestRun_UsesConfiguredTFVersion(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, nil, 0600) - Ok(t, err) - - logger := logging.NewNoopCtxLogger(t) - tfVersion, _ := version.NewVersion("0.11.0") - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - Log: logger, - RequestCtx: context.TODO(), - } - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - o := runtime.ApplyStepRunner{ - TerraformExecutor: terraform, - } - - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := o.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, []string{"apply", "-input=false", "extra", "args", "comment", "args", fmt.Sprintf("%q", planPath)}, map[string]string(nil), tfVersion, "workspace") - _, err = os.Stat(planPath) - Assert(t, os.IsNotExist(err), "planfile should be deleted") -} - -// Apply ignores the -target flag when used with a planfile so we should give -// an error if it's being used with -target. -func TestRun_UsingTarget(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - cases := []struct { - commentFlags []string - extraArgs []string - expErr bool - }{ - { - commentFlags: []string{"-target", "mytarget"}, - expErr: true, - }, - { - commentFlags: []string{"-target=mytarget"}, - expErr: true, - }, - { - extraArgs: []string{"-target", "mytarget"}, - expErr: true, - }, - { - extraArgs: []string{"-target=mytarget"}, - expErr: true, - }, - { - commentFlags: []string{"-target", "mytarget"}, - extraArgs: []string{"-target=mytarget"}, - expErr: true, - }, - // Test false positives. - { - commentFlags: []string{"-targethahagotcha"}, - expErr: false, - }, - { - extraArgs: []string{"-targethahagotcha"}, - expErr: false, - }, - { - commentFlags: []string{"-targeted=weird"}, - expErr: false, - }, - { - extraArgs: []string{"-targeted=weird"}, - expErr: false, - }, - } - - RegisterMockTestingT(t) - - for _, c := range cases { - descrip := fmt.Sprintf("comments flags: %s extra args: %s", - strings.Join(c.commentFlags, ", "), strings.Join(c.extraArgs, ", ")) - t.Run(descrip, func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, nil, 0600) - Ok(t, err) - terraform := mocks.NewMockClient() - step := runtime.ApplyStepRunner{ - TerraformExecutor: terraform, - } - - ctx := context.Background() - output, err := step.Run(ctx, command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: c.commentFlags, - RequestCtx: context.TODO(), - }, c.extraArgs, tmpDir, map[string]string(nil)) - Equals(t, "", output) - if c.expErr { - ErrEquals(t, "cannot run apply with -target because we are applying an already generated plan. Instead, run -target with atlantis plan", err) - } else { - Ok(t, err) - } - }) - } -} - -// Test that apply works for remote applies. -func TestRun_RemoteApply_Success(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - planFileContents := ` -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.` - err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) - Ok(t, err) - - RegisterMockTestingT(t) - tfOut := fmt.Sprintf(preConfirmOutFmt, planFileContents) + postConfirmOut - tfExec := &remoteApplyMock{LinesToSend: tfOut, DoneCh: make(chan bool)} - updater := mocks2.NewMockVCSStatusUpdater() - o := runtime.ApplyStepRunner{ - AsyncTFExec: tfExec, - VCSStatusUpdater: updater, - } - tfVersion, _ := version.NewVersion("0.11.0") - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - RequestCtx: context.TODO(), - } - output, err := o.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - <-tfExec.DoneCh - - Ok(t, err) - Equals(t, "yes\n", tfExec.PassedInput) - Equals(t, ` -2019/02/27 21:47:36 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/d161c1b -null_resource.dir2[1]: Destroying... (ID: 8554368366766418126) -null_resource.dir2[1]: Destruction complete after 0s - -Apply complete! Resources: 0 added, 0 changed, 1 destroyed. -`, output) - - Equals(t, []string{"apply", "-input=false", "extra", "args", "comment", "args"}, tfExec.CalledArgs) - _, err = os.Stat(planPath) - Assert(t, os.IsNotExist(err), "planfile should be deleted") - - // Check that the status was updated with the run url. - runURL := "https://app.terraform.io/app/lkysow-enterprises/atlantis-tfe-test-dir2/runs/run-PiDsRYKGcerTttV2" - updater.VerifyWasCalledOnce().UpdateProject(ctx, prjCtx, command.Apply, models.PendingVCSStatus, runURL, "") - updater.VerifyWasCalledOnce().UpdateProject(ctx, prjCtx, command.Apply, models.SuccessVCSStatus, runURL, "") -} - -// Test that if the plan is different, we error out. -func TestRun_RemoteApply_PlanChanged(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - planFileContents := ` -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.` - err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) - Ok(t, err) - - RegisterMockTestingT(t) - tfOut := fmt.Sprintf(preConfirmOutFmt, "not the expected plan!") + noConfirmationOut - tfExec := &remoteApplyMock{ - LinesToSend: tfOut, - Err: errors.New("exit status 1"), - DoneCh: make(chan bool), - } - o := runtime.ApplyStepRunner{ - AsyncTFExec: tfExec, - VCSStatusUpdater: mocks2.NewMockVCSStatusUpdater(), - } - tfVersion, _ := version.NewVersion("0.11.0") - - ctx := context.Background() - output, err := o.Run(ctx, command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - RequestCtx: context.TODO(), - }, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - <-tfExec.DoneCh - ErrEquals(t, `Plan generated during apply phase did not match plan generated during plan phase. -Aborting apply. - -Expected Plan: - -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy. -************************************************** - -Actual Plan: - -not the expected plan! -************************************************** - -This likely occurred because someone applied a change to this state in-between -your plan and apply commands. -To resolve, re-run plan.`, err) - Equals(t, "", output) - Equals(t, "no\n", tfExec.PassedInput) - - // Planfile should not be deleted. - _, err = os.Stat(planPath) - Ok(t, err) -} - -type remoteApplyMock struct { - // LinesToSend will be sent on the channel. - LinesToSend string - // Err will be sent on the channel after all LinesToSend. - Err error - // CalledArgs is what args we were called with. - CalledArgs []string - // PassedInput is set to the last string passed to our input channel. - PassedInput string - // DoneCh callers should wait on the done channel to ensure we're done. - DoneCh chan bool -} - -func (r *remoteApplyMock) RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) <-chan helpers.Line { - in := make(chan string) - - defer close(in) - - return r.RunCommandAsyncWithInput(ctx, prjCtx, path, args, envs, v, workspace, in) -} - -// RunCommandAsync fakes out running terraform async. -func (r *remoteApplyMock) RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string, input <-chan string) <-chan helpers.Line { - r.CalledArgs = args - - out := make(chan helpers.Line) - - // We use a wait group to ensure our sending and receiving routines have - // completed. - wg := new(sync.WaitGroup) - wg.Add(2) - go func() { - wg.Wait() - // When they're done, we signal the done channel. - r.DoneCh <- true - }() - - // Asynchronously process input. - go func() { - inLine := <-input - r.PassedInput = inLine - wg.Done() - }() - - // Asynchronously send the lines we're supposed to. - go func() { - for _, line := range strings.Split(r.LinesToSend, "\n") { - out <- helpers.Line{Line: line} - } - if r.Err != nil { - out <- helpers.Line{Err: r.Err} - } - close(out) - wg.Done() - }() - return out -} - -var preConfirmOutFmt = ` -Running apply in the remote backend. Output will stream here. Pressing Ctrl-C -will cancel the remote apply if its still pending. If the apply started it -will stop streaming the logs, but will not stop the apply running remotely. - -Preparing the remote apply... - -To view this run in a browser, visit: -https://app.terraform.io/app/lkysow-enterprises/atlantis-tfe-test-dir2/runs/run-PiDsRYKGcerTttV2 - -Waiting for the plan to start... - -Terraform v0.11.11 - -Configuring remote state backend... -Initializing Terraform configuration... -2019/02/27 21:50:44 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/d161c1b -Refreshing Terraform state in-memory prior to plan... -The refreshed state will be used to calculate this plan, but will not be -persisted to local or remote state storage. - -null_resource.dir2[0]: Refreshing state... (ID: 8492616078576984857) - ------------------------------------------------------------------------- -%s - -Do you want to perform these actions in workspace "atlantis-tfe-test-dir2"? - Terraform will perform the actions described above. - Only 'yes' will be accepted to approve. - - Enter a value: ` - -var postConfirmOut = ` - -2019/02/27 21:47:36 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/d161c1b -null_resource.dir2[1]: Destroying... (ID: 8554368366766418126) -null_resource.dir2[1]: Destruction complete after 0s - -Apply complete! Resources: 0 added, 0 changed, 1 destroyed. -` - -var noConfirmationOut = ` - -Error: Apply discarded. -` diff --git a/server/legacy/core/runtime/cache/local.go b/server/legacy/core/runtime/cache/local.go deleted file mode 100644 index c4ea653e4..000000000 --- a/server/legacy/core/runtime/cache/local.go +++ /dev/null @@ -1,23 +0,0 @@ -package cache - -import ( - "os/exec" - - "github.com/hashicorp/go-version" -) - -func NewLocalBinaryCache(binaryName string) ExecutionVersionCache { - return &localFS{ - binaryName: binaryName, - } -} - -// LocalFS is a basic implementation that just looks up -// the binary in the path. Primarily used for testing. -type localFS struct { - binaryName string -} - -func (m *localFS) Get(key *version.Version) (string, error) { - return exec.LookPath(m.binaryName) -} diff --git a/server/legacy/core/runtime/cache/mocks/matchers/ptr_to_go_version_version.go b/server/legacy/core/runtime/cache/mocks/matchers/ptr_to_go_version_version.go deleted file mode 100644 index 06fb461ce..000000000 --- a/server/legacy/core/runtime/cache/mocks/matchers/ptr_to_go_version_version.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - go_version "github.com/hashicorp/go-version" - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyPtrToGoVersionVersion() *go_version.Version { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*go_version.Version))(nil)).Elem())) - var nullValue *go_version.Version - return nullValue -} - -func EqPtrToGoVersionVersion(value *go_version.Version) *go_version.Version { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *go_version.Version - return nullValue -} diff --git a/server/legacy/core/runtime/cache/mocks/mock_key_serializer.go b/server/legacy/core/runtime/cache/mocks/mock_key_serializer.go deleted file mode 100644 index dbfb03b88..000000000 --- a/server/legacy/core/runtime/cache/mocks/mock_key_serializer.go +++ /dev/null @@ -1,109 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime/cache (interfaces: KeySerializer) - -package mocks - -import ( - go_version "github.com/hashicorp/go-version" - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockKeySerializer struct { - fail func(message string, callerSkip ...int) -} - -func NewMockKeySerializer(options ...pegomock.Option) *MockKeySerializer { - mock := &MockKeySerializer{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockKeySerializer) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockKeySerializer) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockKeySerializer) Serialize(key *go_version.Version) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockKeySerializer().") - } - params := []pegomock.Param{key} - result := pegomock.GetGenericMockFrom(mock).Invoke("Serialize", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockKeySerializer) VerifyWasCalledOnce() *VerifierMockKeySerializer { - return &VerifierMockKeySerializer{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockKeySerializer) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockKeySerializer { - return &VerifierMockKeySerializer{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockKeySerializer) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockKeySerializer { - return &VerifierMockKeySerializer{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockKeySerializer) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockKeySerializer { - return &VerifierMockKeySerializer{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockKeySerializer struct { - mock *MockKeySerializer - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockKeySerializer) Serialize(key *go_version.Version) *MockKeySerializer_Serialize_OngoingVerification { - params := []pegomock.Param{key} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Serialize", params, verifier.timeout) - return &MockKeySerializer_Serialize_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockKeySerializer_Serialize_OngoingVerification struct { - mock *MockKeySerializer - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockKeySerializer_Serialize_OngoingVerification) GetCapturedArguments() *go_version.Version { - key := c.GetAllCapturedArguments() - return key[len(key)-1] -} - -func (c *MockKeySerializer_Serialize_OngoingVerification) GetAllCapturedArguments() (_param0 []*go_version.Version) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*go_version.Version) - } - } - return -} diff --git a/server/legacy/core/runtime/cache/mocks/mock_version_path.go b/server/legacy/core/runtime/cache/mocks/mock_version_path.go deleted file mode 100644 index ddaf381e1..000000000 --- a/server/legacy/core/runtime/cache/mocks/mock_version_path.go +++ /dev/null @@ -1,109 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime/cache (interfaces: ExecutionVersionCache) - -package mocks - -import ( - go_version "github.com/hashicorp/go-version" - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockExecutionVersionCache struct { - fail func(message string, callerSkip ...int) -} - -func NewMockExecutionVersionCache(options ...pegomock.Option) *MockExecutionVersionCache { - mock := &MockExecutionVersionCache{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockExecutionVersionCache) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockExecutionVersionCache) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockExecutionVersionCache) Get(key *go_version.Version) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockExecutionVersionCache().") - } - params := []pegomock.Param{key} - result := pegomock.GetGenericMockFrom(mock).Invoke("Get", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockExecutionVersionCache) VerifyWasCalledOnce() *VerifierMockExecutionVersionCache { - return &VerifierMockExecutionVersionCache{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockExecutionVersionCache) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockExecutionVersionCache { - return &VerifierMockExecutionVersionCache{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockExecutionVersionCache) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockExecutionVersionCache { - return &VerifierMockExecutionVersionCache{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockExecutionVersionCache) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockExecutionVersionCache { - return &VerifierMockExecutionVersionCache{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockExecutionVersionCache struct { - mock *MockExecutionVersionCache - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockExecutionVersionCache) Get(key *go_version.Version) *MockExecutionVersionCache_Get_OngoingVerification { - params := []pegomock.Param{key} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Get", params, verifier.timeout) - return &MockExecutionVersionCache_Get_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockExecutionVersionCache_Get_OngoingVerification struct { - mock *MockExecutionVersionCache - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockExecutionVersionCache_Get_OngoingVerification) GetCapturedArguments() *go_version.Version { - key := c.GetAllCapturedArguments() - return key[len(key)-1] -} - -func (c *MockExecutionVersionCache_Get_OngoingVerification) GetAllCapturedArguments() (_param0 []*go_version.Version) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*go_version.Version) - } - } - return -} diff --git a/server/legacy/core/runtime/cache/version_path.go b/server/legacy/core/runtime/cache/version_path.go deleted file mode 100644 index 4b13b94ef..000000000 --- a/server/legacy/core/runtime/cache/version_path.go +++ /dev/null @@ -1,129 +0,0 @@ -package cache - -import ( - "fmt" - "sync" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_version_path.go ExecutionVersionCache -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_key_serializer.go KeySerializer - -type ExecutionVersionCache interface { - Get(key *version.Version) (string, error) -} - -type KeySerializer interface { - Serialize(key *version.Version) (string, error) -} - -type DefaultDiskLookupKeySerializer struct { - binaryName string -} - -func (s *DefaultDiskLookupKeySerializer) Serialize(key *version.Version) (string, error) { - return fmt.Sprintf("%s%s", s.binaryName, key.Original()), nil -} - -// ExecutionVersionDiskLayer is a cache layer which attempts to find the version on disk, -// before calling the configured loading function. -type ExecutionVersionDiskLayer struct { - versionRootDir models.FilePath - exec models.Exec - keySerializer KeySerializer - loader func(v *version.Version, destPath string) (models.FilePath, error) - binaryName string -} - -// Gets a path from cache -func (v *ExecutionVersionDiskLayer) Get(key *version.Version) (string, error) { - binaryVersion, err := v.keySerializer.Serialize(key) - - if err != nil { - return "", errors.Wrapf(err, "serializing key for disk lookup") - } - - // first check for the binary in our path - path, err := v.exec.LookPath(binaryVersion) - - if err == nil { - return path, nil - } - - // if the binary is not in our path, let's look in the version root directory - binaryPath := v.versionRootDir.Join(binaryVersion) - - // if the binary doesn't exist there, we need to load it. - if binaryPath.NotExists() { - // load it into a directory first and then sym link it to the serialized key aka binary version - loaderPath := v.versionRootDir.Join(v.binaryName, "versions", key.Original()) - - loadedBinary, err := v.loader(key, loaderPath.Resolve()) - - if err != nil { - return "", errors.Wrapf(err, "loading %s", loaderPath) - } - - binaryPath, err = loadedBinary.Symlink(binaryPath.Resolve()) - - if err != nil { - return "", errors.Wrapf(err, "linking %s to %s", loaderPath, loadedBinary) - } - } - - return binaryPath.Resolve(), nil -} - -// ExecutionVersionMemoryLayer is an in-memory cache which delegates to a disk layer -// if a version's path doesn't exist yet. -type ExecutionVersionMemoryLayer struct { - // RWMutex allows us to have separation between reader locks/writer locks which is great - // since writing of data shouldn't happen too often - lock sync.RWMutex - diskLayer ExecutionVersionCache - cache map[string]string -} - -func (v *ExecutionVersionMemoryLayer) Get(key *version.Version) (string, error) { - // If we need to we can rip this out into a KeySerializer impl, for now this - // seems overkill - serializedKey := key.String() - - v.lock.RLock() - _, ok := v.cache[serializedKey] - v.lock.RUnlock() - - if !ok { - v.lock.Lock() - defer v.lock.Unlock() - value, err := v.diskLayer.Get(key) - - if err != nil { - return "", errors.Wrapf(err, "fetching %s from cache", serializedKey) - } - v.cache[serializedKey] = value - } - return v.cache[serializedKey], nil -} - -func NewExecutionVersionLayeredLoadingCache( - binaryName string, - versionRootDir string, - loader func(v *version.Version, destPath string) (models.FilePath, error), -) ExecutionVersionCache { - diskLayer := &ExecutionVersionDiskLayer{ - exec: models.LocalExec{}, - versionRootDir: models.LocalFilePath(versionRootDir), - keySerializer: &DefaultDiskLookupKeySerializer{binaryName: binaryName}, - loader: loader, - binaryName: binaryName, - } - - return &ExecutionVersionMemoryLayer{ - diskLayer: diskLayer, - cache: make(map[string]string), - } -} diff --git a/server/legacy/core/runtime/cache/version_path_test.go b/server/legacy/core/runtime/cache/version_path_test.go deleted file mode 100644 index 0713e12f6..000000000 --- a/server/legacy/core/runtime/cache/version_path_test.go +++ /dev/null @@ -1,239 +0,0 @@ -package cache - -import ( - "errors" - "path/filepath" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - cache_mocks "github.com/runatlantis/atlantis/server/legacy/core/runtime/cache/mocks" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - models_mocks "github.com/runatlantis/atlantis/server/legacy/core/runtime/models/mocks" - . "github.com/runatlantis/atlantis/testing" -) - -func TestExecutionVersionDiskLayer(t *testing.T) { - binaryVersion := "bin1.0" - binaryName := "bin" - - expectedPath := "some/path/bin1.0" - versionInput, _ := version.NewVersion("1.0") - - RegisterMockTestingT(t) - - mockFilePath := models_mocks.NewMockFilePath() - mockExec := models_mocks.NewMockExec() - mockSerializer := cache_mocks.NewMockKeySerializer() - - t.Run("serializer error", func(t *testing.T) { - subject := &ExecutionVersionDiskLayer{ - versionRootDir: mockFilePath, - exec: mockExec, - loader: func(v *version.Version, destPath string) (models.FilePath, error) { - if destPath == expectedPath && v == versionInput { - return models.LocalFilePath(filepath.Join(destPath, "bin")), nil - } - - t.Fatalf("unexpected inputs to loader") - - return models.LocalFilePath(""), nil - }, - keySerializer: mockSerializer, - } - - When(mockSerializer.Serialize(versionInput)).ThenReturn("", errors.New("serializer error")) - When(mockExec.LookPath(binaryVersion)).ThenReturn(expectedPath, nil) - - _, err := subject.Get(versionInput) - - Assert(t, err != nil, "err is expected") - - mockFilePath.VerifyWasCalled(Never()).Join(AnyString()) - mockFilePath.VerifyWasCalled(Never()).NotExists() - mockFilePath.VerifyWasCalled(Never()).Resolve() - mockExec.VerifyWasCalled(Never()).LookPath(AnyString()) - }) - - t.Run("finds in path", func(t *testing.T) { - subject := &ExecutionVersionDiskLayer{ - versionRootDir: mockFilePath, - exec: mockExec, - loader: func(v *version.Version, destPath string) (models.FilePath, error) { - t.Fatalf("shouldn't be called") - - return models.LocalFilePath(""), nil - }, - keySerializer: mockSerializer, - } - - When(mockSerializer.Serialize(versionInput)).ThenReturn(binaryVersion, nil) - When(mockExec.LookPath(binaryVersion)).ThenReturn(expectedPath, nil) - - resultPath, err := subject.Get(versionInput) - - Ok(t, err) - - Assert(t, resultPath == expectedPath, "path is expected") - - mockFilePath.VerifyWasCalled(Never()).Join(AnyString()) - mockFilePath.VerifyWasCalled(Never()).Resolve() - mockFilePath.VerifyWasCalled(Never()).NotExists() - }) - - t.Run("finds in version root", func(t *testing.T) { - subject := &ExecutionVersionDiskLayer{ - versionRootDir: mockFilePath, - exec: mockExec, - loader: func(v *version.Version, destPath string) (models.FilePath, error) { - t.Fatalf("shouldn't be called") - - return models.LocalFilePath(""), nil - }, - keySerializer: mockSerializer, - } - - When(mockSerializer.Serialize(versionInput)).ThenReturn(binaryVersion, nil) - When(mockExec.LookPath(binaryVersion)).ThenReturn("", errors.New("error")) - - When(mockFilePath.Join(binaryVersion)).ThenReturn(mockFilePath) - - When(mockFilePath.NotExists()).ThenReturn(false) - When(mockFilePath.Resolve()).ThenReturn(expectedPath) - - resultPath, err := subject.Get(versionInput) - - Ok(t, err) - - Assert(t, resultPath == expectedPath, "path is expected") - }) - - t.Run("loads version", func(t *testing.T) { - mockLoaderPath := models_mocks.NewMockFilePath() - mockSymlinkPath := models_mocks.NewMockFilePath() - mockLoadedBinaryPath := models_mocks.NewMockFilePath() - expectedLoaderPath := "/some/path/to/binary" - expectedBinaryVersionPath := filepath.Join(expectedPath, binaryVersion) - - subject := &ExecutionVersionDiskLayer{ - versionRootDir: mockFilePath, - exec: mockExec, - loader: func(v *version.Version, destPath string) (models.FilePath, error) { - if destPath == expectedLoaderPath && v == versionInput { - return mockLoadedBinaryPath, nil - } - - t.Fatalf("unexpected inputs to loader") - - return models.LocalFilePath(""), nil - }, - binaryName: binaryName, - keySerializer: mockSerializer, - } - - When(mockSerializer.Serialize(versionInput)).ThenReturn(binaryVersion, nil) - When(mockExec.LookPath(binaryVersion)).ThenReturn("", errors.New("error")) - - When(mockFilePath.Join(binaryVersion)).ThenReturn(mockFilePath) - When(mockFilePath.Resolve()).ThenReturn(expectedBinaryVersionPath) - - When(mockFilePath.NotExists()).ThenReturn(true) - - When(mockFilePath.Join(binaryName, "versions", versionInput.Original())).ThenReturn(mockLoaderPath) - - When(mockLoaderPath.Resolve()).ThenReturn(expectedLoaderPath) - When(mockLoadedBinaryPath.Symlink(expectedBinaryVersionPath)).ThenReturn(mockSymlinkPath, nil) - - When(mockSymlinkPath.Resolve()).ThenReturn(expectedPath) - - resultPath, err := subject.Get(versionInput) - - Ok(t, err) - - Assert(t, resultPath == expectedPath, "path is expected") - }) - - t.Run("loader error", func(t *testing.T) { - mockLoaderPath := models_mocks.NewMockFilePath() - expectedLoaderPath := "/some/path/to/binary" - subject := &ExecutionVersionDiskLayer{ - versionRootDir: mockFilePath, - exec: mockExec, - loader: func(v *version.Version, destPath string) (models.FilePath, error) { - if destPath == expectedLoaderPath && v == versionInput { - return models.LocalFilePath(""), errors.New("error") - } - - t.Fatalf("unexpected inputs to loader") - - return models.LocalFilePath(""), nil - }, - keySerializer: mockSerializer, - binaryName: binaryName, - } - - When(mockSerializer.Serialize(versionInput)).ThenReturn(binaryVersion, nil) - When(mockExec.LookPath(binaryVersion)).ThenReturn("", errors.New("error")) - - When(mockFilePath.Join(binaryVersion)).ThenReturn(mockFilePath) - - When(mockFilePath.NotExists()).ThenReturn(true) - - When(mockFilePath.Join(binaryName, "versions", versionInput.Original())).ThenReturn(mockLoaderPath) - - When(mockLoaderPath.Resolve()).ThenReturn(expectedLoaderPath) - - _, err := subject.Get(versionInput) - - Assert(t, err != nil, "path is expected") - }) -} - -func TestExecutionVersionMemoryLayer(t *testing.T) { - expectedPath := "some/path" - versionInput, _ := version.NewVersion("1.0") - - RegisterMockTestingT(t) - - mockLayer := cache_mocks.NewMockExecutionVersionCache() - - cache := make(map[string]string) - - subject := &ExecutionVersionMemoryLayer{ - diskLayer: mockLayer, - cache: cache, - } - - t.Run("exists in cache", func(t *testing.T) { - cache[versionInput.String()] = expectedPath - - resultPath, err := subject.Get(versionInput) - - Ok(t, err) - - Assert(t, resultPath == expectedPath, "path is expected") - }) - - t.Run("disk layer error", func(t *testing.T) { - delete(cache, versionInput.String()) - - When(mockLayer.Get(versionInput)).ThenReturn("", errors.New("error")) - - _, err := subject.Get(versionInput) - - Assert(t, err != nil, "error is expected") - }) - - t.Run("disk layer success", func(t *testing.T) { - delete(cache, versionInput.String()) - - When(mockLayer.Get(versionInput)).ThenReturn(expectedPath, nil) - - resultPath, err := subject.Get(versionInput) - - Ok(t, err) - - Assert(t, resultPath == expectedPath, "path is expected") - Assert(t, cache[versionInput.String()] == resultPath, "path is cached") - }) -} diff --git a/server/legacy/core/runtime/env_step_runner.go b/server/legacy/core/runtime/env_step_runner.go deleted file mode 100644 index 9c7b2c198..000000000 --- a/server/legacy/core/runtime/env_step_runner.go +++ /dev/null @@ -1,28 +0,0 @@ -package runtime - -import ( - "context" - "strings" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -// EnvStepRunner set environment variables. -type EnvStepRunner struct { - RunStepRunner *RunStepRunner -} - -// Run runs the env step command. -// value is the value for the environment variable. If set this is returned as -// the value. Otherwise command is run and its output is the value returned. -func (r *EnvStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, command string, value string, path string, envs map[string]string) (string, error) { - if value != "" { - return value, nil - } - res, err := r.RunStepRunner.Run(ctx, prjCtx, command, path, envs) - // Trim newline from res to support running `echo env_value` which has - // a newline. We don't recommend users run echo -n env_value to remove the - // newline because -n doesn't work in the sh shell which is what we use - // to run commands. - return strings.TrimSuffix(res, "\n"), err -} diff --git a/server/legacy/core/runtime/env_step_runner_test.go b/server/legacy/core/runtime/env_step_runner_test.go deleted file mode 100644 index cf50bb656..000000000 --- a/server/legacy/core/runtime/env_step_runner_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package runtime_test - -import ( - "context" - "testing" - - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - - . "github.com/petergtz/pegomock" - . "github.com/runatlantis/atlantis/testing" -) - -func TestEnvStepRunner_Run(t *testing.T) { - cases := []struct { - Command string - Value string - ProjectName string - ExpValue string - ExpErr string - }{ - { - Command: "echo 123", - ExpValue: "123", - }, - { - Value: "test", - ExpValue: "test", - }, - { - Command: "echo 321", - Value: "test", - ExpValue: "test", - }, - } - RegisterMockTestingT(t) - tfClient := mocks.NewMockClient() - tfVersion, err := version.NewVersion("0.12.0") - Ok(t, err) - runStepRunner := runtime.RunStepRunner{ - TerraformExecutor: tfClient, - DefaultTFVersion: tfVersion, - } - envRunner := runtime.EnvStepRunner{ - RunStepRunner: &runStepRunner, - } - for _, c := range cases { - t.Run(c.Command, func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - ctx := context.Background() - prjCtx := command.ProjectContext{ - BaseRepo: models.Repo{ - Name: "basename", - Owner: "baseowner", - }, - HeadRepo: models.Repo{ - Name: "headname", - Owner: "headowner", - }, - Pull: models.PullRequest{ - Num: 2, - HeadBranch: "add-feat", - BaseBranch: "master", - Author: "acme", - }, - User: models.User{ - Username: "acme-user", - }, - Log: logging.NewNoopCtxLogger(t), - Workspace: "myworkspace", - RepoRelDir: "mydir", - TerraformVersion: tfVersion, - ProjectName: c.ProjectName, - RequestCtx: context.TODO(), - } - value, err := envRunner.Run(ctx, prjCtx, c.Command, c.Value, tmpDir, map[string]string(nil)) - if c.ExpErr != "" { - ErrContains(t, c.ExpErr, err) - return - } - Ok(t, err) - Equals(t, c.ExpValue, value) - }) - } -} diff --git a/server/legacy/core/runtime/executor.go b/server/legacy/core/runtime/executor.go deleted file mode 100644 index 7618c78fd..000000000 --- a/server/legacy/core/runtime/executor.go +++ /dev/null @@ -1,19 +0,0 @@ -package runtime - -import ( - "context" - - version "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" -) - -// Executor runs an executable with provided environment variables and arguments and returns stdout -type Executor interface { - Run(ctx context.Context, prjCtx command.ProjectContext, executablePath string, envs map[string]string, workdir string, extraArgs []string) (string, error) -} - -// ExecutorVersionEnsurer ensures a given version exists and outputs a path to the executable -type ExecutorVersionEnsurer interface { - EnsureExecutorVersion(log logging.Logger, v *version.Version) (string, error) -} diff --git a/server/legacy/core/runtime/init_step_runner.go b/server/legacy/core/runtime/init_step_runner.go deleted file mode 100644 index 7b4decbec..000000000 --- a/server/legacy/core/runtime/init_step_runner.go +++ /dev/null @@ -1,67 +0,0 @@ -package runtime - -import ( - "context" - "fmt" - "os" - "path/filepath" - - version "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/runtime/common" -) - -// InitStep runs `terraform init`. -type InitStepRunner struct { - TerraformExecutor TerraformExec - DefaultTFVersion *version.Version -} - -func (i *InitStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - lockFileName := ".terraform.lock.hcl" - terraformLockfilePath := filepath.Join(path, lockFileName) - terraformLockFileTracked, err := common.IsFileTracked(path, lockFileName) - if err != nil { - prjCtx.Log.WarnContext(prjCtx.RequestCtx, fmt.Sprintf("Error checking if %s is tracked in %s", lockFileName, path)) - } - // If .terraform.lock.hcl is not tracked in git and it exists prior to init - // delete it as it probably has been created by a previous run of - // terraform init - if common.FileExists(terraformLockfilePath) && !terraformLockFileTracked { - delErr := os.Remove(terraformLockfilePath) - if delErr != nil { - prjCtx.Log.InfoContext(prjCtx.RequestCtx, fmt.Sprintf("Error Deleting `%s`", lockFileName)) - } - } - - tfVersion := i.DefaultTFVersion - if prjCtx.TerraformVersion != nil { - tfVersion = prjCtx.TerraformVersion - } - - terraformInitVerb := []string{"init"} - terraformInitArgs := []string{"-input=false"} - - // If we're running < 0.9 we have to use `terraform get` instead of `init`. - if MustConstraint("< 0.9.0").Check(tfVersion) { - prjCtx.Log.InfoContext(prjCtx.RequestCtx, fmt.Sprintf("running terraform version %s so will use `get` instead of `init`", tfVersion)) - terraformInitVerb = []string{"get"} - terraformInitArgs = []string{} - } - - if MustConstraint("< 0.14.0").Check(tfVersion) || !common.FileExists(terraformLockfilePath) { - terraformInitArgs = append(terraformInitArgs, "-upgrade") - } - - finalArgs := common.DeDuplicateExtraArgs(terraformInitArgs, extraArgs) - - terraformInitCmd := append(terraformInitVerb, finalArgs...) - - out, err := i.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, path, terraformInitCmd, envs, tfVersion, prjCtx.Workspace) - // Only include the init output if there was an error. Otherwise it's - // unnecessary and lengthens the comment. - if err != nil { - return out, err - } - return "", nil -} diff --git a/server/legacy/core/runtime/init_step_runner_test.go b/server/legacy/core/runtime/init_step_runner_test.go deleted file mode 100644 index 8def8fc18..000000000 --- a/server/legacy/core/runtime/init_step_runner_test.go +++ /dev/null @@ -1,302 +0,0 @@ -package runtime_test - -import ( - "context" - "os" - "os/exec" - "path/filepath" - "strings" - "testing" - - version "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/pkg/errors" - - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - matchers2 "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks/matchers" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestRun_UsesGetOrInitForRightVersion(t *testing.T) { - RegisterMockTestingT(t) - cases := []struct { - version string - expCmd string - }{ - { - "0.8.9", - "get", - }, - { - "0.9.0", - "init", - }, - { - "0.9.1", - "init", - }, - { - "0.10.0", - "init", - }, - } - - for _, c := range cases { - t.Run(c.version, func(t *testing.T) { - terraform := mocks.NewMockClient() - - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - RequestCtx: context.TODO(), - } - - tfVersion, _ := version.NewVersion(c.version) - iso := runtime.InitStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - - output, err := iso.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - // When there is no error, should not return init output to PR. - Equals(t, "", output) - - // If using init then we specify -input=false but not for get. - expArgs := []string{c.expCmd, "-input=false", "-upgrade", "extra", "args"} - if c.expCmd == "get" { - expArgs = []string{c.expCmd, "-upgrade", "extra", "args"} - } - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, "/path", expArgs, map[string]string(nil), tfVersion, "workspace") - }) - } -} - -func TestRun_ShowInitOutputOnError(t *testing.T) { - // If there was an error during init then we want the output to be returned. - RegisterMockTestingT(t) - tfClient := mocks.NewMockClient() - logger := logging.NewNoopCtxLogger(t) - When(tfClient.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", errors.New("error")) - - tfVersion, _ := version.NewVersion("0.11.0") - iso := runtime.InitStepRunner{ - TerraformExecutor: tfClient, - DefaultTFVersion: tfVersion, - } - - ctx := context.Background() - output, err := iso.Run(ctx, command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - RequestCtx: context.TODO(), - }, nil, "/path", map[string]string(nil)) - ErrEquals(t, "error", err) - Equals(t, "output", output) -} - -func TestRun_InitKeepsUpgradeFlagIfLockFileNotPresent(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - RequestCtx: context.TODO(), - } - - tfVersion, _ := version.NewVersion("0.14.0") - iso := runtime.InitStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - - output, err := iso.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - // When there is no error, should not return init output to PR. - Equals(t, "", output) - - expectedArgs := []string{"init", "-input=false", "-upgrade", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") -} - -func TestRun_InitKeepUpgradeFlagIfLockFilePresentAndTFLessThanPoint14(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - lockFilePath := filepath.Join(tmpDir, ".terraform.lock.hcl") - err := os.WriteFile(lockFilePath, nil, 0600) - Ok(t, err) - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - RequestCtx: context.TODO(), - } - - tfVersion, _ := version.NewVersion("0.13.0") - iso := runtime.InitStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - - output, err := iso.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - // When there is no error, should not return init output to PR. - Equals(t, "", output) - - expectedArgs := []string{"init", "-input=false", "-upgrade", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") -} - -func TestRun_InitExtraArgsDeDupe(t *testing.T) { - RegisterMockTestingT(t) - cases := []struct { - description string - extraArgs []string - expectedArgs []string - }{ - { - "No extra args", - []string{}, - []string{"init", "-input=false", "-upgrade"}, - }, - { - "Override -upgrade", - []string{"-upgrade=false"}, - []string{"init", "-input=false", "-upgrade=false"}, - }, - { - "Override -input", - []string{"-input=true"}, - []string{"init", "-input=true", "-upgrade"}, - }, - { - "Override -input and -upgrade", - []string{"-input=true", "-upgrade=false"}, - []string{"init", "-input=true", "-upgrade=false"}, - }, - { - "Non duplicate extra args", - []string{"extra", "args"}, - []string{"init", "-input=false", "-upgrade", "extra", "args"}, - }, - { - "Override upgrade with extra args", - []string{"extra", "args", "-upgrade=false"}, - []string{"init", "-input=false", "-upgrade=false", "extra", "args"}, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - terraform := mocks.NewMockClient() - - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - RequestCtx: context.TODO(), - } - - tfVersion, _ := version.NewVersion("0.10.0") - iso := runtime.InitStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - - output, err := iso.Run(ctx, prjCtx, c.extraArgs, "/path", map[string]string(nil)) - Ok(t, err) - // When there is no error, should not return init output to PR. - Equals(t, "", output) - - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, "/path", c.expectedArgs, map[string]string(nil), tfVersion, "workspace") - }) - } -} - -func TestRun_InitDeletesLockFileIfPresentAndNotTracked(t *testing.T) { - // Initialize the git repo. - repoDir, cleanup := initRepo(t) - defer cleanup() - - lockFilePath := filepath.Join(repoDir, ".terraform.lock.hcl") - err := os.WriteFile(lockFilePath, nil, 0600) - Ok(t, err) - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - - logger := logging.NewNoopCtxLogger(t) - - tfVersion, _ := version.NewVersion("0.14.0") - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - Log: logger, - } - iso := runtime.InitStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - - output, err := iso.Run(ctx, prjCtx, []string{"extra", "args"}, repoDir, map[string]string(nil)) - Ok(t, err) - // When there is no error, should not return init output to PR. - Equals(t, "", output) - - expectedArgs := []string{"init", "-input=false", "-upgrade", "extra", "args"} - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, repoDir, expectedArgs, map[string]string(nil), tfVersion, "workspace") -} - -func runCmd(t *testing.T, dir string, name string, args ...string) { - t.Helper() - cpCmd := exec.Command(name, args...) - cpCmd.Dir = dir - cpOut, err := cpCmd.CombinedOutput() - Assert(t, err == nil, "err running %q: %s", strings.Join(append([]string{name}, args...), " "), cpOut) -} - -func initRepo(t *testing.T) (string, func()) { - repoDir, cleanup := TempDir(t) - runCmd(t, repoDir, "git", "init") - runCmd(t, repoDir, "touch", ".gitkeep") - runCmd(t, repoDir, "git", "add", ".gitkeep") - runCmd(t, repoDir, "git", "config", "--local", "user.email", "atlantisbot@runatlantis.io") - runCmd(t, repoDir, "git", "config", "--local", "user.name", "atlantisbot") - runCmd(t, repoDir, "git", "commit", "-m", "initial commit") - runCmd(t, repoDir, "git", "branch", "branch") - return repoDir, cleanup -} diff --git a/server/legacy/core/runtime/minimum_version_step_runner_delegate.go b/server/legacy/core/runtime/minimum_version_step_runner_delegate.go deleted file mode 100644 index a5cda80ab..000000000 --- a/server/legacy/core/runtime/minimum_version_step_runner_delegate.go +++ /dev/null @@ -1,45 +0,0 @@ -package runtime - -import ( - "context" - "fmt" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -// MinimumVersionStepRunnerDelegate ensures that a given step runner can't run unless the command version being used -// is greater than a provided minimum -type MinimumVersionStepRunnerDelegate struct { - minimumVersion *version.Version - defaultTfVersion *version.Version - delegate Runner -} - -func NewMinimumVersionStepRunnerDelegate(minimumVersionStr string, defaultVersion *version.Version, delegate Runner) (Runner, error) { - minimumVersion, err := version.NewVersion(minimumVersionStr) - - if err != nil { - return &MinimumVersionStepRunnerDelegate{}, errors.Wrap(err, "initializing minimum version") - } - - return &MinimumVersionStepRunnerDelegate{ - minimumVersion: minimumVersion, - defaultTfVersion: defaultVersion, - delegate: delegate, - }, nil -} - -func (r *MinimumVersionStepRunnerDelegate) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - tfVersion := r.defaultTfVersion - if prjCtx.TerraformVersion != nil { - tfVersion = prjCtx.TerraformVersion - } - - if tfVersion.LessThan(r.minimumVersion) { - return fmt.Sprintf("Version: %s is unsupported for this step. Minimum version is: %s", tfVersion.String(), r.minimumVersion.String()), nil - } - - return r.delegate.Run(ctx, prjCtx, extraArgs, path, envs) -} diff --git a/server/legacy/core/runtime/minimum_version_step_runner_delegate_test.go b/server/legacy/core/runtime/minimum_version_step_runner_delegate_test.go deleted file mode 100644 index 534f53084..000000000 --- a/server/legacy/core/runtime/minimum_version_step_runner_delegate_test.go +++ /dev/null @@ -1,128 +0,0 @@ -package runtime - -import ( - "context" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - . "github.com/runatlantis/atlantis/testing" -) - -func TestRunMinimumVersionDelegate(t *testing.T) { - RegisterMockTestingT(t) - - mockDelegate := mocks.NewMockRunner() - - tfVersion12, _ := version.NewVersion("0.12.0") - tfVersion11, _ := version.NewVersion("0.11.15") - - // these stay the same for all tests - extraArgs := []string{"extra", "args"} - envs := map[string]string{} - path := "" - - expectedOut := "some valid output from delegate" - - t.Run("default version success", func(t *testing.T) { - subject := &MinimumVersionStepRunnerDelegate{ - defaultTfVersion: tfVersion12, - minimumVersion: tfVersion12, - delegate: mockDelegate, - } - - ctx := context.Background() - prjCtx := command.ProjectContext{} - - When(mockDelegate.Run(ctx, prjCtx, extraArgs, path, envs)).ThenReturn(expectedOut, nil) - - output, err := subject.Run( - ctx, - prjCtx, - extraArgs, - path, - envs, - ) - - Equals(t, expectedOut, output) - Ok(t, err) - }) - - t.Run("prjCtx version success", func(t *testing.T) { - subject := &MinimumVersionStepRunnerDelegate{ - defaultTfVersion: tfVersion11, - minimumVersion: tfVersion12, - delegate: mockDelegate, - } - - ctx := context.Background() - prjCtx := command.ProjectContext{ - TerraformVersion: tfVersion12, - } - - When(mockDelegate.Run(ctx, prjCtx, extraArgs, path, envs)).ThenReturn(expectedOut, nil) - - output, err := subject.Run( - ctx, - prjCtx, - extraArgs, - path, - envs, - ) - - Equals(t, expectedOut, output) - Ok(t, err) - }) - - t.Run("default version failure", func(t *testing.T) { - subject := &MinimumVersionStepRunnerDelegate{ - defaultTfVersion: tfVersion11, - minimumVersion: tfVersion12, - delegate: mockDelegate, - } - - ctx := context.Background() - prjCtx := command.ProjectContext{} - - output, err := subject.Run( - ctx, - prjCtx, - extraArgs, - path, - envs, - ) - - mockDelegate.VerifyWasCalled(Never()) - - Equals(t, "Version: 0.11.15 is unsupported for this step. Minimum version is: 0.12.0", output) - Ok(t, err) - }) - - t.Run("prjCtx version failure", func(t *testing.T) { - subject := &MinimumVersionStepRunnerDelegate{ - defaultTfVersion: tfVersion12, - minimumVersion: tfVersion12, - delegate: mockDelegate, - } - - ctx := context.Background() - prjCtx := command.ProjectContext{ - TerraformVersion: tfVersion11, - } - - output, err := subject.Run( - ctx, - prjCtx, - extraArgs, - path, - envs, - ) - - mockDelegate.VerifyWasCalled(Never()) - - Equals(t, "Version: 0.11.15 is unsupported for this step. Minimum version is: 0.12.0", output) - Ok(t, err) - }) -} diff --git a/server/legacy/core/runtime/mocks/matchers/command_projectcontext.go b/server/legacy/core/runtime/mocks/matchers/command_projectcontext.go deleted file mode 100644 index 8722b7ba3..000000000 --- a/server/legacy/core/runtime/mocks/matchers/command_projectcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyCommandProjectContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func CommandProjectContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/context_context.go b/server/legacy/core/runtime/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/core/runtime/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/logging_logger.go b/server/legacy/core/runtime/mocks/matchers/logging_logger.go deleted file mode 100644 index d43fd90e9..000000000 --- a/server/legacy/core/runtime/mocks/matchers/logging_logger.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingLogger() logging.Logger { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.Logger))(nil)).Elem())) - var nullValue logging.Logger - return nullValue -} - -func EqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func NotEqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func LoggingLoggerThat(matcher pegomock.ArgumentMatcher) logging.Logger { - pegomock.RegisterMatcher(matcher) - var nullValue logging.Logger - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/logging_simplelogging.go b/server/legacy/core/runtime/mocks/matchers/logging_simplelogging.go deleted file mode 100644 index af87b9432..000000000 --- a/server/legacy/core/runtime/mocks/matchers/logging_simplelogging.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" - "reflect" -) - -func AnyLoggingSimpleLogging() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/map_of_string_to_string.go b/server/legacy/core/runtime/mocks/matchers/map_of_string_to_string.go deleted file mode 100644 index 65175de1a..000000000 --- a/server/legacy/core/runtime/mocks/matchers/map_of_string_to_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyMapOfStringToString() map[string]string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(map[string]string))(nil)).Elem())) - var nullValue map[string]string - return nullValue -} - -func EqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} - -func NotEqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} - -func MapOfStringToStringThat(matcher pegomock.ArgumentMatcher) map[string]string { - pegomock.RegisterMatcher(matcher) - var nullValue map[string]string - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/models_preworkflowhookcommandcontext.go b/server/legacy/core/runtime/mocks/matchers/models_preworkflowhookcommandcontext.go deleted file mode 100644 index 4ee9a18bd..000000000 --- a/server/legacy/core/runtime/mocks/matchers/models_preworkflowhookcommandcontext.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" -) - -func AnyModelsPreWorkflowHookCommandContext() models.PreWorkflowHookCommandContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PreWorkflowHookCommandContext))(nil)).Elem())) - var nullValue models.PreWorkflowHookCommandContext - return nullValue -} - -func EqModelsPreWorkflowHookCommandContext(value models.PreWorkflowHookCommandContext) models.PreWorkflowHookCommandContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PreWorkflowHookCommandContext - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/models_projectcommandcontext.go b/server/legacy/core/runtime/mocks/matchers/models_projectcommandcontext.go deleted file mode 100644 index dbde44f15..000000000 --- a/server/legacy/core/runtime/mocks/matchers/models_projectcommandcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsProjectCommandContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func ModelsProjectCommandContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/models_pullrequest.go b/server/legacy/core/runtime/mocks/matchers/models_pullrequest.go deleted file mode 100644 index d3b691438..000000000 --- a/server/legacy/core/runtime/mocks/matchers/models_pullrequest.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" -) - -func AnyModelsPullRequest() models.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequest))(nil)).Elem())) - var nullValue models.PullRequest - return nullValue -} - -func EqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/models_repo.go b/server/legacy/core/runtime/mocks/matchers/models_repo.go deleted file mode 100644 index 0ff69cc6e..000000000 --- a/server/legacy/core/runtime/mocks/matchers/models_repo.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/ptr_to_go_version_version.go b/server/legacy/core/runtime/mocks/matchers/ptr_to_go_version_version.go deleted file mode 100644 index bb596fe3d..000000000 --- a/server/legacy/core/runtime/mocks/matchers/ptr_to_go_version_version.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - go_version "github.com/hashicorp/go-version" -) - -func AnyPtrToGoVersionVersion() *go_version.Version { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*go_version.Version))(nil)).Elem())) - var nullValue *go_version.Version - return nullValue -} - -func EqPtrToGoVersionVersion(value *go_version.Version) *go_version.Version { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *go_version.Version - return nullValue -} - -func NotEqPtrToGoVersionVersion(value *go_version.Version) *go_version.Version { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *go_version.Version - return nullValue -} - -func PtrToGoVersionVersionThat(matcher pegomock.ArgumentMatcher) *go_version.Version { - pegomock.RegisterMatcher(matcher) - var nullValue *go_version.Version - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/ptr_to_logging_simplelogger.go b/server/legacy/core/runtime/mocks/matchers/ptr_to_logging_simplelogger.go deleted file mode 100644 index e7c8b942f..000000000 --- a/server/legacy/core/runtime/mocks/matchers/ptr_to_logging_simplelogger.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyPtrToLoggingSimpleLogger() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqPtrToLoggingSimpleLogger(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/slice_of_string.go b/server/legacy/core/runtime/mocks/matchers/slice_of_string.go deleted file mode 100644 index f9281819d..000000000 --- a/server/legacy/core/runtime/mocks/matchers/slice_of_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfString() []string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) - var nullValue []string - return nullValue -} - -func EqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func NotEqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { - pegomock.RegisterMatcher(matcher) - var nullValue []string - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/matchers/slice_of_valid_step.go b/server/legacy/core/runtime/mocks/matchers/slice_of_valid_step.go deleted file mode 100644 index b8d3c8bde..000000000 --- a/server/legacy/core/runtime/mocks/matchers/slice_of_valid_step.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - valid "github.com/runatlantis/atlantis/server/config/valid" - "reflect" -) - -func AnySliceOfValidStep() []valid.Step { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]valid.Step))(nil)).Elem())) - var nullValue []valid.Step - return nullValue -} - -func EqSliceOfValidStep(value []valid.Step) []valid.Step { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []valid.Step - return nullValue -} diff --git a/server/legacy/core/runtime/mocks/mock_custom_runner.go b/server/legacy/core/runtime/mocks/mock_custom_runner.go deleted file mode 100644 index e84288421..000000000 --- a/server/legacy/core/runtime/mocks/mock_custom_runner.go +++ /dev/null @@ -1,126 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: CustomRunner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockCustomRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockCustomRunner(options ...pegomock.Option) *MockCustomRunner { - mock := &MockCustomRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockCustomRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockCustomRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockCustomRunner) Run(ctx context.Context, prjCtx command.ProjectContext, cmd string, path string, envs map[string]string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCustomRunner().") - } - params := []pegomock.Param{ctx, prjCtx, cmd, path, envs} - result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockCustomRunner) VerifyWasCalledOnce() *VerifierMockCustomRunner { - return &VerifierMockCustomRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockCustomRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockCustomRunner { - return &VerifierMockCustomRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockCustomRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockCustomRunner { - return &VerifierMockCustomRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockCustomRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockCustomRunner { - return &VerifierMockCustomRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockCustomRunner struct { - mock *MockCustomRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockCustomRunner) Run(ctx context.Context, prjCtx command.ProjectContext, cmd string, path string, envs map[string]string) *MockCustomRunner_Run_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, cmd, path, envs} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockCustomRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCustomRunner_Run_OngoingVerification struct { - mock *MockCustomRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCustomRunner_Run_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string, string, map[string]string) { - ctx, prjCtx, cmd, path, envs := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], cmd[len(cmd)-1], path[len(path)-1], envs[len(envs)-1] -} - -func (c *MockCustomRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string, _param3 []string, _param4 []map[string]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - _param4 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(map[string]string) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_env_runner.go b/server/legacy/core/runtime/mocks/mock_env_runner.go deleted file mode 100644 index 0b2d70579..000000000 --- a/server/legacy/core/runtime/mocks/mock_env_runner.go +++ /dev/null @@ -1,130 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: EnvRunner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockEnvRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockEnvRunner(options ...pegomock.Option) *MockEnvRunner { - mock := &MockEnvRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockEnvRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockEnvRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockEnvRunner) Run(ctx context.Context, prjCtx command.ProjectContext, cmd string, value string, path string, envs map[string]string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEnvRunner().") - } - params := []pegomock.Param{ctx, prjCtx, cmd, value, path, envs} - result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockEnvRunner) VerifyWasCalledOnce() *VerifierMockEnvRunner { - return &VerifierMockEnvRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockEnvRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockEnvRunner { - return &VerifierMockEnvRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockEnvRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockEnvRunner { - return &VerifierMockEnvRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockEnvRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockEnvRunner { - return &VerifierMockEnvRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockEnvRunner struct { - mock *MockEnvRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockEnvRunner) Run(ctx context.Context, prjCtx command.ProjectContext, cmd string, value string, path string, envs map[string]string) *MockEnvRunner_Run_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, cmd, value, path, envs} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockEnvRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEnvRunner_Run_OngoingVerification struct { - mock *MockEnvRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEnvRunner_Run_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string, string, string, map[string]string) { - ctx, prjCtx, cmd, value, path, envs := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], cmd[len(cmd)-1], value[len(value)-1], path[len(path)-1], envs[len(envs)-1] -} - -func (c *MockEnvRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string, _param3 []string, _param4 []string, _param5 []map[string]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) - } - _param5 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(map[string]string) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_pre_workflows_hook_runner.go b/server/legacy/core/runtime/mocks/mock_pre_workflows_hook_runner.go deleted file mode 100644 index 06552fe33..000000000 --- a/server/legacy/core/runtime/mocks/mock_pre_workflows_hook_runner.go +++ /dev/null @@ -1,122 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: PreWorkflowHookRunner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockPreWorkflowHookRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPreWorkflowHookRunner(options ...pegomock.Option) *MockPreWorkflowHookRunner { - mock := &MockPreWorkflowHookRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPreWorkflowHookRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockPreWorkflowHookRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPreWorkflowHookRunner) Run(ctx context.Context, preCtx models.PreWorkflowHookCommandContext, command string, path string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPreWorkflowHookRunner().") - } - params := []pegomock.Param{ctx, preCtx, command, path} - result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockPreWorkflowHookRunner) VerifyWasCalledOnce() *VerifierMockPreWorkflowHookRunner { - return &VerifierMockPreWorkflowHookRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPreWorkflowHookRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockPreWorkflowHookRunner { - return &VerifierMockPreWorkflowHookRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPreWorkflowHookRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPreWorkflowHookRunner { - return &VerifierMockPreWorkflowHookRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPreWorkflowHookRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockPreWorkflowHookRunner { - return &VerifierMockPreWorkflowHookRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPreWorkflowHookRunner struct { - mock *MockPreWorkflowHookRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPreWorkflowHookRunner) Run(ctx context.Context, preCtx models.PreWorkflowHookCommandContext, command string, path string) *MockPreWorkflowHookRunner_Run_OngoingVerification { - params := []pegomock.Param{ctx, preCtx, command, path} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockPreWorkflowHookRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPreWorkflowHookRunner_Run_OngoingVerification struct { - mock *MockPreWorkflowHookRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPreWorkflowHookRunner_Run_OngoingVerification) GetCapturedArguments() (context.Context, models.PreWorkflowHookCommandContext, string, string) { - ctx, preCtx, command, path := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], preCtx[len(preCtx)-1], command[len(command)-1], path[len(path)-1] -} - -func (c *MockPreWorkflowHookRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []models.PreWorkflowHookCommandContext, _param2 []string, _param3 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]models.PreWorkflowHookCommandContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PreWorkflowHookCommandContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_pull_approved_checker.go b/server/legacy/core/runtime/mocks/mock_pull_approved_checker.go deleted file mode 100644 index 2e94609d3..000000000 --- a/server/legacy/core/runtime/mocks/mock_pull_approved_checker.go +++ /dev/null @@ -1,113 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: PullApprovedChecker) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockPullApprovedChecker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPullApprovedChecker(options ...pegomock.Option) *MockPullApprovedChecker { - mock := &MockPullApprovedChecker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPullApprovedChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockPullApprovedChecker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPullApprovedChecker().") - } - params := []pegomock.Param{baseRepo, pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockPullApprovedChecker) VerifyWasCalledOnce() *VerifierMockPullApprovedChecker { - return &VerifierMockPullApprovedChecker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPullApprovedChecker) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockPullApprovedChecker { - return &VerifierMockPullApprovedChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPullApprovedChecker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPullApprovedChecker { - return &VerifierMockPullApprovedChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPullApprovedChecker) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockPullApprovedChecker { - return &VerifierMockPullApprovedChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPullApprovedChecker struct { - mock *MockPullApprovedChecker - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) *MockPullApprovedChecker_PullIsApproved_OngoingVerification { - params := []pegomock.Param{baseRepo, pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) - return &MockPullApprovedChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPullApprovedChecker_PullIsApproved_OngoingVerification struct { - mock *MockPullApprovedChecker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - baseRepo, pull := c.GetAllCapturedArguments() - return baseRepo[len(baseRepo)-1], pull[len(pull)-1] -} - -func (c *MockPullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_pull_status_checker.go b/server/legacy/core/runtime/mocks/mock_pull_status_checker.go deleted file mode 100644 index d3115e549..000000000 --- a/server/legacy/core/runtime/mocks/mock_pull_status_checker.go +++ /dev/null @@ -1,164 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/runtime (interfaces: PullStatusChecker) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockPullStatusChecker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPullStatusChecker(options ...pegomock.Option) *MockPullStatusChecker { - mock := &MockPullStatusChecker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPullStatusChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockPullStatusChecker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPullStatusChecker) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPullStatusChecker().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockPullStatusChecker) PullIsLocked(_param0 models.Repo, _param1 models.PullRequest) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPullStatusChecker().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsLocked", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockPullStatusChecker) VerifyWasCalledOnce() *VerifierMockPullStatusChecker { - return &VerifierMockPullStatusChecker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPullStatusChecker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockPullStatusChecker { - return &VerifierMockPullStatusChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPullStatusChecker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPullStatusChecker { - return &VerifierMockPullStatusChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPullStatusChecker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockPullStatusChecker { - return &VerifierMockPullStatusChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPullStatusChecker struct { - mock *MockPullStatusChecker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPullStatusChecker) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) *MockPullStatusChecker_PullIsApproved_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) - return &MockPullStatusChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPullStatusChecker_PullIsApproved_OngoingVerification struct { - mock *MockPullStatusChecker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPullStatusChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockPullStatusChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockPullStatusChecker) PullIsLocked(_param0 models.Repo, _param1 models.PullRequest) *MockPullStatusChecker_PullIsLocked_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsLocked", params, verifier.timeout) - return &MockPullStatusChecker_PullIsLocked_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPullStatusChecker_PullIsLocked_OngoingVerification struct { - mock *MockPullStatusChecker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPullStatusChecker_PullIsLocked_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockPullStatusChecker_PullIsLocked_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_runner.go b/server/legacy/core/runtime/mocks/mock_runner.go deleted file mode 100644 index 320f6a045..000000000 --- a/server/legacy/core/runtime/mocks/mock_runner.go +++ /dev/null @@ -1,126 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: Runner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockRunner(options ...pegomock.Option) *MockRunner { - mock := &MockRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockRunner().") - } - params := []pegomock.Param{ctx, prjCtx, extraArgs, path, envs} - result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockRunner) VerifyWasCalledOnce() *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockRunner struct { - mock *MockRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) *MockRunner_Run_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, extraArgs, path, envs} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockRunner_Run_OngoingVerification struct { - mock *MockRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockRunner_Run_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, []string, string, map[string]string) { - ctx, prjCtx, extraArgs, path, envs := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], extraArgs[len(extraArgs)-1], path[len(path)-1], envs[len(envs)-1] -} - -func (c *MockRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 [][]string, _param3 []string, _param4 []map[string]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([][]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.([]string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - _param4 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(map[string]string) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_steps_runner.go b/server/legacy/core/runtime/mocks/mock_steps_runner.go deleted file mode 100644 index 942eb28a5..000000000 --- a/server/legacy/core/runtime/mocks/mock_steps_runner.go +++ /dev/null @@ -1,118 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: StepsRunner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockStepsRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockStepsRunner(options ...pegomock.Option) *MockStepsRunner { - mock := &MockStepsRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockStepsRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockStepsRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockStepsRunner) Run(ctx context.Context, cmdCtx command.ProjectContext, absPath string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockStepsRunner().") - } - params := []pegomock.Param{ctx, cmdCtx, absPath} - result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockStepsRunner) VerifyWasCalledOnce() *VerifierMockStepsRunner { - return &VerifierMockStepsRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockStepsRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockStepsRunner { - return &VerifierMockStepsRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockStepsRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockStepsRunner { - return &VerifierMockStepsRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockStepsRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockStepsRunner { - return &VerifierMockStepsRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockStepsRunner struct { - mock *MockStepsRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockStepsRunner) Run(ctx context.Context, cmdCtx command.ProjectContext, absPath string) *MockStepsRunner_Run_OngoingVerification { - params := []pegomock.Param{ctx, cmdCtx, absPath} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockStepsRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockStepsRunner_Run_OngoingVerification struct { - mock *MockStepsRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockStepsRunner_Run_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string) { - ctx, cmdCtx, absPath := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], cmdCtx[len(cmdCtx)-1], absPath[len(absPath)-1] -} - -func (c *MockStepsRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} diff --git a/server/legacy/core/runtime/mocks/mock_versionedexecutorworkflow.go b/server/legacy/core/runtime/mocks/mock_versionedexecutorworkflow.go deleted file mode 100644 index 4c436c1c9..000000000 --- a/server/legacy/core/runtime/mocks/mock_versionedexecutorworkflow.go +++ /dev/null @@ -1,182 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime (interfaces: VersionedExecutorWorkflow) - -package mocks - -import ( - context "context" - go_version "github.com/hashicorp/go-version" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - logging "github.com/runatlantis/atlantis/server/logging" - "reflect" - "time" -) - -type MockVersionedExecutorWorkflow struct { - fail func(message string, callerSkip ...int) -} - -func NewMockVersionedExecutorWorkflow(options ...pegomock.Option) *MockVersionedExecutorWorkflow { - mock := &MockVersionedExecutorWorkflow{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockVersionedExecutorWorkflow) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockVersionedExecutorWorkflow) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockVersionedExecutorWorkflow) EnsureExecutorVersion(log logging.Logger, v *go_version.Version) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockVersionedExecutorWorkflow().") - } - params := []pegomock.Param{log, v} - result := pegomock.GetGenericMockFrom(mock).Invoke("EnsureExecutorVersion", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockVersionedExecutorWorkflow) Run(ctx context.Context, prjCtx command.ProjectContext, executablePath string, envs map[string]string, workdir string, extraArgs []string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockVersionedExecutorWorkflow().") - } - params := []pegomock.Param{ctx, prjCtx, executablePath, envs, workdir, extraArgs} - result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockVersionedExecutorWorkflow) VerifyWasCalledOnce() *VerifierMockVersionedExecutorWorkflow { - return &VerifierMockVersionedExecutorWorkflow{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockVersionedExecutorWorkflow) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockVersionedExecutorWorkflow { - return &VerifierMockVersionedExecutorWorkflow{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockVersionedExecutorWorkflow) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockVersionedExecutorWorkflow { - return &VerifierMockVersionedExecutorWorkflow{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockVersionedExecutorWorkflow) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockVersionedExecutorWorkflow { - return &VerifierMockVersionedExecutorWorkflow{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockVersionedExecutorWorkflow struct { - mock *MockVersionedExecutorWorkflow - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockVersionedExecutorWorkflow) EnsureExecutorVersion(log logging.Logger, v *go_version.Version) *MockVersionedExecutorWorkflow_EnsureExecutorVersion_OngoingVerification { - params := []pegomock.Param{log, v} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "EnsureExecutorVersion", params, verifier.timeout) - return &MockVersionedExecutorWorkflow_EnsureExecutorVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockVersionedExecutorWorkflow_EnsureExecutorVersion_OngoingVerification struct { - mock *MockVersionedExecutorWorkflow - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockVersionedExecutorWorkflow_EnsureExecutorVersion_OngoingVerification) GetCapturedArguments() (logging.Logger, *go_version.Version) { - log, v := c.GetAllCapturedArguments() - return log[len(log)-1], v[len(v)-1] -} - -func (c *MockVersionedExecutorWorkflow_EnsureExecutorVersion_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []*go_version.Version) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*go_version.Version) - } - } - return -} - -func (verifier *VerifierMockVersionedExecutorWorkflow) Run(ctx context.Context, prjCtx command.ProjectContext, executablePath string, envs map[string]string, workdir string, extraArgs []string) *MockVersionedExecutorWorkflow_Run_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, executablePath, envs, workdir, extraArgs} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockVersionedExecutorWorkflow_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockVersionedExecutorWorkflow_Run_OngoingVerification struct { - mock *MockVersionedExecutorWorkflow - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockVersionedExecutorWorkflow_Run_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string, map[string]string, string, []string) { - ctx, prjCtx, executablePath, envs, workdir, extraArgs := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], executablePath[len(executablePath)-1], envs[len(envs)-1], workdir[len(workdir)-1], extraArgs[len(extraArgs)-1] -} - -func (c *MockVersionedExecutorWorkflow_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string, _param3 []map[string]string, _param4 []string, _param5 [][]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(map[string]string) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) - } - _param5 = make([][]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.([]string) - } - } - return -} diff --git a/server/legacy/core/runtime/models/exec.go b/server/legacy/core/runtime/models/exec.go deleted file mode 100644 index 6950b731e..000000000 --- a/server/legacy/core/runtime/models/exec.go +++ /dev/null @@ -1,48 +0,0 @@ -package models - -import ( - "fmt" - "os" - "os/exec" - "strings" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_exec.go Exec - -type Exec interface { - LookPath(file string) (string, error) - CombinedOutput(args []string, envs map[string]string, workdir string) (string, error) -} - -type LocalExec struct{} - -func (e LocalExec) LookPath(file string) (string, error) { - return exec.LookPath(file) -} - -// CombinedOutput encapsulates creating a command and running it. We should think about -// how to flexibly add parameters here as this is meant to satisfy very simple usecases -// for more complex usecases we can add a Command function to this method which will -// allow us to edit a Cmd directly. -func (e LocalExec) CombinedOutput(args []string, envs map[string]string, workdir string) (string, error) { - formattedArgs := strings.Join(args, " ") - - envVars := []string{} - for key, val := range envs { - envVars = append(envVars, fmt.Sprintf("%s=%s", key, val)) - } - - // TODO: move this os.Environ call out to the server so this - // can happen once at the beginning - envVars = append(envVars, os.Environ()...) - - // honestly not entirely sure why we're using sh -c but it's used - // for the terraform binary so copying it for now - cmd := exec.Command("sh", "-c", formattedArgs) - cmd.Env = envVars - cmd.Dir = workdir - - output, err := cmd.CombinedOutput() - - return string(output), err -} diff --git a/server/legacy/core/runtime/models/filepath.go b/server/legacy/core/runtime/models/filepath.go deleted file mode 100644 index 45e40a37c..000000000 --- a/server/legacy/core/runtime/models/filepath.go +++ /dev/null @@ -1,40 +0,0 @@ -package models - -import ( - "os" - "path/filepath" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_filepath.go FilePath - -type FilePath interface { - NotExists() bool - Join(elem ...string) FilePath - Symlink(newname string) (FilePath, error) - Resolve() string -} - -type LocalFilePath string - -func (fp LocalFilePath) NotExists() bool { - _, err := os.Stat(string(fp)) - - return os.IsNotExist(err) -} - -func (fp LocalFilePath) Join(elem ...string) FilePath { - pathComponents := []string{} - - pathComponents = append(pathComponents, string(fp)) - pathComponents = append(pathComponents, elem...) - - return LocalFilePath(filepath.Join(pathComponents...)) -} - -func (fp LocalFilePath) Symlink(newname string) (FilePath, error) { - return LocalFilePath(newname), os.Symlink(fp.Resolve(), newname) -} - -func (fp LocalFilePath) Resolve() string { - return string(fp) -} diff --git a/server/legacy/core/runtime/models/mocks/matchers/map_of_string_to_string.go b/server/legacy/core/runtime/models/mocks/matchers/map_of_string_to_string.go deleted file mode 100644 index cfcc47c96..000000000 --- a/server/legacy/core/runtime/models/mocks/matchers/map_of_string_to_string.go +++ /dev/null @@ -1,19 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyMapOfStringToString() map[string]string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(map[string]string))(nil)).Elem())) - var nullValue map[string]string - return nullValue -} - -func EqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} diff --git a/server/legacy/core/runtime/models/mocks/matchers/models_filepath.go b/server/legacy/core/runtime/models/mocks/matchers/models_filepath.go deleted file mode 100644 index 28b845c19..000000000 --- a/server/legacy/core/runtime/models/mocks/matchers/models_filepath.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - "reflect" -) - -func AnyModelsFilePath() models.FilePath { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.FilePath))(nil)).Elem())) - var nullValue models.FilePath - return nullValue -} - -func EqModelsFilePath(value models.FilePath) models.FilePath { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.FilePath - return nullValue -} diff --git a/server/legacy/core/runtime/models/mocks/matchers/slice_of_string.go b/server/legacy/core/runtime/models/mocks/matchers/slice_of_string.go deleted file mode 100644 index 207d54d74..000000000 --- a/server/legacy/core/runtime/models/mocks/matchers/slice_of_string.go +++ /dev/null @@ -1,19 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfString() []string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) - var nullValue []string - return nullValue -} - -func EqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []string - return nullValue -} diff --git a/server/legacy/core/runtime/models/mocks/mock_exec.go b/server/legacy/core/runtime/models/mocks/mock_exec.go deleted file mode 100644 index 1df47bc9c..000000000 --- a/server/legacy/core/runtime/models/mocks/mock_exec.go +++ /dev/null @@ -1,162 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime/models (interfaces: Exec) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockExec struct { - fail func(message string, callerSkip ...int) -} - -func NewMockExec(options ...pegomock.Option) *MockExec { - mock := &MockExec{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockExec) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockExec) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockExec) LookPath(file string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockExec().") - } - params := []pegomock.Param{file} - result := pegomock.GetGenericMockFrom(mock).Invoke("LookPath", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockExec) CombinedOutput(args []string, envs map[string]string, workdir string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockExec().") - } - params := []pegomock.Param{args, envs, workdir} - result := pegomock.GetGenericMockFrom(mock).Invoke("CombinedOutput", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockExec) VerifyWasCalledOnce() *VerifierMockExec { - return &VerifierMockExec{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockExec) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockExec { - return &VerifierMockExec{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockExec) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockExec { - return &VerifierMockExec{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockExec) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockExec { - return &VerifierMockExec{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockExec struct { - mock *MockExec - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockExec) LookPath(file string) *MockExec_LookPath_OngoingVerification { - params := []pegomock.Param{file} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "LookPath", params, verifier.timeout) - return &MockExec_LookPath_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockExec_LookPath_OngoingVerification struct { - mock *MockExec - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockExec_LookPath_OngoingVerification) GetCapturedArguments() string { - file := c.GetAllCapturedArguments() - return file[len(file)-1] -} - -func (c *MockExec_LookPath_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockExec) CombinedOutput(args []string, envs map[string]string, workdir string) *MockExec_CombinedOutput_OngoingVerification { - params := []pegomock.Param{args, envs, workdir} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CombinedOutput", params, verifier.timeout) - return &MockExec_CombinedOutput_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockExec_CombinedOutput_OngoingVerification struct { - mock *MockExec - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockExec_CombinedOutput_OngoingVerification) GetCapturedArguments() ([]string, map[string]string, string) { - args, envs, workdir := c.GetAllCapturedArguments() - return args[len(args)-1], envs[len(envs)-1], workdir[len(workdir)-1] -} - -func (c *MockExec_CombinedOutput_OngoingVerification) GetAllCapturedArguments() (_param0 [][]string, _param1 []map[string]string, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.([]string) - } - _param1 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(map[string]string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} diff --git a/server/legacy/core/runtime/models/mocks/mock_filepath.go b/server/legacy/core/runtime/models/mocks/mock_filepath.go deleted file mode 100644 index 27be34e72..000000000 --- a/server/legacy/core/runtime/models/mocks/mock_filepath.go +++ /dev/null @@ -1,226 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime/models (interfaces: FilePath) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - "reflect" - "time" -) - -type MockFilePath struct { - fail func(message string, callerSkip ...int) -} - -func NewMockFilePath(options ...pegomock.Option) *MockFilePath { - mock := &MockFilePath{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockFilePath) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockFilePath) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockFilePath) NotExists() bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockFilePath().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("NotExists", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockFilePath) Join(elem ...string) models.FilePath { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockFilePath().") - } - params := []pegomock.Param{} - for _, param := range elem { - params = append(params, param) - } - result := pegomock.GetGenericMockFrom(mock).Invoke("Join", params, []reflect.Type{reflect.TypeOf((*models.FilePath)(nil)).Elem()}) - var ret0 models.FilePath - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.FilePath) - } - } - return ret0 -} - -func (mock *MockFilePath) Symlink(newname string) (models.FilePath, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockFilePath().") - } - params := []pegomock.Param{newname} - result := pegomock.GetGenericMockFrom(mock).Invoke("Symlink", params, []reflect.Type{reflect.TypeOf((*models.FilePath)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.FilePath - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.FilePath) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockFilePath) Resolve() string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockFilePath().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("Resolve", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockFilePath) VerifyWasCalledOnce() *VerifierMockFilePath { - return &VerifierMockFilePath{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockFilePath) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockFilePath { - return &VerifierMockFilePath{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockFilePath) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockFilePath { - return &VerifierMockFilePath{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockFilePath) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockFilePath { - return &VerifierMockFilePath{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockFilePath struct { - mock *MockFilePath - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockFilePath) NotExists() *MockFilePath_NotExists_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "NotExists", params, verifier.timeout) - return &MockFilePath_NotExists_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockFilePath_NotExists_OngoingVerification struct { - mock *MockFilePath - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockFilePath_NotExists_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockFilePath_NotExists_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockFilePath) Join(elem ...string) *MockFilePath_Join_OngoingVerification { - params := []pegomock.Param{} - for _, param := range elem { - params = append(params, param) - } - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Join", params, verifier.timeout) - return &MockFilePath_Join_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockFilePath_Join_OngoingVerification struct { - mock *MockFilePath - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockFilePath_Join_OngoingVerification) GetCapturedArguments() []string { - elem := c.GetAllCapturedArguments() - return elem[len(elem)-1] -} - -func (c *MockFilePath_Join_OngoingVerification) GetAllCapturedArguments() (_param0 [][]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]string, len(c.methodInvocations)) - for u := 0; u < len(c.methodInvocations); u++ { - _param0[u] = make([]string, len(params)-0) - for x := 0; x < len(params); x++ { - if params[x][u] != nil { - _param0[u][x-0] = params[x][u].(string) - } - } - } - } - return -} - -func (verifier *VerifierMockFilePath) Symlink(newname string) *MockFilePath_Symlink_OngoingVerification { - params := []pegomock.Param{newname} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Symlink", params, verifier.timeout) - return &MockFilePath_Symlink_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockFilePath_Symlink_OngoingVerification struct { - mock *MockFilePath - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockFilePath_Symlink_OngoingVerification) GetCapturedArguments() string { - newname := c.GetAllCapturedArguments() - return newname[len(newname)-1] -} - -func (c *MockFilePath_Symlink_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockFilePath) Resolve() *MockFilePath_Resolve_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Resolve", params, verifier.timeout) - return &MockFilePath_Resolve_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockFilePath_Resolve_OngoingVerification struct { - mock *MockFilePath - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockFilePath_Resolve_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockFilePath_Resolve_OngoingVerification) GetAllCapturedArguments() { -} diff --git a/server/legacy/core/runtime/plan_step_runner.go b/server/legacy/core/runtime/plan_step_runner.go deleted file mode 100644 index a7d080ad6..000000000 --- a/server/legacy/core/runtime/plan_step_runner.go +++ /dev/null @@ -1,342 +0,0 @@ -package runtime - -import ( - "context" - "fmt" - "os" - "path/filepath" - "regexp" - "strings" - - version "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -const ( - defaultWorkspace = "default" - refreshKeyword = "Refreshing state..." - refreshSeparator = "------------------------------------------------------------------------\n" -) - -var ( - plusDiffRegex = regexp.MustCompile(`(?m)^ {2}\+`) - tildeDiffRegex = regexp.MustCompile(`(?m)^ {2}~`) - minusDiffRegex = regexp.MustCompile(`(?m)^ {2}-`) -) - -type PlanStepRunner struct { - TerraformExecutor TerraformExec - DefaultTFVersion *version.Version - VCSStatusUpdater StatusUpdater - AsyncTFExec AsyncTFExec -} - -func (p *PlanStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - tfVersion := p.DefaultTFVersion - if prjCtx.TerraformVersion != nil { - tfVersion = prjCtx.TerraformVersion - } - - // We only need to switch workspaces in version 0.9.*. In older versions, - // there is no such thing as a workspace so we don't need to do anything. - if err := p.switchWorkspace(ctx, prjCtx, path, tfVersion, envs); err != nil { - return "", err - } - - planFile := filepath.Join(path, GetPlanFilename(prjCtx.Workspace, prjCtx.ProjectName)) - planCmd := p.buildPlanCmd(prjCtx, extraArgs, path, tfVersion, planFile) - output, err := p.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, filepath.Clean(path), planCmd, envs, tfVersion, prjCtx.Workspace) - if p.isRemoteOpsErrorf(output, err) { - return p.remotePlan(ctx, prjCtx, extraArgs, path, tfVersion, planFile, envs) - } - if err != nil { - return output, err - } - return p.fmtPlanOutput(output, tfVersion), nil -} - -// isRemoteOpsErr returns true if there was an error caused due to this -// project using TFE remote operations. -func (p *PlanStepRunner) isRemoteOpsErrorf(output string, err error) bool { - if err == nil { - return false - } - return strings.Contains(output, remoteOpsErr01114) || strings.Contains(output, remoteOpsErr012) || strings.Contains(output, remoteOpsErr100) -} - -// remotePlan runs a terraform plan command compatible with TFE remote -// operations. -func (p *PlanStepRunner) remotePlan(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, tfVersion *version.Version, planFile string, envs map[string]string) (string, error) { - argList := [][]string{ - {"plan", "-input=false", "-refresh"}, - extraArgs, - prjCtx.EscapedCommentArgs, - } - args := p.flatten(argList) - output, err := p.runRemotePlan(ctx, prjCtx, args, path, tfVersion, envs) - if err != nil { - return output, err - } - - // If using remote ops, we create our own "fake" planfile with the - // text output of the plan. We do this for two reasons: - // 1) Atlantis relies on there being a planfile on disk to detect which - // projects have outstanding plans. - // 2) Remote ops don't support the -out parameter so we can't save the - // plan. To ensure that what gets applied is the plan we printed to the PR, - // during the apply phase, we diff the output we stored in the fake - // planfile with the pending apply output. - planOutput := StripRefreshingFromPlanOutput(output, tfVersion) - - // We also prepend our own remote ops header to the file so during apply we - // know this is a remote apply. - err = os.WriteFile(planFile, []byte(remoteOpsHeader+planOutput), 0600) - if err != nil { - return output, errors.Wrap(err, "unable to create planfile for remote ops") - } - - return p.fmtPlanOutput(output, tfVersion), nil -} - -// switchWorkspace changes the terraform workspace if necessary and will create -// it if it doesn't exist. It handles differences between versions. -func (p *PlanStepRunner) switchWorkspace(ctx context.Context, prjCtx command.ProjectContext, path string, tfVersion *version.Version, envs map[string]string) error { - // In versions less than 0.9 there is no support for workspaces. - noWorkspaceSupport := MustConstraint("<0.9").Check(tfVersion) - // If the user tried to set a specific workspace in the comment but their - // version of TF doesn't support workspaces then error out. - if noWorkspaceSupport && prjCtx.Workspace != defaultWorkspace { - return fmt.Errorf("terraform version %s does not support workspaces", tfVersion) - } - if noWorkspaceSupport { - return nil - } - - // In version 0.9.* the workspace command was called env. - workspaceCmd := "workspace" - runningZeroPointNine := MustConstraint(">=0.9,<0.10").Check(tfVersion) - if runningZeroPointNine { - workspaceCmd = "env" - } - - // Use `workspace show` to find out what workspace we're in now. If we're - // already in the right workspace then no need to switch. This will save us - // about ten seconds. This command is only available in > 0.10. - if !runningZeroPointNine { - workspaceShowOutput, err := p.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, path, []string{workspaceCmd, "show"}, envs, tfVersion, prjCtx.Workspace) - if err != nil { - return err - } - // If `show` says we're already on this workspace then we're done. - if strings.Contains(workspaceShowOutput, prjCtx.Workspace) { - return nil - } - } - - // Finally we'll have to select the workspace. We need to figure out if this - // workspace exists so we can create it if it doesn't. - // To do this we can either select and catch the error or use list and then - // look for the workspace. Both commands take the same amount of time so - // that's why we're running select here. - _, err := p.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, path, []string{workspaceCmd, "select", prjCtx.Workspace}, envs, tfVersion, prjCtx.Workspace) - if err != nil { - // If terraform workspace select fails we run terraform workspace - // new to create a new workspace automatically. - out, err := p.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, path, []string{workspaceCmd, "new", prjCtx.Workspace}, envs, tfVersion, prjCtx.Workspace) - if err != nil { - return fmt.Errorf("%s: %s", err, out) - } - } - return nil -} - -func (p *PlanStepRunner) buildPlanCmd(prjCtx command.ProjectContext, extraArgs []string, path string, tfVersion *version.Version, planFile string) []string { - tfVars := p.tfVars(prjCtx, tfVersion) - - // Check if env/{workspace}.tfvars exist and include it. This is a use-case - // from Hootsuite where Atlantis was first created so we're keeping this as - // an homage and a favor so they don't need to refactor all their repos. - // It's also a nice way to structure your repos to reduce duplication. - var envFileArgs []string - envFile := filepath.Join(path, "env", prjCtx.Workspace+".tfvars") - if _, err := os.Stat(envFile); err == nil { - envFileArgs = []string{"-var-file", envFile} - } - - argList := [][]string{ - // NOTE: we need to quote the plan filename because Bitbucket Server can - // have spaces in its repo owner names. - {"plan", "-input=false", "-refresh", "-out", fmt.Sprintf("%q", planFile)}, - tfVars, - extraArgs, - prjCtx.EscapedCommentArgs, - envFileArgs, - } - - return p.flatten(argList) -} - -// tfVars returns a list of "-var", "key=value" pairs that identify who and which -// repo this command is running for. This can be used for naming the -// session name in AWS which will identify in CloudTrail the source of -// Atlantis API calls. -// If using Terraform >= 0.12 we don't set any of these variables because -// those versions don't allow setting -var flags for any variables that aren't -// actually used in the configuration. Since there's no way for us to detect -// if the configuration is using those variables, we don't set them. -func (p *PlanStepRunner) tfVars(prjCtx command.ProjectContext, tfVersion *version.Version) []string { - if tfVersion.GreaterThanOrEqual(version.Must(version.NewVersion("0.12.0"))) { - return nil - } - - // NOTE: not using maps and looping here because we need to keep the - // ordering for testing purposes. - // NOTE: quoting the values because in Bitbucket the owner can have - // spaces, ex -var atlantis_repo_owner="bitbucket owner". - return []string{ - "-var", - fmt.Sprintf("%s=%q", "atlantis_user", prjCtx.User.Username), - "-var", - fmt.Sprintf("%s=%q", "atlantis_repo", prjCtx.BaseRepo.FullName), - "-var", - fmt.Sprintf("%s=%q", "atlantis_repo_name", prjCtx.BaseRepo.Name), - "-var", - fmt.Sprintf("%s=%q", "atlantis_repo_owner", prjCtx.BaseRepo.Owner), - "-var", - fmt.Sprintf("%s=%d", "atlantis_pull_num", prjCtx.Pull.Num), - } -} - -func (p *PlanStepRunner) flatten(slices [][]string) []string { - var flattened []string - for _, v := range slices { - flattened = append(flattened, v...) - } - return flattened -} - -// fmtPlanOutput uses regex's to remove any leading whitespace in front of the -// terraform output so that the diff syntax highlighting works. Example: -// " - aws_security_group_rule.allow_all" => -// "- aws_security_group_rule.allow_all" -// We do it for +, ~ and -. -// It also removes the "Refreshing..." preamble. -func (p *PlanStepRunner) fmtPlanOutput(output string, tfVersion *version.Version) string { - output = StripRefreshingFromPlanOutput(output, tfVersion) - output = plusDiffRegex.ReplaceAllString(output, "+") - output = tildeDiffRegex.ReplaceAllString(output, "~") - return minusDiffRegex.ReplaceAllString(output, "-") -} - -// runRemotePlan runs a terraform command that utilizes the remote operations -// backend. It watches the command output for the run url to be printed, and -// then updates the commit status with a link to the run url. -// The run url is a link to the Terraform Enterprise UI where the output -// from the in-progress command can be viewed. -// cmdArgs is the args to terraform to execute. -// path is the path to where we need to execute. -func (p *PlanStepRunner) runRemotePlan( - ctx context.Context, - prjCtx command.ProjectContext, - cmdArgs []string, - path string, - tfVersion *version.Version, - envs map[string]string) (string, error) { - // updateStatusF will update the commit status and log any error. - updateStatusF := func(status models.VCSStatus, url string, statusID string) { - if _, err := p.VCSStatusUpdater.UpdateProject(ctx, prjCtx, command.Plan, status, url, statusID); err != nil { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, fmt.Sprintf("unable to update status: %s", err)) - } - } - - // Start the async command execution. - outCh := p.AsyncTFExec.RunCommandAsync(ctx, prjCtx, filepath.Clean(path), cmdArgs, envs, tfVersion, prjCtx.Workspace) - var lines []string - nextLineIsRunURL := false - var runURL string - var err error - - for line := range outCh { - if line.Err != nil { - err = line.Err - break - } - lines = append(lines, line.Line) - - // Here we're checking for the run url and updating the status - // if found. - if line.Line == lineBeforeRunURL { - nextLineIsRunURL = true - } else if nextLineIsRunURL { - runURL = strings.TrimSpace(line.Line) - updateStatusF(models.PendingVCSStatus, runURL, prjCtx.StatusID) - nextLineIsRunURL = false - } - } - - output := strings.Join(lines, "\n") - if err != nil { - updateStatusF(models.FailedVCSStatus, runURL, prjCtx.StatusID) - } else { - updateStatusF(models.SuccessVCSStatus, runURL, prjCtx.StatusID) - } - return output, err -} - -func StripRefreshingFromPlanOutput(output string, tfVersion *version.Version) string { - if tfVersion.GreaterThanOrEqual(version.Must(version.NewVersion("0.14.0"))) { - // Plan output contains a lot of "Refreshing..." lines, remove it - lines := strings.Split(output, "\n") - finalIndex := 0 - for i, line := range lines { - if strings.Contains(line, refreshKeyword) { - finalIndex = i - } - } - - if finalIndex != 0 { - output = strings.Join(lines[finalIndex+1:], "\n") - } - } else { - // Plan output contains a lot of "Refreshing..." lines followed by a - // separator. We want to remove everything before that separator. - sepIdx := strings.Index(output, refreshSeparator) - if sepIdx > -1 { - output = output[sepIdx+len(refreshSeparator):] - } - } - return output -} - -// remoteOpsErr01114 is the error terraform plan will return if this project is -// using TFE remote operations in TF 0.11.15. -var remoteOpsErr01114 = `Error: Saving a generated plan is currently not supported! - -The "remote" backend does not support saving the generated execution -plan locally at this time. - -` - -// remoteOpsErr012 is the error terraform plan will return if this project is -// using TFE remote operations in TF 0.12.{0-4}. Later versions haven't been -// released yet at this time. -var remoteOpsErr012 = `Error: Saving a generated plan is currently not supported - -The "remote" backend does not support saving the generated execution plan -locally at this time. - -` - -// remoteOpsErr100 is the error terraform plan will retrun if this project is -// using TFE remote operations in TF 1.0.{0,1}. -var remoteOpsErr100 = `Error: Saving a generated plan is currently not supported - -The "remote" backend does not support saving the generated execution plan -locally at this time. -` - -// remoteOpsHeader is the header we add to the planfile if this plan was -// generated using TFE remote operations. -var remoteOpsHeader = "Atlantis: this plan was created by remote ops\n" diff --git a/server/legacy/core/runtime/plan_step_runner_test.go b/server/legacy/core/runtime/plan_step_runner_test.go deleted file mode 100644 index 0a9d717ae..000000000 --- a/server/legacy/core/runtime/plan_step_runner_test.go +++ /dev/null @@ -1,1012 +0,0 @@ -package runtime_test - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/events/command" - mocks2 "github.com/runatlantis/atlantis/server/legacy/events/mocks" - - . "github.com/petergtz/pegomock" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - matchers2 "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks/matchers" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - - . "github.com/runatlantis/atlantis/testing" -) - -func TestRun_NoWorkspaceIn08(t *testing.T) { - // We don't want any workspace commands to be run in 0.8. - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - - tfVersion, _ := version.NewVersion("0.8") - - workspace := "default" - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - EscapedCommentArgs: []string{"comment", "args"}, - Workspace: workspace, - RepoRelDir: ".", - User: models.User{Username: "username"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - s := runtime.PlanStepRunner{ - DefaultTFVersion: tfVersion, - TerraformExecutor: terraform, - } - - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - - Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, - "/path", - []string{"plan", - "-input=false", - "-refresh", - "-out", - "\"/path/default.tfplan\"", - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args"}, - map[string]string(nil), - tfVersion, - workspace) - - // Verify that no env or workspace commands were run - terraform.VerifyWasCalled(Never()).RunCommandWithVersion( - ctx, - prjCtx, - "/path", - []string{"env", - "select", - "workspace"}, - map[string]string(nil), - tfVersion, - workspace) - terraform.VerifyWasCalled(Never()).RunCommandWithVersion( - ctx, - prjCtx, - "/path", - []string{"workspace", - "select", - "workspace"}, - map[string]string(nil), - tfVersion, - workspace) -} - -func TestRun_ErrWorkspaceIn08(t *testing.T) { - // If they attempt to use a workspace other than default in 0.8 - // we should error. - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - - ctx := context.Background() - tfVersion, _ := version.NewVersion("0.8") - logger := logging.NewNoopCtxLogger(t) - workspace := "notdefault" - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - _, err := s.Run( - ctx, - command.ProjectContext{ - Log: logger, - Workspace: workspace, - RepoRelDir: ".", - User: models.User{Username: "username"}, - }, []string{"extra", "args"}, "/path", map[string]string(nil)) - ErrEquals(t, "terraform version 0.8.0 does not support workspaces", err) -} - -func TestRun_SwitchesWorkspace(t *testing.T) { - RegisterMockTestingT(t) - - cases := []struct { - tfVersion string - expWorkspaceCmd string - }{ - { - "0.9.0", - "env", - }, - { - "0.9.11", - "env", - }, - { - "0.10.0", - "workspace", - }, - { - "0.11.0", - "workspace", - }, - } - - for _, c := range cases { - t.Run(c.tfVersion, func(t *testing.T) { - terraform := mocks.NewMockClient() - - tfVersion, _ := version.NewVersion(c.tfVersion) - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - - When(terraform.RunCommandWithVersion(matchers.AnyContextContext(), matchers.AnyModelsProjectCommandContext(), AnyString(), AnyStringSlice(), matchers2.AnyMapOfStringToString(), matchers2.AnyPtrToGoVersionVersion(), AnyString())). - ThenReturn("output", nil) - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - - Equals(t, "output", output) - // Verify that env select was called as well as plan. - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, - "/path", - []string{c.expWorkspaceCmd, - "select", - "workspace"}, - map[string]string(nil), - tfVersion, - "workspace") - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, - "/path", - []string{"plan", - "-input=false", - "-refresh", - "-out", - "\"/path/workspace.tfplan\"", - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args"}, - map[string]string(nil), - tfVersion, - "workspace") - }) - } -} - -func TestRun_CreatesWorkspace(t *testing.T) { - // Test that if `workspace select` fails, we call `workspace new`. - RegisterMockTestingT(t) - - cases := []struct { - tfVersion string - expWorkspaceCommand string - }{ - { - "0.9.0", - "env", - }, - { - "0.9.11", - "env", - }, - { - "0.10.0", - "workspace", - }, - { - "0.11.0", - "workspace", - }, - } - - for _, c := range cases { - t.Run(c.tfVersion, func(t *testing.T) { - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion(c.tfVersion) - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - - // Ensure that we actually try to switch workspaces by making the - // output of `workspace show` to be a different name. - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("diffworkzpace\n", nil) - - expWorkspaceArgs := []string{c.expWorkspaceCommand, "select", "workspace"} - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", expWorkspaceArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("", errors.New("workspace does not exist")) - - expPlanArgs := []string{"plan", - "-input=false", - "-refresh", - "-out", - "\"/path/workspace.tfplan\"", - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args"} - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - - Equals(t, "output", output) - // Verify that env select was called as well as plan. - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, "/path", expWorkspaceArgs, map[string]string(nil), tfVersion, "workspace") - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace") - }) - } -} - -func TestRun_NoWorkspaceSwitchIfNotNecessary(t *testing.T) { - // Tests that if workspace show says we're on the right workspace we don't - // switch. - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion("0.10.0") - logger := logging.NewNoopCtxLogger(t) - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("workspace\n", nil) - - expPlanArgs := []string{"plan", - "-input=false", - "-refresh", - "-out", - "\"/path/workspace.tfplan\"", - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args"} - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - - Equals(t, "output", output) - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "workspace") - - // Verify that workspace select was never called. - terraform.VerifyWasCalled(Never()).RunCommandWithVersion( - ctx, - prjCtx, "/path", []string{"workspace", "select", "workspace"}, map[string]string(nil), tfVersion, "workspace") -} - -func TestRun_AddsEnvVarFile(t *testing.T) { - // Test that if env/workspace.tfvars file exists we use -var-file option. - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - - // Create the env/workspace.tfvars file. - tmpDir, cleanup := TempDir(t) - defer cleanup() - err := os.MkdirAll(filepath.Join(tmpDir, "env"), 0700) - Ok(t, err) - envVarsFile := filepath.Join(tmpDir, "env/workspace.tfvars") - err = os.WriteFile(envVarsFile, nil, 0600) - Ok(t, err) - - // Using version >= 0.10 here so we don't expect any env commands. - tfVersion, _ := version.NewVersion("0.10.0") - logger := logging.NewNoopCtxLogger(t) - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - ctx := context.Background() - expPlanArgs := []string{"plan", - "-input=false", - "-refresh", - "-out", - fmt.Sprintf("%q", filepath.Join(tmpDir, "workspace.tfplan")), - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args", - "-var-file", - envVarsFile, - } - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, tmpDir, expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - - // Verify that env select was never called since we're in version >= 0.10 - terraform.VerifyWasCalled(Never()).RunCommandWithVersion( - ctx, - prjCtx, tmpDir, []string{"env", "select", "workspace"}, map[string]string(nil), tfVersion, "workspace") - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, tmpDir, expPlanArgs, map[string]string(nil), tfVersion, "workspace") - Equals(t, "output", output) -} - -func TestRun_UsesDiffPathForProject(t *testing.T) { - // Test that if running for a project, uses a different path for the plan - // file. - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion("0.10.0") - logger := logging.NewNoopCtxLogger(t) - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "default", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - ProjectName: "projectname", - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", []string{"workspace", "show"}, map[string]string(nil), tfVersion, "workspace")).ThenReturn("workspace\n", nil) - - expPlanArgs := []string{"plan", - "-input=false", - "-refresh", - "-out", - "\"/path/projectname-default.tfplan\"", - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args", - } - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "default")).ThenReturn("output", nil) - - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - Equals(t, "output", output) -} - -// Test that we format the plan output for better rendering. -func TestRun_PlanFmt(t *testing.T) { - rawOutput := `Refreshing Terraform state in-memory prior to plan... -The refreshed state will be used to calculate this plan, but will not be -persisted to local or remote state storage. - - ------------------------------------------------------------------------- - -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - + create - ~ update in-place - - destroy - -Terraform will perform the following actions: - -+ null_resource.test[0] - id: - - + null_resource.test[1] - id: - - ~ aws_security_group_rule.allow_all - description: "" => "test3" - - - aws_security_group_rule.allow_all -` - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion("0.10.0") - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - When(terraform.RunCommandWithVersion( - matchers.AnyContextContext(), - matchers.AnyModelsProjectCommandContext(), - AnyString(), - AnyStringSlice(), - matchers2.AnyMapOfStringToString(), - matchers2.AnyPtrToGoVersionVersion(), - AnyString())). - Then(func(params []Param) ReturnValues { - // This code allows us to return different values depending on the - // tf command being run while still using the wildcard matchers above. - tfArgs := params[3].([]string) - if stringSliceEquals(tfArgs, []string{"workspace", "show"}) { - return []ReturnValue{"default", nil} - } else if tfArgs[0] == "plan" { - return []ReturnValue{rawOutput, nil} - } else { - return []ReturnValue{"", errors.New("unexpected call to RunCommandWithVersion")} - } - }) - ctx := context.Background() - actOutput, err := s.Run( - ctx, - command.ProjectContext{Workspace: "default"}, nil, "", map[string]string(nil)) - Ok(t, err) - Equals(t, ` -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: -+ create -~ update in-place -- destroy - -Terraform will perform the following actions: - -+ null_resource.test[0] - id: - -+ null_resource.test[1] - id: - -~ aws_security_group_rule.allow_all - description: "" => "test3" - -- aws_security_group_rule.allow_all -`, actOutput) -} - -// Test that even if there's an error, we get the returned output. -func TestRun_OutputOnErrorf(t *testing.T) { - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion("0.10.0") - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - expOutput := "expected output" - expErrMsg := "error!" - When(terraform.RunCommandWithVersion( - matchers.AnyContextContext(), - matchers.AnyModelsProjectCommandContext(), - AnyString(), - AnyStringSlice(), - matchers2.AnyMapOfStringToString(), - matchers2.AnyPtrToGoVersionVersion(), - AnyString())). - Then(func(params []Param) ReturnValues { - // This code allows us to return different values depending on the - // tf command being run while still using the wildcard matchers above. - tfArgs := params[3].([]string) - if stringSliceEquals(tfArgs, []string{"workspace", "show"}) { - return []ReturnValue{"default\n", nil} - } else if tfArgs[0] == "plan" { - return []ReturnValue{expOutput, errors.New(expErrMsg)} - } else { - return []ReturnValue{"", errors.New("unexpected call to RunCommandWithVersion")} - } - }) - ctx := context.Background() - actOutput, actErr := s.Run( - ctx, - command.ProjectContext{Workspace: "default"}, nil, "", map[string]string(nil)) - ErrEquals(t, expErrMsg, actErr) - Equals(t, expOutput, actOutput) -} - -// Test that if we're using 0.12, we don't set the optional -var atlantis_repo_name -// flags because in >= 0.12 you can't set -var flags if those variables aren't -// being used. -func TestRun_NoOptionalVarsIn012(t *testing.T) { - RegisterMockTestingT(t) - - expPlanArgs := []string{ - "plan", - "-input=false", - "-refresh", - "-out", - fmt.Sprintf("%q", "/path/default.tfplan"), - "extra", - "args", - "comment", - "args", - } - - cases := []struct { - name string - tfVersion string - }{ - { - "stable version", - "0.12.0", - }, - { - "with prerelease", - "0.14.0-rc1", - }, - } - - for _, c := range cases { - t.Run(c.name, func(t *testing.T) { - terraform := mocks.NewMockClient() - When(terraform.RunCommandWithVersion( - matchers.AnyContextContext(), - matchers.AnyModelsProjectCommandContext(), - AnyString(), - AnyStringSlice(), - matchers2.AnyMapOfStringToString(), - matchers2.AnyPtrToGoVersionVersion(), - AnyString())).ThenReturn("output", nil) - - tfVersion, _ := version.NewVersion(c.tfVersion) - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "default", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, "/path", map[string]string(nil)) - Ok(t, err) - Equals(t, "output", output) - - terraform.VerifyWasCalledOnce().RunCommandWithVersion( - ctx, - prjCtx, "/path", expPlanArgs, map[string]string(nil), tfVersion, "default") - }) - } -} - -// Test plans if using remote ops. -func TestRun_RemoteOps(t *testing.T) { - cases := map[string]string{ - "0.11.15 error": `Error: Saving a generated plan is currently not supported! - -The "remote" backend does not support saving the generated execution -plan locally at this time. - -`, - "0.12.* error": `Error: Saving a generated plan is currently not supported - -The "remote" backend does not support saving the generated execution plan -locally at this time. - -`, - } - for name, remoteOpsErr := range cases { - t.Run(name, func(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - // Now that mocking is set up, we're ready to run the plan. - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "default", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - - tfVersion, _ := version.NewVersion("0.11.12") - updater := mocks2.NewMockVCSStatusUpdater() - asyncTf := &remotePlanMock{} - s := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - AsyncTFExec: asyncTf, - VCSStatusUpdater: updater, - } - absProjectPath, cleanup := TempDir(t) - defer cleanup() - - // First, terraform workspace gets run. - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, - absProjectPath, - []string{"workspace", "show"}, - map[string]string(nil), - tfVersion, - "default")).ThenReturn("default\n", nil) - - // Then the first call to terraform plan should return the remote ops error. - expPlanArgs := []string{"plan", - "-input=false", - "-refresh", - "-out", - fmt.Sprintf("%q", filepath.Join(absProjectPath, "default.tfplan")), - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args", - } - - planErr := errors.New("exit status 1: err") - planOutput := "\n" + remoteOpsErr - asyncTf.LinesToSend = remotePlanOutput - When(terraform.RunCommandWithVersion( - ctx, - prjCtx, absProjectPath, expPlanArgs, map[string]string(nil), tfVersion, "default")). - ThenReturn(planOutput, planErr) - - output, err := s.Run(ctx, prjCtx, []string{"extra", "args"}, absProjectPath, map[string]string(nil)) - Ok(t, err) - Equals(t, ` -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: -- destroy - -Terraform will perform the following actions: - -- null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.`, output) - - expRemotePlanArgs := []string{"plan", "-input=false", "-refresh", "extra", "args", "comment", "args"} - Equals(t, expRemotePlanArgs, asyncTf.CalledArgs) - - // Verify that the fake plan file we write has the correct contents. - bytes, err := os.ReadFile(filepath.Join(absProjectPath, "default.tfplan")) - Ok(t, err) - Equals(t, `Atlantis: this plan was created by remote ops - -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.`, string(bytes)) - - // Ensure that the status was updated with the runURL. - runURL := "https://app.terraform.io/app/lkysow-enterprises/atlantis-tfe-test/runs/run-is4oVvJfrkud1KvE" - updater.VerifyWasCalledOnce().UpdateProject(ctx, prjCtx, command.Plan, models.PendingVCSStatus, runURL, "") - updater.VerifyWasCalledOnce().UpdateProject(ctx, prjCtx, command.Plan, models.SuccessVCSStatus, runURL, "") - }) - } -} - -// Test striping output method -func TestStripRefreshingFromPlanOutput(t *testing.T) { - tfVersion0135, _ := version.NewVersion("0.13.5") - tfVersion0140, _ := version.NewVersion("0.14.0") - cases := []struct { - out string - tfVersion *version.Version - }{ - { - remotePlanOutput, - tfVersion0135, - }, - { - `Running plan in the remote backend. Output will stream here. Pressing Ctrl-C -will stop streaming the logs, but will not stop the plan running remotely. - -Preparing the remote plan... - -To view this run in a browser, visit: -https://app.terraform.io/app/lkysow-enterprises/atlantis-tfe-test/runs/run-is4oVvJfrkud1KvE - -Waiting for the plan to start... - -Terraform v0.14.0 - -Configuring remote state backend... -Initializing Terraform configuration... -2019/02/20 22:40:52 [DEBUG] Using modified User-Agent: Terraform/0.14.0TFE/202eeff -Refreshing Terraform state in-memory prior to plan... -The refreshed state will be used to calculate this plan, but will not be -persisted to local or remote state storage. - -null_resource.hi: Refreshing state... (ID: 217661332516885645) -null_resource.hi[1]: Refreshing state... (ID: 6064510335076839362) - -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.`, - tfVersion0140, - }, - } - - for _, c := range cases { - output := runtime.StripRefreshingFromPlanOutput(c.out, c.tfVersion) - Equals(t, ` -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.`, output) - } -} - -type remotePlanMock struct { - // LinesToSend will be sent on the channel. - LinesToSend string - // CalledArgs is what args we were called with. - CalledArgs []string -} - -func (r *remotePlanMock) RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) <-chan helpers.Line { - input := make(chan string) - defer close(input) - - return r.RunCommandAsyncWithInput(ctx, prjCtx, path, args, envs, v, workspace, input) -} - -func (r *remotePlanMock) RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string, input <-chan string) <-chan helpers.Line { - r.CalledArgs = args - out := make(chan helpers.Line) - go func() { - for _, line := range strings.Split(r.LinesToSend, "\n") { - out <- helpers.Line{Line: line} - } - close(out) - }() - return out -} - -func stringSliceEquals(a, b []string) bool { - if len(a) != len(b) { - return false - } - for i, v := range a { - if v != b[i] { - return false - } - } - return true -} - -var remotePlanOutput = `Running plan in the remote backend. Output will stream here. Pressing Ctrl-C -will stop streaming the logs, but will not stop the plan running remotely. - -Preparing the remote plan... - -To view this run in a browser, visit: -https://app.terraform.io/app/lkysow-enterprises/atlantis-tfe-test/runs/run-is4oVvJfrkud1KvE - -Waiting for the plan to start... - -Terraform v0.11.11 - -Configuring remote state backend... -Initializing Terraform configuration... -2019/02/20 22:40:52 [DEBUG] Using modified User-Agent: Terraform/0.11.11 TFE/202eeff -Refreshing Terraform state in-memory prior to plan... -The refreshed state will be used to calculate this plan, but will not be -persisted to local or remote state storage. - -null_resource.hi: Refreshing state... (ID: 217661332516885645) -null_resource.hi[1]: Refreshing state... (ID: 6064510335076839362) - ------------------------------------------------------------------------- - -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.` diff --git a/server/legacy/core/runtime/plan_type_step_runner_delegate.go b/server/legacy/core/runtime/plan_type_step_runner_delegate.go deleted file mode 100644 index 2b7ad9e5e..000000000 --- a/server/legacy/core/runtime/plan_type_step_runner_delegate.go +++ /dev/null @@ -1,62 +0,0 @@ -package runtime - -import ( - "context" - "os" - "path/filepath" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -// NullRunner is a runner that isn't configured for a given plan type but outputs nothing -type NullRunner struct{} - -func (p NullRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - return "", nil -} - -// RemoteBackendUnsupportedRunner is a runner that is responsible for outputting that the remote backend is unsupported -type RemoteBackendUnsupportedRunner struct{} - -func (p RemoteBackendUnsupportedRunner) Run(ctx context.Context, cmdCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - return "Remote backend is unsupported for this step.", nil -} - -func NewPlanTypeStepRunnerDelegate(defaultRunner Runner, remotePlanRunner Runner) Runner { - return &PlanTypeStepRunnerDelegate{ - defaultRunner: defaultRunner, - remotePlanRunner: remotePlanRunner, - } -} - -// PlanTypeStepRunnerDelegate delegates based on the type of plan, ie. remote backend which doesn't support certain functions -type PlanTypeStepRunnerDelegate struct { - defaultRunner Runner - remotePlanRunner Runner -} - -func (p *PlanTypeStepRunnerDelegate) isRemotePlan(planFile string) (bool, error) { - data, err := os.ReadFile(planFile) - - if err != nil { - return false, errors.Wrapf(err, "unable to read %s", planFile) - } - - return IsRemotePlan(data), nil -} - -func (p *PlanTypeStepRunnerDelegate) Run(ctx context.Context, cmdCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - planFile := filepath.Join(path, GetPlanFilename(cmdCtx.Workspace, cmdCtx.ProjectName)) - remotePlan, err := p.isRemotePlan(planFile) - - if err != nil { - return "", err - } - - if remotePlan { - return p.remotePlanRunner.Run(ctx, cmdCtx, extraArgs, path, envs) - } - - return p.defaultRunner.Run(ctx, cmdCtx, extraArgs, path, envs) -} diff --git a/server/legacy/core/runtime/plan_type_step_runner_delegate_test.go b/server/legacy/core/runtime/plan_type_step_runner_delegate_test.go deleted file mode 100644 index 0247ef0f2..000000000 --- a/server/legacy/core/runtime/plan_type_step_runner_delegate_test.go +++ /dev/null @@ -1,159 +0,0 @@ -package runtime_test - -import ( - "context" - "errors" - "os" - "path/filepath" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - . "github.com/runatlantis/atlantis/testing" - - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -var planFileContents = ` -An execution plan has been generated and is shown below. -Resource actions are indicated with the following symbols: - - destroy - -Terraform will perform the following actions: - - - null_resource.hi[1] - - -Plan: 0 to add, 0 to change, 1 to destroy.` - -func TestRunDelegate(t *testing.T) { - RegisterMockTestingT(t) - - mockDefaultRunner := mocks.NewMockRunner() - mockRemoteRunner := mocks.NewMockRunner() - - subject := runtime.NewPlanTypeStepRunnerDelegate( - mockDefaultRunner, - mockRemoteRunner, - ) - - tfVersion, _ := version.NewVersion("0.12.0") - - t.Run("Remote Runner Success", func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) - Ok(t, err) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - } - extraArgs := []string{"extra", "args"} - envs := map[string]string{} - - expectedOut := "some random output" - - When(mockRemoteRunner.Run(ctx, prjCtx, extraArgs, tmpDir, envs)).ThenReturn(expectedOut, nil) - - output, err := subject.Run(ctx, prjCtx, extraArgs, tmpDir, envs) - - mockDefaultRunner.VerifyWasCalled(Never()) - - Equals(t, expectedOut, output) - Ok(t, err) - }) - - t.Run("Remote Runner Failure", func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, []byte("Atlantis: this plan was created by remote ops\n"+planFileContents), 0600) - Ok(t, err) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - } - extraArgs := []string{"extra", "args"} - envs := map[string]string{} - - expectedOut := "some random output" - - When(mockRemoteRunner.Run(ctx, prjCtx, extraArgs, tmpDir, envs)).ThenReturn(expectedOut, errors.New("err")) - - output, err := subject.Run(ctx, prjCtx, extraArgs, tmpDir, envs) - - mockDefaultRunner.VerifyWasCalled(Never()) - - Equals(t, expectedOut, output) - Assert(t, err != nil, "err should not be nil") - }) - - t.Run("Local Runner Success", func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, []byte(planFileContents), 0600) - Ok(t, err) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - } - extraArgs := []string{"extra", "args"} - envs := map[string]string{} - - expectedOut := "some random output" - - When(mockDefaultRunner.Run(ctx, prjCtx, extraArgs, tmpDir, envs)).ThenReturn(expectedOut, nil) - - output, err := subject.Run(ctx, prjCtx, extraArgs, tmpDir, envs) - - mockRemoteRunner.VerifyWasCalled(Never()) - - Equals(t, expectedOut, output) - Ok(t, err) - }) - - t.Run("Local Runner Failure", func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - planPath := filepath.Join(tmpDir, "workspace.tfplan") - err := os.WriteFile(planPath, []byte(planFileContents), 0600) - Ok(t, err) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "workspace", - RepoRelDir: ".", - EscapedCommentArgs: []string{"comment", "args"}, - TerraformVersion: tfVersion, - } - extraArgs := []string{"extra", "args"} - envs := map[string]string{} - - expectedOut := "some random output" - - When(mockDefaultRunner.Run(ctx, prjCtx, extraArgs, tmpDir, envs)).ThenReturn(expectedOut, errors.New("err")) - - output, err := subject.Run(ctx, prjCtx, extraArgs, tmpDir, envs) - - mockRemoteRunner.VerifyWasCalled(Never()) - - Equals(t, expectedOut, output) - Assert(t, err != nil, "err should not be nil") - }) -} diff --git a/server/legacy/core/runtime/policy/conftest_client.go b/server/legacy/core/runtime/policy/conftest_client.go deleted file mode 100644 index 1e4e18c89..000000000 --- a/server/legacy/core/runtime/policy/conftest_client.go +++ /dev/null @@ -1,157 +0,0 @@ -package policy - -import ( - "fmt" - "os" - "path/filepath" - "runtime" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/cache" - runtime_models "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - "github.com/runatlantis/atlantis/server/legacy/core/terraform" - "github.com/runatlantis/atlantis/server/logging" - "golang.org/x/text/cases" - "golang.org/x/text/language" -) - -const ( - DefaultConftestVersionEnvKey = "DEFAULT_CONFTEST_VERSION" - conftestBinaryName = "conftest" - conftestDownloadURLPrefix = "https://github.com/open-policy-agent/conftest/releases/download/v" - conftestArch = "x86_64" -) - -type Arg struct { - Param string - Option string -} - -func (a Arg) build() []string { - return []string{a.Option, a.Param} -} - -func NewPolicyArg(parameter string) Arg { - return Arg{ - Param: parameter, - Option: "-p", - } -} - -type ConftestTestCommandArgs struct { - PolicyArgs []Arg - ExtraArgs []string - InputFile string - Command string -} - -func (c ConftestTestCommandArgs) build() []string { - // add the subcommand - commandArgs := []string{c.Command, "test"} - - for _, a := range c.PolicyArgs { - commandArgs = append(commandArgs, a.build()...) - } - - // add hardcoded options - commandArgs = append(commandArgs, c.InputFile, "--no-color") - - // add extra args provided through server config - commandArgs = append(commandArgs, c.ExtraArgs...) - - return commandArgs -} - -type ConfTestVersionDownloader struct { - downloader terraform.Downloader -} - -func (c ConfTestVersionDownloader) downloadConfTestVersion(v *version.Version, destPath string) (runtime_models.FilePath, error) { - versionURLPrefix := fmt.Sprintf("%s%s", conftestDownloadURLPrefix, v.Original()) - - // download binary in addition to checksum file - binURL := fmt.Sprintf("%s/conftest_%s_%s_%s.tar.gz", versionURLPrefix, v.Original(), cases.Title(language.English).String(runtime.GOOS), conftestArch) - checksumURL := fmt.Sprintf("%s/checksums.txt", versionURLPrefix) - - // underlying implementation uses go-getter so the URL is formatted as such. - // i know i know, I'm assuming an interface implementation with my inputs. - // realistically though the interface just exists for testing so ¯\_(ツ)_/¯ - fullSrcURL := fmt.Sprintf("%s?checksum=file:%s", binURL, checksumURL) - - if err := c.downloader.GetAny(destPath, fullSrcURL); err != nil { - return runtime_models.LocalFilePath(""), errors.Wrapf(err, "downloading conftest version %s at %q", v.String(), fullSrcURL) - } - - binPath := filepath.Join(destPath, "conftest") - - return runtime_models.LocalFilePath(binPath), nil -} - -type ConfTestVersionEnsurer struct { - VersionCache cache.ExecutionVersionCache - DefaultConftestVersion *version.Version -} - -func NewConfTestVersionEnsurer(log logging.Logger, versionRootDir string, conftestDownloder terraform.Downloader) *ConfTestVersionEnsurer { - downloader := ConfTestVersionDownloader{ - downloader: conftestDownloder, - } - version, err := getDefaultVersion() - - if err != nil { - // conftest default versions are not essential to service startup so let's not block on it. - log.Warn(fmt.Sprintf("failed to get default conftest version. Will attempt request scoped lazy loads %s", err.Error())) - } - - versionCache := cache.NewExecutionVersionLayeredLoadingCache( - conftestBinaryName, - versionRootDir, - downloader.downloadConfTestVersion, - ) - - return &ConfTestVersionEnsurer{ - VersionCache: versionCache, - DefaultConftestVersion: version, - } -} - -func (c *ConfTestVersionEnsurer) EnsureExecutorVersion(log logging.Logger, v *version.Version) (string, error) { - // we have no information to proceed so fail hard - if c.DefaultConftestVersion == nil && v == nil { - return "", errors.New("no conftest version configured/specified") - } - - var versionToRetrieve *version.Version - - if v == nil { - versionToRetrieve = c.DefaultConftestVersion - } else { - versionToRetrieve = v - } - - localPath, err := c.VersionCache.Get(versionToRetrieve) - - if err != nil { - return "", err - } - - return localPath, nil -} - -func getDefaultVersion() (*version.Version, error) { - // ensure version is not default version. - // first check for the env var and if that doesn't exist use the local executable version - defaultVersion, exists := os.LookupEnv(DefaultConftestVersionEnvKey) - - if !exists { - return nil, fmt.Errorf("%s not set", DefaultConftestVersionEnvKey) - } - - wrappedVersion, err := version.NewVersion(defaultVersion) - - if err != nil { - return nil, errors.Wrapf(err, "wrapping version %s", defaultVersion) - } - return wrappedVersion, nil -} diff --git a/server/legacy/core/runtime/policy/conftest_client_test.go b/server/legacy/core/runtime/policy/conftest_client_test.go deleted file mode 100644 index ce847d5d1..000000000 --- a/server/legacy/core/runtime/policy/conftest_client_test.go +++ /dev/null @@ -1,116 +0,0 @@ -package policy - -import ( - "errors" - "fmt" - "path/filepath" - "runtime" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/cache/mocks" - terraform_mocks "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" - "golang.org/x/text/cases" - "golang.org/x/text/language" -) - -func TestConfTestVersionDownloader(t *testing.T) { - version, _ := version.NewVersion("0.25.0") - destPath := "some/path" - - fullURL := fmt.Sprintf("https://github.com/open-policy-agent/conftest/releases/download/v0.25.0/conftest_0.25.0_%s_x86_64.tar.gz?checksum=file:https://github.com/open-policy-agent/conftest/releases/download/v0.25.0/checksums.txt", cases.Title(language.English).String(runtime.GOOS)) - - RegisterMockTestingT(t) - - mockDownloader := terraform_mocks.NewMockDownloader() - - subject := ConfTestVersionDownloader{downloader: mockDownloader} - - t.Run("success", func(t *testing.T) { - When(mockDownloader.GetFile(EqString(destPath), EqString(fullURL))).ThenReturn(nil) - binPath, err := subject.downloadConfTestVersion(version, destPath) - - mockDownloader.VerifyWasCalledOnce().GetAny(EqString(destPath), EqString(fullURL)) - - Ok(t, err) - - Assert(t, binPath.Resolve() == filepath.Join(destPath, "conftest"), "expected binpath") - }) - - t.Run("error", func(t *testing.T) { - When(mockDownloader.GetAny(EqString(destPath), EqString(fullURL))).ThenReturn(errors.New("err")) - _, err := subject.downloadConfTestVersion(version, destPath) - - Assert(t, err != nil, "err is expected") - }) -} - -func TestEnsureExecutorVersion(t *testing.T) { - defaultVersion, _ := version.NewVersion("1.0") - expectedPath := "some/path" - - RegisterMockTestingT(t) - - mockCache := mocks.NewMockExecutionVersionCache() - log := logging.NewNoopCtxLogger(t) - - t.Run("no specified version or default version", func(t *testing.T) { - subject := &ConfTestVersionEnsurer{ - VersionCache: mockCache, - } - - _, err := subject.EnsureExecutorVersion(log, nil) - - Assert(t, err != nil, "expected error finding version") - }) - - t.Run("use default version", func(t *testing.T) { - subject := &ConfTestVersionEnsurer{ - VersionCache: mockCache, - DefaultConftestVersion: defaultVersion, - } - - When(mockCache.Get(defaultVersion)).ThenReturn(expectedPath, nil) - - path, err := subject.EnsureExecutorVersion(log, nil) - - Ok(t, err) - - Assert(t, path == expectedPath, "path is expected") - }) - - t.Run("use specified version", func(t *testing.T) { - subject := &ConfTestVersionEnsurer{ - VersionCache: mockCache, - DefaultConftestVersion: defaultVersion, - } - - versionInput, _ := version.NewVersion("2.0") - - When(mockCache.Get(versionInput)).ThenReturn(expectedPath, nil) - - path, err := subject.EnsureExecutorVersion(log, versionInput) - - Ok(t, err) - - Assert(t, path == expectedPath, "path is expected") - }) - - t.Run("cache error", func(t *testing.T) { - subject := &ConfTestVersionEnsurer{ - VersionCache: mockCache, - DefaultConftestVersion: defaultVersion, - } - - versionInput, _ := version.NewVersion("2.0") - - When(mockCache.Get(versionInput)).ThenReturn(expectedPath, errors.New("some err")) - - _, err := subject.EnsureExecutorVersion(log, versionInput) - - Assert(t, err != nil, "path is expected") - }) -} diff --git a/server/legacy/core/runtime/policy/conftest_executor.go b/server/legacy/core/runtime/policy/conftest_executor.go deleted file mode 100644 index 70479ebb3..000000000 --- a/server/legacy/core/runtime/policy/conftest_executor.go +++ /dev/null @@ -1,135 +0,0 @@ -package policy - -import ( - "context" - "fmt" - "path/filepath" - "strings" - - "github.com/palantir/go-githubapp/githubapp" - runtime_models "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - "github.com/runatlantis/atlantis/server/vcs/provider/github" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -type policyFilter interface { - Filter(ctx context.Context, installationToken int64, repo models.Repo, prNum int, trigger command.CommandTrigger, failedPolicies []valid.PolicySet) ([]valid.PolicySet, error) -} - -type exec interface { - CombinedOutput(args []string, envs map[string]string, workdir string) (string, error) -} - -const ( - conftestScope = "conftest.policies" - // use internal server error message for user to understand error is from atlantis - internalError = "internal server error" -) - -// ConfTestExecutor runs a versioned conftest binary with the args built from the project context. -// Project context defines whether conftest runs a local policy set or runs a test on a remote policy set. -type ConfTestExecutor struct { - Exec exec - PolicyFilter policyFilter -} - -func NewConfTestExecutor(creator githubapp.ClientCreator, policySets valid.PolicySets, allocator feature.Allocator, logger logging.Logger) *ConfTestExecutor { - reviewFetcher := &github.PRReviewFetcher{ - ClientCreator: creator, - } - reviewDismisser := &github.PRReviewDismisser{ - ClientCreator: creator, - } - teamMemberFetcher := &github.TeamMemberFetcher{ - ClientCreator: creator, - Org: policySets.Organization, - } - return &ConfTestExecutor{ - Exec: runtime_models.LocalExec{}, - PolicyFilter: events.NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamMemberFetcher, allocator, policySets.PolicySets, logger), - } -} - -// Run performs conftest policy tests against changes and fails if any policy does not pass. It also runs an all-or-nothing -// filter that will filter out all policy failures based on the filter criteria. -func (c *ConfTestExecutor) Run(_ context.Context, prjCtx command.ProjectContext, executablePath string, envs map[string]string, workdir string, extraArgs []string) (string, error) { - var policyNames []string - var failedPolicies []valid.PolicySet - var totalCmdOutput []string - var policyErr error - - inputFile := filepath.Join(workdir, prjCtx.GetShowResultFileName()) - scope := prjCtx.Scope.SubScope(conftestScope) - - for _, policySet := range prjCtx.PolicySets.PolicySets { - var policyArgs []Arg - for _, path := range policySet.Paths { - policyArgs = append(policyArgs, NewPolicyArg(path)) - } - policyNames = append(policyNames, policySet.Name) - args := ConftestTestCommandArgs{ - PolicyArgs: policyArgs, - ExtraArgs: extraArgs, - InputFile: inputFile, - Command: executablePath, - } - serializedArgs := args.build() - policyScope := scope.SubScope(policySet.Name) - cmdOutput, cmdErr := c.Exec.CombinedOutput(serializedArgs, envs, workdir) - // Continue running other policies if one fails since it might not be the only failing one - if cmdErr != nil { - policyErr = cmdErr - failedPolicies = append(failedPolicies, policySet) - policyScope.Counter(metrics.ExecutionFailureMetric).Inc(1) - } else { - policyScope.Counter(metrics.ExecutionSuccessMetric).Inc(1) - } - totalCmdOutput = append(totalCmdOutput, c.processOutput(cmdOutput, policySet, cmdErr)) - } - - title := c.buildTitle(policyNames) - output := c.sanitizeOutput(inputFile, title+strings.Join(totalCmdOutput, "\n")) - if prjCtx.InstallationToken == 0 { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, "missing installation token") - scope.Counter(metrics.ExecutionErrorMetric).Inc(1) - return output, errors.New(internalError) - } - - failedPolicies, err := c.PolicyFilter.Filter(prjCtx.RequestCtx, prjCtx.InstallationToken, prjCtx.HeadRepo, prjCtx.Pull.Num, prjCtx.Trigger, failedPolicies) - if err != nil { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, fmt.Sprintf("error filtering out approved policies: %s", err.Error())) - scope.Counter(metrics.ExecutionErrorMetric).Inc(1) - return output, errors.New(internalError) - } - if len(failedPolicies) == 0 { - scope.Counter(metrics.ExecutionSuccessMetric).Inc(1) - return output, nil - } - // use policyErr here as policy error output is what the user should see - scope.Counter(metrics.ExecutionFailureMetric).Inc(1) - return output, policyErr -} - -func (c *ConfTestExecutor) buildTitle(policySetNames []string) string { - return fmt.Sprintf("Checking plan against the following policies: \n %s\n\n", strings.Join(policySetNames, "\n ")) -} - -func (c *ConfTestExecutor) sanitizeOutput(inputFile string, output string) string { - return strings.Replace(output, inputFile, "", -1) -} - -func (c *ConfTestExecutor) processOutput(output string, policySet valid.PolicySet, err error) string { - // errored results need an extra newline - if err != nil { - return policySet.Name + ":\n" + output - } - return policySet.Name + ":" + output -} diff --git a/server/legacy/core/runtime/policy/conftest_executor_test.go b/server/legacy/core/runtime/policy/conftest_executor_test.go deleted file mode 100644 index 81a1234c5..000000000 --- a/server/legacy/core/runtime/policy/conftest_executor_test.go +++ /dev/null @@ -1,203 +0,0 @@ -package policy_test - -import ( - "context" - "fmt" - "strings" - "testing" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/policy" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - contextInternal "github.com/runatlantis/atlantis/server/neptune/context" - "github.com/stretchr/testify/assert" - "github.com/uber-go/tally/v4" -) - -const ( - path = "/path/to/some/place" - path2 = "/path/to/another/place" - output = "test output" - workDir = "workDir" - executablePath = "executablepath" - policyA = "A" - policyB = "B" -) - -func buildTestProjectCtx(t *testing.T, policySets []valid.PolicySet) command.ProjectContext { - ctx := context.WithValue(context.Background(), contextInternal.InstallationIDKey, int64(1)) - return command.ProjectContext{ - PolicySets: valid.PolicySets{ - Version: nil, - PolicySets: policySets, - }, - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("test", map[string]string{}), - RequestCtx: ctx, - InstallationToken: 1, - } -} - -func buildTestTitle(policySets []valid.PolicySet) string { - var names []string - for _, policy := range policySets { - names = append(names, policy.Name) - } - return fmt.Sprintf("Checking plan against the following policies: \n %s\n", strings.Join(names, "\n ")) -} - -func TestConfTestExecutor_PolicySuccess(t *testing.T) { - exec := &mockExec{ - output: output, - } - policyFilter := &mockPolicyFilter{} - executor := policy.ConfTestExecutor{ - Exec: exec, - PolicyFilter: policyFilter, - } - var args []string - policySets := []valid.PolicySet{ - {Name: policyA, Paths: []string{path, path2}}, - {Name: policyB, Paths: []string{path, path2}}, - } - prjCtx := buildTestProjectCtx(t, policySets) - expectedTitle := buildTestTitle(policySets) - cmdOutput, err := executor.Run(context.Background(), prjCtx, executablePath, map[string]string{}, workDir, args) - assert.NoError(t, err) - assert.Equal(t, exec.numCalls, 2) - assert.True(t, policyFilter.isCalled) - assert.Contains(t, cmdOutput, expectedTitle) - assert.Contains(t, cmdOutput, output) -} - -func TestConfTestExecutor_PolicySuccess_FilteredFailures(t *testing.T) { - exec := &mockExec{ - output: output, - error: assert.AnError, - } - policyFilter := &mockPolicyFilter{} - executor := policy.ConfTestExecutor{ - Exec: exec, - PolicyFilter: policyFilter, - } - var args []string - policySets := []valid.PolicySet{ - {Name: policyA, Paths: []string{path}}, - {Name: policyB, Paths: []string{path2}}, - } - prjCtx := buildTestProjectCtx(t, policySets) - expectedTitle := buildTestTitle(policySets) - cmdOutput, err := executor.Run(context.Background(), prjCtx, executablePath, map[string]string{}, workDir, args) - assert.NoError(t, err) - assert.Equal(t, exec.numCalls, 2) - assert.True(t, policyFilter.isCalled) - assert.Contains(t, cmdOutput, expectedTitle) - assert.Contains(t, cmdOutput, output) -} - -func TestConfTestExecutor_PolicyFailure_NotFiltered(t *testing.T) { - exec := &mockExec{ - output: output, - error: assert.AnError, - } - policySets := []valid.PolicySet{ - {Name: policyA, Paths: []string{path}}, - {Name: policyB, Paths: []string{path2}}, - } - policyFilter := &mockPolicyFilter{ - policies: policySets, - } - executor := policy.ConfTestExecutor{ - Exec: exec, - PolicyFilter: policyFilter, - } - var args []string - prjCtx := buildTestProjectCtx(t, policySets) - cmdOutput, err := executor.Run(context.Background(), prjCtx, executablePath, map[string]string{}, workDir, args) - expectedTitle := buildTestTitle(policySets) - assert.Error(t, err) - assert.Equal(t, exec.numCalls, 2) - assert.True(t, policyFilter.isCalled) - assert.Contains(t, cmdOutput, expectedTitle) - assert.Contains(t, cmdOutput, output) -} - -func TestConfTestExecutor_FilterFailure(t *testing.T) { - exec := &mockExec{ - output: output, - } - policySets := []valid.PolicySet{ - {Name: policyA, Paths: []string{path}}, - {Name: policyB, Paths: []string{path2}}, - } - policyFilter := &mockPolicyFilter{error: assert.AnError} - executor := policy.ConfTestExecutor{ - Exec: exec, - PolicyFilter: policyFilter, - } - var args []string - prjCtx := buildTestProjectCtx(t, policySets) - expectedTitle := buildTestTitle(policySets) - cmdOutput, err := executor.Run(context.Background(), prjCtx, executablePath, map[string]string{}, workDir, args) - assert.Error(t, err) - assert.Equal(t, exec.numCalls, 2) - assert.True(t, policyFilter.isCalled) - assert.Contains(t, cmdOutput, expectedTitle) - assert.Contains(t, cmdOutput, output) -} - -func TestConfTestExecutor_MissingInstallationToken(t *testing.T) { - exec := &mockExec{ - output: output, - } - policyFilter := &mockPolicyFilter{} - executor := policy.ConfTestExecutor{ - Exec: exec, - PolicyFilter: policyFilter, - } - var args []string - policySets := []valid.PolicySet{ - {Name: policyA, Paths: []string{path}}, - {Name: policyB, Paths: []string{path2}}, - } - prjCtx := command.ProjectContext{ - PolicySets: valid.PolicySets{ - Version: nil, - PolicySets: policySets, - }, - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("test", map[string]string{}), - RequestCtx: context.Background(), - } - expectedTitle := buildTestTitle(policySets) - cmdOutput, err := executor.Run(context.Background(), prjCtx, executablePath, map[string]string{}, workDir, args) - assert.Error(t, err) - assert.Equal(t, exec.numCalls, 2) - assert.False(t, policyFilter.isCalled) - assert.Contains(t, cmdOutput, expectedTitle) - assert.Contains(t, cmdOutput, output) -} - -type mockPolicyFilter struct { - isCalled bool - policies []valid.PolicySet - error error -} - -func (r *mockPolicyFilter) Filter(_ context.Context, _ int64, _ models.Repo, _ int, _ command.CommandTrigger, _ []valid.PolicySet) ([]valid.PolicySet, error) { - r.isCalled = true - return r.policies, r.error -} - -type mockExec struct { - numCalls int - output string - error error -} - -func (r *mockExec) CombinedOutput(_ []string, _ map[string]string, _ string) (string, error) { - r.numCalls++ - return r.output, r.error -} diff --git a/server/legacy/core/runtime/policy/mocks/matchers/valid_policyset.go b/server/legacy/core/runtime/policy/mocks/matchers/valid_policyset.go deleted file mode 100644 index da1b709bc..000000000 --- a/server/legacy/core/runtime/policy/mocks/matchers/valid_policyset.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - valid "github.com/runatlantis/atlantis/server/config/valid" - "reflect" -) - -func AnyValidPolicySet() valid.PolicySet { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(valid.PolicySet))(nil)).Elem())) - var nullValue valid.PolicySet - return nullValue -} - -func EqValidPolicySet(value valid.PolicySet) valid.PolicySet { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue valid.PolicySet - return nullValue -} diff --git a/server/legacy/core/runtime/policy/mocks/mock_conftest_client.go b/server/legacy/core/runtime/policy/mocks/mock_conftest_client.go deleted file mode 100644 index 3d1c15a3e..000000000 --- a/server/legacy/core/runtime/policy/mocks/mock_conftest_client.go +++ /dev/null @@ -1,109 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/runtime/policy (interfaces: SourceResolver) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - valid "github.com/runatlantis/atlantis/server/config/valid" - "reflect" - "time" -) - -type MockSourceResolver struct { - fail func(message string, callerSkip ...int) -} - -func NewMockSourceResolver(options ...pegomock.Option) *MockSourceResolver { - mock := &MockSourceResolver{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockSourceResolver) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockSourceResolver) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockSourceResolver) Resolve(policySet valid.PolicySet) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockSourceResolver().") - } - params := []pegomock.Param{policySet} - result := pegomock.GetGenericMockFrom(mock).Invoke("Resolve", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockSourceResolver) VerifyWasCalledOnce() *VerifierMockSourceResolver { - return &VerifierMockSourceResolver{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockSourceResolver) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockSourceResolver { - return &VerifierMockSourceResolver{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockSourceResolver) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockSourceResolver { - return &VerifierMockSourceResolver{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockSourceResolver) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockSourceResolver { - return &VerifierMockSourceResolver{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockSourceResolver struct { - mock *MockSourceResolver - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockSourceResolver) Resolve(policySet valid.PolicySet) *MockSourceResolver_Resolve_OngoingVerification { - params := []pegomock.Param{policySet} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Resolve", params, verifier.timeout) - return &MockSourceResolver_Resolve_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockSourceResolver_Resolve_OngoingVerification struct { - mock *MockSourceResolver - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockSourceResolver_Resolve_OngoingVerification) GetCapturedArguments() valid.PolicySet { - policySet := c.GetAllCapturedArguments() - return policySet[len(policySet)-1] -} - -func (c *MockSourceResolver_Resolve_OngoingVerification) GetAllCapturedArguments() (_param0 []valid.PolicySet) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]valid.PolicySet, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(valid.PolicySet) - } - } - return -} diff --git a/server/legacy/core/runtime/policy_check_step_runner.go b/server/legacy/core/runtime/policy_check_step_runner.go deleted file mode 100644 index d90a596fb..000000000 --- a/server/legacy/core/runtime/policy_check_step_runner.go +++ /dev/null @@ -1,40 +0,0 @@ -package runtime - -import ( - "context" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -// PolicyCheckStepRunner runs a policy check command given a ctx -type PolicyCheckStepRunner struct { - VersionEnsurer ExecutorVersionEnsurer - Executor Executor -} - -// NewPolicyCheckStepRunner creates a new step runner from an Executor workflow -func NewPolicyCheckStepRunner( - defaultTfVersion *version.Version, - versionEnsurer ExecutorVersionEnsurer, - executor Executor) (Runner, error) { - runner := &PlanTypeStepRunnerDelegate{ - defaultRunner: &PolicyCheckStepRunner{ - VersionEnsurer: versionEnsurer, - Executor: executor, - }, - remotePlanRunner: RemoteBackendUnsupportedRunner{}, - } - - return NewMinimumVersionStepRunnerDelegate(minimumShowTfVersion, defaultTfVersion, runner) -} - -// Run ensures a given version for the executable, builds the args from the project context and then runs executable returning the result -func (p *PolicyCheckStepRunner) Run(ctx context.Context, cmdCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - executable, err := p.VersionEnsurer.EnsureExecutorVersion(cmdCtx.Log, cmdCtx.PolicySets.Version) - if err != nil { - return "", errors.Wrapf(err, "ensuring policy Executor version") - } - return p.Executor.Run(ctx, cmdCtx, executable, envs, path, extraArgs) -} diff --git a/server/legacy/core/runtime/policy_check_step_runner_test.go b/server/legacy/core/runtime/policy_check_step_runner_test.go deleted file mode 100644 index a99660d3c..000000000 --- a/server/legacy/core/runtime/policy_check_step_runner_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package runtime_test - -import ( - "context" - "testing" - - "github.com/stretchr/testify/assert" - - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -const ( - expectedOutput = "output" - executablePath = "some/path/conftest" -) - -func buildTestPrjCtx(t *testing.T) command.ProjectContext { - v, err := version.NewVersion("1.0") - assert.NoError(t, err) - return command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - BaseRepo: models.Repo{ - FullName: "owner/repo", - }, - PolicySets: valid.PolicySets{ - Version: v, - PolicySets: []valid.PolicySet{}, - }, - } -} - -func TestRun_Successful(t *testing.T) { - prjCtx := buildTestPrjCtx(t) - ensurer := &mockEnsurer{} - executor := &mockExecutor{ - output: expectedOutput, - } - runner := &runtime.PolicyCheckStepRunner{ - VersionEnsurer: ensurer, - Executor: executor, - } - output, err := runner.Run(context.Background(), prjCtx, []string{}, executablePath, map[string]string{}) - assert.NoError(t, err) - assert.Equal(t, output, expectedOutput) - assert.True(t, ensurer.isCalled) - assert.True(t, executor.isCalled) -} - -func TestRun_EnsurerFailure(t *testing.T) { - prjCtx := buildTestPrjCtx(t) - ensurer := &mockEnsurer{ - err: assert.AnError, - } - executor := &mockExecutor{} - runner := &runtime.PolicyCheckStepRunner{ - VersionEnsurer: ensurer, - Executor: executor, - } - output, err := runner.Run(context.Background(), prjCtx, []string{}, executablePath, map[string]string{}) - assert.Error(t, err) - assert.Empty(t, output) - assert.True(t, ensurer.isCalled) - assert.False(t, executor.isCalled) -} - -type mockExecutor struct { - output string - err error - isCalled bool -} - -func (t *mockExecutor) Run(_ context.Context, _ command.ProjectContext, _ string, _ map[string]string, _ string, _ []string) (string, error) { - t.isCalled = true - return t.output, t.err -} - -type mockEnsurer struct { - output string - err error - isCalled bool -} - -func (t *mockEnsurer) EnsureExecutorVersion(_ logging.Logger, _ *version.Version) (string, error) { - t.isCalled = true - return t.output, t.err -} diff --git a/server/legacy/core/runtime/pre_workflow_hook_runner.go b/server/legacy/core/runtime/pre_workflow_hook_runner.go deleted file mode 100644 index 8902ecfbf..000000000 --- a/server/legacy/core/runtime/pre_workflow_hook_runner.go +++ /dev/null @@ -1,55 +0,0 @@ -package runtime - -import ( - "context" - "fmt" - "os" - "os/exec" - - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pre_workflows_hook_runner.go PreWorkflowHookRunner -type PreWorkflowHookRunner interface { - Run(ctx context.Context, preCtx models.PreWorkflowHookCommandContext, command string, path string) (string, error) -} - -type DefaultPreWorkflowHookRunner struct{} - -func (wh DefaultPreWorkflowHookRunner) Run(ctx context.Context, preCtx models.PreWorkflowHookCommandContext, command string, path string) (string, error) { - cmd := exec.Command("sh", "-c", command) // #nosec - cmd.Dir = path - - baseEnvVars := os.Environ() - customEnvVars := map[string]string{ - "BASE_BRANCH_NAME": preCtx.Pull.BaseBranch, - "BASE_REPO_NAME": preCtx.BaseRepo.Name, - "BASE_REPO_OWNER": preCtx.BaseRepo.Owner, - "DIR": path, - "HEAD_BRANCH_NAME": preCtx.Pull.HeadBranch, - "HEAD_COMMIT": preCtx.Pull.HeadCommit, - "HEAD_REPO_NAME": preCtx.HeadRepo.Name, - "HEAD_REPO_OWNER": preCtx.HeadRepo.Owner, - "PULL_AUTHOR": preCtx.Pull.Author, - "PULL_NUM": fmt.Sprintf("%d", preCtx.Pull.Num), - "USER_NAME": preCtx.User.Username, - } - - finalEnvVars := baseEnvVars - for key, val := range customEnvVars { - finalEnvVars = append(finalEnvVars, fmt.Sprintf("%s=%s", key, val)) - } - cmd.Env = finalEnvVars - - // pre-workflow hooks operate different than our terraform steps - // it's up to the underlying implementation to log errors/output accordingly. - // The only required step is to share Stdout and Stderr with the underlying - // process, so that our logging sidecar can forward the logs to kibana - cmd.Stdout = os.Stdout - cmd.Stderr = os.Stderr - - if err := cmd.Run(); err != nil { - return "", err - } - return "", nil -} diff --git a/server/legacy/core/runtime/pre_workflow_hook_runner_test.go b/server/legacy/core/runtime/pre_workflow_hook_runner_test.go deleted file mode 100644 index e95839951..000000000 --- a/server/legacy/core/runtime/pre_workflow_hook_runner_test.go +++ /dev/null @@ -1,79 +0,0 @@ -package runtime_test - -import ( - "context" - "strings" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestPreWorkflowHookRunner_Run(t *testing.T) { - cases := []struct { - Command string - ExpOut string - ExpErr string - }{ - { - Command: "echo hi", - ExpOut: "", - }, - { - Command: "echo 'a", - ExpErr: "exit status 2", - }, - } - - for _, c := range cases { - var err error - - Ok(t, err) - - RegisterMockTestingT(t) - - logger := logging.NewNoopCtxLogger(t) - - r := runtime.DefaultPreWorkflowHookRunner{} - t.Run(c.Command, func(t *testing.T) { - tmpDir, cleanup := TempDir(t) - defer cleanup() - ctx := context.Background() - prjCtx := models.PreWorkflowHookCommandContext{ - BaseRepo: models.Repo{ - Name: "basename", - Owner: "baseowner", - }, - HeadRepo: models.Repo{ - Name: "headname", - Owner: "headowner", - }, - Pull: models.PullRequest{ - Num: 2, - HeadBranch: "add-feat", - HeadCommit: "12345abcdef", - BaseBranch: "master", - Author: "acme", - }, - User: models.User{ - Username: "acme-user", - }, - Log: logger, - } - out, err := r.Run(ctx, prjCtx, c.Command, tmpDir) - if c.ExpErr != "" { - ErrContains(t, c.ExpErr, err) - return - } - Ok(t, err) - // Replace $DIR in the exp with the actual temp dir. We do this - // here because when constructing the cases we don't yet know the - // temp dir. - expOut := strings.Replace(c.ExpOut, "$DIR", tmpDir, -1) - Equals(t, expOut, out) - }) - } -} diff --git a/server/legacy/core/runtime/pull_approved_checker.go b/server/legacy/core/runtime/pull_approved_checker.go deleted file mode 100644 index 384e6253e..000000000 --- a/server/legacy/core/runtime/pull_approved_checker.go +++ /dev/null @@ -1,11 +0,0 @@ -package runtime - -import ( - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pull_approved_checker.go PullApprovedChecker - -type PullApprovedChecker interface { - PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (bool, error) -} diff --git a/server/legacy/core/runtime/run_step_runner.go b/server/legacy/core/runtime/run_step_runner.go deleted file mode 100644 index 36991df46..000000000 --- a/server/legacy/core/runtime/run_step_runner.go +++ /dev/null @@ -1,89 +0,0 @@ -package runtime - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -// RunStepRunner runs custom commands. -type RunStepRunner struct { - TerraformExecutor TerraformExec - DefaultTFVersion *version.Version - // TerraformBinDir is the directory where Atlantis downloads Terraform binaries. - TerraformBinDir string -} - -func (r *RunStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, command string, path string, envs map[string]string) (string, error) { - tfVersion := r.DefaultTFVersion - if prjCtx.TerraformVersion != nil { - tfVersion = prjCtx.TerraformVersion - } - - err := r.TerraformExecutor.EnsureVersion(prjCtx.Log, tfVersion) - if err != nil { - err = fmt.Errorf("%s: Downloading terraform Version %s", err, tfVersion.String()) - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, fmt.Sprintf("error: %s", err)) - return "", err - } - - cmd := exec.Command("sh", "-c", command) // #nosec - cmd.Dir = path - - dynamicEnvVars := make(map[string]string) - - if loc, ok := prjCtx.Tags["manifest_path"]; ok { - dynamicEnvVars["MANIFEST_FILEPATH"] = loc - } - - baseEnvVars := os.Environ() - - customEnvVars := map[string]string{ - "ATLANTIS_TERRAFORM_VERSION": tfVersion.String(), - "BASE_BRANCH_NAME": prjCtx.Pull.BaseBranch, - "BASE_REPO_NAME": prjCtx.BaseRepo.Name, - "BASE_REPO_OWNER": prjCtx.BaseRepo.Owner, - "COMMENT_ARGS": strings.Join(prjCtx.EscapedCommentArgs, ","), - "DIR": path, - "HEAD_BRANCH_NAME": prjCtx.Pull.HeadBranch, - "HEAD_COMMIT": prjCtx.Pull.HeadCommit, - "HEAD_REPO_NAME": prjCtx.HeadRepo.Name, - "HEAD_REPO_OWNER": prjCtx.HeadRepo.Owner, - "PATH": fmt.Sprintf("%s:%s", os.Getenv("PATH"), r.TerraformBinDir), - "PLANFILE": filepath.Join(path, GetPlanFilename(prjCtx.Workspace, prjCtx.ProjectName)), - "SHOWFILE": filepath.Join(path, prjCtx.GetShowResultFileName()), - "PROJECT_NAME": prjCtx.ProjectName, - "PULL_AUTHOR": prjCtx.Pull.Author, - "PULL_NUM": fmt.Sprintf("%d", prjCtx.Pull.Num), - "REPO_REL_DIR": prjCtx.RepoRelDir, - "USER_NAME": prjCtx.User.Username, - "WORKSPACE": prjCtx.Workspace, - } - - finalEnvVars := baseEnvVars - for key, val := range customEnvVars { - finalEnvVars = append(finalEnvVars, fmt.Sprintf("%s=%s", key, val)) - } - for key, val := range envs { - finalEnvVars = append(finalEnvVars, fmt.Sprintf("%s=%s", key, val)) - } - for key, val := range dynamicEnvVars { - finalEnvVars = append(finalEnvVars, fmt.Sprintf("%s=%s", key, val)) - } - cmd.Env = finalEnvVars - out, err := cmd.CombinedOutput() - - if err != nil { - err = fmt.Errorf("%s: running %q in %q: \n%s", err, command, path, out) - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, fmt.Sprintf("error: %s", err)) - return "", err - } - prjCtx.Log.InfoContext(prjCtx.RequestCtx, fmt.Sprintf("successfully ran %q in %q", command, path)) - return string(out), nil -} diff --git a/server/legacy/core/runtime/run_step_runner_test.go b/server/legacy/core/runtime/run_step_runner_test.go deleted file mode 100644 index 5254e24fc..000000000 --- a/server/legacy/core/runtime/run_step_runner_test.go +++ /dev/null @@ -1,156 +0,0 @@ -package runtime_test - -import ( - "context" - // "fmt" - // "os" - "strings" - "testing" - - version "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - matchers2 "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks/matchers" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestRunStepRunner_Run(t *testing.T) { - cases := []struct { - Command string - ProjectName string - ExpOut string - ExpErr string - Version string - }{ - { - Command: "", - ExpOut: "", - Version: "v1.2.3", - }, - { - Command: "echo hi", - ExpOut: "hi\n", - Version: "v2.3.4", - }, - { - Command: `printf \'your main.tf file does not provide default region.\\ncheck\'`, - ExpOut: `'your`, - }, - { - Command: `printf 'your main.tf file does not provide default region.\ncheck'`, - ExpOut: "your main.tf file does not provide default region.\ncheck", - }, - { - Command: "echo 'a", - ExpErr: "exit status 2: running \"echo 'a\" in", - }, - { - Command: "echo hi >> file && cat file", - ExpOut: "hi\n", - }, - { - Command: "lkjlkj", - ExpErr: "exit status 127: running \"lkjlkj\" in", - }, - { - Command: "echo workspace=$WORKSPACE version=$ATLANTIS_TERRAFORM_VERSION dir=$DIR planfile=$PLANFILE showfile=$SHOWFILE project=$PROJECT_NAME", - ExpOut: "workspace=myworkspace version=0.11.0 dir=$DIR planfile=$DIR/myworkspace.tfplan showfile=$DIR/myworkspace.json project=\n", - }, - { - Command: "echo workspace=$WORKSPACE version=$ATLANTIS_TERRAFORM_VERSION dir=$DIR planfile=$PLANFILE showfile=$SHOWFILE project=$PROJECT_NAME", - ProjectName: "my/project/name", - ExpOut: "workspace=myworkspace version=0.11.0 dir=$DIR planfile=$DIR/my::project::name-myworkspace.tfplan showfile=$DIR/my::project::name-myworkspace.json project=my/project/name\n", - }, - { - Command: "echo base_repo_name=$BASE_REPO_NAME base_repo_owner=$BASE_REPO_OWNER head_repo_name=$HEAD_REPO_NAME head_repo_owner=$HEAD_REPO_OWNER head_branch_name=$HEAD_BRANCH_NAME head_commit=$HEAD_COMMIT base_branch_name=$BASE_BRANCH_NAME pull_num=$PULL_NUM pull_author=$PULL_AUTHOR repo_rel_dir=$REPO_REL_DIR", - ExpOut: "base_repo_name=basename base_repo_owner=baseowner head_repo_name=headname head_repo_owner=headowner head_branch_name=add-feat head_commit=12345abcdef base_branch_name=master pull_num=2 pull_author=acme repo_rel_dir=mydir\n", - }, - { - Command: "echo user_name=$USER_NAME", - ExpOut: "user_name=acme-user\n", - }, - { - Command: "echo args=$COMMENT_ARGS", - ExpOut: "args=-target=resource1,-target=resource2\n", - }, - } - - for _, c := range cases { - var projVersion *version.Version - var err error - - projVersion, err = version.NewVersion("v0.11.0") - - if c.Version != "" { - projVersion, err = version.NewVersion(c.Version) - Ok(t, err) - } - - Ok(t, err) - - defaultVersion, _ := version.NewVersion("0.8") - - RegisterMockTestingT(t) - terraform := mocks.NewMockClient() - When(terraform.EnsureVersion(matchers.AnyLoggingLogger(), matchers2.AnyPtrToGoVersionVersion())). - ThenReturn(nil) - - logger := logging.NewNoopCtxLogger(t) - - r := runtime.RunStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: defaultVersion, - TerraformBinDir: "/bin/dir", - } - t.Run("test", func(t *testing.T) { - tmpDir := t.TempDir() - ctx := context.Background() - prjCtx := command.ProjectContext{ - BaseRepo: models.Repo{ - Name: "basename", - Owner: "baseowner", - }, - HeadRepo: models.Repo{ - Name: "headname", - Owner: "headowner", - }, - Pull: models.PullRequest{ - Num: 2, - HeadBranch: "add-feat", - HeadCommit: "12345abcdef", - BaseBranch: "master", - Author: "acme", - }, - User: models.User{ - Username: "acme-user", - }, - Log: logger, - RequestCtx: context.TODO(), - Workspace: "myworkspace", - RepoRelDir: "mydir", - TerraformVersion: projVersion, - ProjectName: c.ProjectName, - EscapedCommentArgs: []string{"-target=resource1", "-target=resource2"}, - } - out, err := r.Run(ctx, prjCtx, c.Command, tmpDir, map[string]string{"test": "var"}) - if c.ExpErr != "" { - ErrContains(t, c.ExpErr, err) - return - } - Ok(t, err) - // Replace $DIR in the exp with the actual temp dir. We do this - // here because when constructing the cases we don't yet know the - // temp dir. - expOut := strings.Replace(c.ExpOut, "$DIR", tmpDir, -1) - Equals(t, expOut, out) - - terraform.VerifyWasCalledOnce().EnsureVersion(logger, projVersion) - terraform.VerifyWasCalled(Never()).EnsureVersion(logger, defaultVersion) - }) - } -} diff --git a/server/legacy/core/runtime/runtime.go b/server/legacy/core/runtime/runtime.go deleted file mode 100644 index 7a6b256fa..000000000 --- a/server/legacy/core/runtime/runtime.go +++ /dev/null @@ -1,119 +0,0 @@ -// Package runtime holds code for actually running commands vs. preparing -// and constructing. -package runtime - -import ( - "bytes" - "context" - "fmt" - "regexp" - "strings" - - version "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -const ( - // lineBeforeRunURL is the line output during a remote operation right before - // a link to the run url will be output. - lineBeforeRunURL = "To view this run in a browser, visit:" - planfileSlashReplace = "::" -) - -// TerraformExec brings the interface from TerraformClient into this package -// without causing circular imports. -type TerraformExec interface { - RunCommandWithVersion(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) (string, error) - EnsureVersion(log logging.Logger, v *version.Version) error -} - -// AsyncTFExec brings the interface from TerraformClient into this package -// without causing circular imports. -// It's split from TerraformExec because due to a bug in pegomock with channels, -// we can't generate a mock for it so we hand-write it for this specific method. -type AsyncTFExec interface { - // RunCommandAsync runs terraform with args. It immediately returns an - // input and output channel. Callers can use the output channel to - // get the realtime output from the command. - // Callers can use the input channel to pass stdin input to the command. - // If any error is passed on the out channel, there will be no - // further output (so callers are free to exit). - RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) <-chan helpers.Line - RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string, input <-chan string) <-chan helpers.Line -} - -// StatusUpdater brings the interface from StatusUpdater into this package -// without causing circular imports. -type StatusUpdater interface { - UpdateProject(ctx context.Context, projectCtx command.ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) -} - -// Runner mirrors events.StepRunner as a way to bring it into this package -// -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_runner.go Runner -type Runner interface { - Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) -} - -// CustomRunner runs custom run steps. -// -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_custom_runner.go CustomRunner -type CustomRunner interface { - // Run cmd in path. - Run(ctx context.Context, prjCtx command.ProjectContext, cmd string, path string, envs map[string]string) (string, error) -} - -// EnvRunner runs env steps. -// -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_env_runner.go EnvRunner -type EnvRunner interface { - Run(ctx context.Context, prjCtx command.ProjectContext, cmd string, value string, path string, envs map[string]string) (string, error) -} - -// MustConstraint returns a constraint. It panics on error. -func MustConstraint(constraint string) version.Constraints { - c, err := version.NewConstraint(constraint) - if err != nil { - panic(err) - } - return c -} - -// GetPlanFilename returns the filename (not the path) of the generated tf plan -// given a workspace and project name. -func GetPlanFilename(workspace string, projName string) string { - if projName == "" { - return fmt.Sprintf("%s.tfplan", workspace) - } - projName = strings.Replace(projName, "/", planfileSlashReplace, -1) - return fmt.Sprintf("%s-%s.tfplan", projName, workspace) -} - -// isRemotePlan returns true if planContents are from a plan that was generated -// using TFE remote operations. -func IsRemotePlan(planContents []byte) bool { - // We add a header to plans generated by the remote backend so we can - // detect that they're remote in the apply phase. - remoteOpsHeaderBytes := []byte(remoteOpsHeader) - return bytes.Equal(planContents[:len(remoteOpsHeaderBytes)], remoteOpsHeaderBytes) -} - -// ProjectNameFromPlanfile returns the project name that a planfile with name -// filename is for. If filename is for a project without a name then it will -// return an empty string. workspace is the workspace this project is in. -func ProjectNameFromPlanfile(workspace string, filename string) (string, error) { - r, err := regexp.Compile(fmt.Sprintf(`(.*?)-%s\.tfplan`, workspace)) - if err != nil { - return "", errors.Wrap(err, "compiling project name regex, this is a bug") - } - projMatch := r.FindAllStringSubmatch(filename, 1) - if projMatch == nil { - return "", nil - } - rawProjName := projMatch[0][1] - return strings.Replace(rawProjName, planfileSlashReplace, "/", -1), nil -} diff --git a/server/legacy/core/runtime/runtime_test.go b/server/legacy/core/runtime/runtime_test.go deleted file mode 100644 index dfed3d7cc..000000000 --- a/server/legacy/core/runtime/runtime_test.go +++ /dev/null @@ -1,95 +0,0 @@ -package runtime_test - -import ( - "fmt" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - . "github.com/runatlantis/atlantis/testing" -) - -func TestGetPlanFilename(t *testing.T) { - cases := []struct { - workspace string - projectName string - exp string - }{ - { - "workspace", - "", - "workspace.tfplan", - }, - { - "workspace", - "project", - "project-workspace.tfplan", - }, - { - "workspace", - "project/with/slash", - "project::with::slash-workspace.tfplan", - }, - { - "workspace", - "project with space", - "project with space-workspace.tfplan", - }, - { - "workspace😀", - "project😀", - "project😀-workspace😀.tfplan", - }, - // Previously we replaced invalid chars with -'s, however we now - // rely on validation of the atlantis.yaml file to ensure the name's - // don't contain chars that need to be url encoded. So now these - // chars shouldn't get replaced. - { - "default", - `all.invalid.chars \/"*?<>`, - "all.invalid.chars \\::\"*?<>-default.tfplan", - }, - } - - for i, c := range cases { - t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) { - Equals(t, c.exp, runtime.GetPlanFilename(c.workspace, c.projectName)) - }) - } -} - -func TestProjectNameFromPlanfile(t *testing.T) { - cases := []struct { - workspace string - filename string - exp string - }{ - { - "workspace", - "workspace.tfplan", - "", - }, - { - "workspace", - "project-workspace.tfplan", - "project", - }, - { - "workspace", - "project-workspace-workspace.tfplan", - "project-workspace", - }, - { - "workspace", - "project::with::slashes::-workspace.tfplan", - "project/with/slashes/", - }, - } - - for i, c := range cases { - t.Run(fmt.Sprintf("case %d", i), func(t *testing.T) { - act, err := runtime.ProjectNameFromPlanfile(c.workspace, c.filename) - Ok(t, err) - Equals(t, c.exp, act) - }) - } -} diff --git a/server/legacy/core/runtime/show_step_runner.go b/server/legacy/core/runtime/show_step_runner.go deleted file mode 100644 index 0f8e99406..000000000 --- a/server/legacy/core/runtime/show_step_runner.go +++ /dev/null @@ -1,62 +0,0 @@ -package runtime - -import ( - "context" - "os" - "path/filepath" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -const minimumShowTfVersion string = "0.12.0" - -func NewShowStepRunner(executor TerraformExec, defaultTFVersion *version.Version) (Runner, error) { - runner := &PlanTypeStepRunnerDelegate{ - defaultRunner: &ShowStepRunner{ - TerraformExecutor: executor, - DefaultTFVersion: defaultTFVersion, - }, - remotePlanRunner: NullRunner{}, - } - - return NewMinimumVersionStepRunnerDelegate(minimumShowTfVersion, defaultTFVersion, runner) -} - -// ShowStepRunner runs terraform show on an existing plan file and outputs it to a json file -type ShowStepRunner struct { - TerraformExecutor TerraformExec - DefaultTFVersion *version.Version -} - -func (p *ShowStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - tfVersion := p.DefaultTFVersion - if prjCtx.TerraformVersion != nil { - tfVersion = prjCtx.TerraformVersion - } - - planFile := filepath.Join(path, GetPlanFilename(prjCtx.Workspace, prjCtx.ProjectName)) - showResultFile := filepath.Join(path, prjCtx.GetShowResultFileName()) - - output, err := p.TerraformExecutor.RunCommandWithVersion( - ctx, - prjCtx, - path, - []string{"show", "-json", filepath.Clean(planFile)}, - envs, - tfVersion, - prjCtx.Workspace, - ) - - if err != nil { - return output, errors.Wrap(err, "running terraform show") - } - - if err := os.WriteFile(showResultFile, []byte(output), os.ModePerm); err != nil { - return "", errors.Wrap(err, "writing terraform show result") - } - - // don't return the output if it's successful since this is too large - return "", nil -} diff --git a/server/legacy/core/runtime/show_step_runner_test.go b/server/legacy/core/runtime/show_step_runner_test.go deleted file mode 100644 index 1c9d50208..000000000 --- a/server/legacy/core/runtime/show_step_runner_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package runtime - -import ( - "context" - "errors" - "fmt" - "os" - "path/filepath" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestShowStepRunnner(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - path := t.TempDir() - resultPath := filepath.Join(path, "test-default.json") - envs := map[string]string{"key": "val"} - tfVersion, _ := version.NewVersion("0.12") - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "default", - ProjectName: "test", - Log: logger, - } - - RegisterMockTestingT(t) - - mockExecutor := mocks.NewMockClient() - - subject := ShowStepRunner{ - TerraformExecutor: mockExecutor, - DefaultTFVersion: tfVersion, - } - - t.Run("success", func(t *testing.T) { - When(mockExecutor.RunCommandWithVersion( - ctx, prjCtx, path, []string{"show", "-json", filepath.Join(path, "test-default.tfplan")}, envs, tfVersion, prjCtx.Workspace, - )).ThenReturn("success", nil) - - _, err := subject.Run(ctx, prjCtx, []string{}, path, envs) - - Ok(t, err) - - actual, _ := os.ReadFile(resultPath) - - actualStr := string(actual) - Assert(t, actualStr == "success", fmt.Sprintf("expected '%s' to be success", actualStr)) - }) - - t.Run("success w/ version override", func(t *testing.T) { - v, _ := version.NewVersion("0.13.0") - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Workspace: "default", - ProjectName: "test", - Log: logger, - TerraformVersion: v, - } - - When(mockExecutor.RunCommandWithVersion( - ctx, prjCtx, path, []string{"show", "-json", filepath.Join(path, "test-default.tfplan")}, envs, v, prjCtx.Workspace, - )).ThenReturn("success", nil) - - _, err := subject.Run(ctx, prjCtx, []string{}, path, envs) - - Ok(t, err) - - actual, _ := os.ReadFile(resultPath) - - actualStr := string(actual) - Assert(t, actualStr == "success", "got expected result") - }) - - t.Run("failure running command", func(t *testing.T) { - When(mockExecutor.RunCommandWithVersion( - ctx, prjCtx, path, []string{"show", "-json", filepath.Join(path, "test-default.tfplan")}, envs, tfVersion, prjCtx.Workspace, - )).ThenReturn("err", errors.New("error")) - - r, err := subject.Run(ctx, prjCtx, []string{}, path, envs) - - Assert(t, err != nil, "error is returned") - Assert(t, r == "err", "returned expected result") - }) -} diff --git a/server/legacy/core/runtime/steps_runner.go b/server/legacy/core/runtime/steps_runner.go deleted file mode 100644 index 800c1b047..000000000 --- a/server/legacy/core/runtime/steps_runner.go +++ /dev/null @@ -1,90 +0,0 @@ -package runtime - -import ( - "context" - "strings" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_steps_runner.go StepsRunner - -// StepsRunner executes steps defined in project config -type StepsRunner interface { - Run(ctx context.Context, cmdCtx command.ProjectContext, absPath string) (string, error) -} - -func NewStepsRunner( - initStepRunner Runner, - planStepRunner Runner, - showStepRunner Runner, - policyCheckStepRunner Runner, - applyStepRunner Runner, - versionStepRunner Runner, - runStepRunner CustomRunner, - envStepRunner EnvRunner, -) *stepsRunner { //nolint:revive // avoiding refactor while adding linter action - stepsRunner := &stepsRunner{} - - stepsRunner.InitRunner = initStepRunner - stepsRunner.PlanRunner = planStepRunner - stepsRunner.ShowRunner = showStepRunner - stepsRunner.PolicyCheckRunner = policyCheckStepRunner - stepsRunner.ApplyRunner = applyStepRunner - stepsRunner.VersionRunner = versionStepRunner - stepsRunner.RunRunner = runStepRunner - stepsRunner.EnvRunner = envStepRunner - - return stepsRunner -} - -func (r *stepsRunner) Run(ctx context.Context, cmdCtx command.ProjectContext, absPath string) (string, error) { - var outputs []string - - envs := make(map[string]string) - for _, step := range cmdCtx.Steps { - var out string - var err error - switch step.StepName { - case "init": - out, err = r.InitRunner.Run(ctx, cmdCtx, step.ExtraArgs, absPath, envs) - case "plan": - out, err = r.PlanRunner.Run(ctx, cmdCtx, step.ExtraArgs, absPath, envs) - case "show": - out, err = r.ShowRunner.Run(ctx, cmdCtx, step.ExtraArgs, absPath, envs) - case "policy_check": - out, err = r.PolicyCheckRunner.Run(ctx, cmdCtx, step.ExtraArgs, absPath, envs) - case "apply": - out, err = r.ApplyRunner.Run(ctx, cmdCtx, step.ExtraArgs, absPath, envs) - case "version": - out, err = r.VersionRunner.Run(ctx, cmdCtx, step.ExtraArgs, absPath, envs) - case "run": - out, err = r.RunRunner.Run(ctx, cmdCtx, step.RunCommand, absPath, envs) - case "env": - out, err = r.EnvRunner.Run(ctx, cmdCtx, step.RunCommand, step.EnvVarValue, absPath, envs) - envs[step.EnvVarName] = out - // We reset out to the empty string because we don't want it to - // be printed to the PR, it's solely to set the environment variable. - out = "" - } - - if out != "" { - outputs = append(outputs, out) - } - if err != nil { - return strings.Join(outputs, "\n"), err - } - } - return strings.Join(outputs, "\n"), nil -} - -type stepsRunner struct { - InitRunner Runner - PlanRunner Runner - ShowRunner Runner - PolicyCheckRunner Runner - ApplyRunner Runner - VersionRunner Runner - EnvRunner EnvRunner - RunRunner CustomRunner -} diff --git a/server/legacy/core/runtime/steps_runner_test.go b/server/legacy/core/runtime/steps_runner_test.go deleted file mode 100644 index 079cd0ce0..000000000 --- a/server/legacy/core/runtime/steps_runner_test.go +++ /dev/null @@ -1,210 +0,0 @@ -package runtime_test - -import ( - "context" - "testing" - - "github.com/hashicorp/go-getter" - version "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/mocks" - tfMocks "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" -) - -type NoopTFDownloader struct{} - -func (m *NoopTFDownloader) GetFile(dst, src string, opts ...getter.ClientOption) error { - return nil -} - -func (m *NoopTFDownloader) GetAny(dst, src string, opts ...getter.ClientOption) error { - return nil -} - -func TestStepsRunner_Run(t *testing.T) { - cases := []struct { - description string - steps []valid.Step - applyReqs []string - - expSteps []string - expOut string - expFailure string - pullMergeable bool - }{ - { - description: "workflow with custom apply stage", - steps: []valid.Step{ - { - StepName: "env", - EnvVarName: "key", - EnvVarValue: "value", - }, - { - StepName: "run", - }, - { - StepName: "apply", - }, - { - StepName: "plan", - }, - { - StepName: "init", - }, - }, - expSteps: []string{"env", "run", "apply", "plan", "init"}, - expOut: "run\napply\nplan\ninit", - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - RegisterMockTestingT(t) - mockInit := mocks.NewMockRunner() - mockPlan := mocks.NewMockRunner() - mockShow := mocks.NewMockRunner() - mockApply := mocks.NewMockRunner() - mockRun := mocks.NewMockCustomRunner() - mockEnv := mocks.NewMockEnvRunner() - mockPolicyCheck := mocks.NewMockRunner() - mockVersion := mocks.NewMockRunner() - - runner := runtime.NewStepsRunner( - mockInit, - mockPlan, - mockShow, - mockPolicyCheck, - mockApply, - mockVersion, - mockRun, - mockEnv, - ) - repoDir, cleanup := TempDir(t) - defer cleanup() - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Steps: c.steps, - Workspace: "default", - RepoRelDir: ".", - } - expEnvs := map[string]string{ - "key": "value", - } - When(mockInit.Run(ctx, prjCtx, nil, repoDir, expEnvs)).ThenReturn("init", nil) - When(mockPlan.Run(ctx, prjCtx, nil, repoDir, expEnvs)).ThenReturn("plan", nil) - When(mockApply.Run(ctx, prjCtx, nil, repoDir, expEnvs)).ThenReturn("apply", nil) - When(mockRun.Run(ctx, prjCtx, "", repoDir, expEnvs)).ThenReturn("run", nil) - When(mockEnv.Run(ctx, prjCtx, "", "value", repoDir, make(map[string]string))).ThenReturn("value", nil) - - _, err := runner.Run(ctx, prjCtx, repoDir) - assert.NoError(t, err) - - for _, step := range c.expSteps { - switch step { - case "init": - mockInit.VerifyWasCalledOnce().Run(ctx, prjCtx, nil, repoDir, expEnvs) - case "plan": - mockPlan.VerifyWasCalledOnce().Run(ctx, prjCtx, nil, repoDir, expEnvs) - case "apply": - mockApply.VerifyWasCalledOnce().Run(ctx, prjCtx, nil, repoDir, expEnvs) - case "run": - mockRun.VerifyWasCalledOnce().Run(ctx, prjCtx, "", repoDir, expEnvs) - case "env": - mockEnv.VerifyWasCalledOnce().Run(ctx, prjCtx, "", "value", repoDir, expEnvs) - } - } - }) - } -} - -// Test run and env steps. We don't use mocks for this test since we're -// not running any Terraform. -func TestStepsRuinner_RunEnvSteps(t *testing.T) { - RegisterMockTestingT(t) - - terraform := tfMocks.NewMockClient() - tfVersion, err := version.NewVersion("0.12.0") - Ok(t, err) - mockInit := mocks.NewMockRunner() - mockPlan := mocks.NewMockRunner() - mockShow := mocks.NewMockRunner() - mockApply := mocks.NewMockRunner() - mockPolicyCheck := mocks.NewMockRunner() - mockVersion := mocks.NewMockRunner() - - run := &runtime.RunStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - - runner := runtime.NewStepsRunner( - mockInit, - mockPlan, - mockShow, - mockPolicyCheck, - mockApply, - mockVersion, - run, - &runtime.EnvStepRunner{ - RunStepRunner: run, - }, - ) - - repoDir, cleanup := TempDir(t) - defer cleanup() - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - RequestCtx: context.TODO(), - Steps: []valid.Step{ - { - StepName: "run", - RunCommand: "echo var=$var", - }, - { - StepName: "env", - EnvVarName: "var", - EnvVarValue: "value", - }, - { - StepName: "run", - RunCommand: "echo var=$var", - }, - { - StepName: "env", - EnvVarName: "dynamic_var", - RunCommand: "echo dynamic_value", - }, - { - StepName: "run", - RunCommand: "echo dynamic_var=$dynamic_var", - }, - // Test overriding the variable - { - StepName: "env", - EnvVarName: "dynamic_var", - EnvVarValue: "overridden", - }, - { - StepName: "run", - RunCommand: "echo dynamic_var=$dynamic_var", - }, - }, - Workspace: "default", - RepoRelDir: ".", - } - res, err := runner.Run(ctx, prjCtx, repoDir) - Ok(t, err) - - Equals(t, "var=\n\nvar=value\n\ndynamic_var=dynamic_value\n\ndynamic_var=overridden\n", res) -} diff --git a/server/legacy/core/runtime/version_step_runner.go b/server/legacy/core/runtime/version_step_runner.go deleted file mode 100644 index 3d5d8d6e3..000000000 --- a/server/legacy/core/runtime/version_step_runner.go +++ /dev/null @@ -1,26 +0,0 @@ -package runtime - -import ( - "context" - "path/filepath" - - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -// VersionStepRunner runs a version command given a prjCtx -type VersionStepRunner struct { - TerraformExecutor TerraformExec - DefaultTFVersion *version.Version -} - -// Run ensures a given version for the executable, builds the args from the project context and then runs executable returning the result -func (v *VersionStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - tfVersion := v.DefaultTFVersion - if prjCtx.TerraformVersion != nil { - tfVersion = prjCtx.TerraformVersion - } - - versionCmd := []string{"version"} - return v.TerraformExecutor.RunCommandWithVersion(ctx, prjCtx, filepath.Clean(path), versionCmd, envs, tfVersion, prjCtx.Workspace) -} diff --git a/server/legacy/core/runtime/version_step_runner_test.go b/server/legacy/core/runtime/version_step_runner_test.go deleted file mode 100644 index c316f3692..000000000 --- a/server/legacy/core/runtime/version_step_runner_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package runtime - -import ( - "context" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestRunVersionStep(t *testing.T) { - RegisterMockTestingT(t) - logger := logging.NewNoopCtxLogger(t) - workspace := "default" - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - EscapedCommentArgs: []string{"comment", "args"}, - Workspace: workspace, - RepoRelDir: ".", - User: models.User{Username: "username"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion("0.15.0") - tmpDir, cleanup := TempDir(t) - defer cleanup() - - s := &VersionStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - - t.Run("ensure runs", func(t *testing.T) { - _, err := s.Run(ctx, prjCtx, []string{}, tmpDir, map[string]string(nil)) - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, []string{"version"}, map[string]string(nil), tfVersion, "default") - Ok(t, err) - }) -} diff --git a/server/legacy/core/terraform/async_client.go b/server/legacy/core/terraform/async_client.go deleted file mode 100644 index 18b8b4243..000000000 --- a/server/legacy/core/terraform/async_client.go +++ /dev/null @@ -1,133 +0,0 @@ -package terraform - -import ( - "bufio" - "context" - "fmt" - "io" - "sync" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -// Setting the buffer size to 10mb -const BufioScannerBufferSize = 10 * 1024 * 1024 - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_terraform_client_async.go ClientAsync - -type ClientAsync interface { - RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) <-chan helpers.Line - - RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string, input <-chan string) <-chan helpers.Line -} - -type AsyncClient struct { - projectCmdOutputHandler jobs.ProjectCommandOutputHandler - commandBuilder commandBuilder -} - -// RunCommandAsync runs terraform with args. It immediately returns an -// input and output channel. Callers can use the output channel to -// get the realtime output from the command. -// Callers can use the input channel to pass stdin input to the command. -// If any error is passed on the out channel, there will be no -// further output (so callers are free to exit). -func (c *AsyncClient) RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) <-chan helpers.Line { - input := make(chan string) - defer close(input) - - return c.RunCommandAsyncWithInput(ctx, prjCtx, path, args, customEnvVars, v, workspace, input) -} -func (c *AsyncClient) RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string, input <-chan string) <-chan helpers.Line { - outCh := make(chan helpers.Line) - - // We start a goroutine to do our work asynchronously and then immediately - // return our channels. - go func() { - // Ensure we close our channels when we exit. - defer func() { - close(outCh) - }() - - cmd, err := c.commandBuilder.Build(v, workspace, path, args) - if err != nil { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, err.Error()) - outCh <- helpers.Line{Err: err} - return - } - stdout, _ := cmd.StdoutPipe() - stderr, _ := cmd.StderrPipe() - stdin, _ := cmd.StdinPipe() - envVars := cmd.Env - for key, val := range customEnvVars { - envVars = append(envVars, fmt.Sprintf("%s=%s", key, val)) - } - cmd.Env = envVars - - err = cmd.Start() - if err != nil { - err = errors.Wrapf(err, "running %q in %q", cmd.String(), path) - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, err.Error()) - outCh <- helpers.Line{Err: err} - return - } - - // If we get anything on inCh, write it to stdin. - // This function will exit when inCh is closed which we do in our defer. - go func() { - for line := range input { - _, err := io.WriteString(stdin, line) - if err != nil { - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, errors.Wrapf(err, "writing %q to process", line).Error()) - } - } - }() - - // Use a waitgroup to block until our stdout/err copying is complete. - wg := new(sync.WaitGroup) - wg.Add(2) - // Asynchronously copy from stdout/err to outCh. - go func() { - defer wg.Done() - c.WriteOutput(stdout, outCh, prjCtx) - }() - go func() { - defer wg.Done() - c.WriteOutput(stderr, outCh, prjCtx) - }() - - // Wait for our copying to complete. This *must* be done before - // calling cmd.Wait(). (see https://github.com/golang/go/issues/19685) - wg.Wait() - - // Wait for the command to complete. - err = cmd.Wait() - - // We're done now. Send an error if there was one. - if err != nil { - err = errors.Wrapf(err, "running %q in %q", cmd.String(), path) - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, err.Error()) - outCh <- helpers.Line{Err: err} - } else { - prjCtx.Log.InfoContext(prjCtx.RequestCtx, fmt.Sprintf("successfully ran %q in %q", cmd.String(), path)) - } - }() - - return outCh -} - -func (c *AsyncClient) WriteOutput(stdReader io.Reader, outCh chan helpers.Line, prjCtx command.ProjectContext) { - s := bufio.NewScanner(stdReader) - buf := []byte{} - s.Buffer(buf, BufioScannerBufferSize) - - for s.Scan() { - message := s.Text() - outCh <- helpers.Line{Line: message} - c.projectCmdOutputHandler.Send(prjCtx, message) - } -} diff --git a/server/legacy/core/terraform/async_client_test.go b/server/legacy/core/terraform/async_client_test.go deleted file mode 100644 index 58bd566be..000000000 --- a/server/legacy/core/terraform/async_client_test.go +++ /dev/null @@ -1,200 +0,0 @@ -package terraform - -import ( - "context" - "fmt" - "os" - "os/exec" - "path/filepath" - "strings" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - jobmocks "github.com/runatlantis/atlantis/server/legacy/jobs/mocks" - "github.com/stretchr/testify/assert" - - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestDefaultClient_RunCommandAsync_Success(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - logger := logging.NewNoopCtxLogger(t) - echoCommand := exec.Command("sh", "-c", "echo hello") - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - RequestCtx: context.TODO(), - } - - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - client := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(echoCommand, nil) - outCh := client.RunCommandAsync(ctx, prjCtx, path, args, map[string]string{}, nil, workspace) - - out, err := waitCh(outCh) - Ok(t, err) - Equals(t, "hello", out) -} - -// Our implementation is bottlenecked on large output due to the way we pipe each line. -func TestDefaultClient_RunCommandAsync_BigOutput(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - logger := logging.NewNoopCtxLogger(t) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - RequestCtx: context.TODO(), - } - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - client := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - - // set up big file to test limitations. - tmp, cleanup := TempDir(t) - defer cleanup() - - filename := filepath.Join(tmp, "data") - f, err := os.OpenFile(filename, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644) - Ok(t, err) - - var exp string - for i := 0; i < 1024; i++ { - s := strings.Repeat("0", 10) + "\n" - exp += s - _, err = f.WriteString(s) - Ok(t, err) - } - - cmdStr := fmt.Sprintf("cat %s", filename) - cat := exec.Command("sh", "-c", cmdStr) - - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(cat, nil) - outCh := client.RunCommandAsync(ctx, prjCtx, path, args, map[string]string{}, nil, workspace) - - out, err := waitCh(outCh) - Ok(t, err) - Equals(t, strings.TrimRight(exp, "\n"), out) -} - -func TestDefaultClient_RunCommandAsync_StderrOutput(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - echoCommand := exec.Command("sh", "-c", "echo stderr >&2") - - logger := logging.NewNoopCtxLogger(t) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - RequestCtx: context.TODO(), - } - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - client := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(echoCommand, nil) - outCh := client.RunCommandAsync(ctx, prjCtx, path, args, map[string]string{}, nil, workspace) - - out, err := waitCh(outCh) - Ok(t, err) - Equals(t, "stderr", out) -} - -func TestDefaultClient_RunCommandAsync_ExitOne(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - echoCommand := exec.Command("sh", "-c", "echo dying && exit 1") - logger := logging.NewNoopCtxLogger(t) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - RequestCtx: context.TODO(), - } - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - client := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(echoCommand, nil) - outCh := client.RunCommandAsync(ctx, prjCtx, path, args, map[string]string{}, nil, workspace) - - out, err := waitCh(outCh) - assert.ErrorContains(t, err, fmt.Sprintf(`echo dying && exit 1" in %q: exit status 1`, path)) - // Test that we still get our output. - Equals(t, "dying", out) -} - -func TestDefaultClient_RunCommandAsync_Input(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - echoCommand := exec.Command("sh", "-c", "read a && echo $a") - logger := logging.NewNoopCtxLogger(t) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - RequestCtx: context.TODO(), - } - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - client := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - - inCh := make(chan string) - - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(echoCommand, nil) - outCh := client.RunCommandAsyncWithInput(ctx, prjCtx, path, args, map[string]string{}, nil, workspace, inCh) - inCh <- "echo me\n" - - out, err := waitCh(outCh) - Ok(t, err) - Equals(t, "echo me", out) -} - -func waitCh(ch <-chan helpers.Line) (string, error) { - var ls []string - for line := range ch { - if line.Err != nil { - return strings.Join(ls, "\n"), line.Err - } - ls = append(ls, line.Line) - } - return strings.Join(ls, "\n"), nil -} diff --git a/server/legacy/core/terraform/cloud/config.go b/server/legacy/core/terraform/cloud/config.go deleted file mode 100644 index da7ccd66b..000000000 --- a/server/legacy/core/terraform/cloud/config.go +++ /dev/null @@ -1,47 +0,0 @@ -package cloud - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/pkg/errors" -) - -// rcFileContents is a format string to be used with Sprintf that can be used -// to generate the contents of a ~/.terraformrc file for authenticating with -// Terraform Enterprise. -var rcFileContents = `credentials "%s" { - token = %q -}` - -const rcFilename = ".terraformrc" - -// GenerateConfigFile generates a .terraformrc file containing config for tfeToken -// and hostname tfeHostname. -// It will create the file in home/.terraformrc. -func GenerateConfigFile(tfeToken string, tfeHostname string, home string) error { - config := fmt.Sprintf(rcFileContents, tfeHostname, tfeToken) - rcFile := filepath.Join(home, rcFilename) - - // If there is already a .terraformrc file and its contents aren't exactly - // what we would have written to it, then we error out because we don't - // want to overwrite anything. - if _, err := os.Stat(rcFile); err == nil { - currContents, err := os.ReadFile(rcFile) // nolint: gosec - if err != nil { - return errors.Wrapf(err, "trying to read %s to ensure we're not overwriting it", rcFile) - } - if config != string(currContents) { - return fmt.Errorf("can't write TFE token to %s because that file has contents that would be overwritten", rcFile) - } - // Otherwise we don't need to write the file because it already has - // what we need. - return nil - } - - if err := os.WriteFile(rcFile, []byte(config), 0600); err != nil { - return errors.Wrapf(err, "writing generated %s file with TFE token to %s", rcFilename, rcFile) - } - return nil -} diff --git a/server/legacy/core/terraform/cloud/config_test.go b/server/legacy/core/terraform/cloud/config_test.go deleted file mode 100644 index 6a5b7bfc3..000000000 --- a/server/legacy/core/terraform/cloud/config_test.go +++ /dev/null @@ -1,81 +0,0 @@ -package cloud - -import ( - "fmt" - "os" - "path/filepath" - "testing" - - . "github.com/runatlantis/atlantis/testing" -) - -// Test that we write the file as expected -func TestGenerateRCFile_WritesFile(t *testing.T) { - tmp, cleanup := TempDir(t) - defer cleanup() - - err := GenerateConfigFile("token", "hostname", tmp) - Ok(t, err) - - expContents := `credentials "hostname" { - token = "token" -}` - actContents, err := os.ReadFile(filepath.Join(tmp, ".terraformrc")) - Ok(t, err) - Equals(t, expContents, string(actContents)) -} - -// Test that if the file already exists and its contents will be modified if -// we write our config that we error out. -func TestGenerateRCFile_WillNotOverwrite(t *testing.T) { - tmp, cleanup := TempDir(t) - defer cleanup() - - rcFile := filepath.Join(tmp, ".terraformrc") - err := os.WriteFile(rcFile, []byte("contents"), 0600) - Ok(t, err) - - actErr := GenerateConfigFile("token", "hostname", tmp) - expErr := fmt.Sprintf("can't write TFE token to %s because that file has contents that would be overwritten", tmp+"/.terraformrc") - ErrEquals(t, expErr, actErr) -} - -// Test that if the file already exists and its contents will NOT be modified if -// we write our config that we don't error. -func TestGenerateRCFile_NoErrIfContentsSame(t *testing.T) { - tmp, cleanup := TempDir(t) - defer cleanup() - - rcFile := filepath.Join(tmp, ".terraformrc") - contents := `credentials "app.terraform.io" { - token = "token" -}` - err := os.WriteFile(rcFile, []byte(contents), 0600) - Ok(t, err) - - err = GenerateConfigFile("token", "app.terraform.io", tmp) - Ok(t, err) -} - -// Test that if we can't read the existing file to see if the contents will be -// the same that we just error out. -func TestGenerateRCFile_ErrIfCannotRead(t *testing.T) { - tmp, cleanup := TempDir(t) - defer cleanup() - - rcFile := filepath.Join(tmp, ".terraformrc") - err := os.WriteFile(rcFile, []byte("can't see me!"), 0000) - Ok(t, err) - - expErr := fmt.Sprintf("trying to read %s to ensure we're not overwriting it: open %s: permission denied", rcFile, rcFile) - actErr := GenerateConfigFile("token", "hostname", tmp) - ErrEquals(t, expErr, actErr) -} - -// Test that if we can't write, we error out. -func TestGenerateRCFile_ErrIfCannotWrite(t *testing.T) { - rcFile := "/this/dir/does/not/exist/.terraformrc" - expErr := fmt.Sprintf("writing generated .terraformrc file with TFE token to %s: open %s: no such file or directory", rcFile, rcFile) - actErr := GenerateConfigFile("token", "hostname", "/this/dir/does/not/exist") - ErrEquals(t, expErr, actErr) -} diff --git a/server/legacy/core/terraform/cmd.go b/server/legacy/core/terraform/cmd.go deleted file mode 100644 index e234d5c42..000000000 --- a/server/legacy/core/terraform/cmd.go +++ /dev/null @@ -1,57 +0,0 @@ -package terraform - -import ( - "fmt" - "os" - "os/exec" - "strings" - - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/cache" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_command_builder.go commandBuilder -type commandBuilder interface { - Build(v *version.Version, workspace string, path string, args []string) (*exec.Cmd, error) -} - -type CommandBuilder struct { - defaultVersion *version.Version - versionCache cache.ExecutionVersionCache - terraformPluginCacheDir string -} - -func (c *CommandBuilder) Build(v *version.Version, workspace string, path string, args []string) (*exec.Cmd, error) { - if v == nil { - v = c.defaultVersion - } - - binPath, err := c.versionCache.Get(v) - if err != nil { - return nil, errors.Wrapf(err, "getting version from cache %s", v.String()) - } - - // We add custom variables so that if `extra_args` is specified with env - // vars then they'll be substituted. - envVars := []string{ - // Will de-emphasize specific commands to run in output. - "TF_IN_AUTOMATION=true", - // Cache plugins so terraform init runs faster. - fmt.Sprintf("WORKSPACE=%s", workspace), - fmt.Sprintf("TF_WORKSPACE=%s", workspace), - fmt.Sprintf("ATLANTIS_TERRAFORM_VERSION=%s", v.String()), - fmt.Sprintf("DIR=%s", path), - } - if c.terraformPluginCacheDir != "" { - envVars = append(envVars, fmt.Sprintf("TF_PLUGIN_CACHE_DIR=%s", c.terraformPluginCacheDir)) - } - // Append current Atlantis process's environment variables, ex. - // AWS_ACCESS_KEY. - envVars = append(envVars, os.Environ()...) - tfCmd := fmt.Sprintf("%s %s", binPath, strings.Join(args, " ")) - cmd := exec.Command("sh", "-c", tfCmd) - cmd.Dir = path - cmd.Env = envVars - return cmd, nil -} diff --git a/server/legacy/core/terraform/helpers/line.go b/server/legacy/core/terraform/helpers/line.go deleted file mode 100644 index 60509ac17..000000000 --- a/server/legacy/core/terraform/helpers/line.go +++ /dev/null @@ -1,9 +0,0 @@ -package helpers - -// Line represents a line that was output from a terraform command. -type Line struct { - // Line is the contents of the line (without the newline). - Line string - // Err is set if there was an error. - Err error -} diff --git a/server/legacy/core/terraform/mocks/matchers/command_projectcontext.go b/server/legacy/core/terraform/mocks/matchers/command_projectcontext.go deleted file mode 100644 index 8722b7ba3..000000000 --- a/server/legacy/core/terraform/mocks/matchers/command_projectcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyCommandProjectContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func CommandProjectContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/context_context.go b/server/legacy/core/terraform/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/core/terraform/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/go_getter_clientoption.go b/server/legacy/core/terraform/mocks/matchers/go_getter_clientoption.go deleted file mode 100644 index 610330926..000000000 --- a/server/legacy/core/terraform/mocks/matchers/go_getter_clientoption.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - go_getter "github.com/hashicorp/go-getter" - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyGoGetterClientOption() go_getter.ClientOption { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(go_getter.ClientOption))(nil)).Elem())) - var nullValue go_getter.ClientOption - return nullValue -} - -func EqGoGetterClientOption(value go_getter.ClientOption) go_getter.ClientOption { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue go_getter.ClientOption - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/logging_logger.go b/server/legacy/core/terraform/mocks/matchers/logging_logger.go deleted file mode 100644 index d43fd90e9..000000000 --- a/server/legacy/core/terraform/mocks/matchers/logging_logger.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingLogger() logging.Logger { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.Logger))(nil)).Elem())) - var nullValue logging.Logger - return nullValue -} - -func EqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func NotEqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func LoggingLoggerThat(matcher pegomock.ArgumentMatcher) logging.Logger { - pegomock.RegisterMatcher(matcher) - var nullValue logging.Logger - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/logging_simplelogging.go b/server/legacy/core/terraform/mocks/matchers/logging_simplelogging.go deleted file mode 100644 index af87b9432..000000000 --- a/server/legacy/core/terraform/mocks/matchers/logging_simplelogging.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" - "reflect" -) - -func AnyLoggingSimpleLogging() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/map_of_string_to_string.go b/server/legacy/core/terraform/mocks/matchers/map_of_string_to_string.go deleted file mode 100644 index 65175de1a..000000000 --- a/server/legacy/core/terraform/mocks/matchers/map_of_string_to_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyMapOfStringToString() map[string]string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(map[string]string))(nil)).Elem())) - var nullValue map[string]string - return nullValue -} - -func EqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} - -func NotEqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} - -func MapOfStringToStringThat(matcher pegomock.ArgumentMatcher) map[string]string { - pegomock.RegisterMatcher(matcher) - var nullValue map[string]string - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/models_projectcommandcontext.go b/server/legacy/core/terraform/mocks/matchers/models_projectcommandcontext.go deleted file mode 100644 index dbde44f15..000000000 --- a/server/legacy/core/terraform/mocks/matchers/models_projectcommandcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsProjectCommandContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func ModelsProjectCommandContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/ptr_to_exec_cmd.go b/server/legacy/core/terraform/mocks/matchers/ptr_to_exec_cmd.go deleted file mode 100644 index d493ea556..000000000 --- a/server/legacy/core/terraform/mocks/matchers/ptr_to_exec_cmd.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - exec "os/exec" - "reflect" -) - -func AnyPtrToExecCmd() *exec.Cmd { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*exec.Cmd))(nil)).Elem())) - var nullValue *exec.Cmd - return nullValue -} - -func EqPtrToExecCmd(value *exec.Cmd) *exec.Cmd { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *exec.Cmd - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/ptr_to_go_version_version.go b/server/legacy/core/terraform/mocks/matchers/ptr_to_go_version_version.go deleted file mode 100644 index bb596fe3d..000000000 --- a/server/legacy/core/terraform/mocks/matchers/ptr_to_go_version_version.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - go_version "github.com/hashicorp/go-version" -) - -func AnyPtrToGoVersionVersion() *go_version.Version { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*go_version.Version))(nil)).Elem())) - var nullValue *go_version.Version - return nullValue -} - -func EqPtrToGoVersionVersion(value *go_version.Version) *go_version.Version { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *go_version.Version - return nullValue -} - -func NotEqPtrToGoVersionVersion(value *go_version.Version) *go_version.Version { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *go_version.Version - return nullValue -} - -func PtrToGoVersionVersionThat(matcher pegomock.ArgumentMatcher) *go_version.Version { - pegomock.RegisterMatcher(matcher) - var nullValue *go_version.Version - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/ptr_to_logging_simplelogger.go b/server/legacy/core/terraform/mocks/matchers/ptr_to_logging_simplelogger.go deleted file mode 100644 index e7c8b942f..000000000 --- a/server/legacy/core/terraform/mocks/matchers/ptr_to_logging_simplelogger.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyPtrToLoggingSimpleLogger() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqPtrToLoggingSimpleLogger(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/recv_chan_of_helpers_line.go b/server/legacy/core/terraform/mocks/matchers/recv_chan_of_helpers_line.go deleted file mode 100644 index 8cae5ef92..000000000 --- a/server/legacy/core/terraform/mocks/matchers/recv_chan_of_helpers_line.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - helpers "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "reflect" -) - -func AnyRecvChanOfHelpersLine() <-chan helpers.Line { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(<-chan helpers.Line))(nil)).Elem())) - var nullValue <-chan helpers.Line - return nullValue -} - -func EqRecvChanOfHelpersLine(value <-chan helpers.Line) <-chan helpers.Line { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue <-chan helpers.Line - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/recv_chan_of_string.go b/server/legacy/core/terraform/mocks/matchers/recv_chan_of_string.go deleted file mode 100644 index e2c14d38a..000000000 --- a/server/legacy/core/terraform/mocks/matchers/recv_chan_of_string.go +++ /dev/null @@ -1,19 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyRecvChanOfString() <-chan string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(<-chan string))(nil)).Elem())) - var nullValue <-chan string - return nullValue -} - -func EqRecvChanOfString(value <-chan string) <-chan string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue <-chan string - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/recv_chan_of_terraform_line.go b/server/legacy/core/terraform/mocks/matchers/recv_chan_of_terraform_line.go deleted file mode 100644 index 61e1dd76d..000000000 --- a/server/legacy/core/terraform/mocks/matchers/recv_chan_of_terraform_line.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - helpers "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - "reflect" -) - -func AnyRecvChanOfTerraformLine() <-chan helpers.Line { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(<-chan helpers.Line))(nil)).Elem())) - var nullValue <-chan helpers.Line - return nullValue -} - -func EqRecvChanOfTerraformLine(value <-chan helpers.Line) <-chan helpers.Line { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue <-chan helpers.Line - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/matchers/slice_of_string.go b/server/legacy/core/terraform/mocks/matchers/slice_of_string.go deleted file mode 100644 index f9281819d..000000000 --- a/server/legacy/core/terraform/mocks/matchers/slice_of_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfString() []string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) - var nullValue []string - return nullValue -} - -func EqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func NotEqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { - pegomock.RegisterMatcher(matcher) - var nullValue []string - return nullValue -} diff --git a/server/legacy/core/terraform/mocks/mock_command_builder.go b/server/legacy/core/terraform/mocks/mock_command_builder.go deleted file mode 100644 index f4dae8552..000000000 --- a/server/legacy/core/terraform/mocks/mock_command_builder.go +++ /dev/null @@ -1,122 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/terraform (interfaces: commandBuilder) - -package mocks - -import ( - go_version "github.com/hashicorp/go-version" - pegomock "github.com/petergtz/pegomock" - exec "os/exec" - "reflect" - "time" -) - -type MockcommandBuilder struct { - fail func(message string, callerSkip ...int) -} - -func NewMockcommandBuilder(options ...pegomock.Option) *MockcommandBuilder { - mock := &MockcommandBuilder{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockcommandBuilder) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockcommandBuilder) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockcommandBuilder) Build(v *go_version.Version, workspace string, path string, args []string) (*exec.Cmd, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockcommandBuilder().") - } - params := []pegomock.Param{v, workspace, path, args} - result := pegomock.GetGenericMockFrom(mock).Invoke("Build", params, []reflect.Type{reflect.TypeOf((**exec.Cmd)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *exec.Cmd - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*exec.Cmd) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockcommandBuilder) VerifyWasCalledOnce() *VerifierMockcommandBuilder { - return &VerifierMockcommandBuilder{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockcommandBuilder) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockcommandBuilder { - return &VerifierMockcommandBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockcommandBuilder) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockcommandBuilder { - return &VerifierMockcommandBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockcommandBuilder) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockcommandBuilder { - return &VerifierMockcommandBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockcommandBuilder struct { - mock *MockcommandBuilder - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockcommandBuilder) Build(v *go_version.Version, workspace string, path string, args []string) *MockcommandBuilder_Build_OngoingVerification { - params := []pegomock.Param{v, workspace, path, args} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Build", params, verifier.timeout) - return &MockcommandBuilder_Build_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockcommandBuilder_Build_OngoingVerification struct { - mock *MockcommandBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockcommandBuilder_Build_OngoingVerification) GetCapturedArguments() (*go_version.Version, string, string, []string) { - v, workspace, path, args := c.GetAllCapturedArguments() - return v[len(v)-1], workspace[len(workspace)-1], path[len(path)-1], args[len(args)-1] -} - -func (c *MockcommandBuilder_Build_OngoingVerification) GetAllCapturedArguments() (_param0 []*go_version.Version, _param1 []string, _param2 []string, _param3 [][]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*go_version.Version) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([][]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.([]string) - } - } - return -} diff --git a/server/legacy/core/terraform/mocks/mock_downloader.go b/server/legacy/core/terraform/mocks/mock_downloader.go deleted file mode 100644 index aaeed9a08..000000000 --- a/server/legacy/core/terraform/mocks/mock_downloader.go +++ /dev/null @@ -1,185 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/terraform (interfaces: Downloader) - -package mocks - -import ( - go_getter "github.com/hashicorp/go-getter" - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockDownloader struct { - fail func(message string, callerSkip ...int) -} - -func NewMockDownloader(options ...pegomock.Option) *MockDownloader { - mock := &MockDownloader{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockDownloader) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockDownloader) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockDownloader) GetFile(dst string, src string, opts ...go_getter.ClientOption) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockDownloader().") - } - params := []pegomock.Param{dst, src} - for _, param := range opts { - params = append(params, param) - } - result := pegomock.GetGenericMockFrom(mock).Invoke("GetFile", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockDownloader) GetAny(dst string, src string, opts ...go_getter.ClientOption) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockDownloader().") - } - params := []pegomock.Param{dst, src} - for _, param := range opts { - params = append(params, param) - } - result := pegomock.GetGenericMockFrom(mock).Invoke("GetAny", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockDownloader) VerifyWasCalledOnce() *VerifierMockDownloader { - return &VerifierMockDownloader{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockDownloader) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockDownloader { - return &VerifierMockDownloader{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockDownloader) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockDownloader { - return &VerifierMockDownloader{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockDownloader) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockDownloader { - return &VerifierMockDownloader{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockDownloader struct { - mock *MockDownloader - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockDownloader) GetFile(dst string, src string, opts ...go_getter.ClientOption) *MockDownloader_GetFile_OngoingVerification { - params := []pegomock.Param{dst, src} - for _, param := range opts { - params = append(params, param) - } - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetFile", params, verifier.timeout) - return &MockDownloader_GetFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockDownloader_GetFile_OngoingVerification struct { - mock *MockDownloader - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockDownloader_GetFile_OngoingVerification) GetCapturedArguments() (string, string, []go_getter.ClientOption) { - dst, src, opts := c.GetAllCapturedArguments() - return dst[len(dst)-1], src[len(src)-1], opts[len(opts)-1] -} - -func (c *MockDownloader_GetFile_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 [][]go_getter.ClientOption) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([][]go_getter.ClientOption, len(c.methodInvocations)) - for u := 0; u < len(c.methodInvocations); u++ { - _param2[u] = make([]go_getter.ClientOption, len(params)-2) - for x := 2; x < len(params); x++ { - if params[x][u] != nil { - _param2[u][x-2] = params[x][u].(go_getter.ClientOption) - } - } - } - } - return -} - -func (verifier *VerifierMockDownloader) GetAny(dst string, src string, opts ...go_getter.ClientOption) *MockDownloader_GetAny_OngoingVerification { - params := []pegomock.Param{dst, src} - for _, param := range opts { - params = append(params, param) - } - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetAny", params, verifier.timeout) - return &MockDownloader_GetAny_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockDownloader_GetAny_OngoingVerification struct { - mock *MockDownloader - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockDownloader_GetAny_OngoingVerification) GetCapturedArguments() (string, string, []go_getter.ClientOption) { - dst, src, opts := c.GetAllCapturedArguments() - return dst[len(dst)-1], src[len(src)-1], opts[len(opts)-1] -} - -func (c *MockDownloader_GetAny_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 [][]go_getter.ClientOption) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([][]go_getter.ClientOption, len(c.methodInvocations)) - for u := 0; u < len(c.methodInvocations); u++ { - _param2[u] = make([]go_getter.ClientOption, len(params)-2) - for x := 2; x < len(params); x++ { - if params[x][u] != nil { - _param2[u][x-2] = params[x][u].(go_getter.ClientOption) - } - } - } - } - return -} diff --git a/server/legacy/core/terraform/mocks/mock_terraform_client.go b/server/legacy/core/terraform/mocks/mock_terraform_client.go deleted file mode 100644 index a8161e9b7..000000000 --- a/server/legacy/core/terraform/mocks/mock_terraform_client.go +++ /dev/null @@ -1,182 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/terraform (interfaces: Client) - -package mocks - -import ( - context "context" - go_version "github.com/hashicorp/go-version" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - logging "github.com/runatlantis/atlantis/server/logging" - "reflect" - "time" -) - -type MockClient struct { - fail func(message string, callerSkip ...int) -} - -func NewMockClient(options ...pegomock.Option) *MockClient { - mock := &MockClient{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockClient) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockClient) RunCommandWithVersion(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{ctx, prjCtx, path, args, envs, v, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("RunCommandWithVersion", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) EnsureVersion(log logging.Logger, v *go_version.Version) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{log, v} - result := pegomock.GetGenericMockFrom(mock).Invoke("EnsureVersion", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockClient) VerifyWasCalledOnce() *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockClient) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockClient) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockClient) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockClient struct { - mock *MockClient - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockClient) RunCommandWithVersion(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) *MockClient_RunCommandWithVersion_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, path, args, envs, v, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandWithVersion", params, verifier.timeout) - return &MockClient_RunCommandWithVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_RunCommandWithVersion_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_RunCommandWithVersion_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string, []string, map[string]string, *go_version.Version, string) { - ctx, prjCtx, path, args, envs, v, workspace := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], path[len(path)-1], args[len(args)-1], envs[len(envs)-1], v[len(v)-1], workspace[len(workspace)-1] -} - -func (c *MockClient_RunCommandWithVersion_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string, _param3 [][]string, _param4 []map[string]string, _param5 []*go_version.Version, _param6 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([][]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.([]string) - } - _param4 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(map[string]string) - } - _param5 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(*go_version.Version) - } - _param6 = make([]string, len(c.methodInvocations)) - for u, param := range params[6] { - _param6[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockClient) EnsureVersion(log logging.Logger, v *go_version.Version) *MockClient_EnsureVersion_OngoingVerification { - params := []pegomock.Param{log, v} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "EnsureVersion", params, verifier.timeout) - return &MockClient_EnsureVersion_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_EnsureVersion_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_EnsureVersion_OngoingVerification) GetCapturedArguments() (logging.Logger, *go_version.Version) { - log, v := c.GetAllCapturedArguments() - return log[len(log)-1], v[len(v)-1] -} - -func (c *MockClient_EnsureVersion_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []*go_version.Version) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*go_version.Version) - } - } - return -} diff --git a/server/legacy/core/terraform/mocks/mock_terraform_client_async.go b/server/legacy/core/terraform/mocks/mock_terraform_client_async.go deleted file mode 100644 index fcc68afd3..000000000 --- a/server/legacy/core/terraform/mocks/mock_terraform_client_async.go +++ /dev/null @@ -1,210 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/core/terraform (interfaces: ClientAsync) - -package mocks - -import ( - context "context" - go_version "github.com/hashicorp/go-version" - pegomock "github.com/petergtz/pegomock" - helpers "github.com/runatlantis/atlantis/server/legacy/core/terraform/helpers" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockClientAsync struct { - fail func(message string, callerSkip ...int) -} - -func NewMockClientAsync(options ...pegomock.Option) *MockClientAsync { - mock := &MockClientAsync{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockClientAsync) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockClientAsync) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockClientAsync) RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *go_version.Version, workspace string) <-chan helpers.Line { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClientAsync().") - } - params := []pegomock.Param{ctx, prjCtx, path, args, customEnvVars, v, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("RunCommandAsync", params, []reflect.Type{reflect.TypeOf((*<-chan helpers.Line)(nil)).Elem()}) - var ret0 <-chan helpers.Line - if len(result) != 0 { - if result[0] != nil { - var ok bool - ret0, ok = result[0].(chan helpers.Line) - if !ok { - ret0 = result[0].(<-chan helpers.Line) - } - } - } - return ret0 -} - -func (mock *MockClientAsync) RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *go_version.Version, workspace string, input <-chan string) <-chan helpers.Line { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClientAsync().") - } - params := []pegomock.Param{ctx, prjCtx, path, args, customEnvVars, v, workspace, input} - result := pegomock.GetGenericMockFrom(mock).Invoke("RunCommandAsyncWithInput", params, []reflect.Type{reflect.TypeOf((*<-chan helpers.Line)(nil)).Elem()}) - var ret0 <-chan helpers.Line - if len(result) != 0 { - if result[0] != nil { - var ok bool - ret0, ok = result[0].(chan helpers.Line) - if !ok { - ret0 = result[0].(<-chan helpers.Line) - } - } - } - return ret0 -} - -func (mock *MockClientAsync) VerifyWasCalledOnce() *VerifierMockClientAsync { - return &VerifierMockClientAsync{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockClientAsync) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockClientAsync { - return &VerifierMockClientAsync{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockClientAsync) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockClientAsync { - return &VerifierMockClientAsync{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockClientAsync) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockClientAsync { - return &VerifierMockClientAsync{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockClientAsync struct { - mock *MockClientAsync - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockClientAsync) RunCommandAsync(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *go_version.Version, workspace string) *MockClientAsync_RunCommandAsync_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, path, args, customEnvVars, v, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandAsync", params, verifier.timeout) - return &MockClientAsync_RunCommandAsync_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClientAsync_RunCommandAsync_OngoingVerification struct { - mock *MockClientAsync - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClientAsync_RunCommandAsync_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string, []string, map[string]string, *go_version.Version, string) { - ctx, prjCtx, path, args, customEnvVars, v, workspace := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], path[len(path)-1], args[len(args)-1], customEnvVars[len(customEnvVars)-1], v[len(v)-1], workspace[len(workspace)-1] -} - -func (c *MockClientAsync_RunCommandAsync_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string, _param3 [][]string, _param4 []map[string]string, _param5 []*go_version.Version, _param6 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([][]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.([]string) - } - _param4 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(map[string]string) - } - _param5 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(*go_version.Version) - } - _param6 = make([]string, len(c.methodInvocations)) - for u, param := range params[6] { - _param6[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockClientAsync) RunCommandAsyncWithInput(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *go_version.Version, workspace string, input <-chan string) *MockClientAsync_RunCommandAsyncWithInput_OngoingVerification { - params := []pegomock.Param{ctx, prjCtx, path, args, customEnvVars, v, workspace, input} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandAsyncWithInput", params, verifier.timeout) - return &MockClientAsync_RunCommandAsyncWithInput_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClientAsync_RunCommandAsyncWithInput_OngoingVerification struct { - mock *MockClientAsync - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClientAsync_RunCommandAsyncWithInput_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, string, []string, map[string]string, *go_version.Version, string, <-chan string) { - ctx, prjCtx, path, args, customEnvVars, v, workspace, input := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], prjCtx[len(prjCtx)-1], path[len(path)-1], args[len(args)-1], customEnvVars[len(customEnvVars)-1], v[len(v)-1], workspace[len(workspace)-1], input[len(input)-1] -} - -func (c *MockClientAsync_RunCommandAsyncWithInput_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []string, _param3 [][]string, _param4 []map[string]string, _param5 []*go_version.Version, _param6 []string, _param7 []<-chan string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([][]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.([]string) - } - _param4 = make([]map[string]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(map[string]string) - } - _param5 = make([]*go_version.Version, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(*go_version.Version) - } - _param6 = make([]string, len(c.methodInvocations)) - for u, param := range params[6] { - _param6[u] = param.(string) - } - _param7 = make([]<-chan string, len(c.methodInvocations)) - for u, param := range params[7] { - _param7[u] = param.(<-chan string) - } - } - return -} diff --git a/server/legacy/core/terraform/terraform_client.go b/server/legacy/core/terraform/terraform_client.go deleted file mode 100644 index 868ed1d3b..000000000 --- a/server/legacy/core/terraform/terraform_client.go +++ /dev/null @@ -1,342 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. -// -// Package terraform handles the actual running of terraform commands. -package terraform - -import ( - "context" - "fmt" - "os/exec" - "path/filepath" - "regexp" - "runtime" - "strings" - - "github.com/hashicorp/go-getter" - "github.com/hashicorp/go-version" - "github.com/pkg/errors" - - "github.com/runatlantis/atlantis/server/legacy/core/runtime/cache" - runtime_models "github.com/runatlantis/atlantis/server/legacy/core/runtime/models" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/terraform/ansi" - "github.com/runatlantis/atlantis/server/legacy/jobs" - "github.com/runatlantis/atlantis/server/logging" -) - -var LogStreamingValidCmds = [...]string{"init", "plan", "apply"} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_terraform_client.go Client - -type Client interface { - // RunCommandWithVersion executes terraform with args in path. If v is nil, - // it will use the default Terraform version. workspace is the Terraform - // workspace which should be set as an environment variable. - RunCommandWithVersion(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, envs map[string]string, v *version.Version, workspace string) (string, error) - - // EnsureVersion makes sure that terraform version `v` is available to use - EnsureVersion(log logging.Logger, v *version.Version) error -} - -type DefaultClient struct { - // defaultVersion is the default version of terraform to use if another - // version isn't specified. - defaultVersion *version.Version - binDir string - // downloader downloads terraform versions. - downloader Downloader - downloadBaseURL string - - versionCache cache.ExecutionVersionCache - commandBuilder commandBuilder - *AsyncClient -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_downloader.go Downloader - -// Downloader is for downloading terraform versions. -type Downloader interface { - GetFile(dst, src string, opts ...getter.ClientOption) error - GetAny(dst, src string, opts ...getter.ClientOption) error -} - -// versionRegex extracts the version from `terraform version` output. -// -// Terraform v0.12.0-alpha4 (2c36829d3265661d8edbd5014de8090ea7e2a076) -// => 0.12.0-alpha4 -// -// Terraform v0.11.10 -// => 0.11.10 -var versionRegex = regexp.MustCompile("Terraform v(.*?)(\\s.*)?\n") - -// NewClientWithDefaultVersion creates a new terraform client and pre-fetches the default version -func NewClientWithVersionCache( - binDir string, - cacheDir string, - defaultVersionStr string, - defaultVersionFlagName string, - tfDownloadURL string, - tfDownloader Downloader, - usePluginCache bool, - projectCmdOutputHandler jobs.ProjectCommandOutputHandler, - versionCache cache.ExecutionVersionCache, -) (*DefaultClient, error) { - version, err := getDefaultVersion(defaultVersionStr, defaultVersionFlagName) - - if err != nil { - return nil, errors.Wrapf(err, "getting default version") - } - - // warm the cache with this version - _, err = versionCache.Get(version) - - if err != nil { - return nil, errors.Wrapf(err, "getting default terraform version %s", defaultVersionStr) - } - - builder := &CommandBuilder{ - defaultVersion: version, - versionCache: versionCache, - } - - if usePluginCache { - builder.terraformPluginCacheDir = cacheDir - } - - asyncClient := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: builder, - } - - return &DefaultClient{ - defaultVersion: version, - binDir: binDir, - downloader: tfDownloader, - downloadBaseURL: tfDownloadURL, - AsyncClient: asyncClient, - commandBuilder: builder, - versionCache: versionCache, - }, nil -} - -func NewE2ETestClient( - binDir string, - cacheDir string, - tfeToken string, - tfeHostname string, - defaultVersionStr string, - defaultVersionFlagName string, - tfDownloadURL string, - tfDownloader Downloader, - usePluginCache bool, - projectCmdOutputHandler jobs.ProjectCommandOutputHandler, -) (*DefaultClient, error) { - versionCache := cache.NewLocalBinaryCache("terraform") - return NewClientWithVersionCache( - binDir, - cacheDir, - defaultVersionStr, - defaultVersionFlagName, - tfDownloadURL, - tfDownloader, - usePluginCache, - projectCmdOutputHandler, - versionCache, - ) -} - -func NewClient( - binDir string, - cacheDir string, - defaultVersionStr string, - defaultVersionFlagName string, - tfDownloadURL string, - tfDownloader Downloader, - usePluginCache bool, - projectCmdOutputHandler jobs.ProjectCommandOutputHandler, -) (*DefaultClient, error) { - loader := VersionLoader{ - downloader: tfDownloader, - downloadURL: tfDownloadURL, - } - - versionCache := cache.NewExecutionVersionLayeredLoadingCache( - "terraform", - binDir, - loader.LoadVersion, - ) - return NewClientWithVersionCache( - binDir, - cacheDir, - defaultVersionStr, - defaultVersionFlagName, - tfDownloadURL, - tfDownloader, - usePluginCache, - projectCmdOutputHandler, - versionCache, - ) -} - -// Version returns the default version of Terraform we use if no other version -// is defined. -func (c *DefaultClient) DefaultVersion() *version.Version { - return c.defaultVersion -} - -// TerraformBinDir returns the directory where we download Terraform binaries. -func (c *DefaultClient) TerraformBinDir() string { - return c.binDir -} - -func (c *DefaultClient) EnsureVersion(log logging.Logger, v *version.Version) error { - if v == nil { - v = c.defaultVersion - } - - _, err := c.versionCache.Get(v) - - if err != nil { - return errors.Wrapf(err, "getting version %s", v) - } - - return nil -} - -// See Client.RunCommandWithVersion. -func (c *DefaultClient) RunCommandWithVersion(ctx context.Context, prjCtx command.ProjectContext, path string, args []string, customEnvVars map[string]string, v *version.Version, workspace string) (string, error) { - // if the feature is enabled, we use the async workflow else we default to the original sync workflow - // Don't stream terraform show output to outCh - if len(args) > 0 && isAsyncEligibleCommand(args[0]) { - outCh := c.RunCommandAsync(ctx, prjCtx, path, args, customEnvVars, v, workspace) - - var lines []string - var err error - for line := range outCh { - if line.Err != nil { - err = line.Err - break - } - lines = append(lines, line.Line) - } - output := strings.Join(lines, "\n") - - // sanitize output by stripping out any ansi characters. - output = ansi.Strip(output) - return fmt.Sprintf("%s\n", output), err - } - - cmd, err := c.commandBuilder.Build(v, workspace, path, args) - if err != nil { - return "", err - } - envVars := cmd.Env - for key, val := range customEnvVars { - envVars = append(envVars, fmt.Sprintf("%s=%s", key, val)) - } - cmd.Env = envVars - out, err := cmd.CombinedOutput() - if err != nil { - err = errors.Wrapf(err, "running %q in %q", cmd.String(), path) - prjCtx.Log.ErrorContext(prjCtx.RequestCtx, err.Error()) - return ansi.Strip(string(out)), err - } - prjCtx.Log.InfoContext(prjCtx.RequestCtx, fmt.Sprintf("successfully ran %q in %q", cmd.String(), path)) - - return ansi.Strip(string(out)), nil -} - -type VersionLoader struct { - downloader Downloader - downloadURL string -} - -func NewVersionLoader(downloader Downloader, downloadURL string) *VersionLoader { - return &VersionLoader{ - downloader: downloader, - downloadURL: downloadURL, - } -} - -func (l *VersionLoader) LoadVersion(v *version.Version, destPath string) (runtime_models.FilePath, error) { - urlPrefix := fmt.Sprintf("%s/terraform/%s/terraform_%s", l.downloadURL, v.String(), v.String()) - binURL := fmt.Sprintf("%s_%s_%s.zip", urlPrefix, runtime.GOOS, runtime.GOARCH) - checksumURL := fmt.Sprintf("%s_SHA256SUMS", urlPrefix) - fullSrcURL := fmt.Sprintf("%s?checksum=file:%s", binURL, checksumURL) - if err := l.downloader.GetAny(destPath, fullSrcURL); err != nil { - return runtime_models.LocalFilePath(""), errors.Wrapf(err, "downloading terraform version %s at %q", v.String(), fullSrcURL) - } - - binPath := filepath.Join(destPath, "terraform") - - return runtime_models.LocalFilePath(binPath), nil -} - -func isAsyncEligibleCommand(cmd string) bool { - for _, validCmd := range LogStreamingValidCmds { - if validCmd == cmd { - return true - } - } - return false -} - -func getDefaultVersion(overrideVersion string, versionFlagName string) (*version.Version, error) { - if overrideVersion != "" { - v, err := version.NewVersion(overrideVersion) - if err != nil { - return nil, errors.Wrapf(err, "parsing version %s", overrideVersion) - } - - return v, nil - } - - // look for the binary directly on disk and query the version - // we shouldn't really be doing this, but don't want to break existing clients. - // this implementation assumes that versions in the format our cache assumes - // and if thats the case we won't be redownloading the version of this binary to our cache - localPath, err := exec.LookPath("terraform") - if err != nil { - return nil, fmt.Errorf("terraform not found in $PATH. Set --%s or download terraform from https://www.terraform.io/downloads.html", versionFlagName) - } - - return getVersion(localPath) -} - -func getVersion(tfBinary string) (*version.Version, error) { - versionOutBytes, err := exec.Command(tfBinary, "version").Output() // #nosec - versionOutput := string(versionOutBytes) - if err != nil { - return nil, errors.Wrapf(err, "running terraform version: %s", versionOutput) - } - match := versionRegex.FindStringSubmatch(versionOutput) - if len(match) <= 1 { - return nil, fmt.Errorf("could not parse terraform version from %s", versionOutput) - } - return version.NewVersion(match[1]) -} - -type DefaultDownloader struct{} - -// See go-getter.GetFile. -func (d *DefaultDownloader) GetFile(dst, src string, opts ...getter.ClientOption) error { - return getter.GetFile(dst, src, opts...) -} - -// See go-getter.GetFile. -func (d *DefaultDownloader) GetAny(dst, src string, opts ...getter.ClientOption) error { - return getter.GetAny(dst, src, opts...) -} diff --git a/server/legacy/core/terraform/terraform_client_internal_test.go b/server/legacy/core/terraform/terraform_client_internal_test.go deleted file mode 100644 index 55e4803fb..000000000 --- a/server/legacy/core/terraform/terraform_client_internal_test.go +++ /dev/null @@ -1,131 +0,0 @@ -package terraform - -import ( - "context" - "fmt" - "os/exec" - "path/filepath" - "runtime" - "testing" - - "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/command" - jobmocks "github.com/runatlantis/atlantis/server/legacy/jobs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" -) - -// Test that it executes successfully -func TestDefaultClient_Synchronous_RunCommandWithVersion(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - logger := logging.NewNoopCtxLogger(t) - echoCommand := exec.Command("sh", "-c", "echo hello") - - ctx := context.Background() - prjCtx := command.ProjectContext{ - RequestCtx: context.TODO(), - Log: logger, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - asyncClient := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - client := &DefaultClient{ - commandBuilder: mockBuilder, - AsyncClient: asyncClient, - } - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(echoCommand, nil) - - customEnvVars := map[string]string{} - out, err := client.RunCommandWithVersion(ctx, prjCtx, path, args, customEnvVars, nil, workspace) - Ok(t, err) - Equals(t, "hello\n", out) -} - -func TestVersionLoader_buildsURL(t *testing.T) { - v, _ := version.NewVersion("0.15.0") - - destPath := "some/path" - fullURL := fmt.Sprintf("https://releases.hashicorp.com/terraform/0.15.0/terraform_0.15.0_%s_%s.zip?checksum=file:https://releases.hashicorp.com/terraform/0.15.0/terraform_0.15.0_SHA256SUMS", runtime.GOOS, runtime.GOARCH) - - RegisterMockTestingT(t) - - mockDownloader := mocks.NewMockDownloader() - - subject := VersionLoader{ - downloader: mockDownloader, - downloadURL: "https://releases.hashicorp.com", - } - - t.Run("success", func(t *testing.T) { - When(mockDownloader.GetAny(EqString(destPath), EqString(fullURL))).ThenReturn(nil) - binPath, err := subject.LoadVersion(v, destPath) - - mockDownloader.VerifyWasCalledOnce().GetAny(EqString(destPath), EqString(fullURL)) - - Ok(t, err) - - Assert(t, binPath.Resolve() == filepath.Join(destPath, "terraform"), "expected binpath") - }) - - t.Run("error", func(t *testing.T) { - When(mockDownloader.GetAny(EqString(destPath), EqString(fullURL))).ThenReturn(fmt.Errorf("err")) - _, err := subject.LoadVersion(v, destPath) - - Assert(t, err != nil, "err is expected") - }) -} - -// Test that it returns an error on error. -func TestDefaultClient_Synchronous_RunCommandWithVersion_Error(t *testing.T) { - path := "some/path" - args := []string{ - "ARG1=$ARG1", - } - workspace := "workspace" - logger := logging.NewNoopCtxLogger(t) - echoCommand := exec.Command("sh", "-c", "echo dying && exit 1") - - ctx := context.Background() - prjCtx := command.ProjectContext{ - RequestCtx: context.TODO(), - Log: logger, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - } - mockBuilder := mocks.NewMockcommandBuilder() - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - asyncClient := &AsyncClient{ - projectCmdOutputHandler: projectCmdOutputHandler, - commandBuilder: mockBuilder, - } - - client := &DefaultClient{ - commandBuilder: mockBuilder, - AsyncClient: asyncClient, - } - - When(mockBuilder.Build(nil, workspace, path, args)).ThenReturn(echoCommand, nil) - out, err := client.RunCommandWithVersion(ctx, prjCtx, path, args, map[string]string{}, nil, workspace) - assert.ErrorContains(t, err, fmt.Sprintf(`echo dying && exit 1" in %q: exit status 1`, path)) - // Test that we still get our output. - Equals(t, "dying\n", out) -} diff --git a/server/legacy/core/terraform/terraform_client_test.go b/server/legacy/core/terraform/terraform_client_test.go deleted file mode 100644 index 1d93bd6ce..000000000 --- a/server/legacy/core/terraform/terraform_client_test.go +++ /dev/null @@ -1,141 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package terraform_test - -import ( - "context" - "fmt" - "os" - "path/filepath" - "testing" - - "github.com/runatlantis/atlantis/cmd" - "github.com/runatlantis/atlantis/server/legacy/core/terraform" - "github.com/runatlantis/atlantis/server/legacy/events/command" - jobmocks "github.com/runatlantis/atlantis/server/legacy/jobs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -// Test that if terraform is not in PATH and we didn't set the default-tf flag -// that we error. -func TestNewClient_NoTF(t *testing.T) { - tmp, binDir, cacheDir, cleanup := mkSubDirs(t) - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - defer cleanup() - - // Set PATH to only include our empty directory. - defer tempSetEnv(t, "PATH", tmp)() - - _, err := terraform.NewClient(binDir, cacheDir, "", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) - ErrEquals(t, "getting default version: terraform not found in $PATH. Set --default-tf-version or download terraform from https://www.terraform.io/downloads.html", err) -} - -// Test that if the default-tf flag is set and that binary is in our PATH -// that we use it. -func TestNewClient_DefaultTFFlagInPath(t *testing.T) { - fakeBinOut := "Terraform v0.11.10\n" - tmp, binDir, cacheDir, cleanup := mkSubDirs(t) - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - ctx := context.Background() - prjCtx := command.ProjectContext{ - RequestCtx: context.TODO(), - Log: logging.NewNoopCtxLogger(t), - Workspace: "default", - RepoRelDir: ".", - BaseRepo: models.Repo{FullName: "owner/repo"}, - } - defer cleanup() - - // We're testing this by adding our own "fake" terraform binary to path that - // outputs what would normally come from terraform version. - err := os.WriteFile(filepath.Join(tmp, "terraform0.11.10"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 - Ok(t, err) - defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - - c, err := terraform.NewClient(binDir, cacheDir, "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) - Ok(t, err) - - Ok(t, err) - Equals(t, "0.11.10", c.DefaultVersion().String()) - - output, err := c.RunCommandWithVersion(ctx, prjCtx, tmp, nil, map[string]string{}, nil, "") - Ok(t, err) - Equals(t, fakeBinOut+"\n", output) -} - -// Test that if the default-tf flag is set and that binary is in our download -// bin dir that we use it. -func TestNewClient_DefaultTFFlagInBinDir(t *testing.T) { - fakeBinOut := "Terraform v0.11.10\n" - tmp, binDir, cacheDir, cleanup := mkSubDirs(t) - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - ctx := context.Background() - prjCtx := command.ProjectContext{ - RequestCtx: context.TODO(), - Log: logging.NewNoopCtxLogger(t), - Workspace: "default", - RepoRelDir: ".", - BaseRepo: models.Repo{FullName: "owner/repo"}, - } - defer cleanup() - - // Add our fake binary to {datadir}/bin/terraform{version}. - err := os.WriteFile(filepath.Join(binDir, "terraform0.11.10"), []byte(fmt.Sprintf("#!/bin/sh\necho '%s'", fakeBinOut)), 0700) // #nosec G306 - Ok(t, err) - defer tempSetEnv(t, "PATH", fmt.Sprintf("%s:%s", tmp, os.Getenv("PATH")))() - - c, err := terraform.NewClient(binDir, cacheDir, "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) - Ok(t, err) - - Ok(t, err) - Equals(t, "0.11.10", c.DefaultVersion().String()) - - output, err := c.RunCommandWithVersion(ctx, prjCtx, tmp, nil, map[string]string{}, nil, "") - Ok(t, err) - Equals(t, fakeBinOut+"\n", output) -} - -// Test that we get an error if the terraform version flag is malformed. -func TestNewClient_BadVersion(t *testing.T) { - _, binDir, cacheDir, cleanup := mkSubDirs(t) - projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() - defer cleanup() - - _, err := terraform.NewClient(binDir, cacheDir, "malformed", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, nil, true, projectCmdOutputHandler) - ErrEquals(t, "getting default version: parsing version malformed: Malformed version: malformed", err) -} - -// tempSetEnv sets env var key to value. It returns a function that when called -// will reset the env var to its original value. -func tempSetEnv(t *testing.T, key string, value string) func() { - orig := os.Getenv(key) - Ok(t, os.Setenv(key, value)) - return func() { os.Setenv(key, orig) } -} - -// returns parent, bindir, cachedir, cleanup func -func mkSubDirs(t *testing.T) (string, string, string, func()) { - tmp, cleanup := TempDir(t) - binDir := filepath.Join(tmp, "bin") - err := os.MkdirAll(binDir, 0700) - Ok(t, err) - - cachedir := filepath.Join(tmp, "plugin-cache") - err = os.MkdirAll(cachedir, 0700) - Ok(t, err) - - return tmp, binDir, cachedir, cleanup -} diff --git a/server/legacy/events/apply_command_runner.go b/server/legacy/events/apply_command_runner.go deleted file mode 100644 index 99f382dbd..000000000 --- a/server/legacy/events/apply_command_runner.go +++ /dev/null @@ -1,181 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/models" -) - -func NewApplyCommandRunner( - vcsClient vcs.Client, - disableApplyAll bool, - applyCommandLocker locking.ApplyLockChecker, - vcsStatusUpdater VCSStatusUpdater, - prjCommandBuilder ProjectApplyCommandBuilder, - prjCmdRunner ProjectApplyCommandRunner, - outputUpdater OutputUpdater, - dbUpdater *DBUpdater, - parallelPoolSize int, - pullReqStatusFetcher vcs.PullReqStatusFetcher, -) *ApplyCommandRunner { - return &ApplyCommandRunner{ - vcsClient: vcsClient, - DisableApplyAll: disableApplyAll, - locker: applyCommandLocker, - vcsStatusUpdater: vcsStatusUpdater, - prjCmdBuilder: prjCommandBuilder, - prjCmdRunner: prjCmdRunner, - outputUpdater: outputUpdater, - dbUpdater: dbUpdater, - parallelPoolSize: parallelPoolSize, - pullReqStatusFetcher: pullReqStatusFetcher, - } -} - -type ApplyCommandRunner struct { - DisableApplyAll bool - locker locking.ApplyLockChecker - vcsClient vcs.Client - vcsStatusUpdater VCSStatusUpdater - prjCmdBuilder ProjectApplyCommandBuilder - prjCmdRunner ProjectApplyCommandRunner - outputUpdater OutputUpdater - dbUpdater *DBUpdater - parallelPoolSize int - pullReqStatusFetcher vcs.PullReqStatusFetcher -} - -func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *command.Comment) { - var err error - baseRepo := ctx.Pull.BaseRepo - pull := ctx.Pull - - locked, err := a.IsLocked() - // CheckApplyLock falls back to DisableApply flag if fetching the lock - // raises an error - // We will log failure as warning - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("checking global apply lock: %s", err)) - } - - if locked { - ctx.Log.InfoContext(ctx.RequestCtx, "ignoring apply command since apply disabled globally") - if err := a.vcsClient.CreateComment(baseRepo, pull.Num, applyDisabledComment, command.Apply.String()); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to comment on pull request: %s", err)) - } - - return - } - - if a.DisableApplyAll && !cmd.IsForSpecificProject() { - ctx.Log.InfoContext(ctx.RequestCtx, "ignoring apply command without flags since apply all is disabled") - if err := a.vcsClient.CreateComment(baseRepo, pull.Num, applyAllDisabledComment, command.Apply.String()); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to comment on pull request: %s", err)) - } - - return - } - - statusID, err := a.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, baseRepo, pull, models.PendingVCSStatus, cmd.CommandName(), "", "") - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - - // Get the mergeable status before we set any build statuses of our own. - // We do this here because when we set a "Pending" status, if users have - // required the Atlantis status checks to pass, then we've now changed - // the mergeability status of the pull request. - // This sets the approved, mergeable, and sqlocked status in the context. - ctx.PullRequestStatus, err = a.pullReqStatusFetcher.FetchPullStatus(baseRepo, pull) - if err != nil { - // On error we continue the request with mergeable assumed false. - // We want to continue because not all apply's will need this status, - // only if they rely on the mergeability requirement. - // All PullRequestStatus fields are set to false by default when error. - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to get pull request status: %s. Continuing with mergeable and approved assumed false", err)) - } - - var projectCmds []command.ProjectContext - projectCmds, err = a.prjCmdBuilder.BuildApplyCommands(ctx, cmd) - - if err != nil { - if _, statusErr := a.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, models.FailedVCSStatus, cmd.CommandName(), statusID, ""); statusErr != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", statusErr)) - } - a.outputUpdater.UpdateOutput(ctx, cmd, command.Result{Error: err}) - return - } - - // Only run commands in parallel if enabled - var result command.Result - if a.isParallelEnabled(projectCmds) { - ctx.Log.InfoContext(ctx.RequestCtx, "Running applies in parallel") - result = runProjectCmdsParallel(projectCmds, a.prjCmdRunner.Apply, a.parallelPoolSize) - } else { - result = runProjectCmds(projectCmds, a.prjCmdRunner.Apply) - } - - a.outputUpdater.UpdateOutput( - ctx, - cmd, - result) - - pullStatus, err := a.dbUpdater.updateDB(ctx, pull, result.ProjectResults) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("writing results: %s", err)) - return - } - - a.updateVcsStatus(ctx, pullStatus, statusID) -} - -func (a *ApplyCommandRunner) IsLocked() (bool, error) { - lock, err := a.locker.CheckApplyLock() - - return lock.Locked, err -} - -func (a *ApplyCommandRunner) isParallelEnabled(projectCmds []command.ProjectContext) bool { - return len(projectCmds) > 0 && projectCmds[0].ParallelApplyEnabled -} - -func (a *ApplyCommandRunner) updateVcsStatus(ctx *command.Context, pullStatus models.PullStatus, statusID string) { - var numSuccess int - var numErrored int - status := models.SuccessVCSStatus - - numSuccess = pullStatus.StatusCount(models.AppliedPlanStatus) - numErrored = pullStatus.StatusCount(models.ErroredApplyStatus) - - if numErrored > 0 { - status = models.FailedVCSStatus - } else if numSuccess < len(pullStatus.Projects) { - // If there are plans that haven't been applied yet, we'll use a pending - // status. - status = models.PendingVCSStatus - } - - if _, err := a.vcsStatusUpdater.UpdateCombinedCount( - ctx.RequestCtx, - ctx.Pull.BaseRepo, - ctx.Pull, - status, - command.Apply, - numSuccess, - len(pullStatus.Projects), - statusID, - ); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } -} - -// applyAllDisabledComment is posted when apply all commands (i.e. "atlantis apply") -// are disabled and an apply all command is issued. -var applyAllDisabledComment = "**Error:** Running `atlantis apply` without flags is disabled." + - " You must specify which project to apply via the `-d `, `-w ` or `-p ` flags." - -// applyDisabledComment is posted when apply commands are disabled globally and an apply command is issued. -var applyDisabledComment = "**Error:** Running `atlantis apply` is disabled." diff --git a/server/legacy/events/apply_command_runner_test.go b/server/legacy/events/apply_command_runner_test.go deleted file mode 100644 index 0fc8ca0f7..000000000 --- a/server/legacy/events/apply_command_runner_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package events_test - -import ( - "context" - "errors" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/models/fixtures" -) - -func TestApplyCommandRunner_IsLocked(t *testing.T) { - RegisterMockTestingT(t) - - cases := []struct { - Description string - ApplyLocked bool - ApplyLockError error - ExpComment string - }{ - { - Description: "When global apply lock is present IsDisabled returns true", - ApplyLocked: true, - ApplyLockError: nil, - ExpComment: "**Error:** Running `atlantis apply` is disabled.", - }, - { - Description: "When no global apply lock is present and DisableApply flag is false IsDisabled returns false", - ApplyLocked: false, - ApplyLockError: nil, - ExpComment: "Ran Apply for 0 projects:\n\n\n\n", - }, - { - Description: "If ApplyLockChecker returns an error IsDisabled return value of DisableApply flag", - ApplyLockError: errors.New("error"), - ApplyLocked: false, - ExpComment: "Ran Apply for 0 projects:\n\n\n\n", - }, - } - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - vcsClient := setup(t) - - scopeNull, _, _ := metrics.NewLoggingScope(logger, "atlantis") - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState, Num: fixtures.Pull.Num} - ctx := &command.Context{ - User: fixtures.User, - Log: logger, - Pull: modelPull, - HeadRepo: fixtures.GithubRepo, - Trigger: command.CommentTrigger, - Scope: scopeNull, - RequestCtx: context.TODO(), - } - - When(applyLockChecker.CheckApplyLock()).ThenReturn(locking.ApplyCommandLock{Locked: c.ApplyLocked}, c.ApplyLockError) - applyCommandRunner.Run(ctx, &command.Comment{Name: command.Apply}) - - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, c.ExpComment, "apply") - }) - } -} diff --git a/server/legacy/events/apply_requirement_handler.go b/server/legacy/events/apply_requirement_handler.go deleted file mode 100644 index cc2324d37..000000000 --- a/server/legacy/events/apply_requirement_handler.go +++ /dev/null @@ -1,47 +0,0 @@ -package events - -import ( - "github.com/runatlantis/atlantis/server/config/raw" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --package mocks -o mocks/mock_apply_handler.go ApplyRequirement -type ApplyRequirement interface { - ValidateProject(repoDir string, ctx command.ProjectContext) (string, error) -} - -type AggregateApplyRequirements struct { - WorkingDir WorkingDir -} - -func (a *AggregateApplyRequirements) ValidateProject(repoDir string, ctx command.ProjectContext) (failure string, err error) { - for _, req := range ctx.ApplyRequirements { - switch req { - case raw.ApprovedApplyRequirement: - if !ctx.PullReqStatus.ApprovalStatus.IsApproved { - return "Pull request must be approved by at least one person other than the author before running apply.", nil - } - // this should come before mergeability check since mergeability is a superset of this check. - case valid.PoliciesPassedApplyReq: - if ctx.ProjectPlanStatus != models.PassedPolicyCheckStatus { - return "All policies must pass for project before running apply", nil - } - case raw.MergeableApplyRequirement: - if !ctx.PullReqStatus.Mergeable { - return "Pull request must be mergeable before running apply.", nil - } - case raw.UnDivergedApplyRequirement: - if a.WorkingDir.HasDiverged(ctx.Log, repoDir, ctx.BaseRepo) { - return "Default branch must be rebased onto pull request before running apply.", nil - } - case raw.UnlockedApplyRequirement: - if ctx.PullReqStatus.SQLocked { - return "Pull request must be unlocked using the 🔓 emoji before running apply.", nil - } - } - } - // Passed all apply requirements configured. - return "", nil -} diff --git a/server/legacy/events/command/apply/runner.go b/server/legacy/events/command/apply/runner.go deleted file mode 100644 index 006debe4a..000000000 --- a/server/legacy/events/command/apply/runner.go +++ /dev/null @@ -1,26 +0,0 @@ -package apply - -import ( - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func NewDisabledRunner(outputUpdater events.OutputUpdater) *DisabledRunner { - return &DisabledRunner{ - pullUpdater: outputUpdater, - } -} - -type DisabledRunner struct { - pullUpdater events.OutputUpdater -} - -func (r *DisabledRunner) Run(ctx *command.Context, cmd *command.Comment) { - r.pullUpdater.UpdateOutput( - ctx, - cmd, - command.Result{ - Failure: "Atlantis apply is being deprecated, please merge the PR to apply your changes", - }, - ) -} diff --git a/server/legacy/events/command/comment.go b/server/legacy/events/command/comment.go deleted file mode 100644 index e1d96a154..000000000 --- a/server/legacy/events/command/comment.go +++ /dev/null @@ -1,73 +0,0 @@ -package command - -import ( - "fmt" - "path" - "strings" -) - -// NewComment constructs a Command, setting all missing fields to defaults. -func NewComment(repoRelDir string, flags []string, name Name, forceApply bool, workspace string, project string, logLevel string) *Comment { - // If repoRelDir was empty we want to keep it that way to indicate that it - // wasn't specified in the comment. - if repoRelDir != "" { - repoRelDir = path.Clean(repoRelDir) - if repoRelDir == "/" { - repoRelDir = "." - } - } - return &Comment{ - RepoRelDir: repoRelDir, - Flags: flags, - Name: name, - Workspace: workspace, - ProjectName: project, - ForceApply: forceApply, - LogLevel: logLevel, - } -} - -// Comment is a command that was triggered by a pull request comment. -type Comment struct { - // RepoRelDir is the path relative to the repo root to run the command in. - // Will never end in "/". If empty then the comment specified no directory. - RepoRelDir string - // Flags are the extra arguments appended to the comment, - // ex. atlantis plan -- -target=resource - Flags []string - // Name is the name of the command the comment specified. - Name Name - //ForceApply is true of the command should ignore apply_requirments. - ForceApply bool - // Workspace is the name of the Terraform workspace to run the command in. - // If empty then the comment specified no workspace. - Workspace string - // ProjectName is the name of a project to run the command on. It refers to a - // project specified in an atlantis.yaml file. - // If empty then the comment specified no project. - ProjectName string - // LogLevel is the name log level verbosity requested on the underlying Terraform operation. - LogLevel string -} - -// IsForSpecificProject returns true if the command is for a specific dir, workspace -// or project name. Otherwise it's a command like "atlantis plan" or "atlantis -// apply". -func (c Comment) IsForSpecificProject() bool { - return c.RepoRelDir != "" || c.Workspace != "" || c.ProjectName != "" -} - -// CommandName returns the name of this command. -func (c Comment) CommandName() Name { - return c.Name -} - -// IsAutoplan will be false for comment commands. -func (c Comment) IsAutoplan() bool { - return false -} - -// String returns a string representation of the command. -func (c Comment) String() string { - return fmt.Sprintf("command=%q dir=%q workspace=%q project=%q loglevel=%q flags=%q", c.Name.String(), c.RepoRelDir, c.Workspace, c.ProjectName, c.LogLevel, strings.Join(c.Flags, ",")) -} diff --git a/server/legacy/events/command/context.go b/server/legacy/events/command/context.go deleted file mode 100644 index 8b56299e6..000000000 --- a/server/legacy/events/command/context.go +++ /dev/null @@ -1,52 +0,0 @@ -package command - -import ( - "context" - "time" - - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/uber-go/tally/v4" -) - -// CommandTrigger represents the how the command was triggered -type CommandTrigger int //nolint:revive // avoiding refactor while adding linter action - -const ( - // Commands that are automatically triggered (ie. automatic plans) - AutoTrigger CommandTrigger = iota - - // Commands that are triggered by comments (ie. atlantis plan) - CommentTrigger - - // Commands that are triggered by PR reviews (ie. atlantis policy checks) - PRReviewTrigger -) - -// Context represents the context of a command that should be executed -// for a pull request. -type Context struct { - // HeadRepo is the repository that is getting merged into the BaseRepo. - // If the pull request branch is from the same repository then HeadRepo will - // be the same as BaseRepo. - // See https://help.github.com/articles/about-pull-request-merges/. - HeadRepo models.Repo - Pull models.PullRequest - Scope tally.Scope - // User is the user that triggered this command. - User models.User - Log logging.Logger - - // Current PR state - PullRequestStatus models.PullReqStatus - - PullStatus *models.PullStatus - - Trigger CommandTrigger - - // Time Atlantis received VCS event, triggering command to be executed - TriggerTimestamp time.Time - RequestCtx context.Context - - InstallationToken int64 -} diff --git a/server/legacy/events/command/lock.go b/server/legacy/events/command/lock.go deleted file mode 100644 index e95cc38db..000000000 --- a/server/legacy/events/command/lock.go +++ /dev/null @@ -1,26 +0,0 @@ -package command - -import ( - "time" -) - -// LockMetadata contains additional data provided to the lock -type LockMetadata struct { - UnixTime int64 -} - -// Lock represents a global lock for an atlantis command (plan, apply, policy_check). -// It is used to prevent commands from being executed -type Lock struct { - // Time is the time at which the lock was first created. - LockMetadata LockMetadata - CommandName Name -} - -func (l *Lock) LockTime() time.Time { - return time.Unix(l.LockMetadata.UnixTime, 0) -} - -func (l *Lock) IsLocked() bool { - return !l.LockTime().IsZero() -} diff --git a/server/legacy/events/command/mocks/matchers/context_context.go b/server/legacy/events/command/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/events/command/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/events/command/mocks/matchers/models_repo.go b/server/legacy/events/command/mocks/matchers/models_repo.go deleted file mode 100644 index b36c3ee7c..000000000 --- a/server/legacy/events/command/mocks/matchers/models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func NotEqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func ModelsRepoThat(matcher pegomock.ArgumentMatcher) models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/events/command/mocks/matchers/ptr_to_command_comment.go b/server/legacy/events/command/mocks/matchers/ptr_to_command_comment.go deleted file mode 100644 index 682e74d7a..000000000 --- a/server/legacy/events/command/mocks/matchers/ptr_to_command_comment.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" -) - -func AnyPtrToCommandComment() *command.Comment { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Comment))(nil)).Elem())) - var nullValue *command.Comment - return nullValue -} - -func EqPtrToCommandComment(value *command.Comment) *command.Comment { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Comment - return nullValue -} diff --git a/server/legacy/events/command/mocks/matchers/ptr_to_command_context.go b/server/legacy/events/command/mocks/matchers/ptr_to_command_context.go deleted file mode 100644 index 342d40d55..000000000 --- a/server/legacy/events/command/mocks/matchers/ptr_to_command_context.go +++ /dev/null @@ -1,20 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" -) - -func AnyPtrToCommandContext() *command.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Context))(nil)).Elem())) - var nullValue *command.Context - return nullValue -} - -func EqPtrToCommandContext(value *command.Context) *command.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Context - return nullValue -} diff --git a/server/legacy/events/command/mocks/mock_job_closer.go b/server/legacy/events/command/mocks/mock_job_closer.go deleted file mode 100644 index 47d3e8cdb..000000000 --- a/server/legacy/events/command/mocks/mock_job_closer.go +++ /dev/null @@ -1,107 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/command (interfaces: JobCloser) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockJobCloser struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobCloser(options ...pegomock.Option) *MockJobCloser { - mock := &MockJobCloser{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobCloser) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobCloser) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobCloser) CloseJob(_param0 context.Context, _param1 string, _param2 models.Repo) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobCloser().") - } - params := []pegomock.Param{_param0, _param1, _param2} - pegomock.GetGenericMockFrom(mock).Invoke("CloseJob", params, []reflect.Type{}) -} - -func (mock *MockJobCloser) VerifyWasCalledOnce() *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobCloser) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobCloser) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobCloser) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobCloser struct { - mock *MockJobCloser - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobCloser) CloseJob(_param0 context.Context, _param1 string, _param2 models.Repo) *MockJobCloser_CloseJob_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CloseJob", params, verifier.timeout) - return &MockJobCloser_CloseJob_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobCloser_CloseJob_OngoingVerification struct { - mock *MockJobCloser - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobCloser_CloseJob_OngoingVerification) GetCapturedArguments() (context.Context, string, models.Repo) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockJobCloser_CloseJob_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string, _param2 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.Repo) - } - } - return -} diff --git a/server/legacy/events/command/mocks/mock_project_job_url_generator.go b/server/legacy/events/command/mocks/mock_project_job_url_generator.go deleted file mode 100644 index be61cc201..000000000 --- a/server/legacy/events/command/mocks/mock_project_job_url_generator.go +++ /dev/null @@ -1,108 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/command (interfaces: JobURLGenerator) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockJobURLGenerator struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobURLGenerator(options ...pegomock.Option) *MockJobURLGenerator { - mock := &MockJobURLGenerator{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobURLGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobURLGenerator) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobURLGenerator) GenerateProjectJobURL(_param0 string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobURLGenerator().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateProjectJobURL", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockJobURLGenerator) VerifyWasCalledOnce() *VerifierMockJobURLGenerator { - return &VerifierMockJobURLGenerator{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobURLGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobURLGenerator { - return &VerifierMockJobURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobURLGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobURLGenerator { - return &VerifierMockJobURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobURLGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobURLGenerator { - return &VerifierMockJobURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobURLGenerator struct { - mock *MockJobURLGenerator - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobURLGenerator) GenerateProjectJobURL(_param0 string) *MockJobURLGenerator_GenerateProjectJobURL_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateProjectJobURL", params, verifier.timeout) - return &MockJobURLGenerator_GenerateProjectJobURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobURLGenerator_GenerateProjectJobURL_OngoingVerification struct { - mock *MockJobURLGenerator - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobURLGenerator_GenerateProjectJobURL_OngoingVerification) GetCapturedArguments() string { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockJobURLGenerator_GenerateProjectJobURL_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/command/mocks/mock_runner.go b/server/legacy/events/command/mocks/mock_runner.go deleted file mode 100644 index 6fecf6179..000000000 --- a/server/legacy/events/command/mocks/mock_runner.go +++ /dev/null @@ -1,102 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/command (interfaces: Runner) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockRunner(options ...pegomock.Option) *MockRunner { - mock := &MockRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockRunner) Run(ctx *command.Context, cmd *command.Comment) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockRunner().") - } - params := []pegomock.Param{ctx, cmd} - pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{}) -} - -func (mock *MockRunner) VerifyWasCalledOnce() *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockRunner { - return &VerifierMockRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockRunner struct { - mock *MockRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockRunner) Run(ctx *command.Context, cmd *command.Comment) *MockRunner_Run_OngoingVerification { - params := []pegomock.Param{ctx, cmd} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockRunner_Run_OngoingVerification struct { - mock *MockRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockRunner_Run_OngoingVerification) GetCapturedArguments() (*command.Context, *command.Comment) { - ctx, cmd := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], cmd[len(cmd)-1] -} - -func (c *MockRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*command.Comment) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - _param1 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Comment) - } - } - return -} diff --git a/server/legacy/events/command/name.go b/server/legacy/events/command/name.go deleted file mode 100644 index 7bc69ad5c..000000000 --- a/server/legacy/events/command/name.go +++ /dev/null @@ -1,50 +0,0 @@ -package command - -import ( - "strings" - - "golang.org/x/text/cases" - "golang.org/x/text/language" -) - -// Name is which command to run. -type Name int - -const ( - // Apply is a command to run terraform apply. - Apply Name = iota - // Plan is a command to run terraform plan. - Plan - // Unlock is a command to discard previous plans as well as the atlantis locks. - Unlock - // PolicyCheck is a command to run conftest test. - PolicyCheck - // Autoplan is a command to run terrafor plan on PR open/update if autoplan is enabled - Autoplan - // Version is a command to run terraform version. - Version - // Adding more? Don't forget to update String() below -) - -// TitleString returns the string representation in title form. -// ie. policy_check becomes Policy Check -func (c Name) TitleString() string { - return cases.Title(language.English).String(strings.ReplaceAll(strings.ToLower(c.String()), "_", " ")) -} - -// String returns the string representation of c. -func (c Name) String() string { - switch c { - case Apply: - return "apply" - case Plan, Autoplan: - return "plan" - case Unlock: - return "unlock" - case PolicyCheck: - return "policy_check" - case Version: - return "version" - } - return "" -} diff --git a/server/legacy/events/command/name_test.go b/server/legacy/events/command/name_test.go deleted file mode 100644 index 42542373a..000000000 --- a/server/legacy/events/command/name_test.go +++ /dev/null @@ -1,32 +0,0 @@ -package command_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - . "github.com/runatlantis/atlantis/testing" -) - -func TestApplyCommand_String(t *testing.T) { - uc := command.Apply - - Equals(t, "apply", uc.String()) -} - -func TestPlanCommand_String(t *testing.T) { - uc := command.Plan - - Equals(t, "plan", uc.String()) -} - -func TestPolicyCheckCommand_String(t *testing.T) { - uc := command.PolicyCheck - - Equals(t, "policy_check", uc.String()) -} - -func TestUnlockCommand_String(t *testing.T) { - uc := command.Unlock - - Equals(t, "unlock", uc.String()) -} diff --git a/server/legacy/events/command/plan/runner.go b/server/legacy/events/command/plan/runner.go deleted file mode 100644 index e385ed7e9..000000000 --- a/server/legacy/events/command/plan/runner.go +++ /dev/null @@ -1,24 +0,0 @@ -package plan - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" -) - -func NewRunner(vcsClient vcs.Client) *Runner { - return &Runner{ - vcsClient: vcsClient, - } -} - -type Runner struct { - vcsClient vcs.Client -} - -func (r *Runner) Run(ctx *command.Context, cmd *command.Comment) { - if err := r.vcsClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, "I'm a platform mode plan runner", command.Plan.String()); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to comment: %s", err)) - } -} diff --git a/server/legacy/events/command/policy_check_ouptut_store.go b/server/legacy/events/command/policy_check_ouptut_store.go deleted file mode 100644 index 1f3dc7fee..000000000 --- a/server/legacy/events/command/policy_check_ouptut_store.go +++ /dev/null @@ -1,39 +0,0 @@ -package command - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/models" -) - -const KeySeparator = "||" - -type PolicyCheckOutputStore struct { - store map[string]*models.PolicyCheckSuccess -} - -func NewPolicyCheckOutputStore() *PolicyCheckOutputStore { - return &PolicyCheckOutputStore{ - store: map[string]*models.PolicyCheckSuccess{}, - } -} - -func buildKey(projectName string, workspace string) string { - return fmt.Sprintf("%s%s%s", projectName, KeySeparator, workspace) -} - -func (p *PolicyCheckOutputStore) Get(projectName string, workspace string) *models.PolicyCheckSuccess { - key := buildKey(projectName, workspace) - - if output, ok := p.store[key]; ok { - return output - } - return nil -} - -func (p *PolicyCheckOutputStore) Set(projectName string, workspace string, output string) { - key := buildKey(projectName, workspace) - p.store[key] = &models.PolicyCheckSuccess{ - PolicyCheckOutput: output, - } -} diff --git a/server/legacy/events/command/project_context.go b/server/legacy/events/command/project_context.go deleted file mode 100644 index 1eb88ab84..000000000 --- a/server/legacy/events/command/project_context.go +++ /dev/null @@ -1,206 +0,0 @@ -package command - -import ( - "context" - "fmt" - "path/filepath" - "strings" - - "github.com/google/uuid" - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/uber-go/tally/v4" -) - -const ( - planfileSlashReplace = "::" -) - -type ContextFlags struct { - ParallelApply, - ParallelPlan, - ForceApply bool - LogLevel string -} - -func NewProjectContext( - ctx *Context, - cmd Name, - applyCmd string, - planCmd string, - projCfg valid.MergedProjectCfg, - steps []valid.Step, - policySets valid.PolicySets, - escapedCommentArgs []string, - contextFlags *ContextFlags, - scope tally.Scope, - pullStatus models.PullReqStatus, -) ProjectContext { - var projectPlanStatus models.ProjectPlanStatus - - if ctx.PullStatus != nil { - for _, project := range ctx.PullStatus.Projects { - if project.ProjectName == projCfg.Name { - projectPlanStatus = project.Status - break - } - } - } - - return ProjectContext{ - CommandName: cmd, - ApplyCmd: applyCmd, - BaseRepo: ctx.Pull.BaseRepo, - EscapedCommentArgs: escapedCommentArgs, - ParallelApplyEnabled: contextFlags.ParallelApply, - ParallelPlanEnabled: contextFlags.ParallelPlan, - AutoplanEnabled: projCfg.AutoplanEnabled, - Steps: steps, - HeadRepo: ctx.HeadRepo, - Log: ctx.Log, - Scope: scope, - ProjectPlanStatus: projectPlanStatus, - Pull: ctx.Pull, - ProjectName: projCfg.Name, - ApplyRequirements: projCfg.ApplyRequirements, - RePlanCmd: planCmd, - RepoRelDir: projCfg.RepoRelDir, - RepoConfigVersion: projCfg.RepoCfgVersion, - TerraformVersion: projCfg.TerraformVersion, - User: ctx.User, - ForceApply: contextFlags.ForceApply, - Workspace: projCfg.Workspace, - PolicySets: policySets, - Tags: projCfg.Tags, - PullReqStatus: pullStatus, - JobID: uuid.New().String(), - RequestCtx: ctx.RequestCtx, - WorkflowModeType: valid.PlatformWorkflowMode, - InstallationToken: ctx.InstallationToken, - Trigger: ctx.Trigger, - } -} - -// ProjectContext defines the context for a plan or apply stage that will -// be executed for a project. -type ProjectContext struct { - CommandName Name - // ApplyCmd is the command that users should run to apply this plan. If - // this is an apply then this will be empty. - ApplyCmd string - // ApplyRequirements is the list of requirements that must be satisfied - // before we will run the apply stage. - ApplyRequirements []string - // ParallelApplyEnabled is true if parallel apply is enabled for this project. - ParallelApplyEnabled bool - // ParallelPlanEnabled is true if parallel plan is enabled for this project. - ParallelPlanEnabled bool - // ParallelPolicyCheckEnabled is true if parallel policy_check is enabled for this project. - ParallelPolicyCheckEnabled bool - // AutoplanEnabled is true if autoplanning is enabled for this project. - AutoplanEnabled bool - // BaseRepo is the repository that the pull request will be merged into. - BaseRepo models.Repo - // EscapedCommentArgs are the extra arguments that were added to the atlantis - // command, ex. atlantis plan -- -target=resource. We then escape them - // by adding a \ before each character so that they can be used within - // sh -c safely, i.e. sh -c "terraform plan $(touch bad)". - EscapedCommentArgs []string - // HeadRepo is the repository that is getting merged into the BaseRepo. - // If the pull request branch is from the same repository then HeadRepo will - // be the same as BaseRepo. - HeadRepo models.Repo - // Log is a logger that's been set up for this context. - Log logging.Logger - // Scope is the scope for reporting stats setup for this context - Scope tally.Scope - // PullReqStatus holds state about the PR that requires additional computation outside models.PullRequest - PullReqStatus models.PullReqStatus - // CurrentProjectPlanStatus is the status of the current project prior to this command. - ProjectPlanStatus models.ProjectPlanStatus - // Pull is the pull request we're responding to. - Pull models.PullRequest - // ProjectName is the name of the project set in atlantis.yaml. If there was - // no name this will be an empty string. - ProjectName string - // RepoConfigVersion is the version of the repo's atlantis.yaml file. If - // there was no file, this will be 0. - RepoConfigVersion int - // RePlanCmd is the command that users should run to re-plan this project. - // If this is an apply then this will be empty. - RePlanCmd string - // RepoRelDir is the directory of this project relative to the repo root. - RepoRelDir string - // Steps are the sequence of commands we need to run for this project and this - // stage. - Steps []valid.Step - // TerraformVersion is the version of terraform we should use when executing - // commands for this project. This can be set to nil in which case we will - // use the default Atlantis terraform version. - TerraformVersion *version.Version - // Configuration metadata for a given project. - Tags map[string]string - // User is the user that triggered this command. - User models.User - // ForceApply is true when the apply should ignore apply_requirements. - ForceApply bool - // Workspace is the Terraform workspace this project is in. It will always - // be set. - Workspace string - // PolicySets represent the policies that are run on the plan as part of the - // policy check stage - PolicySets valid.PolicySets - // UUID for atlantis logs - JobID string - // RequestCtx is the context generated when request is first received from VCS - RequestCtx context.Context - // StatusID is used for consecutive status updates in the step runners - StatusID string - - WorkflowModeType valid.WorkflowModeType - InstallationToken int64 - Trigger CommandTrigger -} - -// ProjectCloneDir creates relative path to clone the repo to. If we are running -// plans and apply in parallel we want to have a directory per project. -func (p ProjectContext) ProjectCloneDir() string { - if p.ParallelPlanEnabled || p.ParallelApplyEnabled { - return filepath.Join(p.ProjectName, p.Workspace) - } - - return p.Workspace -} - -// SetScope sets the scope of the stats object field. Note: we deliberately set this on the value -// instead of a pointer since we want scopes to mirror our function stack -func (p ProjectContext) SetScope(scope string) { - p.Scope = p.Scope.SubScope(scope) //nolint -} - -// GetShowResultFileName returns the filename (not the path) to store the tf show result -func (p ProjectContext) GetShowResultFileName() string { - if p.ProjectName == "" { - return fmt.Sprintf("%s.json", p.Workspace) - } - projName := strings.Replace(p.ProjectName, "/", planfileSlashReplace, -1) - return fmt.Sprintf("%s-%s.json", projName, p.Workspace) -} - -// Gets a unique identifier for the current pull request as a single string -func (p ProjectContext) PullInfof() string { - return buildPullInfof(p.BaseRepo.FullName, p.Pull.Num, p.ProjectName, p.RepoRelDir, p.Workspace) -} -func buildPullInfof(repoName string, pullNum int, projectName string, relDir string, workspace string) string { - projectIdentifier := getProjectIdentifier(relDir, projectName) - return fmt.Sprintf("%s/%d/%s/%s", repoName, pullNum, projectIdentifier, workspace) -} - -func getProjectIdentifier(relRepoDir string, projectName string) string { - if projectName != "" { - return projectName - } - return strings.ReplaceAll(relRepoDir, "/", "-") -} diff --git a/server/legacy/events/command/project_result.go b/server/legacy/events/command/project_result.go deleted file mode 100644 index ced3ad7b9..000000000 --- a/server/legacy/events/command/project_result.go +++ /dev/null @@ -1,66 +0,0 @@ -package command - -import ( - "github.com/runatlantis/atlantis/server/models" -) - -// ProjectResult is the result of executing a plan/policy_check/apply for a specific project. -type ProjectResult struct { - Command Name - RepoRelDir string - Workspace string - Error error - Failure string - PlanSuccess *models.PlanSuccess - PolicyCheckSuccess *models.PolicyCheckSuccess - ApplySuccess string - VersionSuccess string - ProjectName string - StatusID string - JobID string -} - -// VcsStatus returns the vcs commit status of this project result. -func (p ProjectResult) VcsStatus() models.VCSStatus { - if p.Error != nil { - return models.FailedVCSStatus - } - if p.Failure != "" { - return models.FailedVCSStatus - } - return models.SuccessVCSStatus -} - -// PlanStatus returns the plan status. -func (p ProjectResult) PlanStatus() models.ProjectPlanStatus { - switch p.Command { - case Plan: - if p.Error != nil { - return models.ErroredPlanStatus - } else if p.Failure != "" { - return models.ErroredPlanStatus - } - return models.PlannedPlanStatus - case PolicyCheck: - if p.Error != nil { - return models.ErroredPolicyCheckStatus - } else if p.Failure != "" { - return models.ErroredPolicyCheckStatus - } - return models.PassedPolicyCheckStatus - case Apply: - if p.Error != nil { - return models.ErroredApplyStatus - } else if p.Failure != "" { - return models.ErroredApplyStatus - } - return models.AppliedPlanStatus - } - - panic("PlanStatus() missing a combination") -} - -// IsSuccessful returns true if this project result had no errors. -func (p ProjectResult) IsSuccessful() bool { - return p.PlanSuccess != nil || p.PolicyCheckSuccess != nil || p.ApplySuccess != "" -} diff --git a/server/legacy/events/command/project_result_test.go b/server/legacy/events/command/project_result_test.go deleted file mode 100644 index 698466a1d..000000000 --- a/server/legacy/events/command/project_result_test.go +++ /dev/null @@ -1,215 +0,0 @@ -package command_test - -import ( - "errors" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestProjectResult_IsSuccessful(t *testing.T) { - cases := map[string]struct { - pr command.ProjectResult - exp bool - }{ - "plan success": { - command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{}, - }, - true, - }, - "policy_check success": { - command.ProjectResult{ - PolicyCheckSuccess: &models.PolicyCheckSuccess{}, - }, - true, - }, - "apply success": { - command.ProjectResult{ - ApplySuccess: "success", - }, - true, - }, - "failure": { - command.ProjectResult{ - Failure: "failure", - }, - false, - }, - "error": { - command.ProjectResult{ - Error: errors.New("error"), - }, - false, - }, - } - - for name, c := range cases { - t.Run(name, func(t *testing.T) { - Equals(t, c.exp, c.pr.IsSuccessful()) - }) - } -} - -func TestProjectResult_PlanStatus(t *testing.T) { - cases := []struct { - p command.ProjectResult - expStatus models.ProjectPlanStatus - }{ - { - p: command.ProjectResult{ - Command: command.Plan, - Error: errors.New("err"), - }, - expStatus: models.ErroredPlanStatus, - }, - { - p: command.ProjectResult{ - Command: command.Plan, - Failure: "failure", - }, - expStatus: models.ErroredPlanStatus, - }, - { - p: command.ProjectResult{ - Command: command.Plan, - PlanSuccess: &models.PlanSuccess{}, - }, - expStatus: models.PlannedPlanStatus, - }, - { - p: command.ProjectResult{ - Command: command.Apply, - Error: errors.New("err"), - }, - expStatus: models.ErroredApplyStatus, - }, - { - p: command.ProjectResult{ - Command: command.Apply, - Failure: "failure", - }, - expStatus: models.ErroredApplyStatus, - }, - { - p: command.ProjectResult{ - Command: command.Apply, - ApplySuccess: "success", - }, - expStatus: models.AppliedPlanStatus, - }, - { - p: command.ProjectResult{ - Command: command.PolicyCheck, - PolicyCheckSuccess: &models.PolicyCheckSuccess{}, - }, - expStatus: models.PassedPolicyCheckStatus, - }, - { - p: command.ProjectResult{ - Command: command.PolicyCheck, - Failure: "failure", - }, - expStatus: models.ErroredPolicyCheckStatus, - }, - } - - for _, c := range cases { - t.Run(c.expStatus.String(), func(t *testing.T) { - Equals(t, c.expStatus, c.p.PlanStatus()) - }) - } -} - -func TestPlanSuccess_Summary(t *testing.T) { - cases := []struct { - p command.ProjectResult - expResult string - }{ - { - p: command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: ` - An execution plan has been generated and is shown below. - Resource actions are indicated with the following symbols: - - destroy - - Terraform will perform the following actions: - - - null_resource.hi[1] - - - Plan: 0 to add, 0 to change, 1 to destroy.`, - }, - }, - expResult: "Plan: 0 to add, 0 to change, 1 to destroy.", - }, - { - p: command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: ` - An execution plan has been generated and is shown below. - Resource actions are indicated with the following symbols: - - No changes. Infrastructure is up-to-date.`, - }, - }, - expResult: "No changes. Infrastructure is up-to-date.", - }, - { - //nolint:dupword - p: command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: ` - Note: Objects have changed outside of Terraform - - Terraform detected the following changes made outside of Terraform since the - last "terraform apply": - - No changes. Your infrastructure matches the configuration.`, - }, - }, - expResult: "\n**Note: Objects have changed outside of Terraform**\nNo changes. Your infrastructure matches the configuration.", - }, - { - //nolint:dupword - p: command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: ` - Note: Objects have changed outside of Terraform - - Terraform detected the following changes made outside of Terraform since the - last "terraform apply": - - An execution plan has been generated and is shown below. - Resource actions are indicated with the following symbols: - - destroy - - Terraform will perform the following actions: - - - null_resource.hi[1] - - - Plan: 0 to add, 0 to change, 1 to destroy.`, - }, - }, - expResult: "\n**Note: Objects have changed outside of Terraform**\nPlan: 0 to add, 0 to change, 1 to destroy.", - }, - { - p: command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{ - TerraformOutput: `No match, expect empty`, - }, - }, - expResult: "", - }, - } - - for _, c := range cases { - t.Run(c.expResult, func(t *testing.T) { - Equals(t, c.expResult, c.p.PlanSuccess.Summary()) - }) - } -} diff --git a/server/legacy/events/command/project_status_updater.go b/server/legacy/events/command/project_status_updater.go deleted file mode 100644 index b5629670b..000000000 --- a/server/legacy/events/command/project_status_updater.go +++ /dev/null @@ -1,50 +0,0 @@ -package command - -import ( - "context" - "fmt" - - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_job_closer.go JobCloser - -// Job Closer closes a job by marking op complete and clearing up buffers if logs are successfully persisted -type JobCloser interface { - CloseJob(ctx context.Context, jobID string, repo models.Repo) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_job_url_generator.go ProjectJobURLGenerator - -// JobURLGenerator generates urls to view project's progress. -type JobURLGenerator interface { - GenerateProjectJobURL(jobID string) (string, error) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_status_updater.go ProjectStatusUpdater - -type projectVCSStatusUpdater interface { - // UpdateProject sets the commit status for the project represented by - // ctx. - UpdateProject(ctx context.Context, projectCtx ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) -} - -type ProjectStatusUpdater struct { - ProjectJobURLGenerator JobURLGenerator - JobCloser JobCloser - ProjectVCSStatusUpdater projectVCSStatusUpdater -} - -func (p ProjectStatusUpdater) UpdateProjectStatus(ctx ProjectContext, status models.VCSStatus) (string, error) { - url, err := p.ProjectJobURLGenerator.GenerateProjectJobURL(ctx.JobID) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - statusID, err := p.ProjectVCSStatusUpdater.UpdateProject(ctx.RequestCtx, ctx, ctx.CommandName, status, url, ctx.StatusID) - - // Close the Job if the operation is complete - if status == models.SuccessVCSStatus || status == models.FailedVCSStatus { - p.JobCloser.CloseJob(ctx.RequestCtx, ctx.JobID, ctx.BaseRepo) - } - return statusID, err -} diff --git a/server/legacy/events/command/project_status_updater_test.go b/server/legacy/events/command/project_status_updater_test.go deleted file mode 100644 index ecc2f9bd2..000000000 --- a/server/legacy/events/command/project_status_updater_test.go +++ /dev/null @@ -1,104 +0,0 @@ -package command_test - -import ( - "context" - "fmt" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -type testJobURLGenerator struct { - expectedURL string - expectedErr error -} - -func (t *testJobURLGenerator) GenerateProjectJobURL(jobID string) (string, error) { - return t.expectedURL, t.expectedErr -} - -type testJobCloser struct { - called bool -} - -func (t *testJobCloser) CloseJob(ctx context.Context, jobID string, repo models.Repo) { - t.called = true -} - -type testCommitStatusUpdater struct { - expectedStatusID string - expectedError error -} - -func (t *testCommitStatusUpdater) UpdateProject(ctx context.Context, projectCtx command.ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) { - return t.expectedStatusID, t.expectedError -} - -func TestProjectStatusUpdater_CloseJobWhenOperationComplete(t *testing.T) { - jobURLGenerator := testJobURLGenerator{ - expectedURL: "url", - expectedErr: nil, - } - - jobCloser := testJobCloser{} - - commitStatusUpdater := testCommitStatusUpdater{ - expectedStatusID: "1234", - expectedError: nil, - } - - prjStatusUpdater := command.ProjectStatusUpdater{ - ProjectJobURLGenerator: &jobURLGenerator, - JobCloser: &jobCloser, - ProjectVCSStatusUpdater: &commitStatusUpdater, - } - - statusID, err := prjStatusUpdater.UpdateProjectStatus(command.ProjectContext{}, models.SuccessVCSStatus) - - if err != nil { - t.FailNow() - } - - if statusID != "1234" { - t.FailNow() - } - - if jobCloser.called != true { - t.FailNow() - } -} - -func TestProjectStatusUpdater_DoNotCloseJobWhenInProgress(t *testing.T) { - jobURLGenerator := testJobURLGenerator{ - expectedURL: "url", - expectedErr: nil, - } - - jobCloser := testJobCloser{} - - commitStatusUpdater := testCommitStatusUpdater{ - expectedStatusID: "1234", - expectedError: nil, - } - - prjStatusUpdater := command.ProjectStatusUpdater{ - ProjectJobURLGenerator: &jobURLGenerator, - JobCloser: &jobCloser, - ProjectVCSStatusUpdater: &commitStatusUpdater, - } - - statusID, err := prjStatusUpdater.UpdateProjectStatus(command.ProjectContext{}, models.PendingVCSStatus) - - if err != nil { - t.FailNow() - } - - if statusID != "1234" { - t.FailNow() - } - - if jobCloser.called != false { - t.FailNow() - } -} diff --git a/server/legacy/events/command/result.go b/server/legacy/events/command/result.go deleted file mode 100644 index ffa6cc59b..000000000 --- a/server/legacy/events/command/result.go +++ /dev/null @@ -1,22 +0,0 @@ -package command - -// Result is the result of running a Command. -type Result struct { - Error error - Failure string - ProjectResults []ProjectResult -} - -// HasErrors returns true if there were any errors during the execution, -// even if it was only in one project. -func (c Result) HasErrors() bool { - if c.Error != nil || c.Failure != "" { - return true - } - for _, r := range c.ProjectResults { - if !r.IsSuccessful() { - return true - } - } - return false -} diff --git a/server/legacy/events/command/result_test.go b/server/legacy/events/command/result_test.go deleted file mode 100644 index fadb323a4..000000000 --- a/server/legacy/events/command/result_test.go +++ /dev/null @@ -1,108 +0,0 @@ -package command_test - -import ( - "errors" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestCommandResult_HasErrors(t *testing.T) { - cases := map[string]struct { - cr command.Result - exp bool - }{ - "error": { - cr: command.Result{ - Error: errors.New("err"), - }, - exp: true, - }, - "failure": { - cr: command.Result{ - Failure: "failure", - }, - exp: true, - }, - "empty results list": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{}, - }, - exp: false, - }, - "successful plan": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{ - { - PlanSuccess: &models.PlanSuccess{}, - }, - }, - }, - exp: false, - }, - "successful apply": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{ - { - ApplySuccess: "success", - }, - }, - }, - exp: false, - }, - "single errored project": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{ - { - Error: errors.New("err"), - }, - }, - }, - exp: true, - }, - "single failed project": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{ - { - Failure: "failure", - }, - }, - }, - exp: true, - }, - "two successful projects": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{ - { - PlanSuccess: &models.PlanSuccess{}, - }, - { - ApplySuccess: "success", - }, - }, - }, - exp: false, - }, - "one successful, one failed project": { - cr: command.Result{ - ProjectResults: []command.ProjectResult{ - { - PlanSuccess: &models.PlanSuccess{}, - }, - { - Failure: "failed", - }, - }, - }, - exp: true, - }, - } - - for descrip, c := range cases { - t.Run(descrip, func(t *testing.T) { - Equals(t, c.exp, c.cr.HasErrors()) - }) - } -} diff --git a/server/legacy/events/command/runners.go b/server/legacy/events/command/runners.go deleted file mode 100644 index fee68f1a5..000000000 --- a/server/legacy/events/command/runners.go +++ /dev/null @@ -1,8 +0,0 @@ -package command - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_runner.go Runner - -// Runner runs individual command workflows. -type Runner interface { - Run(ctx *Context, cmd *Comment) -} diff --git a/server/legacy/events/command/vcs.go b/server/legacy/events/command/vcs.go deleted file mode 100644 index fb6133ef6..000000000 --- a/server/legacy/events/command/vcs.go +++ /dev/null @@ -1,125 +0,0 @@ -package command - -import ( - "context" - "fmt" - "strconv" - "strings" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" - "golang.org/x/text/cases" - "golang.org/x/text/language" -) - -// VCSStatusUpdater updates the status of a commit with the VCS host. We set -// the status to signify whether the plan/apply succeeds. -type VCSStatusUpdater struct { - Client vcs.Client - TitleBuilder vcs.StatusTitleBuilder - DefaultDetailsURL string -} - -func (d *VCSStatusUpdater) UpdateCombined(ctx context.Context, repo models.Repo, pull models.PullRequest, status models.VCSStatus, cmdName fmt.Stringer, statusID string, output string) (string, error) { - src := d.TitleBuilder.Build(cmdName.String()) - descrip := fmt.Sprintf("%s %s", cases.Title(language.English).String(cmdName.String()), d.statusDescription(status)) - - request := types.UpdateStatusRequest{ - Repo: repo, - PullNum: pull.Num, - Ref: pull.HeadCommit, - StatusName: src, - State: status, - Description: descrip, - DetailsURL: d.DefaultDetailsURL, - PullCreationTime: pull.CreatedAt, - StatusID: statusID, - CommandName: titleString(cmdName), - Output: output, - } - return d.Client.UpdateStatus(ctx, request) -} - -func (d *VCSStatusUpdater) UpdateCombinedCount(ctx context.Context, repo models.Repo, pull models.PullRequest, status models.VCSStatus, cmdName fmt.Stringer, numSuccess int, numTotal int, statusID string) (string, error) { - src := d.TitleBuilder.Build(cmdName.String()) - cmdVerb := "unknown" - - switch cmdName { - case Plan: - cmdVerb = "planned" - case PolicyCheck: - cmdVerb = "policies checked" - case Apply: - cmdVerb = "applied" - } - - request := types.UpdateStatusRequest{ - Repo: repo, - PullNum: pull.Num, - Ref: pull.HeadCommit, - StatusName: src, - State: status, - Description: fmt.Sprintf("%d/%d projects %s successfully.", numSuccess, numTotal, cmdVerb), - DetailsURL: d.DefaultDetailsURL, - PullCreationTime: pull.CreatedAt, - StatusID: statusID, - CommandName: titleString(cmdName), - - // Additional fields for github checks rendering - NumSuccess: strconv.FormatInt(int64(numSuccess), 10), - NumTotal: strconv.FormatInt(int64(numTotal), 10), - } - - return d.Client.UpdateStatus(ctx, request) -} - -func (d *VCSStatusUpdater) UpdateProject(ctx context.Context, projectCtx ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) { - projectID := projectCtx.ProjectName - if projectID == "" { - projectID = fmt.Sprintf("%s/%s", projectCtx.RepoRelDir, projectCtx.Workspace) - } - statusName := d.TitleBuilder.Build(cmdName.String(), vcs.StatusTitleOptions{ - ProjectName: projectID, - }) - - description := fmt.Sprintf("%s %s", cases.Title(language.English).String(cmdName.String()), d.statusDescription(status)) - request := types.UpdateStatusRequest{ - Repo: projectCtx.BaseRepo, - PullNum: projectCtx.Pull.Num, - Ref: projectCtx.Pull.HeadCommit, - StatusName: statusName, - State: status, - Description: description, - DetailsURL: url, - PullCreationTime: projectCtx.Pull.CreatedAt, - StatusID: statusID, - - CommandName: titleString(cmdName), - Project: projectCtx.ProjectName, - Workspace: projectCtx.Workspace, - Directory: projectCtx.RepoRelDir, - } - - return d.Client.UpdateStatus(ctx, request) -} - -func (d *VCSStatusUpdater) statusDescription(status models.VCSStatus) string { - var description string - switch status { - case models.QueuedVCSStatus: - description = "queued." - case models.PendingVCSStatus: - description = "in progress..." - case models.FailedVCSStatus: - description = "failed." - case models.SuccessVCSStatus: - description = "succeeded." - } - - return description -} - -func titleString(cmdName fmt.Stringer) string { - return cases.Title(language.English).String(strings.ReplaceAll(strings.ToLower(cmdName.String()), "_", " ")) -} diff --git a/server/legacy/events/command/vcs_test.go b/server/legacy/events/command/vcs_test.go deleted file mode 100644 index bb66278a2..000000000 --- a/server/legacy/events/command/vcs_test.go +++ /dev/null @@ -1,334 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package command_test - -import ( - "context" - "fmt" - "strconv" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestUpdateCombined(t *testing.T) { - cases := []struct { - status models.VCSStatus - command command.Name - expDescrip string - }{ - { - status: models.QueuedVCSStatus, - command: command.Plan, - expDescrip: "Plan queued.", - }, - { - status: models.PendingVCSStatus, - command: command.Plan, - expDescrip: "Plan in progress...", - }, - { - status: models.FailedVCSStatus, - command: command.Plan, - expDescrip: "Plan failed.", - }, - { - status: models.SuccessVCSStatus, - command: command.Plan, - expDescrip: "Plan succeeded.", - }, - { - status: models.PendingVCSStatus, - command: command.Apply, - expDescrip: "Apply in progress...", - }, - { - status: models.FailedVCSStatus, - command: command.Apply, - expDescrip: "Apply failed.", - }, - { - status: models.SuccessVCSStatus, - command: command.Apply, - expDescrip: "Apply succeeded.", - }, - } - - for _, c := range cases { - t.Run(c.expDescrip, func(t *testing.T) { - RegisterMockTestingT(t) - client := mocks.NewMockClient() - - titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis"} - s := command.VCSStatusUpdater{Client: client, TitleBuilder: titleBuilder} - ctx := context.Background() - - _, err := s.UpdateCombined(ctx, models.Repo{}, models.PullRequest{}, c.status, c.command, "", "") - Ok(t, err) - - expSrc := fmt.Sprintf("atlantis/%s", c.command) - client.VerifyWasCalledOnce().UpdateStatus(ctx, types.UpdateStatusRequest{ - Repo: models.Repo{}, - PullNum: 0, - State: c.status, - StatusName: expSrc, - Description: c.expDescrip, - CommandName: c.command.TitleString(), - }) - }) - } -} - -func TestUpdateCombinedCount(t *testing.T) { - cases := []struct { - status models.VCSStatus - command command.Name - numSuccess int - numTotal int - expDescrip string - }{ - { - status: models.QueuedVCSStatus, - command: command.Plan, - numSuccess: 0, - numTotal: 2, - expDescrip: "0/2 projects planned successfully.", - }, - { - status: models.PendingVCSStatus, - command: command.Plan, - numSuccess: 0, - numTotal: 2, - expDescrip: "0/2 projects planned successfully.", - }, - { - status: models.FailedVCSStatus, - command: command.Plan, - numSuccess: 1, - numTotal: 2, - expDescrip: "1/2 projects planned successfully.", - }, - { - status: models.SuccessVCSStatus, - command: command.Plan, - numSuccess: 2, - numTotal: 2, - expDescrip: "2/2 projects planned successfully.", - }, - { - status: models.FailedVCSStatus, - command: command.Apply, - numSuccess: 0, - numTotal: 2, - expDescrip: "0/2 projects applied successfully.", - }, - { - status: models.PendingVCSStatus, - command: command.Apply, - numSuccess: 1, - numTotal: 2, - expDescrip: "1/2 projects applied successfully.", - }, - { - status: models.SuccessVCSStatus, - command: command.Apply, - numSuccess: 2, - numTotal: 2, - expDescrip: "2/2 projects applied successfully.", - }, - } - - for _, c := range cases { - t.Run(c.expDescrip, func(t *testing.T) { - RegisterMockTestingT(t) - client := mocks.NewMockClient() - titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis-test"} - s := command.VCSStatusUpdater{Client: client, TitleBuilder: titleBuilder} - ctx := context.Background() - _, err := s.UpdateCombinedCount(ctx, models.Repo{}, models.PullRequest{}, c.status, c.command, c.numSuccess, c.numTotal, "") - Ok(t, err) - - expSrc := fmt.Sprintf("%s/%s", titleBuilder.TitlePrefix, c.command) - client.VerifyWasCalledOnce().UpdateStatus(ctx, types.UpdateStatusRequest{ - Repo: models.Repo{}, - PullNum: 0, - State: c.status, - StatusName: expSrc, - Description: c.expDescrip, - CommandName: c.command.TitleString(), - NumSuccess: strconv.FormatInt(int64(c.numSuccess), 10), - NumTotal: strconv.FormatInt(int64(c.numTotal), 10), - }) - }) - } -} - -// Test that it sets the "source" properly depending on if the project is -// named or not. -func TestDefaultCommitStatusUpdater_UpdateProjectSrc(t *testing.T) { - RegisterMockTestingT(t) - cases := []struct { - projectName string - repoRelDir string - workspace string - expSrc string - }{ - { - projectName: "name", - repoRelDir: ".", - workspace: "default", - expSrc: "atlantis/plan: name", - }, - { - projectName: "", - repoRelDir: "dir1/dir2", - workspace: "workspace", - expSrc: "atlantis/plan: dir1/dir2/workspace", - }, - } - - for _, c := range cases { - t.Run(c.expSrc, func(t *testing.T) { - client := mocks.NewMockClient() - titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis"} - s := command.VCSStatusUpdater{Client: client, TitleBuilder: titleBuilder} - ctx := context.Background() - _, err := s.UpdateProject(ctx, command.ProjectContext{ - ProjectName: c.projectName, - RepoRelDir: c.repoRelDir, - Workspace: c.workspace, - }, - command.Plan, - models.PendingVCSStatus, - "url", "") - Ok(t, err) - client.VerifyWasCalledOnce().UpdateStatus(ctx, types.UpdateStatusRequest{ - Repo: models.Repo{}, - PullNum: 0, - State: models.PendingVCSStatus, - StatusName: c.expSrc, - Description: "Plan in progress...", - DetailsURL: "url", - - CommandName: "Plan", - Workspace: c.workspace, - Directory: c.repoRelDir, - Project: c.projectName, - }) - }) - } -} - -// Test that it uses the right words in the description. -func TestDefaultCommitStatusUpdater_UpdateProject(t *testing.T) { - RegisterMockTestingT(t) - cases := []struct { - status models.VCSStatus - cmd command.Name - expDescrip string - }{ - { - models.PendingVCSStatus, - command.Plan, - "Plan in progress...", - }, - { - models.FailedVCSStatus, - command.Plan, - "Plan failed.", - }, - { - models.SuccessVCSStatus, - command.Plan, - "Plan succeeded.", - }, - { - models.PendingVCSStatus, - command.Apply, - "Apply in progress...", - }, - { - models.FailedVCSStatus, - command.Apply, - "Apply failed.", - }, - { - models.SuccessVCSStatus, - command.Apply, - "Apply succeeded.", - }, - } - - for _, c := range cases { - t.Run(c.expDescrip, func(t *testing.T) { - client := mocks.NewMockClient() - titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "atlantis"} - s := command.VCSStatusUpdater{Client: client, TitleBuilder: titleBuilder} - ctx := context.Background() - _, err := s.UpdateProject(ctx, command.ProjectContext{ - RepoRelDir: ".", - Workspace: "default", - }, - c.cmd, - c.status, - "url", "") - Ok(t, err) - client.VerifyWasCalledOnce().UpdateStatus(ctx, types.UpdateStatusRequest{ - Repo: models.Repo{}, - PullNum: 0, - State: c.status, - StatusName: fmt.Sprintf("atlantis/%s: ./default", c.cmd.String()), - Description: c.expDescrip, - DetailsURL: "url", - CommandName: c.cmd.TitleString(), - Workspace: "default", - Directory: ".", - }) - }) - } -} - -// Test that we can set the status name. -func TestDefaultCommitStatusUpdater_UpdateProjectCustomStatusName(t *testing.T) { - RegisterMockTestingT(t) - client := mocks.NewMockClient() - titleBuilder := vcs.StatusTitleBuilder{TitlePrefix: "custom"} - s := command.VCSStatusUpdater{Client: client, TitleBuilder: titleBuilder} - ctx := context.Background() - _, err := s.UpdateProject(ctx, command.ProjectContext{ - RepoRelDir: ".", - Workspace: "default", - }, - command.Apply, - models.SuccessVCSStatus, - "url", "") - Ok(t, err) - client.VerifyWasCalledOnce().UpdateStatus(ctx, types.UpdateStatusRequest{ - Repo: models.Repo{}, - PullNum: 0, - State: models.SuccessVCSStatus, - StatusName: "custom/apply: ./default", - Description: "Apply succeeded.", - DetailsURL: "url", - Workspace: "default", - CommandName: "Apply", - Directory: ".", - }) -} diff --git a/server/legacy/events/command_runner.go b/server/legacy/events/command_runner.go deleted file mode 100644 index 9a49b524f..000000000 --- a/server/legacy/events/command_runner.go +++ /dev/null @@ -1,329 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "context" - "fmt" - "time" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/logging/fields" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/recovery" - "github.com/uber-go/tally/v4" -) - -const ( - ShutdownComment = "Atlantis server is shutting down, please try again later." -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_command_runner.go CommandRunner - -// CommandRunner is the first step after a command request has been parsed. -type CommandRunner interface { - // RunCommentCommand is the first step after a command request has been parsed. - // It handles gathering additional information needed to execute the command - // and then calling the appropriate services to finish executing the command. - RunCommentCommand(ctx context.Context, baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User, pullNum int, cmd *command.Comment, timestamp time.Time, installationToken int64) - RunAutoplanCommand(ctx context.Context, baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User, timestamp time.Time, installationToken int64) - RunPRReviewCommand(ctx context.Context, repo models.Repo, pull models.PullRequest, user models.User, timestamp time.Time, installationToken int64) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_stale_command_checker.go StaleCommandChecker - -// StaleCommandChecker handles checks to validate if current command is stale and can be dropped. -type StaleCommandChecker interface { - // CommandIsStale returns true if currentEventTimestamp is earlier than timestamp set in DB's latest pull model. - CommandIsStale(ctx *command.Context) bool -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_comment_command_runner.go CommentCommandRunner - -// CommentCommandRunner runs individual command workflows. -type CommentCommandRunner interface { - Run(*command.Context, *command.Comment) -} - -type policyCommandRunner interface { - Run(ctx *command.Context) -} - -func buildCommentCommandRunner( - cmdRunner *DefaultCommandRunner, - cmdName command.Name, -) CommentCommandRunner { - // panic here, we want to fail fast and hard since - // this would be an internal service configuration error. - runner, ok := cmdRunner.CommentCommandRunnerByCmd[cmdName] - - if !ok { - panic(fmt.Sprintf("command runner not configured for command %s", cmdName.String())) - } - - return runner -} - -// DefaultCommandRunner is the first step when processing a comment command. -type DefaultCommandRunner struct { - VCSClient vcs.Client - DisableAutoplan bool - GlobalCfg valid.GlobalCfg - StatsScope tally.Scope - // ParallelPoolSize controls the size of the wait group used to run - // parallel plans and applies (if enabled). - ParallelPoolSize int - CommentCommandRunnerByCmd map[command.Name]command.Runner - Drainer *Drainer - PreWorkflowHooksCommandRunner PreWorkflowHooksCommandRunner - VCSStatusUpdater VCSStatusUpdater - PullStatusFetcher PullStatusFetcher - StaleCommandChecker StaleCommandChecker - Logger logging.Logger - PolicyCommandRunner policyCommandRunner -} - -// RunAutoplanCommand runs plan and policy_checks when a pull request is opened or updated. -func (c *DefaultCommandRunner) RunAutoplanCommand(ctx context.Context, baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User, timestamp time.Time, installationToken int64) { - if opStarted := c.Drainer.StartOp(); !opStarted { - if commentErr := c.VCSClient.CreateComment(baseRepo, pull.Num, ShutdownComment, command.Plan.String()); commentErr != nil { - c.Logger.ErrorContext(ctx, commentErr.Error()) - } - return - } - defer c.Drainer.OpDone() - - defer c.logPanics(ctx) - status, err := c.PullStatusFetcher.GetPullStatus(pull) - - if err != nil { - c.Logger.ErrorContext(ctx, err.Error()) - } - - scope := c.StatsScope.SubScope("autoplan") - timer := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer timer.Stop() - - cmdCtx := &command.Context{ - User: user, - Log: c.Logger, - Scope: scope, - Pull: pull, - HeadRepo: headRepo, - PullStatus: status, - Trigger: command.AutoTrigger, - TriggerTimestamp: timestamp, - RequestCtx: ctx, - InstallationToken: installationToken, - } - if !c.validateCtxAndComment(cmdCtx) { - return - } - if c.DisableAutoplan { - return - } - // Drop request if a more recent VCS event updated Atlantis state - if c.StaleCommandChecker.CommandIsStale(cmdCtx) { - return - } - - if err := c.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cmdCtx); err != nil { - c.Logger.ErrorContext(ctx, "Error running pre-workflow hooks", fields.PullRequestWithErr(pull, err)) - _, err := c.VCSStatusUpdater.UpdateCombined(ctx, cmdCtx.HeadRepo, cmdCtx.Pull, models.FailedVCSStatus, command.Plan, "", err.Error()) - if err != nil { - c.Logger.ErrorContext(ctx, err.Error()) - } - return - } - - autoPlanRunner := buildCommentCommandRunner(c, command.Plan) - - autoPlanRunner.Run(cmdCtx, nil) -} - -// RunCommentCommand executes the command. -// We take in a pointer for maybeHeadRepo because for some events there isn't -// enough data to construct the Repo model and callers might want to wait until -// the event is further validated before making an additional (potentially -// wasteful) call to get the necessary data. -func (c *DefaultCommandRunner) RunCommentCommand(ctx context.Context, baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User, pullNum int, cmd *command.Comment, timestamp time.Time, installationToken int64) { - if opStarted := c.Drainer.StartOp(); !opStarted { - if commentErr := c.VCSClient.CreateComment(baseRepo, pullNum, ShutdownComment, ""); commentErr != nil { - c.Logger.ErrorContext(ctx, commentErr.Error()) - } - return - } - defer c.Drainer.OpDone() - - defer c.logPanics(ctx) - - scope := c.StatsScope.SubScope("comment") - - if cmd != nil { - scope = scope.SubScope(cmd.Name.String()) - } - timer := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer timer.Stop() - - status, err := c.PullStatusFetcher.GetPullStatus(pull) - - if err != nil { - c.Logger.ErrorContext(ctx, err.Error()) - } - - cmdCtx := &command.Context{ - User: user, - Log: c.Logger, - Pull: pull, - PullStatus: status, - HeadRepo: headRepo, - Trigger: command.CommentTrigger, - Scope: scope, - TriggerTimestamp: timestamp, - RequestCtx: ctx, - InstallationToken: installationToken, - } - - if !c.validateCtxAndComment(cmdCtx) { - return - } - - // Drop request if a more recent VCS event updated Atlantis state - if c.StaleCommandChecker.CommandIsStale(cmdCtx) { - return - } - - if err := c.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cmdCtx); err != nil { - // Replace approve policies command with policy check if preworkflow hook fails since we don't use - // approve policies statuses - cmdName := cmd.Name - - c.Logger.ErrorContext(ctx, "Error running pre-workflow hooks", fields.PullRequestWithErr(pull, err)) - _, err := c.VCSStatusUpdater.UpdateCombined(ctx, cmdCtx.HeadRepo, cmdCtx.Pull, models.FailedVCSStatus, cmdName, "", err.Error()) - if err != nil { - c.Logger.ErrorContext(ctx, err.Error()) - } - return - } - - cmdRunner := buildCommentCommandRunner(c, cmd.CommandName()) - - cmdRunner.Run(cmdCtx, cmd) -} - -// RunPRReviewCommand executes the policy check command. -func (c *DefaultCommandRunner) RunPRReviewCommand(ctx context.Context, repo models.Repo, pull models.PullRequest, user models.User, timestamp time.Time, installationToken int64) { - if opStarted := c.Drainer.StartOp(); !opStarted { - if commentErr := c.VCSClient.CreateComment(repo, pull.Num, ShutdownComment, ""); commentErr != nil { - c.Logger.ErrorContext(ctx, commentErr.Error()) - } - return - } - defer c.Drainer.OpDone() - scope := c.StatsScope.SubScope("pr_approval") - timer := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer timer.Stop() - - // only log error here, like with Atlantis autoplans/commands - status, err := c.PullStatusFetcher.GetPullStatus(pull) - if err != nil { - c.Logger.ErrorContext(ctx, err.Error()) - } - - cmdCtx := &command.Context{ - User: user, - Log: c.Logger, - Pull: pull, - PullStatus: status, - HeadRepo: repo, - Trigger: command.PRReviewTrigger, - Scope: scope, - TriggerTimestamp: timestamp, - RequestCtx: ctx, - InstallationToken: installationToken, - } - if !c.validateCtxAndComment(cmdCtx) { - return - } - - // Drop request if a more recent VCS event updated Atlantis state - if c.StaleCommandChecker.CommandIsStale(cmdCtx) { - return - } - - if err := c.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cmdCtx); err != nil { - c.Logger.ErrorContext(ctx, "Error running pre-workflow hooks", fields.PullRequestWithErr(pull, err)) - _, err := c.VCSStatusUpdater.UpdateCombined(ctx, cmdCtx.HeadRepo, cmdCtx.Pull, models.FailedVCSStatus, command.PolicyCheck, "", err.Error()) - if err != nil { - c.Logger.ErrorContext(ctx, err.Error()) - } - return - } - // run policy check command runner - c.PolicyCommandRunner.Run(cmdCtx) -} - -func (c *DefaultCommandRunner) validateCtxAndComment(cmdCtx *command.Context) bool { - if cmdCtx.HeadRepo.Owner != cmdCtx.Pull.BaseRepo.Owner { - c.Logger.InfoContext(cmdCtx.RequestCtx, "command was run on a fork pull request which is disallowed") - if err := c.VCSClient.CreateComment(cmdCtx.Pull.BaseRepo, cmdCtx.Pull.Num, "Atlantis commands can't be run on fork pull requests.", ""); err != nil { - c.Logger.ErrorContext(cmdCtx.RequestCtx, err.Error()) - } - return false - } - - if cmdCtx.Pull.State != models.OpenPullState { - c.Logger.InfoContext(cmdCtx.RequestCtx, "command was run on closed pull request") - if err := c.VCSClient.CreateComment(cmdCtx.Pull.BaseRepo, cmdCtx.Pull.Num, "Atlantis commands can't be run on closed pull requests", ""); err != nil { - c.Logger.ErrorContext(cmdCtx.RequestCtx, err.Error()) - } - return false - } - - repo := c.GlobalCfg.MatchingRepo(cmdCtx.Pull.BaseRepo.ID()) - if !repo.BranchMatches(cmdCtx.Pull.BaseBranch) { - c.Logger.InfoContext(cmdCtx.RequestCtx, "command was run on a pull request which doesn't match base branches") - // just ignore it to allow us to use any git workflows without malicious intentions. - return false - } - return true -} - -// logPanics logs and creates a comment on the pull request for panics. -func (c *DefaultCommandRunner) logPanics(ctx context.Context) { - if err := recover(); err != nil { - stack := recovery.Stack(3) - c.Logger.ErrorContext(ctx, fmt.Sprintf("PANIC: %s\n%s", err, stack)) - } -} - -type ForceApplyCommandRunner struct { - CommandRunner - VCSClient vcs.Client - Logger logging.Logger -} - -func (f *ForceApplyCommandRunner) RunCommentCommand(ctx context.Context, baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User, pullNum int, cmd *command.Comment, timestamp time.Time, installationToken int64) { - if cmd.ForceApply { - warningMessage := "⚠️ WARNING ⚠️\n\n You have bypassed all apply requirements for this PR 🚀 . This can have unpredictable consequences 🙏🏽 and should only be used in an emergency 🆘 .\n\n 𝐓𝐡𝐢𝐬 𝐚𝐜𝐭𝐢𝐨𝐧 𝐰𝐢𝐥𝐥 𝐛𝐞 𝐚𝐮𝐝𝐢𝐭𝐞𝐝.\n" - if commentErr := f.VCSClient.CreateComment(baseRepo, pullNum, warningMessage, ""); commentErr != nil { - f.Logger.ErrorContext(ctx, commentErr.Error()) - } - } - f.CommandRunner.RunCommentCommand(ctx, baseRepo, headRepo, pull, user, pullNum, cmd, timestamp, installationToken) -} diff --git a/server/legacy/events/command_runner_internal_test.go b/server/legacy/events/command_runner_internal_test.go deleted file mode 100644 index 18a821379..000000000 --- a/server/legacy/events/command_runner_internal_test.go +++ /dev/null @@ -1,178 +0,0 @@ -package events - -import ( - "context" - "fmt" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestApplyUpdateCommitStatus(t *testing.T) { - cases := map[string]struct { - cmd command.Name - pullStatus models.PullStatus - expStatus models.VCSStatus - expNumSuccess int - expNumTotal int - }{ - "apply, one pending": { - cmd: command.Apply, - pullStatus: models.PullStatus{ - Projects: []models.ProjectStatus{ - { - Status: models.PlannedPlanStatus, - }, - { - Status: models.AppliedPlanStatus, - }, - }, - }, - expStatus: models.PendingVCSStatus, - expNumSuccess: 1, - expNumTotal: 2, - }, - "apply, all successful": { - cmd: command.Apply, - pullStatus: models.PullStatus{ - Projects: []models.ProjectStatus{ - { - Status: models.AppliedPlanStatus, - }, - { - Status: models.AppliedPlanStatus, - }, - }, - }, - expStatus: models.SuccessVCSStatus, - expNumSuccess: 2, - expNumTotal: 2, - }, - "apply, one errored, one pending": { - cmd: command.Apply, - pullStatus: models.PullStatus{ - Projects: []models.ProjectStatus{ - { - Status: models.AppliedPlanStatus, - }, - { - Status: models.ErroredApplyStatus, - }, - { - Status: models.PlannedPlanStatus, - }, - }, - }, - expStatus: models.FailedVCSStatus, - expNumSuccess: 1, - expNumTotal: 3, - }, - } - - for name, c := range cases { - t.Run(name, func(t *testing.T) { - csu := &MockCSU{} - cr := &ApplyCommandRunner{ - vcsStatusUpdater: csu, - } - cr.updateVcsStatus(&command.Context{}, c.pullStatus, "") - Equals(t, models.Repo{}, csu.CalledRepo) - Equals(t, models.PullRequest{}, csu.CalledPull) - Equals(t, c.expStatus, csu.CalledStatus) - Equals(t, c.cmd.String(), csu.CalledCommand) - Equals(t, c.expNumSuccess, csu.CalledNumSuccess) - Equals(t, c.expNumTotal, csu.CalledNumTotal) - }) - } -} - -func TestPlanUpdateCommitStatus(t *testing.T) { - cases := map[string]struct { - cmd command.Name - pullStatus models.PullStatus - expStatus models.VCSStatus - expNumSuccess int - expNumTotal int - }{ - "single plan success": { - cmd: command.Plan, - pullStatus: models.PullStatus{ - Projects: []models.ProjectStatus{ - { - Status: models.PlannedPlanStatus, - }, - }, - }, - expStatus: models.SuccessVCSStatus, - expNumSuccess: 1, - expNumTotal: 1, - }, - "one plan error, other errors": { - cmd: command.Plan, - pullStatus: models.PullStatus{ - Projects: []models.ProjectStatus{ - { - Status: models.ErroredPlanStatus, - }, - { - Status: models.PlannedPlanStatus, - }, - { - Status: models.AppliedPlanStatus, - }, - { - Status: models.ErroredApplyStatus, - }, - }, - }, - expStatus: models.FailedVCSStatus, - expNumSuccess: 3, - expNumTotal: 4, - }, - } - - for name, c := range cases { - t.Run(name, func(t *testing.T) { - csu := &MockCSU{} - cr := &PlanCommandRunner{ - vcsStatusUpdater: csu, - } - cr.updateVcsStatus(&command.Context{}, c.pullStatus, "") - Equals(t, models.Repo{}, csu.CalledRepo) - Equals(t, models.PullRequest{}, csu.CalledPull) - Equals(t, c.expStatus, csu.CalledStatus) - Equals(t, c.cmd.String(), csu.CalledCommand) - Equals(t, c.expNumSuccess, csu.CalledNumSuccess) - Equals(t, c.expNumTotal, csu.CalledNumTotal) - }) - } -} - -type MockCSU struct { - CalledRepo models.Repo - CalledPull models.PullRequest - CalledStatus models.VCSStatus - CalledCommand string - CalledNumSuccess int - CalledNumTotal int - CalledStatusID string -} - -func (m *MockCSU) UpdateCombinedCount(ctx context.Context, repo models.Repo, pull models.PullRequest, status models.VCSStatus, command fmt.Stringer, numSuccess int, numTotal int, statusID string) (string, error) { - m.CalledRepo = repo - m.CalledPull = pull - m.CalledStatus = status - m.CalledCommand = command.String() - m.CalledNumSuccess = numSuccess - m.CalledNumTotal = numTotal - m.CalledStatusID = statusID - return "", nil -} -func (m *MockCSU) UpdateCombined(ctx context.Context, repo models.Repo, pull models.PullRequest, status models.VCSStatus, command fmt.Stringer, statusID string, output string) (string, error) { - return "", nil -} -func (m *MockCSU) UpdateProject(ctx context.Context, projectCtx command.ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) { - return "", nil -} diff --git a/server/legacy/events/command_runner_test.go b/server/legacy/events/command_runner_test.go deleted file mode 100644 index 3fe4db789..000000000 --- a/server/legacy/events/command_runner_test.go +++ /dev/null @@ -1,576 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "context" - "errors" - "fmt" - "regexp" - "testing" - "time" - - "github.com/stretchr/testify/assert" - "github.com/uber-go/tally/v4" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - lyft_vcs "github.com/runatlantis/atlantis/server/legacy/events/vcs/lyft" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - - . "github.com/petergtz/pegomock" - lockingmocks "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - eventmocks "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/models/fixtures" - "github.com/runatlantis/atlantis/server/vcs/markdown" - . "github.com/runatlantis/atlantis/testing" -) - -var projectCommandBuilder *mocks.MockProjectCommandBuilder -var projectCommandRunner *mocks.MockProjectCommandRunner -var ch events.DefaultCommandRunner -var fa events.ForceApplyCommandRunner -var workingDir events.WorkingDir -var pendingPlanFinder *mocks.MockPendingPlanFinder -var drainer *events.Drainer -var deleteLockCommand *mocks.MockDeleteLockCommand -var vcsUpdater *mocks.MockVCSStatusUpdater -var staleCommandChecker *mocks.MockStaleCommandChecker -var logger logging.Logger - -// TODO: refactor these into their own unit tests. -// these were all split out from default command runner in an effort to improve -// readability however the tests were kept as is. -var dbUpdater *events.DBUpdater -var pullUpdater events.OutputUpdater -var policyCheckCommandRunner *events.PolicyCheckCommandRunner -var planCommandRunner *events.PlanCommandRunner -var applyLockChecker *lockingmocks.MockApplyLockChecker -var applyCommandRunner *events.ApplyCommandRunner -var unlockCommandRunner *events.UnlockCommandRunner -var preWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner - -func setup(t *testing.T) *vcsmocks.MockClient { - RegisterMockTestingT(t) - projectCommandBuilder = mocks.NewMockProjectCommandBuilder() - vcsClient := vcsmocks.NewMockClient() - githubClient := vcsmocks.NewMockIGithubClient() - logger = logging.NewNoopCtxLogger(t) - projectCommandRunner = mocks.NewMockProjectCommandRunner() - workingDir = mocks.NewMockWorkingDir() - pendingPlanFinder = mocks.NewMockPendingPlanFinder() - vcsUpdater = mocks.NewMockVCSStatusUpdater() - tmp, cleanup := TempDir(t) - defer cleanup() - defaultBoltDB, err := db.New(tmp) - Ok(t, err) - - drainer = &events.Drainer{} - deleteLockCommand = eventmocks.NewMockDeleteLockCommand() - applyLockChecker = lockingmocks.NewMockApplyLockChecker() - - dbUpdater = &events.DBUpdater{ - DB: defaultBoltDB, - } - - pullUpdater = &events.PullOutputUpdater{ - HidePrevPlanComments: false, - VCSClient: vcsClient, - MarkdownRenderer: &markdown.Renderer{}, - } - - parallelPoolSize := 1 - policyCheckCommandRunner = events.NewPolicyCheckCommandRunner( - dbUpdater, - pullUpdater, - vcsUpdater, - projectCommandRunner, - parallelPoolSize, - ) - - planCommandRunner = events.NewPlanCommandRunner( - vcsClient, - pendingPlanFinder, - workingDir, - vcsUpdater, - projectCommandBuilder, - projectCommandRunner, - dbUpdater, - pullUpdater, - policyCheckCommandRunner, - parallelPoolSize, - ) - - pullReqStatusFetcher := lyft_vcs.NewSQBasedPullStatusFetcher(githubClient, vcs.NewLyftPullMergeabilityChecker("atlantis")) - - applyCommandRunner = events.NewApplyCommandRunner( - vcsClient, - false, - applyLockChecker, - vcsUpdater, - projectCommandBuilder, - projectCommandRunner, - pullUpdater, - dbUpdater, - parallelPoolSize, - pullReqStatusFetcher, - ) - - unlockCommandRunner = events.NewUnlockCommandRunner( - deleteLockCommand, - vcsClient, - ) - - versionCommandRunner := events.NewVersionCommandRunner( - pullUpdater, - projectCommandBuilder, - projectCommandRunner, - parallelPoolSize, - ) - - commentCommandRunnerByCmd := map[command.Name]command.Runner{ - command.Plan: planCommandRunner, - command.Apply: applyCommandRunner, - command.Unlock: unlockCommandRunner, - command.Version: versionCommandRunner, - } - - preWorkflowHooksCommandRunner = mocks.NewMockPreWorkflowHooksCommandRunner() - - When(preWorkflowHooksCommandRunner.RunPreHooks(matchers.AnyContextContext(), matchers.AnyPtrToEventsCommandContext())).ThenReturn(nil) - - globalCfg := valid.NewGlobalCfg("somedir") - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - staleCommandChecker = mocks.NewMockStaleCommandChecker() - - ch = events.DefaultCommandRunner{ - VCSClient: vcsClient, - CommentCommandRunnerByCmd: commentCommandRunnerByCmd, - Logger: logging.NewNoopCtxLogger(t), - GlobalCfg: globalCfg, - StatsScope: scope, - Drainer: drainer, - PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, - PullStatusFetcher: defaultBoltDB, - StaleCommandChecker: staleCommandChecker, - VCSStatusUpdater: vcsUpdater, - } - return vcsClient -} - -func TestRunCommentCommand_ForkPRDisabled(t *testing.T) { - t.Log("if a command is run on a forked pull request and this is disabled atlantis should" + - " comment saying that this is not allowed") - vcsClient := setup(t) - ctx := context.Background() - modelPull := models.PullRequest{ - BaseRepo: fixtures.GithubRepo, - State: models.OpenPullState, - } - - headRepo := fixtures.GithubRepo - headRepo.FullName = "forkrepo/atlantis" - headRepo.Owner = "forkrepo" - ch.RunCommentCommand(ctx, fixtures.GithubRepo, headRepo, modelPull, fixtures.User, fixtures.Pull.Num, nil, time.Now(), 0) - commentMessage := "Atlantis commands can't be run on fork pull requests." - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, commentMessage, "") -} - -func TestRunCommentCommand_PreWorkflowHookError(t *testing.T) { - t.Log("if pre workflow hook errors out stop the execution") - for _, cmd := range []command.Name{command.Plan, command.Apply} { - t.Run(cmd.String(), func(t *testing.T) { - RegisterMockTestingT(t) - _ = setup(t) - ctx := context.Background() - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, Num: fixtures.Pull.Num, State: models.OpenPullState} - preWorkflowHooksCommandRunner = mocks.NewMockPreWorkflowHooksCommandRunner() - - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - When(preWorkflowHooksCommandRunner.RunPreHooks(matchers.AnyContextContext(), matchers.AnyPtrToEventsCommandContext())).ThenReturn(fmt.Errorf("catastrophic error")) - - ch.PreWorkflowHooksCommandRunner = preWorkflowHooksCommandRunner - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, modelPull.BaseRepo, modelPull, fixtures.User, fixtures.Pull.Num, &command.Comment{Name: cmd}, time.Now(), 0) - _, _, _, status, cmdName, _, _ := vcsUpdater.VerifyWasCalledOnce().UpdateCombined(matchers.AnyContextContext(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), matchers.AnyModelsVcsStatus(), matchers.AnyCommandName(), AnyString(), AnyString()).GetCapturedArguments() - Equals(t, models.FailedVCSStatus, status) - Equals(t, cmd, cmdName) - }) - } -} - -func TestRunCommentCommand_DisableApplyAllDisabled(t *testing.T) { - t.Log("if \"atlantis apply\" is run and this is disabled atlantis should" + - " comment saying that this is not allowed") - vcsClient := setup(t) - ctx := context.Background() - applyCommandRunner.DisableApplyAll = true - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState, Num: fixtures.Pull.Num} - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, modelPull.Num, &command.Comment{Name: command.Apply}, time.Now(), 0) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "**Error:** Running `atlantis apply` without flags is disabled. You must specify which project to apply via the `-d `, `-w ` or `-p ` flags.", "apply") -} - -func TestForceApplyRunCommentCommandRunner_CommentWhenEnabled(t *testing.T) { - t.Log("if \"atlantis apply --force\" is run and this is enabled atlantis should" + - " comment with a warning") - vcsClient := setup(t) - ctx := context.Background() - - fa = events.ForceApplyCommandRunner{ - CommandRunner: &ch, - VCSClient: vcsClient, - } - - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState, Num: fixtures.Pull.Num} - - fa.RunCommentCommand(ctx, fixtures.GithubRepo, models.Repo{}, models.PullRequest{}, fixtures.User, modelPull.Num, &command.Comment{Name: command.Apply, ForceApply: true}, time.Now(), 0) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "⚠️ WARNING ⚠️\n\n You have bypassed all apply requirements for this PR 🚀 . This can have unpredictable consequences 🙏🏽 and should only be used in an emergency 🆘 .\n\n 𝐓𝐡𝐢𝐬 𝐚𝐜𝐭𝐢𝐨𝐧 𝐰𝐢𝐥𝐥 𝐛𝐞 𝐚𝐮𝐝𝐢𝐭𝐞𝐝.\n", "") -} - -func TestRunCommentCommand_DisableDisableAutoplan(t *testing.T) { - t.Log("if \"DisableAutoplan is true\" are disabled and we are silencing return and do not comment with error") - setup(t) - ctx := context.Background() - ch.DisableAutoplan = true - defer func() { ch.DisableAutoplan = false }() - - When(projectCommandBuilder.BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext())). - ThenReturn([]command.ProjectContext{ - { - CommandName: command.Plan, - }, - { - CommandName: command.Plan, - }, - }, nil) - - ch.RunAutoplanCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - projectCommandBuilder.VerifyWasCalled(Never()).BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext()) -} - -func TestRunCommentCommand_ClosedPull(t *testing.T) { - t.Log("if a command is run on a closed pull request atlantis should" + - " comment saying that this is not allowed") - vcsClient := setup(t) - ctx := context.Background() - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.ClosedPullState, Num: fixtures.Pull.Num} - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, fixtures.Pull.Num, nil, time.Now(), 0) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "Atlantis commands can't be run on closed pull requests", "") -} - -func TestRunCommentCommand_MatchedBranch(t *testing.T) { - t.Log("if a command is run on a pull request which matches base branches run plan successfully") - vcsClient := setup(t) - ctx := context.Background() - - ch.GlobalCfg.Repos = append(ch.GlobalCfg.Repos, valid.Repo{ - IDRegex: regexp.MustCompile(".*"), - BranchRegex: regexp.MustCompile("^main$"), - }) - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, BaseBranch: "main"} - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, fixtures.Pull.Num, &command.Comment{Name: command.Plan}, time.Now(), 0) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "Ran Plan for 0 projects:\n\n\n\n", "plan") -} - -func TestRunCommentCommand_UnmatchedBranch(t *testing.T) { - t.Log("if a command is run on a pull request which doesn't match base branches do not comment with error") - vcsClient := setup(t) - ctx := context.Background() - - ch.GlobalCfg.Repos = append(ch.GlobalCfg.Repos, valid.Repo{ - IDRegex: regexp.MustCompile(".*"), - BranchRegex: regexp.MustCompile("^main$"), - }) - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, BaseBranch: "foo"} - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, fixtures.Pull.Num, &command.Comment{Name: command.Plan}, time.Now(), 0) - vcsClient.VerifyWasCalled(Never()).CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()) -} - -func TestRunUnlockCommand_VCSComment(t *testing.T) { - t.Log("if unlock PR command is run, atlantis should" + - " invoke the delete command and comment on PR accordingly") - - vcsClient := setup(t) - ctx := context.Background() - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState, Num: fixtures.Pull.Num} - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, fixtures.Pull.Num, &command.Comment{Name: command.Unlock}, time.Now(), 0) - - deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, fixtures.Pull.Num, "All Atlantis locks for this PR have been unlocked and plans discarded", "unlock") -} - -func TestRunUnlockCommandFail_VCSComment(t *testing.T) { - t.Log("if unlock PR command is run and delete fails, atlantis should" + - " invoke comment on PR with error message") - - vcsClient := setup(t) - ctx := context.Background() - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState, Num: fixtures.Pull.Num} - When(deleteLockCommand.DeleteLocksByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(0, errors.New("err")) - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, fixtures.Pull.Num, &command.Comment{Name: command.Unlock}, time.Now(), 0) - - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, fixtures.Pull.Num, "Failed to delete PR locks", "unlock") -} - -func TestRunAutoplanCommand_PreWorkflowHookError(t *testing.T) { - t.Log("if pre workflow hook errors out stop the execution") - setup(t) - ctx := context.Background() - preWorkflowHooksCommandRunner = mocks.NewMockPreWorkflowHooksCommandRunner() - - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - When(preWorkflowHooksCommandRunner.RunPreHooks(matchers.AnyContextContext(), matchers.AnyPtrToEventsCommandContext())).ThenReturn(fmt.Errorf("catastrophic error")) - When(projectCommandRunner.Plan(matchers.AnyCommandProjectContext())).ThenReturn(command.ProjectResult{PlanSuccess: &models.PlanSuccess{}}) - - ch.PreWorkflowHooksCommandRunner = preWorkflowHooksCommandRunner - - ch.RunAutoplanCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - _, _, _, status, cmdName, _, _ := vcsUpdater.VerifyWasCalledOnce().UpdateCombined(matchers.AnyContextContext(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), matchers.AnyModelsVcsStatus(), matchers.AnyCommandName(), AnyString(), AnyString()).GetCapturedArguments() - Equals(t, models.FailedVCSStatus, status) - Equals(t, command.Plan, cmdName) -} - -func TestApplyMergeablityWhenPolicyCheckFails(t *testing.T) { - t.Log("if \"atlantis apply\" is run with failing policy check then apply is not performed") - setup(t) - ctx := context.Background() - tmp, cleanup := TempDir(t) - defer cleanup() - boltDB, err := db.New(tmp) - Ok(t, err) - dbUpdater.DB = boltDB - modelPull := models.PullRequest{ - BaseRepo: fixtures.GithubRepo, - State: models.OpenPullState, - Num: fixtures.Pull.Num, - } - - _, _ = boltDB.UpdatePullWithResults(modelPull, []command.ProjectResult{ - { - Command: command.PolicyCheck, - Error: fmt.Errorf("failing policy"), - ProjectName: "default", - Workspace: "default", - RepoRelDir: ".", - }, - }) - - When(ch.VCSClient.PullIsMergeable(fixtures.GithubRepo, modelPull)).ThenReturn(true, nil) - - When(projectCommandBuilder.BuildApplyCommands(matchers.AnyPtrToEventsCommandContext(), matchers.AnyPtrToEventsCommentCommand())).Then(func(args []Param) ReturnValues { - return ReturnValues{ - []command.ProjectContext{ - { - CommandName: command.Apply, - ProjectName: "default", - Workspace: "default", - RepoRelDir: ".", - ProjectPlanStatus: models.ErroredPolicyCheckStatus, - }, - }, - nil, - } - }) - - When(workingDir.GetPullDir(fixtures.GithubRepo, modelPull)).ThenReturn(tmp, nil) - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, modelPull, fixtures.User, fixtures.Pull.Num, &command.Comment{Name: command.Apply}, time.Now(), 0) -} - -func TestRunCommentCommand_DrainOngoing(t *testing.T) { - t.Log("if drain is ongoing then a message should be displayed") - vcsClient := setup(t) - ctx := context.Background() - drainer.ShutdownBlocking() - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, models.PullRequest{}, fixtures.User, fixtures.Pull.Num, nil, time.Now(), 0) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, fixtures.Pull.Num, "Atlantis server is shutting down, please try again later.", "") -} - -func TestRunCommentCommand_DrainNotOngoing(t *testing.T) { - t.Log("if drain is not ongoing then remove ongoing operation must be called even if panic occurred") - setup(t) - ctx := context.Background() - ch.RunCommentCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, models.PullRequest{}, fixtures.User, fixtures.Pull.Num, nil, time.Now(), 0) - Equals(t, 0, drainer.GetStatus().InProgressOps) -} - -func TestRunAutoplanCommand_DrainOngoing(t *testing.T) { - t.Log("if drain is ongoing then a message should be displayed") - vcsClient := setup(t) - ctx := context.Background() - drainer.ShutdownBlocking() - ch.RunAutoplanCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, fixtures.Pull.Num, "Atlantis server is shutting down, please try again later.", "plan") -} - -func TestRunAutoplanCommand_DrainNotOngoing(t *testing.T) { - t.Log("if drain is not ongoing then remove ongoing operation must be called even if panic occurred") - setup(t) - ctx := context.Background() - fixtures.Pull.BaseRepo = fixtures.GithubRepo - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(false) - When(projectCommandBuilder.BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext())).ThenPanic("panic test - if you're seeing this in a test failure this isn't the failing test") - ch.RunAutoplanCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - projectCommandBuilder.VerifyWasCalledOnce().BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext()) - Equals(t, 0, drainer.GetStatus().InProgressOps) -} - -func TestRunCommentCommand_DropStaleRequest(t *testing.T) { - t.Log("if comment command is stale then entire request should be dropped") - vcsClient := setup(t) - ctx := context.Background() - modelPull := models.PullRequest{BaseRepo: fixtures.GithubRepo, State: models.OpenPullState, Num: fixtures.Pull.Num} - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(true) - - ch.RunCommentCommand(ctx, fixtures.GithubRepo, models.Repo{}, models.PullRequest{}, fixtures.User, modelPull.Num, &command.Comment{Name: command.Apply}, time.Now(), 0) - vcsClient.VerifyWasCalled(Never()).CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()) -} - -func TestRunAutoplanCommand_DropStaleRequest(t *testing.T) { - t.Log("if autoplan command is stale then entire request should be dropped") - vcsClient := setup(t) - ctx := context.Background() - When(staleCommandChecker.CommandIsStale(matchers.AnyPtrToModelsCommandContext())).ThenReturn(true) - - ch.RunAutoplanCommand(ctx, fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - vcsClient.VerifyWasCalled(Never()).CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()) -} - -func TestRunPRRCommand_RunPRReviewCommand(t *testing.T) { - staleChecker := &testStaleCommandChecker{} - policyCommandRunner := &testPolicyCommandRunner{} - preWorkflowHooksRunner := &testPreWorkflowHooksRunnerRunner{} - ch = events.DefaultCommandRunner{ - Drainer: &events.Drainer{}, - Logger: logging.NewNoopCtxLogger(t), - GlobalCfg: valid.NewGlobalCfg("somedir"), - StatsScope: tally.NewTestScope("atlantis", map[string]string{}), - PullStatusFetcher: &testPullStatusFetcher{}, - StaleCommandChecker: staleChecker, - PreWorkflowHooksCommandRunner: preWorkflowHooksRunner, - PolicyCommandRunner: policyCommandRunner, - } - ctx := context.Background() - ch.RunPRReviewCommand(ctx, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - assert.True(t, policyCommandRunner.wasCalled) -} - -func TestRunPRRCommand_RunPRReviewCommand_StaleCommand(t *testing.T) { - staleChecker := &testStaleCommandChecker{ - stale: true, - } - policyCommandRunner := &testPolicyCommandRunner{} - ch = events.DefaultCommandRunner{ - Drainer: &events.Drainer{}, - Logger: logging.NewNoopCtxLogger(t), - GlobalCfg: valid.NewGlobalCfg("somedir"), - StatsScope: tally.NewTestScope("atlantis", map[string]string{}), - PullStatusFetcher: &testPullStatusFetcher{}, - StaleCommandChecker: staleChecker, - PolicyCommandRunner: policyCommandRunner, - } - ctx := context.Background() - ch.RunPRReviewCommand(ctx, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - assert.False(t, policyCommandRunner.wasCalled) -} - -func TestRunPRRCommand_RunPRReviewCommand_HooksError(t *testing.T) { - staleChecker := &testStaleCommandChecker{} - preWorkflowHooksRunner := &testPreWorkflowHooksRunnerRunner{ - error: assert.AnError, - } - policyCommandRunner := &testPolicyCommandRunner{} - vcsStatusUpdater := &testVCSStatusUpdater{} - ch = events.DefaultCommandRunner{ - Drainer: &events.Drainer{}, - Logger: logging.NewNoopCtxLogger(t), - GlobalCfg: valid.NewGlobalCfg("somedir"), - StatsScope: tally.NewTestScope("atlantis", map[string]string{}), - PullStatusFetcher: &testPullStatusFetcher{}, - StaleCommandChecker: staleChecker, - PreWorkflowHooksCommandRunner: preWorkflowHooksRunner, - VCSStatusUpdater: vcsStatusUpdater, - PolicyCommandRunner: policyCommandRunner, - } - ctx := context.Background() - ch.RunPRReviewCommand(ctx, fixtures.GithubRepo, fixtures.Pull, fixtures.User, time.Now(), 0) - assert.False(t, policyCommandRunner.wasCalled) -} - -type testVCSStatusUpdater struct { - output string - error error -} - -func (u testVCSStatusUpdater) UpdateCombined(context.Context, models.Repo, models.PullRequest, models.VCSStatus, fmt.Stringer, string, string) (string, error) { - return u.output, u.error -} - -func (u testVCSStatusUpdater) UpdateCombinedCount(context.Context, models.Repo, models.PullRequest, models.VCSStatus, fmt.Stringer, int, int, string) (string, error) { - return u.output, u.error -} - -func (u testVCSStatusUpdater) UpdateProject(context.Context, command.ProjectContext, fmt.Stringer, models.VCSStatus, string, string) (string, error) { - return u.output, u.error -} - -type testPullStatusFetcher struct { - error error - pull *models.PullStatus -} - -func (p testPullStatusFetcher) GetPullStatus(_ models.PullRequest) (*models.PullStatus, error) { - return p.pull, p.error -} - -type testPreWorkflowHooksRunnerRunner struct { - error error -} - -func (h testPreWorkflowHooksRunnerRunner) RunPreHooks(context.Context, *command.Context) error { - return h.error -} - -type testStaleCommandChecker struct { - stale bool -} - -func (s testStaleCommandChecker) CommandIsStale(_ *command.Context) bool { - return s.stale -} - -type testPolicyCommandRunner struct { - wasCalled bool -} - -func (r *testPolicyCommandRunner) Run(_ *command.Context) { - r.wasCalled = true -} diff --git a/server/legacy/events/comment_parser.go b/server/legacy/events/comment_parser.go deleted file mode 100644 index 5382b267e..000000000 --- a/server/legacy/events/comment_parser.go +++ /dev/null @@ -1,426 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "bytes" - "fmt" - "io" - "net/url" - "path/filepath" - "regexp" - "strings" - "text/template" - - "github.com/flynn-archive/go-shlex" - "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - "github.com/spf13/pflag" -) - -const ( - workspaceFlagLong = "workspace" - workspaceFlagShort = "w" - dirFlagLong = "dir" - dirFlagShort = "d" - projectFlagLong = "project" - projectFlagShort = "p" - forceFlagLong = "force" - forceFlagShort = "f" - logFlagLong = "log-level" - logFlagShort = "l" - atlantisExecutable = "atlantis" -) - -// multiLineRegex is used to ignore multi-line comments since those aren't valid -// Atlantis commands. If the second line just has newlines then we let it pass -// through because when you double click on a comment in GitHub and then you -// paste it again, GitHub adds two newlines and so we wanted to allow copying -// and pasting GitHub comments. -var ( - multiLineRegex = regexp.MustCompile(`.*\r?\n[^\r\n]+`) - ValidLogLevels = []string{"trace", "debug", "info", "warn", "error"} -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_comment_parsing.go CommentParsing - -// CommentParsing handles parsing pull request comments. -type CommentParsing interface { - // Parse attempts to parse a pull request comment to see if it's an Atlantis - // command. - Parse(comment string, vcsHost models.VCSHostType) CommentParseResult -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_comment_building.go CommentBuilder - -// CommentBuilder builds comment commands that can be used on pull requests. -type CommentBuilder interface { - // BuildPlanComment builds a plan comment for the specified args. - BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string - // BuildApplyComment builds an apply comment for the specified args. - BuildApplyComment(repoRelDir string, workspace string, project string) string - // BuildVersionComment builds a version comment for the specified args. - BuildVersionComment(repoRelDir string, workspace string, project string) string -} - -// CommentParser implements CommentParsing -type CommentParser struct { - GithubUser string - ApplyDisabled bool -} - -// CommentParseResult describes the result of parsing a comment as a command. -type CommentParseResult struct { - // Command is the successfully parsed command. Will be nil if - // CommentResponse or Ignore is set. - Command *command.Comment - // CommentResponse is set when we should respond immediately to the command - // for example for atlantis help. - CommentResponse string - // Ignore is set to true when we should just ignore this comment. - Ignore bool -} - -// Parse parses the comment as an Atlantis command. -// -// Valid commands contain: -// - The initial "executable" name, 'run' or 'atlantis' or '@GithubUser' -// where GithubUser is the API user Atlantis is running as. -// - Then a cmd, either 'plan', 'apply', or 'help'. -// - Then optional flags, then an optional separator '--' followed by optional -// extra flags to be appended to the terraform plan/apply command. -// -// Examples: -// - atlantis help -// - run plan -// - @GithubUser plan -w staging -// - atlantis plan -w staging -d dir -// - atlantis plan -- -key=value -key2 value2 -func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) CommentParseResult { - if multiLineRegex.MatchString(comment) { - return CommentParseResult{Ignore: true} - } - - // We first use strings.Fields to parse and do an initial evaluation. - // Later we use a proper shell parser and re-parse. - args := strings.Fields(comment) - if len(args) < 1 { - return CommentParseResult{Ignore: true} - } - - // Helpfully warn the user if they're using "terraform" instead of "atlantis" - if args[0] == "terraform" { - return CommentParseResult{CommentResponse: DidYouMeanAtlantisComment} - } - - // Atlantis can be invoked using the name of the VCS host user we're - // running under. Need to be able to match against that user. - var vcsUser string - switch vcsHost { - case models.Github: - vcsUser = e.GithubUser - } - executableNames := []string{"run", atlantisExecutable, "@" + vcsUser} - if !e.stringInSlice(args[0], executableNames) { - return CommentParseResult{Ignore: true} - } - - // Now that we know Atlantis is being invoked, re-parse using a shell-style - // parser. - args, err := shlex.Split(comment) - if err != nil { - return CommentParseResult{CommentResponse: fmt.Sprintf("```\nError parsing command: %s\n```", err)} - } - if len(args) < 1 { - return CommentParseResult{Ignore: true} - } - - // If they've just typed the name of the executable then give them the help - // output. - if len(args) == 1 { - return CommentParseResult{CommentResponse: e.HelpComment(e.ApplyDisabled)} - } - cmd := args[1] - - // Help output. - if e.stringInSlice(cmd, []string{"help", "-h", "--help"}) { - return CommentParseResult{CommentResponse: e.HelpComment(e.ApplyDisabled)} - } - - // Need to have a plan, apply, approve_policy or unlock at this point. - if !e.stringInSlice(cmd, []string{command.Plan.String(), command.Apply.String(), command.Unlock.String(), command.Version.String()}) { - return CommentParseResult{CommentResponse: fmt.Sprintf("```\nError: unknown command %q.\nRun 'atlantis --help' for usage.\n```", cmd)} - } - - var workspace string - var dir string - var project string - var force bool - var logLevel string - var flagSet *pflag.FlagSet - var name command.Name - - // Set up the flag parsing depending on the command. - switch cmd { - case command.Plan.String(): - name = command.Plan - flagSet = pflag.NewFlagSet(command.Plan.String(), pflag.ContinueOnError) - flagSet.SetOutput(io.Discard) - flagSet.StringVarP(&workspace, workspaceFlagLong, workspaceFlagShort, "", "Switch to this Terraform workspace before planning.") - flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Which directory to run plan in relative to root of repo, ex. 'child/dir'.") - flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Which project to run plan for. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", config.AtlantisYAMLFilename)) - flagSet.StringVarP(&logLevel, logFlagLong, logFlagShort, "", "Which log level to use when emitting terraform results, ex. 'trace'.") - case command.Apply.String(): - name = command.Apply - flagSet = pflag.NewFlagSet(command.Apply.String(), pflag.ContinueOnError) - flagSet.SetOutput(io.Discard) - flagSet.StringVarP(&workspace, workspaceFlagLong, workspaceFlagShort, "", "Apply the plan for this Terraform workspace.") - flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Apply the plan for this directory, relative to root of repo, ex. 'child/dir'.") - flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Apply the plan for this project. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", config.AtlantisYAMLFilename)) - flagSet.BoolVarP(&force, forceFlagLong, forceFlagShort, false, "Force Atlantis to ignore apply requirements.") - flagSet.StringVarP(&logLevel, logFlagLong, logFlagShort, "", "Which log level to use when emitting terraform results, ex. 'trace'.") - case command.Unlock.String(): - name = command.Unlock - flagSet = pflag.NewFlagSet(command.Unlock.String(), pflag.ContinueOnError) - flagSet.SetOutput(io.Discard) - case command.Version.String(): - name = command.Version - flagSet = pflag.NewFlagSet(command.Version.String(), pflag.ContinueOnError) - flagSet.StringVarP(&workspace, workspaceFlagLong, workspaceFlagShort, "", "Switch to this Terraform workspace before running version.") - flagSet.StringVarP(&dir, dirFlagLong, dirFlagShort, "", "Which directory to run version in relative to root of repo, ex. 'child/dir'.") - flagSet.StringVarP(&project, projectFlagLong, projectFlagShort, "", fmt.Sprintf("Print the version for this project. Refers to the name of the project configured in %s.", config.AtlantisYAMLFilename)) - default: - return CommentParseResult{CommentResponse: fmt.Sprintf("Error: unknown command %q – this is a bug", cmd)} - } - - // Now parse the flags. - // It's safe to use [2:] because we know there's at least 2 elements in args. - err = flagSet.Parse(args[2:]) - if err == pflag.ErrHelp { - return CommentParseResult{CommentResponse: fmt.Sprintf("```\nUsage of %s:\n%s\n```", cmd, flagSet.FlagUsagesWrapped(usagesCols))} - } - if err != nil { - if cmd == command.Unlock.String() { - return CommentParseResult{CommentResponse: UnlockUsage} - } - return CommentParseResult{CommentResponse: e.errMarkdown(err.Error(), cmd, flagSet)} - } - - var unusedArgs []string - if flagSet.ArgsLenAtDash() == -1 { - unusedArgs = flagSet.Args() - } else { - unusedArgs = flagSet.Args()[0:flagSet.ArgsLenAtDash()] - } - if len(unusedArgs) > 0 { - return CommentParseResult{CommentResponse: e.errMarkdown(fmt.Sprintf("unknown argument(s) – %s", strings.Join(unusedArgs, " ")), cmd, flagSet)} - } - - var extraArgs []string - if flagSet.ArgsLenAtDash() != -1 { - extraArgs = flagSet.Args()[flagSet.ArgsLenAtDash():] - } - - dir, err = e.validateDir(dir) - if err != nil { - return CommentParseResult{CommentResponse: e.errMarkdown(err.Error(), cmd, flagSet)} - } - - // Use the same validation that Terraform uses: https://git.io/vxGhU. Plus - // we also don't allow '..'. We don't want the workspace to contain a path - // since we create files based on the name. - if workspace != url.PathEscape(workspace) || strings.Contains(workspace, "..") { - return CommentParseResult{CommentResponse: e.errMarkdown(fmt.Sprintf("invalid workspace: %q", workspace), cmd, flagSet)} - } - - // If project is specified, dir or workspace should not be set. Since we - // dir/workspace have defaults we can't detect if the user set the flag - // to the default or didn't set the flag so there is an edge case here we - // don't detect, ex. atlantis plan -p project -d . -w default won't cause - // an error. - if project != "" && (workspace != "" || dir != "") { - err := fmt.Sprintf("cannot use -%s/--%s at same time as -%s/--%s or -%s/--%s", projectFlagShort, projectFlagLong, dirFlagShort, dirFlagLong, workspaceFlagShort, workspaceFlagLong) - return CommentParseResult{CommentResponse: e.errMarkdown(err, cmd, flagSet)} - } - - caseInsensitiveLogLevel := strings.ToLower(logLevel) - if e.invalidLogLevel(caseInsensitiveLogLevel) { - return CommentParseResult{CommentResponse: e.errMarkdown(fmt.Sprintf("invalid log level: %q", logLevel), cmd, flagSet)} - } - - return CommentParseResult{ - Command: command.NewComment(dir, extraArgs, name, force, workspace, project, caseInsensitiveLogLevel), - } -} - -func (e *CommentParser) invalidLogLevel(logLevel string) bool { - if logLevel == "" { - return false - } - return !e.stringInSlice(logLevel, ValidLogLevels) -} - -// BuildPlanComment builds a plan comment for the specified args. -func (e *CommentParser) BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string { - flags := e.buildFlags(repoRelDir, workspace, project) - commentFlags := "" - if len(commentArgs) > 0 { - var flagsWithoutQuotes []string - for _, f := range commentArgs { - f = strings.TrimPrefix(f, "\"") - f = strings.TrimSuffix(f, "\"") - flagsWithoutQuotes = append(flagsWithoutQuotes, f) - } - commentFlags = fmt.Sprintf(" -- %s", strings.Join(flagsWithoutQuotes, " ")) - } - return fmt.Sprintf("%s %s%s%s", atlantisExecutable, command.Plan.String(), flags, commentFlags) -} - -// BuildApplyComment builds an apply comment for the specified args. -func (e *CommentParser) BuildApplyComment(repoRelDir string, workspace string, project string) string { - flags := e.buildFlags(repoRelDir, workspace, project) - return fmt.Sprintf("%s %s%s", atlantisExecutable, command.Apply.String(), flags) -} - -// BuildVersionComment builds a version comment for the specified args. -func (e *CommentParser) BuildVersionComment(repoRelDir string, workspace string, project string) string { - flags := e.buildFlags(repoRelDir, workspace, project) - return fmt.Sprintf("%s %s%s", atlantisExecutable, command.Version.String(), flags) -} - -func (e *CommentParser) buildFlags(repoRelDir string, workspace string, project string) string { - // Add quotes if dir has spaces. - if strings.Contains(repoRelDir, " ") { - repoRelDir = fmt.Sprintf("%q", repoRelDir) - } - - var flags string - switch { - // If project is specified we can just use its name. - case project != "": - flags = fmt.Sprintf(" -%s %s", projectFlagShort, project) - case repoRelDir == DefaultRepoRelDir && workspace == DefaultWorkspace: - // If it's the root and default workspace then we just need to specify one - // of the flags and the other will get defaulted. - flags = fmt.Sprintf(" -%s %s", dirFlagShort, DefaultRepoRelDir) - case repoRelDir == DefaultRepoRelDir: - // If dir is the default then we just need to specify workspace. - flags = fmt.Sprintf(" -%s %s", workspaceFlagShort, workspace) - case workspace == DefaultWorkspace: - // If workspace is the default then we just need to specify the dir. - flags = fmt.Sprintf(" -%s %s", dirFlagShort, repoRelDir) - default: - // Otherwise we have to specify both flags. - flags = fmt.Sprintf(" -%s %s -%s %s", dirFlagShort, repoRelDir, workspaceFlagShort, workspace) - } - - return flags -} - -func (e *CommentParser) validateDir(dir string) (string, error) { - if dir == "" { - return dir, nil - } - validatedDir := filepath.Clean(dir) - // Join with . so the path is relative. This helps us if they use '/', - // and is safe to do if their path is relative since it's a no-op. - validatedDir = filepath.Join(".", validatedDir) - // Need to clean again to resolve relative validatedDirs. - validatedDir = filepath.Clean(validatedDir) - // Detect relative dirs since they're not allowed. - if strings.HasPrefix(validatedDir, "..") { - return "", fmt.Errorf("using a relative path %q with -%s/--%s is not allowed", dir, dirFlagShort, dirFlagLong) - } - - return validatedDir, nil -} - -func (e *CommentParser) stringInSlice(a string, list []string) bool { - for _, b := range list { - if b == a { - return true - } - } - return false -} - -func (e *CommentParser) errMarkdown(errMsg string, cmd string, flagSet *pflag.FlagSet) string { - return fmt.Sprintf("```\nError: %s.\nUsage of %s:\n%s```", errMsg, cmd, flagSet.FlagUsagesWrapped(usagesCols)) -} - -func (e *CommentParser) HelpComment(applyDisabled bool) string { - buf := &bytes.Buffer{} - tmpl := template.Must(template.New("").Parse(helpCommentTemplate)) - if err := tmpl.Execute(buf, struct { - ApplyDisabled bool - }{ - ApplyDisabled: applyDisabled, - }); err != nil { - return fmt.Sprintf("Failed to render template, this is a bug: %v", err) - } - return buf.String() -} - -var helpCommentTemplate = "```cmake\n" + - `atlantis -Terraform Pull Request Automation - -Usage: - atlantis [options] -- [terraform options] - -Examples: - # run plan in the root directory passing the -target flag to terraform - atlantis plan -d . -- -target=resource - {{- if not .ApplyDisabled }} - - # apply all unapplied plans from this pull request - atlantis apply - - # apply the plan for the root directory and staging workspace - atlantis apply -d . -w staging -{{- end }} - -Commands: - plan Runs 'terraform plan' for the changes in this pull request. - To plan a specific project, use the -d, -w and -p flags. -{{- if not .ApplyDisabled }} - apply Runs 'terraform apply' on all unapplied plans from this pull request. - To only apply a specific plan, use the -d, -w and -p flags. -{{- end }} - unlock Removes all atlantis locks and discards all plans for this PR. - To unlock a specific plan you can use the Atlantis UI. - version Print the output of 'terraform version' - help View help. - -Flags: - -h, --help help for atlantis - -Use "atlantis [command] --help" for more information about a command.` + - "\n```" - -// DidYouMeanAtlantisComment is the comment we add to the pull request when -// someone runs a command with terraform instead of atlantis. -var DidYouMeanAtlantisComment = "Did you mean to use `atlantis` instead of `terraform`?" - -// UnlockUsage is the comment we add to the pull request when someone runs -// `atlantis unlock` with flags. - -var UnlockUsage = "`Usage of unlock:`\n\n ```cmake\n" + - `atlantis unlock - - Unlocks the entire PR and discards all plans in this PR. - Arguments or flags are not supported at the moment. - If you need to unlock a specific project please use the atlantis UI.` + - "\n```" diff --git a/server/legacy/events/comment_parser_test.go b/server/legacy/events/comment_parser_test.go deleted file mode 100644 index 91a20c317..000000000 --- a/server/legacy/events/comment_parser_test.go +++ /dev/null @@ -1,797 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "fmt" - "strings" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -var commentParser = events.CommentParser{ - GithubUser: "github-user", -} - -func TestParse_Ignored(t *testing.T) { - ignoreComments := []string{ - "", - "a", - "abc", - "atlantis plan\nbut with newlines", - "terraform plan\nbut with newlines", - "This shouldn't error, but it does.", - } - for _, c := range ignoreComments { - r := commentParser.Parse(c, models.Github) - Assert(t, r.Ignore, "expected Ignore to be true for comment %q", c) - } -} - -func TestParse_HelpResponse(t *testing.T) { - helpComments := []string{ - "run", - "atlantis", - "@github-user", - "atlantis help", - "atlantis --help", - "atlantis -h", - "atlantis help something else", - "atlantis help plan", - } - for _, c := range helpComments { - r := commentParser.Parse(c, models.Github) - Equals(t, commentParser.HelpComment(false), r.CommentResponse) - } -} - -func TestParse_HelpResponseWithApplyDisabled(t *testing.T) { - helpComments := []string{ - "run", - "atlantis", - "@github-user", - "atlantis help", - "atlantis --help", - "atlantis -h", - "atlantis help something else", - "atlantis help plan", - } - for _, c := range helpComments { - commentParser.ApplyDisabled = true - r := commentParser.Parse(c, models.Github) - Equals(t, commentParser.HelpComment(true), r.CommentResponse) - } -} - -func TestParse_UnusedArguments(t *testing.T) { - t.Log("if there are unused flags we return an error") - cases := []struct { - Command command.Name - Args string - Unused string - }{ - { - command.Plan, - "-d . arg", - "arg", - }, - { - command.Plan, - "arg -d .", - "arg", - }, - { - command.Plan, - "arg", - "arg", - }, - { - command.Plan, - "arg arg2", - "arg arg2", - }, - { - command.Plan, - "-d . arg -w kjj arg2", - "arg arg2", - }, - { - command.Apply, - "-d . arg", - "arg", - }, - { - command.Apply, - "arg arg2", - "arg arg2", - }, - { - command.Apply, - "arg arg2 -- useful", - "arg arg2", - }, - { - command.Apply, - "arg arg2 --", - "arg arg2", - }, - } - for _, c := range cases { - comment := fmt.Sprintf("atlantis %s %s", c.Command.String(), c.Args) - t.Run(comment, func(t *testing.T) { - r := commentParser.Parse(comment, models.Github) - var usage string - switch c.Command { - case command.Plan: - usage = PlanUsage - case command.Apply: - usage = ApplyUsage - } - Equals(t, fmt.Sprintf("```\nError: unknown argument(s) – %s.\n%s```", c.Unused, usage), r.CommentResponse) - }) - } -} - -func TestParse_UnknownShorthandFlag(t *testing.T) { - comment := "atlantis unlock -d ." - r := commentParser.Parse(comment, models.Github) - - Equals(t, UnlockUsage, r.CommentResponse) -} - -func TestParse_DidYouMeanAtlantis(t *testing.T) { - t.Log("given a comment that should result in a 'did you mean atlantis'" + - "response, should set CommentParseResult.CommentResult") - comments := []string{ - "terraform", - "terraform help", - "terraform --help", - "terraform -h", - "terraform plan", - "terraform apply", - "terraform plan -w workspace -d . -- test", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - Assert(t, r.CommentResponse == events.DidYouMeanAtlantisComment, - "For comment %q expected CommentResponse==%q but got %q", c, events.DidYouMeanAtlantisComment, r.CommentResponse) - } -} - -func TestParse_InvalidCommand(t *testing.T) { - t.Log("given a comment with an invalid atlantis command, should return " + - "a warning.") - comments := []string{ - "atlantis paln", - "atlantis Plan", - "atlantis appely apply", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - exp := fmt.Sprintf("```\nError: unknown command %q.\nRun 'atlantis --help' for usage.\n```", strings.Fields(c)[1]) - Assert(t, r.CommentResponse == exp, - "For comment %q expected CommentResponse==%q but got %q", c, exp, r.CommentResponse) - } -} - -func TestParse_InvalidLogLevel(t *testing.T) { - comments := []string{ - "atlantis plan -l warnz", - "atlantis plan --log-level strace", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - exp := fmt.Sprintf("```\nError: invalid log level: %q.\n", strings.Fields(c)[3]) + PlanUsage + "```" - for i, c := range exp { - if string(c) != string(r.CommentResponse[i]) { - fmt.Println(i, string(c)) - } - } - Assert(t, r.CommentResponse == exp, - "For comment %q expected CommentResponse==%q but got %q", c, exp, r.CommentResponse) - } -} - -func TestParse_SubcommandUsage(t *testing.T) { - t.Log("given a comment asking for the usage of a subcommand should " + - "return help") - comments := []string{ - "atlantis plan -h", - "atlantis plan --help", - "atlantis apply -h", - "atlantis apply --help", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - exp := "Usage of " + strings.Fields(c)[1] - Assert(t, strings.Contains(r.CommentResponse, exp), - "For comment %q expected CommentResponse %q to contain %q", c, r.CommentResponse, exp) - Assert(t, !strings.Contains(r.CommentResponse, "Error:"), - "For comment %q expected CommentResponse %q to not contain %q", c, r.CommentResponse, "Error: ") - } -} - -func TestParse_InvalidFlags(t *testing.T) { - t.Log("given a comment with a valid atlantis command but invalid" + - " flags, should return a warning and the proper usage") - cases := []struct { - comment string - exp string - }{ - { - "atlantis plan -e", - "Error: unknown shorthand flag: 'e' in -e", - }, - { - "atlantis plan --abc", - "Error: unknown flag: --abc", - }, - { - "atlantis apply -e", - "Error: unknown shorthand flag: 'e' in -e", - }, - { - "atlantis apply --abc", - "Error: unknown flag: --abc", - }, - } - for _, c := range cases { - r := commentParser.Parse(c.comment, models.Github) - Assert(t, strings.Contains(r.CommentResponse, c.exp), - "For comment %q expected CommentResponse %q to contain %q", c.comment, r.CommentResponse, c.exp) - Assert(t, strings.Contains(r.CommentResponse, "Usage of "), - "For comment %q expected CommentResponse %q to contain %q", c.comment, r.CommentResponse, "Usage of ") - } -} - -func TestParse_RelativeDirPath(t *testing.T) { - t.Log("if -d is used with a relative path, should return an error") - comments := []string{ - "atlantis plan -d ..", - "atlantis apply -d ..", - // These won't return an error because we prepend with . when parsing. - //"atlantis plan -d /..", - //"atlantis apply -d /..", - "atlantis plan -d ./..", - "atlantis apply -d ./..", - "atlantis plan -d a/b/../../..", - "atlantis apply -d a/../..", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - exp := "Error: using a relative path" - Assert(t, strings.Contains(r.CommentResponse, exp), - "For comment %q expected CommentResponse %q to contain %q", c, r.CommentResponse, exp) - } -} - -// If there's multiple lines but it's whitespace, allow the command. This -// occurs when you copy and paste via GitHub. -func TestParse_Multiline(t *testing.T) { - comments := []string{ - "atlantis plan\n", - "atlantis plan\n\n", - "atlantis plan\r\n", - "atlantis plan\r\n\r\n", - } - for _, comment := range comments { - t.Run(comment, func(t *testing.T) { - r := commentParser.Parse(comment, models.Github) - Equals(t, "", r.CommentResponse) - Equals(t, &command.Comment{ - RepoRelDir: "", - Flags: nil, - Name: command.Plan, - Workspace: "", - ProjectName: "", - }, r.Command) - }) - } -} - -func TestParse_InvalidWorkspace(t *testing.T) { - t.Log("if -w is used with '..' or '/', should return an error") - comments := []string{ - "atlantis plan -w ..", - "atlantis apply -w ..", - "atlantis plan -w /", - "atlantis apply -w /", - "atlantis plan -w ..abc", - "atlantis apply -w abc..", - "atlantis plan -w abc..abc", - "atlantis apply -w ../../../etc/passwd", - } - for _, c := range comments { - r := commentParser.Parse(c, models.Github) - exp := "Error: invalid workspace" - Assert(t, strings.Contains(r.CommentResponse, exp), - "For comment %q expected CommentResponse %q to contain %q", c, r.CommentResponse, exp) - } -} - -func TestParse_UsingProjectAtSameTimeAsWorkspaceOrDir(t *testing.T) { - cases := []string{ - "atlantis plan -w workspace -p project", - "atlantis plan -d dir -p project", - "atlantis plan -d dir -w workspace -p project", - } - for _, c := range cases { - t.Run(c, func(t *testing.T) { - r := commentParser.Parse(c, models.Github) - exp := "Error: cannot use -p/--project at same time as -d/--dir or -w/--workspace" - Assert(t, strings.Contains(r.CommentResponse, exp), - "For comment %q expected CommentResponse %q to contain %q", c, r.CommentResponse, exp) - }) - } -} - -func TestParse_Parsing(t *testing.T) { - cases := []struct { - flags string - expWorkspace string - expDir string - expExtraArgs string - expProject string - expLogLevel string - }{ - // Test defaults. - { - "", - "", - "", - "", - "", - "", - }, - // Test each short flag individually. - { - "-w workspace", - "workspace", - "", - "", - "", - "", - }, - { - "-d dir", - "", - "dir", - "", - "", - "", - }, - { - "-p project", - "", - "", - "", - "project", - "", - }, - { - "-l trace", - "", - "", - "", - "", - "trace", - }, - // Test each long flag individually. - { - "--workspace workspace", - "workspace", - "", - "", - "", - "", - }, - { - "--dir dir", - "", - "dir", - "", - "", - "", - }, - { - "--project project", - "", - "", - "", - "project", - "", - }, - { - "--log-level TRACE", - "", - "", - "", - "", - "trace", - }, - // Test all of them with different permutations. - { - "-w workspace -d dir", - "workspace", - "dir", - "", - "", - "", - }, - { - "-w workspace -d dir -l warn", - "workspace", - "dir", - "", - "", - "warn", - }, - { - "-w workspace -- -d dir", - "workspace", - "", - "-d dir", - "", - "", - }, - // Test the extra args parsing. - { - "--", - "", - "", - "", - "", - "", - }, - { - "-w workspace -d dir -- arg one -two --three &&", - "workspace", - "dir", - "arg one -two --three &&", - "", - "", - }, - // Test whitespace. - { - "\t-w\tworkspace\t-d\tdir\t--\targ\tone\t-two\t--three\t&&", - "workspace", - "dir", - "arg one -two --three &&", - "", - "", - }, - { - " -w workspace -d dir -- arg one -two --three &&", - "workspace", - "dir", - "arg one -two --three &&", - "", - "", - }, - // Test that the dir string is normalized. - { - "-d /", - "", - ".", - "", - "", - "", - }, - { - "-d /adir", - "", - "adir", - "", - "", - "", - }, - { - "-d .", - "", - ".", - "", - "", - "", - }, - { - "-d ./", - "", - ".", - "", - "", - "", - }, - { - "-d ./adir", - "", - "adir", - "", - "", - "", - }, - { - "-d \"dir with space\"", - "", - "dir with space", - "", - "", - "", - }, - } - - for _, test := range cases { - for _, cmdName := range []string{"plan", "apply"} { - comment := fmt.Sprintf("atlantis %s %s", cmdName, test.flags) - t.Run(comment, func(t *testing.T) { - r := commentParser.Parse(comment, models.Github) - Assert(t, r.CommentResponse == "", "CommentResponse should have been empty but was %q for comment %q", r.CommentResponse, comment) - Assert(t, test.expDir == r.Command.RepoRelDir, "exp dir to equal %q but was %q for comment %q", test.expDir, r.Command.RepoRelDir, comment) - Assert(t, test.expWorkspace == r.Command.Workspace, "exp workspace to equal %q but was %q for comment %q", test.expWorkspace, r.Command.Workspace, comment) - Assert(t, test.expLogLevel == r.Command.LogLevel, "exp log level to equal %q but was %q for comment %q", test.expLogLevel, r.Command.LogLevel, comment) - actExtraArgs := strings.Join(r.Command.Flags, " ") - Assert(t, test.expExtraArgs == actExtraArgs, "exp extra args to equal %v but got %v for comment %q", test.expExtraArgs, actExtraArgs, comment) - if cmdName == "plan" { - Assert(t, r.Command.Name == command.Plan, "did not parse comment %q as plan command", comment) - } - if cmdName == "apply" { - Assert(t, r.Command.Name == command.Apply, "did not parse comment %q as apply command", comment) - } - }) - } - } -} - -func TestBuildPlanApplyVersionComment(t *testing.T) { - cases := []struct { - repoRelDir string - workspace string - project string - commentArgs []string - expPlanFlags string - expApplyFlags string - expVersionFlags string - }{ - { - repoRelDir: ".", - workspace: "default", - project: "", - commentArgs: nil, - expPlanFlags: "-d .", - expApplyFlags: "-d .", - expVersionFlags: "-d .", - }, - { - repoRelDir: "dir", - workspace: "default", - project: "", - commentArgs: nil, - expPlanFlags: "-d dir", - expApplyFlags: "-d dir", - expVersionFlags: "-d dir", - }, - { - repoRelDir: ".", - workspace: "workspace", - project: "", - commentArgs: nil, - expPlanFlags: "-w workspace", - expApplyFlags: "-w workspace", - expVersionFlags: "-w workspace", - }, - { - repoRelDir: "dir", - workspace: "workspace", - project: "", - commentArgs: nil, - expPlanFlags: "-d dir -w workspace", - expApplyFlags: "-d dir -w workspace", - expVersionFlags: "-d dir -w workspace", - }, - { - repoRelDir: ".", - workspace: "default", - project: "project", - commentArgs: nil, - expPlanFlags: "-p project", - expApplyFlags: "-p project", - expVersionFlags: "-p project", - }, - { - repoRelDir: "dir", - workspace: "workspace", - project: "project", - commentArgs: nil, - expPlanFlags: "-p project", - expApplyFlags: "-p project", - expVersionFlags: "-p project", - }, - { - repoRelDir: ".", - workspace: "default", - project: "", - commentArgs: []string{`"arg1"`, `"arg2"`}, - expPlanFlags: "-d . -- arg1 arg2", - expApplyFlags: "-d .", - expVersionFlags: "-d .", - }, - { - repoRelDir: "dir", - workspace: "workspace", - project: "", - commentArgs: []string{`"arg1"`, `"arg2"`, `arg3`}, - expPlanFlags: "-d dir -w workspace -- arg1 arg2 arg3", - expApplyFlags: "-d dir -w workspace", - expVersionFlags: "-d dir -w workspace", - }, - { - repoRelDir: "dir with spaces", - workspace: "default", - project: "", - expPlanFlags: "-d \"dir with spaces\"", - expApplyFlags: "-d \"dir with spaces\"", - expVersionFlags: "-d \"dir with spaces\"", - }, - } - - for _, c := range cases { - t.Run(c.expPlanFlags, func(t *testing.T) { - for _, cmd := range []command.Name{command.Plan, command.Apply, command.Version} { - switch cmd { - case command.Plan: - actComment := commentParser.BuildPlanComment(c.repoRelDir, c.workspace, c.project, c.commentArgs) - Equals(t, fmt.Sprintf("atlantis plan %s", c.expPlanFlags), actComment) - case command.Apply: - actComment := commentParser.BuildApplyComment(c.repoRelDir, c.workspace, c.project) - Equals(t, fmt.Sprintf("atlantis apply %s", c.expApplyFlags), actComment) - case command.Version: - actComment := commentParser.BuildVersionComment(c.repoRelDir, c.workspace, c.project) - Equals(t, fmt.Sprintf("atlantis version %s", c.expVersionFlags), actComment) - } - } - }) - } -} - -func TestCommentParser_HelpComment(t *testing.T) { - cases := []struct { - applyDisabled bool - expectResult string - }{ - { - applyDisabled: false, - expectResult: "```cmake\n" + - `atlantis -Terraform Pull Request Automation - -Usage: - atlantis [options] -- [terraform options] - -Examples: - # run plan in the root directory passing the -target flag to terraform - atlantis plan -d . -- -target=resource - - # apply all unapplied plans from this pull request - atlantis apply - - # apply the plan for the root directory and staging workspace - atlantis apply -d . -w staging - -Commands: - plan Runs 'terraform plan' for the changes in this pull request. - To plan a specific project, use the -d, -w and -p flags. - apply Runs 'terraform apply' on all unapplied plans from this pull request. - To only apply a specific plan, use the -d, -w and -p flags. - unlock Removes all atlantis locks and discards all plans for this PR. - To unlock a specific plan you can use the Atlantis UI. - version Print the output of 'terraform version' - help View help. - -Flags: - -h, --help help for atlantis - -Use "atlantis [command] --help" for more information about a command.` + - "\n```", - }, - { - applyDisabled: true, - expectResult: "```cmake\n" + - `atlantis -Terraform Pull Request Automation - -Usage: - atlantis [options] -- [terraform options] - -Examples: - # run plan in the root directory passing the -target flag to terraform - atlantis plan -d . -- -target=resource - -Commands: - plan Runs 'terraform plan' for the changes in this pull request. - To plan a specific project, use the -d, -w and -p flags. - unlock Removes all atlantis locks and discards all plans for this PR. - To unlock a specific plan you can use the Atlantis UI. - version Print the output of 'terraform version' - help View help. - -Flags: - -h, --help help for atlantis - -Use "atlantis [command] --help" for more information about a command.` + - "\n```", - }, - } - - for _, c := range cases { - t.Run(fmt.Sprintf("ApplyDisabled: %v", c.applyDisabled), func(t *testing.T) { - Equals(t, commentParser.HelpComment(c.applyDisabled), c.expectResult) - }) - } -} - -func TestParse_VCSUsername(t *testing.T) { - cp := events.CommentParser{ - GithubUser: "gh", - } - cases := []struct { - vcs models.VCSHostType - user string - }{ - { - vcs: models.Github, - user: "gh", - }, - } - - for _, c := range cases { - t.Run(c.vcs.String(), func(t *testing.T) { - r := cp.Parse(fmt.Sprintf("@%s %s", c.user, "help"), c.vcs) - Equals(t, commentParser.HelpComment(false), r.CommentResponse) - }) - } -} - -var PlanUsage = `Usage of plan: - -d, --dir string Which directory to run plan in relative to root of repo, - ex. 'child/dir'. - -l, --log-level string Which log level to use when emitting terraform results, - ex. 'trace'. - -p, --project string Which project to run plan for. Refers to the name of the - project configured in atlantis.yaml. Cannot be used at - same time as workspace or dir flags. - -w, --workspace string Switch to this Terraform workspace before planning. -` - -var ApplyUsage = `Usage of apply: - -d, --dir string Apply the plan for this directory, relative to root of - repo, ex. 'child/dir'. - -f, --force Force Atlantis to ignore apply requirements. - -l, --log-level string Which log level to use when emitting terraform results, - ex. 'trace'. - -p, --project string Apply the plan for this project. Refers to the name of - the project configured in atlantis.yaml. Cannot be used - at same time as workspace or dir flags. - -w, --workspace string Apply the plan for this Terraform workspace. -` - -var UnlockUsage = "`Usage of unlock:`\n\n ```cmake\n" + - `atlantis unlock - - Unlocks the entire PR and discards all plans in this PR. - Arguments or flags are not supported at the moment. - If you need to unlock a specific project please use the atlantis UI.` + - "\n```" diff --git a/server/legacy/events/commit_status_updater.go b/server/legacy/events/commit_status_updater.go deleted file mode 100644 index 5a1e80f98..000000000 --- a/server/legacy/events/commit_status_updater.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "context" - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_vcs_status_updater.go VCSStatusUpdater -type VCSStatusUpdater interface { - // UpdateCombined updates the combined status of the head commit of pull. - // A combined status represents all the projects modified in the pull. - UpdateCombined(ctx context.Context, repo models.Repo, pull models.PullRequest, status models.VCSStatus, cmdName fmt.Stringer, statusID string, output string) (string, error) - // UpdateCombinedCount updates the combined status to reflect the - // numSuccess out of numTotal. - UpdateCombinedCount(ctx context.Context, repo models.Repo, pull models.PullRequest, status models.VCSStatus, cmdName fmt.Stringer, numSuccess int, numTotal int, statusID string) (string, error) - // UpdateProject sets the commit status for the project represented by - // ctx. - UpdateProject(ctx context.Context, projectCtx command.ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) -} diff --git a/server/legacy/events/db_updater.go b/server/legacy/events/db_updater.go deleted file mode 100644 index 235374288..000000000 --- a/server/legacy/events/db_updater.go +++ /dev/null @@ -1,25 +0,0 @@ -package events - -import ( - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -type DBUpdater struct { - DB *db.BoltDB -} - -func (c *DBUpdater) updateDB(_ *command.Context, pull models.PullRequest, results []command.ProjectResult) (models.PullStatus, error) { - // Filter out results that errored due to the directory not existing. We - // don't store these in the database because they would never be "apply-able" - // and so the pull request would always have errors. - var filtered []command.ProjectResult - for _, r := range results { - if _, ok := r.Error.(DirNotExistErr); ok { - continue - } - filtered = append(filtered, r) - } - return c.DB.UpdatePullWithResults(pull, filtered) -} diff --git a/server/legacy/events/delete_lock_command.go b/server/legacy/events/delete_lock_command.go deleted file mode 100644 index 3a011d85a..000000000 --- a/server/legacy/events/delete_lock_command.go +++ /dev/null @@ -1,90 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand - -// DeleteLockCommand is the first step after a command request has been parsed. -type DeleteLockCommand interface { - DeleteLock(id string) (*models.ProjectLock, error) - DeleteLocksByPull(repoFullName string, pullNum int) (int, error) -} - -// DefaultDeleteLockCommand deletes a specific lock after a request from the LocksController. -type DefaultDeleteLockCommand struct { - Locker locking.Locker - Logger logging.Logger - WorkingDir WorkingDir - WorkingDirLocker WorkingDirLocker - DB *db.BoltDB -} - -// DeleteLock handles deleting the lock at id -func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) { - lock, err := l.Locker.Unlock(id) - if err != nil { - return nil, err - } - if lock == nil { - return nil, nil - } - - l.deleteWorkingDir(*lock) - return lock, nil -} - -// DeleteLocksByPull handles deleting all locks for the pull request -func (l *DefaultDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) { - locks, err := l.Locker.UnlockByPull(repoFullName, pullNum) - numLocks := len(locks) - if err != nil { - return numLocks, err - } - if numLocks == 0 { - return numLocks, nil - } - - for i := 0; i < numLocks; i++ { - lock := locks[i] - l.deleteWorkingDir(lock) - } - - return numLocks, nil -} - -func (l *DefaultDeleteLockCommand) deleteWorkingDir(lock models.ProjectLock) { - // NOTE: Because BaseRepo was added to the PullRequest model later, previous - // installations of Atlantis will have locks in their DB that do not have - // this field on PullRequest. We skip deleting the working dir in this case. - if lock.Pull.BaseRepo == (models.Repo{}) { - return - } - unlock, err := l.WorkingDirLocker.TryLock(lock.Pull.BaseRepo.FullName, lock.Pull.Num, lock.Workspace) - logFields := map[string]interface{}{ - "repository": lock.Pull.BaseRepo.FullName, - "pull-num": lock.Pull.Num, - "workspace": lock.Workspace, - } - if err != nil { - l.Logger.Error( - fmt.Sprintf("unable to obtain working dir lock when trying to delete old plans: %s", err), - logFields, - ) - } else { - defer unlock() - // nolint: vetshadow - if err := l.WorkingDir.DeleteForWorkspace(lock.Pull.BaseRepo, lock.Pull, lock.Workspace); err != nil { - l.Logger.Error(fmt.Sprintf("unable to delete workspace: %s", err), logFields) - } - } - if err := l.DB.UpdateProjectStatus(lock.Pull, lock.Workspace, lock.Project.Path, models.DiscardedPlanStatus); err != nil { - l.Logger.Error(fmt.Sprintf("unable to delete project status: %s", err), logFields) - } -} diff --git a/server/legacy/events/delete_lock_command_test.go b/server/legacy/events/delete_lock_command_test.go deleted file mode 100644 index 724b6b53b..000000000 --- a/server/legacy/events/delete_lock_command_test.go +++ /dev/null @@ -1,135 +0,0 @@ -package events_test - -import ( - "errors" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/db" - lockmocks "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestDeleteLock_LockerErrorf(t *testing.T) { - t.Log("If there is an error retrieving the lock, we return the error") - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.Unlock("id")).ThenReturn(nil, errors.New("err")) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - } - _, err := dlc.DeleteLock("id") - ErrEquals(t, "err", err) -} - -func TestDeleteLock_None(t *testing.T) { - t.Log("If there is no lock at that ID we return nil") - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.Unlock("id")).ThenReturn(nil, nil) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - } - lock, err := dlc.DeleteLock("id") - Ok(t, err) - Assert(t, lock == nil, "lock was not nil") -} - -func TestDeleteLock_OldFormat(t *testing.T) { - t.Log("If the lock doesn't have BaseRepo set it is deleted successfully") - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.Unlock("id")).ThenReturn(&models.ProjectLock{}, nil) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - } - lock, err := dlc.DeleteLock("id") - Ok(t, err) - Assert(t, lock != nil, "lock was nil") -} - -func TestDeleteLock_Success(t *testing.T) { - t.Log("Delete lock deletes successfully the working dir") - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.Unlock("id")).ThenReturn(&models.ProjectLock{}, nil) - workingDir := events.NewMockWorkingDir() - workingDirLocker := events.NewDefaultWorkingDirLocker() - pull := models.PullRequest{ - BaseRepo: models.Repo{FullName: "owner/repo"}, - } - When(l.Unlock("id")).ThenReturn(&models.ProjectLock{ - Pull: pull, - Workspace: "workspace", - Project: models.Project{ - Path: "path", - RepoFullName: "owner/repo", - }, - }, nil) - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - DB: db, - WorkingDirLocker: workingDirLocker, - WorkingDir: workingDir, - } - lock, err := dlc.DeleteLock("id") - Ok(t, err) - Assert(t, lock != nil, "lock was nil") - workingDir.VerifyWasCalledOnce().DeleteForWorkspace(pull.BaseRepo, pull, "workspace") -} - -func TestDeleteLocksByPull_LockerErrorf(t *testing.T) { - t.Log("If there is an error retrieving the lock, returned a failed status") - repoName := "reponame" - pullNum := 2 - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.UnlockByPull(repoName, pullNum)).ThenReturn(nil, errors.New("err")) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) - ErrEquals(t, "err", err) -} - -func TestDeleteLocksByPull_None(t *testing.T) { - t.Log("If there is no lock at that ID there is no error") - repoName := "reponame" - pullNum := 2 - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.UnlockByPull(repoName, pullNum)).ThenReturn([]models.ProjectLock{}, nil) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) - Ok(t, err) -} - -func TestDeleteLocksByPull_OldFormat(t *testing.T) { - t.Log("If the lock doesn't have BaseRepo set it is deleted successfully") - repoName := "reponame" - pullNum := 2 - RegisterMockTestingT(t) - l := lockmocks.NewMockLocker() - When(l.UnlockByPull(repoName, pullNum)).ThenReturn([]models.ProjectLock{{}}, nil) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopCtxLogger(t), - } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) - Ok(t, err) -} diff --git a/server/legacy/events/drainer.go b/server/legacy/events/drainer.go deleted file mode 100644 index 3ac4238fa..000000000 --- a/server/legacy/events/drainer.go +++ /dev/null @@ -1,63 +0,0 @@ -package events - -import ( - "sync" -) - -// Drainer is used to gracefully shut down atlantis by waiting for in-progress -// operations to complete. -type Drainer struct { - status DrainStatus - mutex sync.Mutex - wg sync.WaitGroup -} - -type DrainStatus struct { - // ShuttingDown is whether we are in the progress of shutting down. - ShuttingDown bool - // InProgressOps is the number of operations currently in progress. - InProgressOps int -} - -// StartOp tries to start a new operation. It returns false if Atlantis is -// shutting down. -func (d *Drainer) StartOp() bool { - d.mutex.Lock() - defer d.mutex.Unlock() - - if d.status.ShuttingDown { - return false - } - d.status.InProgressOps++ - d.wg.Add(1) - return true -} - -// OpDone marks an operation as complete. -func (d *Drainer) OpDone() { - d.mutex.Lock() - defer d.mutex.Unlock() - - d.status.InProgressOps-- - d.wg.Done() - if d.status.InProgressOps < 0 { - // This would be a bug. - d.status.InProgressOps = 0 - } -} - -// ShutdownBlocking sets "shutting down" to true and blocks until there are no -// in progress operations. -func (d *Drainer) ShutdownBlocking() { - // Set the shutdown status. - d.mutex.Lock() - d.status.ShuttingDown = true - d.mutex.Unlock() - - // Block until there are no in-progress ops. - d.wg.Wait() -} - -func (d *Drainer) GetStatus() DrainStatus { - return d.status -} diff --git a/server/legacy/events/drainer_test.go b/server/legacy/events/drainer_test.go deleted file mode 100644 index 98f95f48c..000000000 --- a/server/legacy/events/drainer_test.go +++ /dev/null @@ -1,68 +0,0 @@ -package events_test - -import ( - "context" - "testing" - "time" - - "github.com/runatlantis/atlantis/server/legacy/events" - . "github.com/runatlantis/atlantis/testing" -) - -// Test starting and completing ops. -func TestDrainer(t *testing.T) { - d := events.Drainer{} - - // Starts at 0. - Equals(t, 0, d.GetStatus().InProgressOps) - - // Add 1. - d.StartOp() - Equals(t, 1, d.GetStatus().InProgressOps) - - // Remove 1. - d.OpDone() - Equals(t, 0, d.GetStatus().InProgressOps) - - // Add 2. - d.StartOp() - d.StartOp() - Equals(t, 2, d.GetStatus().InProgressOps) - - // Remove 1. - d.OpDone() - Equals(t, 1, d.GetStatus().InProgressOps) -} - -func TestDrainer_Shutdown(t *testing.T) { - d := events.Drainer{} - d.StartOp() - - shutdown := make(chan bool) - go func() { - d.ShutdownBlocking() - close(shutdown) - }() - - // Sleep to ensure that ShutdownBlocking has been called. - time.Sleep(300 * time.Millisecond) - - // Starting another op should fail. - Equals(t, false, d.StartOp()) - - // Status should be shutting down. - Equals(t, events.DrainStatus{ - ShuttingDown: true, - InProgressOps: 1, - }, d.GetStatus()) - - // Stop the final operation and wait for shutdown to exit. - d.OpDone() - timer, cancel := context.WithTimeout(context.Background(), 1*time.Second) - defer cancel() - select { - case <-shutdown: - case <-timer.Done(): - Assert(t, false, "Timer reached without shutdown") - } -} diff --git a/server/legacy/events/event_parser.go b/server/legacy/events/event_parser.go deleted file mode 100644 index 9910e7cff..000000000 --- a/server/legacy/events/event_parser.go +++ /dev/null @@ -1,322 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "fmt" - "path" - "strings" - - "github.com/google/go-github/v45/github" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -const ( - usagesCols = 90 -) - -// PullCommand is a command to run on a pull request. -type PullCommand interface { - // CommandName is the name of the command we're running. - CommandName() command.Name - // IsAutoplan is true if this is an autoplan command vs. a comment command. - IsAutoplan() bool -} - -// PolicyCheckCommand is a policy_check command that is automatically triggered -// after successful plan command. -type PolicyCheckCommand struct{} - -// CommandName is policy_check. -func (c PolicyCheckCommand) CommandName() command.Name { - return command.PolicyCheck -} - -// IsAutoplan is true for policy_check commands. -func (c PolicyCheckCommand) IsAutoplan() bool { - return false -} - -// AutoplanCommand is a plan command that is automatically triggered when a -// pull request is opened or updated. -type AutoplanCommand struct{} - -// CommandName is plan. -func (c AutoplanCommand) CommandName() command.Name { - return command.Plan -} - -// IsAutoplan is true for autoplan commands (obviously). -func (c AutoplanCommand) IsAutoplan() bool { - return true -} - -// CommentCommand is a command that was triggered by a pull request comment. -type CommentCommand struct { - // RepoRelDir is the path relative to the repo root to run the command in. - // Will never end in "/". If empty then the comment specified no directory. - RepoRelDir string - // Flags are the extra arguments appended to the comment, - // ex. atlantis plan -- -target=resource - Flags []string - // Name is the name of the command the comment specified. - Name command.Name - // ForceApply is true of the command should ignore apply_requirments. - ForceApply bool - // Workspace is the name of the Terraform workspace to run the command in. - // If empty then the comment specified no workspace. - Workspace string - // ProjectName is the name of a project to run the command on. It refers to a - // project specified in an atlantis.yaml file. - // If empty then the comment specified no project. - ProjectName string -} - -// IsForSpecificProject returns true if the command is for a specific dir, workspace -// or project name. Otherwise it's a command like "atlantis plan" or "atlantis -// apply". -func (c CommentCommand) IsForSpecificProject() bool { - return c.RepoRelDir != "" || c.Workspace != "" || c.ProjectName != "" -} - -// CommandName returns the name of this command. -func (c CommentCommand) CommandName() command.Name { - return c.Name -} - -// IsAutoplan will be false for comment commands. -func (c CommentCommand) IsAutoplan() bool { - return false -} - -// String returns a string representation of the command. -func (c CommentCommand) String() string { - return fmt.Sprintf("command=%q dir=%q workspace=%q project=%q flags=%q", c.Name.String(), c.RepoRelDir, c.Workspace, c.ProjectName, strings.Join(c.Flags, ",")) -} - -// NewCommentCommand constructs a CommentCommand, setting all missing fields to defaults. -func NewCommentCommand(repoRelDir string, flags []string, name command.Name, forceApply bool, workspace string, project string) *command.Comment { - // If repoRelDir was empty we want to keep it that way to indicate that it - // wasn't specified in the comment. - if repoRelDir != "" { - repoRelDir = path.Clean(repoRelDir) - if repoRelDir == "/" { - repoRelDir = "." - } - } - return &command.Comment{ - RepoRelDir: repoRelDir, - Flags: flags, - Name: name, - Workspace: workspace, - ProjectName: project, - ForceApply: forceApply, - } -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_event_parsing.go EventParsing - -// EventParsing parses webhook events from different VCS hosts into their -// respective Atlantis models. -// todo: rename to VCSParsing or the like because this also parses API responses #refactor -// -//nolint:interfacebloat -type EventParsing interface { - // ParseGithubIssueCommentEvent parses GitHub pull request comment events. - // baseRepo is the repo that the pull request will be merged into. - // user is the pull request author. - // pullNum is the number of the pull request that triggered the webhook. - // Deprecated: see events/controllers/github/parser.go - ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) ( - baseRepo models.Repo, user models.User, pullNum int, err error) - - // ParseGithubPull parses the response from the GitHub API endpoint (not - // from a webhook) that returns a pull request. - // pull is the parsed pull request. - // baseRepo is the repo the pull request will be merged into. - // headRepo is the repo the pull request branch is from. - // Deprecated: see converters/github.go - ParseGithubPull(ghPull *github.PullRequest) ( - pull models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) - - // ParseGithubPullEvent parses GitHub pull request events. - // pull is the parsed pull request. - // pullEventType is the type of event, for example opened/closed. - // baseRepo is the repo the pull request will be merged into. - // headRepo is the repo the pull request branch is from. - // user is the pull request author. - // Deprecated: see events/controllers/github/parser.go - ParseGithubPullEvent(pullEvent *github.PullRequestEvent) ( - pull models.PullRequest, pullEventType models.PullRequestEventType, - baseRepo models.Repo, headRepo models.Repo, user models.User, err error) - - // ParseGithubRepo parses the response from the GitHub API endpoint that - // returns a repo into the Atlantis model. - // Deprecated: see converters/github.go - ParseGithubRepo(ghRepo *github.Repository) (models.Repo, error) -} - -// EventParser parses VCS events. -type EventParser struct { - GithubUser string - GithubToken string - AllowDraftPRs bool -} - -// ParseGithubIssueCommentEvent parses GitHub pull request comment events. -// See EventParsing for return value docs. -func (e *EventParser) ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) (baseRepo models.Repo, user models.User, pullNum int, err error) { - baseRepo, err = e.ParseGithubRepo(comment.Repo) - if err != nil { - return - } - if comment.Comment == nil || comment.Comment.User.GetLogin() == "" { - err = errors.New("comment.user.login is null") - return - } - commenterUsername := comment.Comment.User.GetLogin() - user = models.User{ - Username: commenterUsername, - } - pullNum = comment.Issue.GetNumber() - if pullNum == 0 { - err = errors.New("issue.number is null") - return - } - return -} - -// ParseGithubPullEvent parses GitHub pull request events. -// See EventParsing for return value docs. -func (e *EventParser) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (pull models.PullRequest, pullEventType models.PullRequestEventType, baseRepo models.Repo, headRepo models.Repo, user models.User, err error) { - if pullEvent.PullRequest == nil { - err = errors.New("pull_request is null") - return - } - pull, baseRepo, headRepo, err = e.ParseGithubPull(pullEvent.PullRequest) - if err != nil { - return - } - if pullEvent.Sender == nil { - err = errors.New("sender is null") - return - } - senderUsername := pullEvent.Sender.GetLogin() - if senderUsername == "" { - err = errors.New("sender.login is null") - return - } - - action := pullEvent.GetAction() - // If it's a draft PR we ignore it for auto-planning if configured to do so - // however it's still possible for users to run plan on it manually via a - // comment so if any draft PR is closed we still need to check if we need - // to delete its locks. - if pullEvent.GetPullRequest().GetDraft() && pullEvent.GetAction() != "closed" && !e.AllowDraftPRs { - action = "other" - } - - switch action { - case "opened": - pullEventType = models.OpenedPullEvent - case "ready_for_review": - // when an author takes a PR out of 'draft' state a 'ready_for_review' - // event is triggered. We want atlantis to treat this as a freshly opened PR - pullEventType = models.OpenedPullEvent - case "synchronize": - pullEventType = models.UpdatedPullEvent - case "closed": - pullEventType = models.ClosedPullEvent - default: - pullEventType = models.OtherPullEvent - } - user = models.User{Username: senderUsername} - return -} - -// ParseGithubPull parses the response from the GitHub API endpoint (not -// from a webhook) that returns a pull request. -// See EventParsing for return value docs. -func (e *EventParser) ParseGithubPull(pull *github.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) { - commit := pull.Head.GetSHA() - if commit == "" { - err = errors.New("head.sha is null") - return - } - url := pull.GetHTMLURL() - if url == "" { - err = errors.New("html_url is null") - return - } - headBranch := pull.Head.GetRef() - if headBranch == "" { - err = errors.New("head.ref is null") - return - } - baseBranch := pull.Base.GetRef() - if baseBranch == "" { - err = errors.New("base.ref is null") - return - } - - authorUsername := pull.User.GetLogin() - if authorUsername == "" { - err = errors.New("user.login is null") - return - } - num := pull.GetNumber() - if num == 0 { - err = errors.New("number is null") - return - } - - baseRepo, err = e.ParseGithubRepo(pull.Base.Repo) - if err != nil { - return - } - headRepo, err = e.ParseGithubRepo(pull.Head.Repo) - if err != nil { - return - } - - pullState := models.ClosedPullState - closedAt := pull.GetClosedAt() - updatedAt := pull.GetUpdatedAt() - if pull.GetState() == "open" { - pullState = models.OpenPullState - } - - pullModel = models.PullRequest{ - Author: authorUsername, - HeadBranch: headBranch, - HeadCommit: commit, - URL: url, - Num: num, - State: pullState, - BaseRepo: baseRepo, - BaseBranch: baseBranch, - ClosedAt: closedAt, - UpdatedAt: updatedAt, - } - return -} - -// ParseGithubRepo parses the response from the GitHub API endpoint that -// returns a repo into the Atlantis model. -// See EventParsing for return value docs. -func (e *EventParser) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, error) { - return models.NewRepo(models.Github, ghRepo.GetFullName(), ghRepo.GetCloneURL(), e.GithubUser, e.GithubToken) -} diff --git a/server/legacy/events/event_parser_test.go b/server/legacy/events/event_parser_test.go deleted file mode 100644 index a1a5aee80..000000000 --- a/server/legacy/events/event_parser_test.go +++ /dev/null @@ -1,110 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - . "github.com/runatlantis/atlantis/testing" -) - -func TestNewCommand_CleansDir(t *testing.T) { - cases := []struct { - RepoRelDir string - ExpDir string - }{ - { - "", - "", - }, - { - "/", - ".", - }, - { - "./", - ".", - }, - // We rely on our callers to not pass in relative dirs. - { - "..", - "..", - }, - } - - for _, c := range cases { - t.Run(c.RepoRelDir, func(t *testing.T) { - cmd := events.NewCommentCommand(c.RepoRelDir, nil, command.Plan, false, "workspace", "") - Equals(t, c.ExpDir, cmd.RepoRelDir) - }) - } -} - -func TestNewCommand_EmptyDirWorkspaceProject(t *testing.T) { - cmd := events.NewCommentCommand("", nil, command.Plan, false, "", "") - Equals(t, command.Comment{ - RepoRelDir: "", - Flags: nil, - Name: command.Plan, - Workspace: "", - ProjectName: "", - }, *cmd) -} - -func TestNewCommand_AllFieldsSet(t *testing.T) { - cmd := events.NewCommentCommand("dir", []string{"a", "b"}, command.Plan, true, "workspace", "project") - Equals(t, command.Comment{ - Workspace: "workspace", - RepoRelDir: "dir", - Flags: []string{"a", "b"}, - ForceApply: true, - Name: command.Plan, - ProjectName: "project", - }, *cmd) -} - -func TestAutoplanCommand_CommandName(t *testing.T) { - Equals(t, command.Plan, (events.AutoplanCommand{}).CommandName()) -} - -func TestAutoplanCommand_IsAutoplan(t *testing.T) { - Equals(t, true, (events.AutoplanCommand{}).IsAutoplan()) -} - -func TestCommentCommand_CommandName(t *testing.T) { - Equals(t, command.Plan, (command.Comment{ - Name: command.Plan, - }).CommandName()) - Equals(t, command.Apply, (command.Comment{ - Name: command.Apply, - }).CommandName()) -} - -func TestCommentCommand_IsAutoplan(t *testing.T) { - Equals(t, false, (command.Comment{}).IsAutoplan()) -} - -func TestCommentCommand_String(t *testing.T) { - exp := `command="plan" dir="mydir" workspace="myworkspace" project="myproject" loglevel="trace" flags="flag1,flag2"` - Equals(t, exp, (command.Comment{ - RepoRelDir: "mydir", - Flags: []string{"flag1", "flag2"}, - Name: command.Plan, - Workspace: "myworkspace", - ProjectName: "myproject", - LogLevel: "trace", - }).String()) -} diff --git a/server/legacy/events/github_app_working_dir.go b/server/legacy/events/github_app_working_dir.go deleted file mode 100644 index 705957a8d..000000000 --- a/server/legacy/events/github_app_working_dir.go +++ /dev/null @@ -1,60 +0,0 @@ -package events - -import ( - "fmt" - "strings" - - "github.com/runatlantis/atlantis/server/vcs/provider/github" - - "github.com/mitchellh/go-homedir" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -// GithubAppWorkingDir implements WorkingDir. -// It acts as a proxy to an instance of WorkingDir that refreshes the app's token -// before every clone, given Github App tokens expire quickly -type GithubAppWorkingDir struct { - WorkingDir - Credentials vcs.GithubCredentials - GithubHostname string -} - -// Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(log logging.Logger, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { - log.Info("Refreshing git tokens for Github App", map[string]interface{}{ - "repository": headRepo.FullName, - "pull-num": p.Num, - "workspace": workspace, - }) - - token, err := g.Credentials.GetToken() - if err != nil { - return "", false, errors.Wrap(err, "getting github token") - } - - home, err := homedir.Dir() - if err != nil { - return "", false, errors.Wrap(err, "getting home dir to write ~/.git-credentials file") - } - - // https://developer.github.com/apps/building-github-apps/authenticating-with-github-apps/#http-based-git-access-by-an-installation - if err := github.WriteGitCreds("x-access-token", token, g.GithubHostname, home, log, true); err != nil { - return "", false, err - } - - baseRepo := &p.BaseRepo - - // Realistically, this is a super brittle way of supporting clones using gh app installation tokens - // This URL should be built during Repo creation and the struct should be immutable going forward. - // Doing this requires a larger refactor however, and can probably be coupled with supporting > 1 installation - authURL := fmt.Sprintf("://x-access-token:%s", token) - baseRepo.CloneURL = strings.Replace(baseRepo.CloneURL, "://:", authURL, 1) - baseRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, "://:", "://x-access-token:", 1) - headRepo.CloneURL = strings.Replace(headRepo.CloneURL, "://:", authURL, 1) - headRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, "://:", "://x-access-token:", 1) - - return g.WorkingDir.Clone(log, headRepo, p, workspace) -} diff --git a/server/legacy/events/github_app_working_dir_test.go b/server/legacy/events/github_app_working_dir_test.go deleted file mode 100644 index 7cc46f8e3..000000000 --- a/server/legacy/events/github_app_working_dir_test.go +++ /dev/null @@ -1,52 +0,0 @@ -package events_test - -import ( - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events" - eventMocks "github.com/runatlantis/atlantis/server/legacy/events/mocks" - vcsMocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { - workingDir := eventMocks.NewMockWorkingDir() - - credentials := vcsMocks.NewMockGithubCredentials() - - ghAppWorkingDir := events.GithubAppWorkingDir{ - WorkingDir: workingDir, - Credentials: credentials, - GithubHostname: "some-host", - } - - baseRepo, _ := models.NewRepo( - models.Github, - "runatlantis/atlantis", - "https://github.com/runatlantis/atlantis.git", - - // user and token have to be blank otherwise this proxy wouldn't be invoked to begin with - "", - "", - ) - - logger := logging.NewNoopCtxLogger(t) - - headRepo := baseRepo - - modifiedBaseRepo := baseRepo - modifiedBaseRepo.CloneURL = "https://x-access-token:token@github.com/runatlantis/atlantis.git" - modifiedBaseRepo.SanitizedCloneURL = "https://x-access-token:@github.com/runatlantis/atlantis.git" - - When(credentials.GetToken()).ThenReturn("token", nil) - When(workingDir.Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")).ThenReturn( - "", true, nil, - ) - - _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") - - Assert(t, success == true, "clone url mutation error") -} diff --git a/server/legacy/events/instrumented_project_command_builder.go b/server/legacy/events/instrumented_project_command_builder.go deleted file mode 100644 index 074c1e437..000000000 --- a/server/legacy/events/instrumented_project_command_builder.go +++ /dev/null @@ -1,75 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" -) - -type InstrumentedProjectCommandBuilder struct { - ProjectCommandBuilder - Logger logging.Logger -} - -func (b *InstrumentedProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - scope := ctx.Scope.SubScope("builder") - - timer := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer timer.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - projectCmds, err := b.ProjectCommandBuilder.BuildApplyCommands(ctx, comment) - - if err != nil { - executionError.Inc(1) - b.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("Error building apply commands: %s", err)) - } else { - executionSuccess.Inc(1) - } - - return projectCmds, err -} -func (b *InstrumentedProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) { - scope := ctx.Scope.SubScope("builder") - - timer := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer timer.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - projectCmds, err := b.ProjectCommandBuilder.BuildAutoplanCommands(ctx) - - if err != nil { - executionError.Inc(1) - b.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("Error building auto plan commands: %s", err)) - } else { - executionSuccess.Inc(1) - } - - return projectCmds, err -} -func (b *InstrumentedProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - scope := ctx.Scope.SubScope("builder") - - timer := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer timer.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - projectCmds, err := b.ProjectCommandBuilder.BuildPlanCommands(ctx, comment) - - if err != nil { - executionError.Inc(1) - b.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("Error building plan commands: %s", err)) - } else { - executionSuccess.Inc(1) - } - - return projectCmds, err -} diff --git a/server/legacy/events/instrumented_project_command_context_builder.go b/server/legacy/events/instrumented_project_command_context_builder.go deleted file mode 100644 index d00dd7623..000000000 --- a/server/legacy/events/instrumented_project_command_context_builder.go +++ /dev/null @@ -1,43 +0,0 @@ -package events - -import ( - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/uber-go/tally/v4" -) - -// InstrumentedProjectCommandContextBuilder ensures that project command context contains a scoped stats -// object relevant to the command it applies to. -type InstrumentedProjectCommandContextBuilder struct { - ProjectCommandContextBuilder - // Conciously making this global since it gets flushed periodically anyways - ProjectCounter tally.Counter -} - -// BuildProjectContext builds the context and injects the appropriate command level scope after the fact. -func (cb *InstrumentedProjectCommandContextBuilder) BuildProjectContext( - ctx *command.Context, - cmdName command.Name, - prjCfg valid.MergedProjectCfg, - commentFlags []string, - repoDir string, - contextFlags *command.ContextFlags, -) (projectCmds []command.ProjectContext) { - cb.ProjectCounter.Inc(1) - - cmds := cb.ProjectCommandContextBuilder.BuildProjectContext( - ctx, cmdName, prjCfg, commentFlags, repoDir, contextFlags, - ) - - projectCmds = []command.ProjectContext{} - - for _, cmd := range cmds { - // specifically use the command name in the context instead of the arg - // since we can return multiple commands worth of contexts for a given command name arg - // to effectively pipeline them. - cmd.SetScope(cmd.CommandName.String()) - projectCmds = append(projectCmds, cmd) - } - - return -} diff --git a/server/legacy/events/instrumented_project_command_runner.go b/server/legacy/events/instrumented_project_command_runner.go deleted file mode 100644 index 5a3c7afcc..000000000 --- a/server/legacy/events/instrumented_project_command_runner.go +++ /dev/null @@ -1,68 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/metrics" -) - -const ( - Plan = "plan" - PolicyCheck = "policy check" - Apply = "apply" -) - -type InstrumentedProjectCommandRunner struct { - ProjectCommandRunner -} - -func (p *InstrumentedProjectCommandRunner) Plan(ctx command.ProjectContext) command.ProjectResult { - return RunAndEmitStats(Plan, ctx, p.ProjectCommandRunner.Plan) -} - -func (p *InstrumentedProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - return RunAndEmitStats(PolicyCheck, ctx, p.ProjectCommandRunner.PolicyCheck) -} - -func (p *InstrumentedProjectCommandRunner) Apply(ctx command.ProjectContext) command.ProjectResult { - return RunAndEmitStats(Apply, ctx, p.ProjectCommandRunner.Apply) -} - -func RunAndEmitStats(commandName string, ctx command.ProjectContext, execute func(ctx command.ProjectContext) command.ProjectResult) command.ProjectResult { - // ensures we are differentiating between project level command and overall command - ctx.SetScope("project") - - scope := ctx.Scope - logger := ctx.Log - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - executionFailure := scope.Counter(metrics.ExecutionFailureMetric) - - result := execute(ctx) - - if result.Error != nil { - executionError.Inc(1) - logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("Error running %s operation: %s", commandName, result.Error.Error()), map[string]interface{}{"project": ctx.ProjectName}) - return result - } - - if result.Failure != "" { - executionFailure.Inc(1) - logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("Failure running %s operation: %s", commandName, result.Failure), map[string]interface{}{"project": ctx.ProjectName}) - return result - } - - // Log successful policy check results - if commandName == PolicyCheck && result.PolicyCheckSuccess != nil { - logger.InfoContext(ctx.RequestCtx, fmt.Sprintf("Success running %s operation: %s", commandName, result.PolicyCheckSuccess.PolicyCheckOutput), map[string]interface{}{"project": ctx.ProjectName}) - } - logger.InfoContext(ctx.RequestCtx, fmt.Sprintf("%s success. output available at: %s", commandName, ctx.Pull.URL), map[string]interface{}{"project": ctx.ProjectName}) - - executionSuccess.Inc(1) - return result -} diff --git a/server/legacy/events/instrumented_pull_closed_executor.go b/server/legacy/events/instrumented_pull_closed_executor.go deleted file mode 100644 index e966d50b3..000000000 --- a/server/legacy/events/instrumented_pull_closed_executor.go +++ /dev/null @@ -1,54 +0,0 @@ -package events - -import ( - "strconv" - - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - "github.com/uber-go/tally/v4" -) - -type InstrumentedPullClosedExecutor struct { - scope tally.Scope - log logging.Logger - cleaner PullCleaner -} - -func NewInstrumentedPullClosedExecutor( - scope tally.Scope, log logging.Logger, cleaner PullCleaner, -) PullCleaner { - return &InstrumentedPullClosedExecutor{ - scope: scope.SubScope("pullclosed.cleanup"), - log: log, - cleaner: cleaner, - } -} - -func (e *InstrumentedPullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullRequest) error { - executionSuccess := e.scope.Counter(metrics.ExecutionSuccessMetric) - executionError := e.scope.Counter(metrics.ExecutionErrorMetric) - executionTime := e.scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - e.log.Info("Initiating cleanup of pull data.", map[string]interface{}{ - "repository": repo.FullName, - "pull-num": strconv.Itoa(pull.Num), - }) - - err := e.cleaner.CleanUpPull(repo, pull) - - if err != nil { - executionError.Inc(1) - e.log.Error("error during cleanup of pull data", map[string]interface{}{ - "repository": repo.FullName, - "pull-num": strconv.Itoa(pull.Num), - "err": err, - }) - return err - } - - executionSuccess.Inc(1) - - return nil -} diff --git a/server/legacy/events/matchers/logging_logger.go b/server/legacy/events/matchers/logging_logger.go deleted file mode 100644 index d43fd90e9..000000000 --- a/server/legacy/events/matchers/logging_logger.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingLogger() logging.Logger { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.Logger))(nil)).Elem())) - var nullValue logging.Logger - return nullValue -} - -func EqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func NotEqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func LoggingLoggerThat(matcher pegomock.ArgumentMatcher) logging.Logger { - pegomock.RegisterMatcher(matcher) - var nullValue logging.Logger - return nullValue -} diff --git a/server/legacy/events/matchers/logging_simplelogging.go b/server/legacy/events/matchers/logging_simplelogging.go deleted file mode 100644 index c3b96f61f..000000000 --- a/server/legacy/events/matchers/logging_simplelogging.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingSimpleLogging() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func NotEqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func LoggingSimpleLoggingThat(matcher pegomock.ArgumentMatcher) logging.SimpleLogging { - pegomock.RegisterMatcher(matcher) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/events/matchers/models_pullrequest.go b/server/legacy/events/matchers/models_pullrequest.go deleted file mode 100644 index 94e36a1ab..000000000 --- a/server/legacy/events/matchers/models_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsPullRequest() models.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequest))(nil)).Elem())) - var nullValue models.PullRequest - return nullValue -} - -func EqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func NotEqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func ModelsPullRequestThat(matcher pegomock.ArgumentMatcher) models.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue models.PullRequest - return nullValue -} diff --git a/server/legacy/events/matchers/models_repo.go b/server/legacy/events/matchers/models_repo.go deleted file mode 100644 index b36c3ee7c..000000000 --- a/server/legacy/events/matchers/models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func NotEqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func ModelsRepoThat(matcher pegomock.ArgumentMatcher) models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/events/matchers/ptr_to_logging_simplelogger.go b/server/legacy/events/matchers/ptr_to_logging_simplelogger.go deleted file mode 100644 index e7c8b942f..000000000 --- a/server/legacy/events/matchers/ptr_to_logging_simplelogger.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyPtrToLoggingSimpleLogger() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqPtrToLoggingSimpleLogger(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/events/mock_workingdir_test.go b/server/legacy/events/mock_workingdir_test.go deleted file mode 100644 index c0a932289..000000000 --- a/server/legacy/events/mock_workingdir_test.go +++ /dev/null @@ -1,376 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: WorkingDir) - -package events - -import ( - pegomock "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockWorkingDir struct { - fail func(message string, callerSkip ...int) -} - -func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { - mock := &MockWorkingDir{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockWorkingDir) Clone(log logging.Logger, headRepo models.Repo, p models.PullRequest, projectCloneDir string) (string, bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{log, headRepo, p, projectCloneDir} - result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 bool - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(bool) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetWorkingDir", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockWorkingDir) HasDiverged(log logging.Logger, cloneDir string, baseRepo models.Repo) bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{log, cloneDir, baseRepo} - result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullDir", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p} - result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockWorkingDir) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockWorkingDir) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockWorkingDir) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockWorkingDir struct { - mock *MockWorkingDir - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockWorkingDir) Clone(log logging.Logger, headRepo models.Repo, p models.PullRequest, projectCloneDir string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{log, headRepo, p, projectCloneDir} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) - return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_Clone_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.Logger, models.Repo, models.PullRequest, string) { - log, headRepo, p, projectCloneDir := c.GetAllCapturedArguments() - return log[len(log)-1], headRepo[len(headRepo)-1], p[len(p)-1], projectCloneDir[len(projectCloneDir)-1] -} - -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - _param2 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.PullRequest) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetWorkingDir_OngoingVerification { - params := []pegomock.Param{r, p, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetWorkingDir", params, verifier.timeout) - return &MockWorkingDir_GetWorkingDir_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_GetWorkingDir_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] -} - -func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) HasDiverged(log logging.Logger, cloneDir string, baseRepo models.Repo) *MockWorkingDir_HasDiverged_OngoingVerification { - params := []pegomock.Param{log, cloneDir, baseRepo} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) - return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_HasDiverged_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.Logger, string, models.Repo) { - log, cloneDir, baseRepo := c.GetAllCapturedArguments() - return log[len(log)-1], cloneDir[len(cloneDir)-1], baseRepo[len(baseRepo)-1] -} - -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []string, _param2 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.Repo) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) *MockWorkingDir_GetPullDir_OngoingVerification { - params := []pegomock.Param{r, p} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullDir", params, verifier.timeout) - return &MockWorkingDir_GetPullDir_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_GetPullDir_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_GetPullDir_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] -} - -func (c *MockWorkingDir_GetPullDir_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { - params := []pegomock.Param{r, p} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout) - return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_Delete_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] -} - -func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { - params := []pegomock.Param{r, p, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout) - return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] -} - -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/mocks/matchers/command_name.go b/server/legacy/events/mocks/matchers/command_name.go deleted file mode 100644 index 35fdfcc41..000000000 --- a/server/legacy/events/mocks/matchers/command_name.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyCommandName() command.Name { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Name))(nil)).Elem())) - var nullValue command.Name - return nullValue -} - -func EqCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func NotEqCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func CommandNameThat(matcher pegomock.ArgumentMatcher) command.Name { - pegomock.RegisterMatcher(matcher) - var nullValue command.Name - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/command_projectcontext.go b/server/legacy/events/mocks/matchers/command_projectcontext.go deleted file mode 100644 index 8722b7ba3..000000000 --- a/server/legacy/events/mocks/matchers/command_projectcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyCommandProjectContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func CommandProjectContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/context_context.go b/server/legacy/events/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/events/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/events_commentparseresult.go b/server/legacy/events/mocks/matchers/events_commentparseresult.go deleted file mode 100644 index 3ce8aafae..000000000 --- a/server/legacy/events/mocks/matchers/events_commentparseresult.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - events "github.com/runatlantis/atlantis/server/legacy/events" -) - -func AnyEventsCommentParseResult() events.CommentParseResult { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.CommentParseResult))(nil)).Elem())) - var nullValue events.CommentParseResult - return nullValue -} - -func EqEventsCommentParseResult(value events.CommentParseResult) events.CommentParseResult { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue events.CommentParseResult - return nullValue -} - -func NotEqEventsCommentParseResult(value events.CommentParseResult) events.CommentParseResult { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue events.CommentParseResult - return nullValue -} - -func EventsCommentParseResultThat(matcher pegomock.ArgumentMatcher) events.CommentParseResult { - pegomock.RegisterMatcher(matcher) - var nullValue events.CommentParseResult - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/fmt_stringer.go b/server/legacy/events/mocks/matchers/fmt_stringer.go deleted file mode 100644 index f68c94720..000000000 --- a/server/legacy/events/mocks/matchers/fmt_stringer.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - fmt "fmt" -) - -func AnyFmtStringer() fmt.Stringer { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(fmt.Stringer))(nil)).Elem())) - var nullValue fmt.Stringer - return nullValue -} - -func EqFmtStringer(value fmt.Stringer) fmt.Stringer { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue fmt.Stringer - return nullValue -} - -func NotEqFmtStringer(value fmt.Stringer) fmt.Stringer { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue fmt.Stringer - return nullValue -} - -func FmtStringerThat(matcher pegomock.ArgumentMatcher) fmt.Stringer { - pegomock.RegisterMatcher(matcher) - var nullValue fmt.Stringer - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/jobs_pullinfo.go b/server/legacy/events/mocks/matchers/jobs_pullinfo.go deleted file mode 100644 index 27e6b0086..000000000 --- a/server/legacy/events/mocks/matchers/jobs_pullinfo.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -func AnyJobsPullInfof() jobs.PullInfo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(jobs.PullInfo))(nil)).Elem())) - var nullValue jobs.PullInfo - return nullValue -} - -func EqJobsPullInfof(value jobs.PullInfo) jobs.PullInfo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue jobs.PullInfo - return nullValue -} - -func NotEqJobsPullInfof(value jobs.PullInfo) jobs.PullInfo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue jobs.PullInfo - return nullValue -} - -func JobsPullInfoThat(matcher pegomock.ArgumentMatcher) jobs.PullInfo { - pegomock.RegisterMatcher(matcher) - var nullValue jobs.PullInfo - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/logging_logger.go b/server/legacy/events/mocks/matchers/logging_logger.go deleted file mode 100644 index d43fd90e9..000000000 --- a/server/legacy/events/mocks/matchers/logging_logger.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingLogger() logging.Logger { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.Logger))(nil)).Elem())) - var nullValue logging.Logger - return nullValue -} - -func EqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func NotEqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func LoggingLoggerThat(matcher pegomock.ArgumentMatcher) logging.Logger { - pegomock.RegisterMatcher(matcher) - var nullValue logging.Logger - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/logging_simplelogging.go b/server/legacy/events/mocks/matchers/logging_simplelogging.go deleted file mode 100644 index 502456e7c..000000000 --- a/server/legacy/events/mocks/matchers/logging_simplelogging.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingSimpleLogging() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func NotEqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func LoggingSimpleLoggingThat(matcher pegomock.ArgumentMatcher) logging.SimpleLogging { - pegomock.RegisterMatcher(matcher) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/map_of_string_to_string.go b/server/legacy/events/mocks/matchers/map_of_string_to_string.go deleted file mode 100644 index e1683b5df..000000000 --- a/server/legacy/events/mocks/matchers/map_of_string_to_string.go +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" -) - -func AnyMapOfStringToString() map[string]string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(map[string]string))(nil)).Elem())) - var nullValue map[string]string - return nullValue -} - -func EqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} - -func NotEqMapOfStringToString(value map[string]string) map[string]string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue map[string]string - return nullValue -} - -func MapOfStringToStringThat(matcher pegomock.ArgumentMatcher) map[string]string { - pegomock.RegisterMatcher(matcher) - var nullValue map[string]string - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_commandname.go b/server/legacy/events/mocks/matchers/models_commandname.go deleted file mode 100644 index db61aecdc..000000000 --- a/server/legacy/events/mocks/matchers/models_commandname.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsCommandName() command.Name { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Name))(nil)).Elem())) - var nullValue command.Name - return nullValue -} - -func EqModelsCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func NotEqModelsCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func ModelsCommandNameThat(matcher pegomock.ArgumentMatcher) command.Name { - pegomock.RegisterMatcher(matcher) - var nullValue command.Name - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_project.go b/server/legacy/events/mocks/matchers/models_project.go deleted file mode 100644 index 8a78aae18..000000000 --- a/server/legacy/events/mocks/matchers/models_project.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsProject() models.Project { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Project))(nil)).Elem())) - var nullValue models.Project - return nullValue -} - -func EqModelsProject(value models.Project) models.Project { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Project - return nullValue -} - -func NotEqModelsProject(value models.Project) models.Project { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Project - return nullValue -} - -func ModelsProjectThat(matcher pegomock.ArgumentMatcher) models.Project { - pegomock.RegisterMatcher(matcher) - var nullValue models.Project - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_projectcommandcontext.go b/server/legacy/events/mocks/matchers/models_projectcommandcontext.go deleted file mode 100644 index dbde44f15..000000000 --- a/server/legacy/events/mocks/matchers/models_projectcommandcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsProjectCommandContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func ModelsProjectCommandContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_projectresult.go b/server/legacy/events/mocks/matchers/models_projectresult.go deleted file mode 100644 index ccec380ea..000000000 --- a/server/legacy/events/mocks/matchers/models_projectresult.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsProjectResult() command.ProjectResult { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectResult))(nil)).Elem())) - var nullValue command.ProjectResult - return nullValue -} - -func EqModelsProjectResult(value command.ProjectResult) command.ProjectResult { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectResult - return nullValue -} - -func NotEqModelsProjectResult(value command.ProjectResult) command.ProjectResult { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectResult - return nullValue -} - -func ModelsProjectResultThat(matcher pegomock.ArgumentMatcher) command.ProjectResult { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectResult - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_pullrequest.go b/server/legacy/events/mocks/matchers/models_pullrequest.go deleted file mode 100644 index 94e36a1ab..000000000 --- a/server/legacy/events/mocks/matchers/models_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsPullRequest() models.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequest))(nil)).Elem())) - var nullValue models.PullRequest - return nullValue -} - -func EqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func NotEqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func ModelsPullRequestThat(matcher pegomock.ArgumentMatcher) models.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue models.PullRequest - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_pullrequesteventtype.go b/server/legacy/events/mocks/matchers/models_pullrequesteventtype.go deleted file mode 100644 index f4fefd505..000000000 --- a/server/legacy/events/mocks/matchers/models_pullrequesteventtype.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsPullRequestEventType() models.PullRequestEventType { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequestEventType))(nil)).Elem())) - var nullValue models.PullRequestEventType - return nullValue -} - -func EqModelsPullRequestEventType(value models.PullRequestEventType) models.PullRequestEventType { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PullRequestEventType - return nullValue -} - -func NotEqModelsPullRequestEventType(value models.PullRequestEventType) models.PullRequestEventType { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.PullRequestEventType - return nullValue -} - -func ModelsPullRequestEventTypeThat(matcher pegomock.ArgumentMatcher) models.PullRequestEventType { - pegomock.RegisterMatcher(matcher) - var nullValue models.PullRequestEventType - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_repo.go b/server/legacy/events/mocks/matchers/models_repo.go deleted file mode 100644 index b36c3ee7c..000000000 --- a/server/legacy/events/mocks/matchers/models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func NotEqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func ModelsRepoThat(matcher pegomock.ArgumentMatcher) models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_user.go b/server/legacy/events/mocks/matchers/models_user.go deleted file mode 100644 index 8e552bb1c..000000000 --- a/server/legacy/events/mocks/matchers/models_user.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsUser() models.User { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.User))(nil)).Elem())) - var nullValue models.User - return nullValue -} - -func EqModelsUser(value models.User) models.User { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.User - return nullValue -} - -func NotEqModelsUser(value models.User) models.User { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.User - return nullValue -} - -func ModelsUserThat(matcher pegomock.ArgumentMatcher) models.User { - pegomock.RegisterMatcher(matcher) - var nullValue models.User - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_vcshosttype.go b/server/legacy/events/mocks/matchers/models_vcshosttype.go deleted file mode 100644 index 9c27be16b..000000000 --- a/server/legacy/events/mocks/matchers/models_vcshosttype.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsVCSHostType() models.VCSHostType { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.VCSHostType))(nil)).Elem())) - var nullValue models.VCSHostType - return nullValue -} - -func EqModelsVCSHostType(value models.VCSHostType) models.VCSHostType { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.VCSHostType - return nullValue -} - -func NotEqModelsVCSHostType(value models.VCSHostType) models.VCSHostType { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.VCSHostType - return nullValue -} - -func ModelsVCSHostTypeThat(matcher pegomock.ArgumentMatcher) models.VCSHostType { - pegomock.RegisterMatcher(matcher) - var nullValue models.VCSHostType - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/models_vcsstatus.go b/server/legacy/events/mocks/matchers/models_vcsstatus.go deleted file mode 100644 index f3d0a55fa..000000000 --- a/server/legacy/events/mocks/matchers/models_vcsstatus.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsVcsStatus() models.VCSStatus { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.VCSStatus))(nil)).Elem())) - var nullValue models.VCSStatus - return nullValue -} - -func EqModelsVcsStatus(value models.VCSStatus) models.VCSStatus { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.VCSStatus - return nullValue -} - -func NotEqModelsVcsStatus(value models.VCSStatus) models.VCSStatus { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.VCSStatus - return nullValue -} - -func ModelsVcsStatusThat(matcher pegomock.ArgumentMatcher) models.VCSStatus { - pegomock.RegisterMatcher(matcher) - var nullValue models.VCSStatus - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_command_comment.go b/server/legacy/events/mocks/matchers/ptr_to_command_comment.go deleted file mode 100644 index 85321450c..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_command_comment.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyPtrToCommandComment() *command.Comment { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Comment))(nil)).Elem())) - var nullValue *command.Comment - return nullValue -} - -func EqPtrToCommandComment(value *command.Comment) *command.Comment { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Comment - return nullValue -} - -func NotEqPtrToCommandComment(value *command.Comment) *command.Comment { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *command.Comment - return nullValue -} - -func PtrToCommandCommentThat(matcher pegomock.ArgumentMatcher) *command.Comment { - pegomock.RegisterMatcher(matcher) - var nullValue *command.Comment - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_command_context.go b/server/legacy/events/mocks/matchers/ptr_to_command_context.go deleted file mode 100644 index 5ea47f0a1..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_command_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyPtrToCommandContext() *command.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Context))(nil)).Elem())) - var nullValue *command.Context - return nullValue -} - -func EqPtrToCommandContext(value *command.Context) *command.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Context - return nullValue -} - -func NotEqPtrToCommandContext(value *command.Context) *command.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *command.Context - return nullValue -} - -func PtrToCommandContextThat(matcher pegomock.ArgumentMatcher) *command.Context { - pegomock.RegisterMatcher(matcher) - var nullValue *command.Context - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_events_commandcontext.go b/server/legacy/events/mocks/matchers/ptr_to_events_commandcontext.go deleted file mode 100644 index 601d9d828..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_events_commandcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyPtrToEventsCommandContext() *command.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Context))(nil)).Elem())) - var nullValue *command.Context - return nullValue -} - -func EqPtrToEventsCommandContext(value *command.Context) *command.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Context - return nullValue -} - -func NotEqPtrToEventsCommandContext(value *command.Context) *command.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *command.Context - return nullValue -} - -func PtrToEventsCommandContextThat(matcher pegomock.ArgumentMatcher) *command.Context { - pegomock.RegisterMatcher(matcher) - var nullValue *command.Context - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_events_commentcommand.go b/server/legacy/events/mocks/matchers/ptr_to_events_commentcommand.go deleted file mode 100644 index f3d640dea..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_events_commentcommand.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyPtrToEventsCommentCommand() *command.Comment { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Comment))(nil)).Elem())) - var nullValue *command.Comment - return nullValue -} - -func EqPtrToEventsCommentCommand(value *command.Comment) *command.Comment { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Comment - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_events_trylockresponse.go b/server/legacy/events/mocks/matchers/ptr_to_events_trylockresponse.go deleted file mode 100644 index 1d9993d51..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_events_trylockresponse.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - events "github.com/runatlantis/atlantis/server/legacy/events" -) - -func AnyPtrToEventsTryLockResponse() *events.TryLockResponse { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*events.TryLockResponse))(nil)).Elem())) - var nullValue *events.TryLockResponse - return nullValue -} - -func EqPtrToEventsTryLockResponse(value *events.TryLockResponse) *events.TryLockResponse { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *events.TryLockResponse - return nullValue -} - -func NotEqPtrToEventsTryLockResponse(value *events.TryLockResponse) *events.TryLockResponse { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *events.TryLockResponse - return nullValue -} - -func PtrToEventsTryLockResponseThat(matcher pegomock.ArgumentMatcher) *events.TryLockResponse { - pegomock.RegisterMatcher(matcher) - var nullValue *events.TryLockResponse - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_github_issuecommentevent.go b/server/legacy/events/mocks/matchers/ptr_to_github_issuecommentevent.go deleted file mode 100644 index fd79e7822..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_github_issuecommentevent.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - github "github.com/google/go-github/v45/github" -) - -func AnyPtrToGithubIssueCommentEvent() *github.IssueCommentEvent { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*github.IssueCommentEvent))(nil)).Elem())) - var nullValue *github.IssueCommentEvent - return nullValue -} - -func EqPtrToGithubIssueCommentEvent(value *github.IssueCommentEvent) *github.IssueCommentEvent { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *github.IssueCommentEvent - return nullValue -} - -func NotEqPtrToGithubIssueCommentEvent(value *github.IssueCommentEvent) *github.IssueCommentEvent { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *github.IssueCommentEvent - return nullValue -} - -func PtrToGithubIssueCommentEventThat(matcher pegomock.ArgumentMatcher) *github.IssueCommentEvent { - pegomock.RegisterMatcher(matcher) - var nullValue *github.IssueCommentEvent - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_github_pullrequest.go b/server/legacy/events/mocks/matchers/ptr_to_github_pullrequest.go deleted file mode 100644 index 4f93e0cee..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_github_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - github "github.com/google/go-github/v45/github" -) - -func AnyPtrToGithubPullRequest() *github.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*github.PullRequest))(nil)).Elem())) - var nullValue *github.PullRequest - return nullValue -} - -func EqPtrToGithubPullRequest(value *github.PullRequest) *github.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *github.PullRequest - return nullValue -} - -func NotEqPtrToGithubPullRequest(value *github.PullRequest) *github.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *github.PullRequest - return nullValue -} - -func PtrToGithubPullRequestThat(matcher pegomock.ArgumentMatcher) *github.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue *github.PullRequest - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_github_pullrequestevent.go b/server/legacy/events/mocks/matchers/ptr_to_github_pullrequestevent.go deleted file mode 100644 index 57f6ede38..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_github_pullrequestevent.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - github "github.com/google/go-github/v45/github" -) - -func AnyPtrToGithubPullRequestEvent() *github.PullRequestEvent { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*github.PullRequestEvent))(nil)).Elem())) - var nullValue *github.PullRequestEvent - return nullValue -} - -func EqPtrToGithubPullRequestEvent(value *github.PullRequestEvent) *github.PullRequestEvent { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *github.PullRequestEvent - return nullValue -} - -func NotEqPtrToGithubPullRequestEvent(value *github.PullRequestEvent) *github.PullRequestEvent { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *github.PullRequestEvent - return nullValue -} - -func PtrToGithubPullRequestEventThat(matcher pegomock.ArgumentMatcher) *github.PullRequestEvent { - pegomock.RegisterMatcher(matcher) - var nullValue *github.PullRequestEvent - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_github_repository.go b/server/legacy/events/mocks/matchers/ptr_to_github_repository.go deleted file mode 100644 index e3d9e3234..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_github_repository.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - github "github.com/google/go-github/v45/github" -) - -func AnyPtrToGithubRepository() *github.Repository { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*github.Repository))(nil)).Elem())) - var nullValue *github.Repository - return nullValue -} - -func EqPtrToGithubRepository(value *github.Repository) *github.Repository { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *github.Repository - return nullValue -} - -func NotEqPtrToGithubRepository(value *github.Repository) *github.Repository { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *github.Repository - return nullValue -} - -func PtrToGithubRepositoryThat(matcher pegomock.ArgumentMatcher) *github.Repository { - pegomock.RegisterMatcher(matcher) - var nullValue *github.Repository - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_logging_simplelogger.go b/server/legacy/events/mocks/matchers/ptr_to_logging_simplelogger.go deleted file mode 100644 index e7c8b942f..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_logging_simplelogger.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyPtrToLoggingSimpleLogger() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqPtrToLoggingSimpleLogger(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_models_commandcontext.go b/server/legacy/events/mocks/matchers/ptr_to_models_commandcontext.go deleted file mode 100644 index e95681c7a..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_models_commandcontext.go +++ /dev/null @@ -1,21 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyPtrToModelsCommandContext() *command.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*command.Context))(nil)).Elem())) - var nullValue *command.Context - return nullValue -} - -func EqPtrToModelsCommandContext(value *command.Context) *command.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *command.Context - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_models_projectlock.go b/server/legacy/events/mocks/matchers/ptr_to_models_projectlock.go deleted file mode 100644 index ec87b4970..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_models_projectlock.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyPtrToModelsProjectLock() *models.ProjectLock { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*models.ProjectLock))(nil)).Elem())) - var nullValue *models.ProjectLock - return nullValue -} - -func EqPtrToModelsProjectLock(value *models.ProjectLock) *models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *models.ProjectLock - return nullValue -} - -func NotEqPtrToModelsProjectLock(value *models.ProjectLock) *models.ProjectLock { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *models.ProjectLock - return nullValue -} - -func PtrToModelsProjectLockThat(matcher pegomock.ArgumentMatcher) *models.ProjectLock { - pegomock.RegisterMatcher(matcher) - var nullValue *models.ProjectLock - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_models_pullrequest.go b/server/legacy/events/mocks/matchers/ptr_to_models_pullrequest.go deleted file mode 100644 index 732cc76ad..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_models_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyPtrToModelsPullRequest() *models.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*models.PullRequest))(nil)).Elem())) - var nullValue *models.PullRequest - return nullValue -} - -func EqPtrToModelsPullRequest(value *models.PullRequest) *models.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *models.PullRequest - return nullValue -} - -func NotEqPtrToModelsPullRequest(value *models.PullRequest) *models.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *models.PullRequest - return nullValue -} - -func PtrToModelsPullRequestThat(matcher pegomock.ArgumentMatcher) *models.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue *models.PullRequest - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/ptr_to_models_repo.go b/server/legacy/events/mocks/matchers/ptr_to_models_repo.go deleted file mode 100644 index 35e10bff0..000000000 --- a/server/legacy/events/mocks/matchers/ptr_to_models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyPtrToModelsRepo() *models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*models.Repo))(nil)).Elem())) - var nullValue *models.Repo - return nullValue -} - -func EqPtrToModelsRepo(value *models.Repo) *models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *models.Repo - return nullValue -} - -func NotEqPtrToModelsRepo(value *models.Repo) *models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *models.Repo - return nullValue -} - -func PtrToModelsRepoThat(matcher pegomock.ArgumentMatcher) *models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue *models.Repo - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/slice_of_byte.go b/server/legacy/events/mocks/matchers/slice_of_byte.go deleted file mode 100644 index 7ff2e45ca..000000000 --- a/server/legacy/events/mocks/matchers/slice_of_byte.go +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" -) - -func AnySliceOfByte() []byte { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]byte))(nil)).Elem())) - var nullValue []byte - return nullValue -} - -func EqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func NotEqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func SliceOfByteThat(matcher pegomock.ArgumentMatcher) []byte { - pegomock.RegisterMatcher(matcher) - var nullValue []byte - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/slice_of_events_pendingplan.go b/server/legacy/events/mocks/matchers/slice_of_events_pendingplan.go deleted file mode 100644 index b25a540b4..000000000 --- a/server/legacy/events/mocks/matchers/slice_of_events_pendingplan.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - events "github.com/runatlantis/atlantis/server/legacy/events" -) - -func AnySliceOfEventsPendingPlan() []events.PendingPlan { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]events.PendingPlan))(nil)).Elem())) - var nullValue []events.PendingPlan - return nullValue -} - -func EqSliceOfEventsPendingPlan(value []events.PendingPlan) []events.PendingPlan { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []events.PendingPlan - return nullValue -} - -func NotEqSliceOfEventsPendingPlan(value []events.PendingPlan) []events.PendingPlan { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []events.PendingPlan - return nullValue -} - -func SliceOfEventsPendingPlanThat(matcher pegomock.ArgumentMatcher) []events.PendingPlan { - pegomock.RegisterMatcher(matcher) - var nullValue []events.PendingPlan - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/slice_of_models_projectcommandcontext.go b/server/legacy/events/mocks/matchers/slice_of_models_projectcommandcontext.go deleted file mode 100644 index 91ecd9f93..000000000 --- a/server/legacy/events/mocks/matchers/slice_of_models_projectcommandcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnySliceOfModelsProjectCommandContext() []command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]command.ProjectContext))(nil)).Elem())) - var nullValue []command.ProjectContext - return nullValue -} - -func EqSliceOfModelsProjectCommandContext(value []command.ProjectContext) []command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []command.ProjectContext - return nullValue -} - -func NotEqSliceOfModelsProjectCommandContext(value []command.ProjectContext) []command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []command.ProjectContext - return nullValue -} - -func SliceOfModelsProjectCommandContextThat(matcher pegomock.ArgumentMatcher) []command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue []command.ProjectContext - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/slice_of_string.go b/server/legacy/events/mocks/matchers/slice_of_string.go deleted file mode 100644 index 8bfc2792f..000000000 --- a/server/legacy/events/mocks/matchers/slice_of_string.go +++ /dev/null @@ -1,32 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" -) - -func AnySliceOfString() []string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) - var nullValue []string - return nullValue -} - -func EqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func NotEqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { - pegomock.RegisterMatcher(matcher) - var nullValue []string - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/time_time.go b/server/legacy/events/mocks/matchers/time_time.go deleted file mode 100644 index 461e1dd6d..000000000 --- a/server/legacy/events/mocks/matchers/time_time.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - time "time" -) - -func AnyTimeTime() time.Time { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(time.Time))(nil)).Elem())) - var nullValue time.Time - return nullValue -} - -func EqTimeTime(value time.Time) time.Time { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue time.Time - return nullValue -} - -func NotEqTimeTime(value time.Time) time.Time { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue time.Time - return nullValue -} - -func TimeTimeThat(matcher pegomock.ArgumentMatcher) time.Time { - pegomock.RegisterMatcher(matcher) - var nullValue time.Time - return nullValue -} diff --git a/server/legacy/events/mocks/matchers/webhooks_applyresult.go b/server/legacy/events/mocks/matchers/webhooks_applyresult.go deleted file mode 100644 index ad2b8776d..000000000 --- a/server/legacy/events/mocks/matchers/webhooks_applyresult.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - webhooks "github.com/runatlantis/atlantis/server/legacy/events/webhooks" -) - -func AnyWebhooksApplyResult() webhooks.ApplyResult { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(webhooks.ApplyResult))(nil)).Elem())) - var nullValue webhooks.ApplyResult - return nullValue -} - -func EqWebhooksApplyResult(value webhooks.ApplyResult) webhooks.ApplyResult { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue webhooks.ApplyResult - return nullValue -} - -func NotEqWebhooksApplyResult(value webhooks.ApplyResult) webhooks.ApplyResult { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue webhooks.ApplyResult - return nullValue -} - -func WebhooksApplyResultThat(matcher pegomock.ArgumentMatcher) webhooks.ApplyResult { - pegomock.RegisterMatcher(matcher) - var nullValue webhooks.ApplyResult - return nullValue -} diff --git a/server/legacy/events/mocks/mock_apply_command_locker.go b/server/legacy/events/mocks/mock_apply_command_locker.go deleted file mode 100644 index 92a1daff0..000000000 --- a/server/legacy/events/mocks/mock_apply_command_locker.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: ApplyCommandLocker) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type MockApplyCommandLocker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockApplyCommandLocker(options ...pegomock.Option) *MockApplyCommandLocker { - mock := &MockApplyCommandLocker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockApplyCommandLocker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockApplyCommandLocker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockApplyCommandLocker) IsDisabled(ctx *command.Context) bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyCommandLocker().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("IsDisabled", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockApplyCommandLocker) VerifyWasCalledOnce() *VerifierMockApplyCommandLocker { - return &VerifierMockApplyCommandLocker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockApplyCommandLocker) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockApplyCommandLocker { - return &VerifierMockApplyCommandLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockApplyCommandLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockApplyCommandLocker { - return &VerifierMockApplyCommandLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockApplyCommandLocker) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockApplyCommandLocker { - return &VerifierMockApplyCommandLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockApplyCommandLocker struct { - mock *MockApplyCommandLocker - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockApplyCommandLocker) IsDisabled(ctx *command.Context) *MockApplyCommandLocker_IsDisabled_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "IsDisabled", params, verifier.timeout) - return &MockApplyCommandLocker_IsDisabled_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyCommandLocker_IsDisabled_OngoingVerification struct { - mock *MockApplyCommandLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyCommandLocker_IsDisabled_OngoingVerification) GetCapturedArguments() *command.Context { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockApplyCommandLocker_IsDisabled_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_apply_handler.go b/server/legacy/events/mocks/mock_apply_handler.go deleted file mode 100644 index 44c64a753..000000000 --- a/server/legacy/events/mocks/mock_apply_handler.go +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: ApplyRequirement) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type MockApplyRequirement struct { - fail func(message string, callerSkip ...int) -} - -func NewMockApplyRequirement(options ...pegomock.Option) *MockApplyRequirement { - mock := &MockApplyRequirement{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockApplyRequirement) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockApplyRequirement) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockApplyRequirement) ValidateProject(_param0 string, _param1 command.ProjectContext) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockApplyRequirement().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("ValidateProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockApplyRequirement) VerifyWasCalledOnce() *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockApplyRequirement) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockApplyRequirement) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockApplyRequirement) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockApplyRequirement { - return &VerifierMockApplyRequirement{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockApplyRequirement struct { - mock *MockApplyRequirement - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockApplyRequirement) ValidateProject(_param0 string, _param1 command.ProjectContext) *MockApplyRequirement_ValidateProject_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ValidateProject", params, verifier.timeout) - return &MockApplyRequirement_ValidateProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockApplyRequirement_ValidateProject_OngoingVerification struct { - mock *MockApplyRequirement - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockApplyRequirement_ValidateProject_OngoingVerification) GetCapturedArguments() (string, command.ProjectContext) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockApplyRequirement_ValidateProject_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_command_runner.go b/server/legacy/events/mocks/mock_command_runner.go deleted file mode 100644 index aa464ce8f..000000000 --- a/server/legacy/events/mocks/mock_command_runner.go +++ /dev/null @@ -1,183 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: CommandRunner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockCommandRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockCommandRunner(options ...pegomock.Option) *MockCommandRunner { - mock := &MockCommandRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockCommandRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockCommandRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockCommandRunner) RunAutoplanCommand(_param0 context.Context, _param1 models.Repo, _param2 models.Repo, _param3 models.PullRequest, _param4 models.User, _param5 time.Time) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommandRunner().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} - pegomock.GetGenericMockFrom(mock).Invoke("RunAutoplanCommand", params, []reflect.Type{}) -} - -func (mock *MockCommandRunner) RunCommentCommand(_param0 context.Context, _param1 models.Repo, _param2 *models.Repo, _param3 *models.PullRequest, _param4 models.User, _param5 int, _param6 *command.Comment, _param7 time.Time) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommandRunner().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5, _param6, _param7} - pegomock.GetGenericMockFrom(mock).Invoke("RunCommentCommand", params, []reflect.Type{}) -} - -func (mock *MockCommandRunner) VerifyWasCalledOnce() *VerifierMockCommandRunner { - return &VerifierMockCommandRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockCommandRunner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockCommandRunner { - return &VerifierMockCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockCommandRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockCommandRunner { - return &VerifierMockCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockCommandRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockCommandRunner { - return &VerifierMockCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockCommandRunner struct { - mock *MockCommandRunner - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockCommandRunner) RunAutoplanCommand(_param0 context.Context, _param1 models.Repo, _param2 models.Repo, _param3 models.PullRequest, _param4 models.User, _param5 time.Time) *MockCommandRunner_RunAutoplanCommand_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunAutoplanCommand", params, verifier.timeout) - return &MockCommandRunner_RunAutoplanCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommandRunner_RunAutoplanCommand_OngoingVerification struct { - mock *MockCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommandRunner_RunAutoplanCommand_OngoingVerification) GetCapturedArguments() (context.Context, models.Repo, models.Repo, models.PullRequest, models.User, time.Time) { - _param0, _param1, _param2, _param3, _param4, _param5 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1] -} - -func (c *MockCommandRunner_RunAutoplanCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []models.Repo, _param2 []models.Repo, _param3 []models.PullRequest, _param4 []models.User, _param5 []time.Time) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - _param2 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.Repo) - } - _param3 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.PullRequest) - } - _param4 = make([]models.User, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(models.User) - } - _param5 = make([]time.Time, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(time.Time) - } - } - return -} - -func (verifier *VerifierMockCommandRunner) RunCommentCommand(_param0 context.Context, _param1 models.Repo, _param2 *models.Repo, _param3 *models.PullRequest, _param4 models.User, _param5 int, _param6 *command.Comment, _param7 time.Time) *MockCommandRunner_RunCommentCommand_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5, _param6, _param7} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommentCommand", params, verifier.timeout) - return &MockCommandRunner_RunCommentCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommandRunner_RunCommentCommand_OngoingVerification struct { - mock *MockCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommandRunner_RunCommentCommand_OngoingVerification) GetCapturedArguments() (context.Context, models.Repo, *models.Repo, *models.PullRequest, models.User, int, *command.Comment, time.Time) { - _param0, _param1, _param2, _param3, _param4, _param5, _param6, _param7 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1], _param6[len(_param6)-1], _param7[len(_param7)-1] -} - -func (c *MockCommandRunner_RunCommentCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []models.Repo, _param2 []*models.Repo, _param3 []*models.PullRequest, _param4 []models.User, _param5 []int, _param6 []*command.Comment, _param7 []time.Time) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - _param2 = make([]*models.Repo, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(*models.Repo) - } - _param3 = make([]*models.PullRequest, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(*models.PullRequest) - } - _param4 = make([]models.User, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(models.User) - } - _param5 = make([]int, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(int) - } - _param6 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[6] { - _param6[u] = param.(*command.Comment) - } - _param7 = make([]time.Time, len(c.methodInvocations)) - for u, param := range params[7] { - _param7[u] = param.(time.Time) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_comment_building.go b/server/legacy/events/mocks/mock_comment_building.go deleted file mode 100644 index f3ed8beba..000000000 --- a/server/legacy/events/mocks/mock_comment_building.go +++ /dev/null @@ -1,217 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: CommentBuilder) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockCommentBuilder struct { - fail func(message string, callerSkip ...int) -} - -func NewMockCommentBuilder(options ...pegomock.Option) *MockCommentBuilder { - mock := &MockCommentBuilder{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockCommentBuilder) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockCommentBuilder) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockCommentBuilder) BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommentBuilder().") - } - params := []pegomock.Param{repoRelDir, workspace, project, commentArgs} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildPlanComment", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockCommentBuilder) BuildApplyComment(repoRelDir string, workspace string, project string) string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommentBuilder().") - } - params := []pegomock.Param{repoRelDir, workspace, project} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApplyComment", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockCommentBuilder) BuildVersionComment(repoRelDir string, workspace string, project string) string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommentBuilder().") - } - params := []pegomock.Param{repoRelDir, workspace, project} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildVersionComment", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockCommentBuilder) VerifyWasCalledOnce() *VerifierMockCommentBuilder { - return &VerifierMockCommentBuilder{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockCommentBuilder) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockCommentBuilder { - return &VerifierMockCommentBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockCommentBuilder) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockCommentBuilder { - return &VerifierMockCommentBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockCommentBuilder) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockCommentBuilder { - return &VerifierMockCommentBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockCommentBuilder struct { - mock *MockCommentBuilder - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockCommentBuilder) BuildPlanComment(repoRelDir string, workspace string, project string, commentArgs []string) *MockCommentBuilder_BuildPlanComment_OngoingVerification { - params := []pegomock.Param{repoRelDir, workspace, project, commentArgs} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildPlanComment", params, verifier.timeout) - return &MockCommentBuilder_BuildPlanComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommentBuilder_BuildPlanComment_OngoingVerification struct { - mock *MockCommentBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommentBuilder_BuildPlanComment_OngoingVerification) GetCapturedArguments() (string, string, string, []string) { - repoRelDir, workspace, project, commentArgs := c.GetAllCapturedArguments() - return repoRelDir[len(repoRelDir)-1], workspace[len(workspace)-1], project[len(project)-1], commentArgs[len(commentArgs)-1] -} - -func (c *MockCommentBuilder_BuildPlanComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string, _param3 [][]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([][]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.([]string) - } - } - return -} - -func (verifier *VerifierMockCommentBuilder) BuildApplyComment(repoRelDir string, workspace string, project string) *MockCommentBuilder_BuildApplyComment_OngoingVerification { - params := []pegomock.Param{repoRelDir, workspace, project} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApplyComment", params, verifier.timeout) - return &MockCommentBuilder_BuildApplyComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommentBuilder_BuildApplyComment_OngoingVerification struct { - mock *MockCommentBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommentBuilder_BuildApplyComment_OngoingVerification) GetCapturedArguments() (string, string, string) { - repoRelDir, workspace, project := c.GetAllCapturedArguments() - return repoRelDir[len(repoRelDir)-1], workspace[len(workspace)-1], project[len(project)-1] -} - -func (c *MockCommentBuilder_BuildApplyComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockCommentBuilder) BuildVersionComment(repoRelDir string, workspace string, project string) *MockCommentBuilder_BuildVersionComment_OngoingVerification { - params := []pegomock.Param{repoRelDir, workspace, project} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildVersionComment", params, verifier.timeout) - return &MockCommentBuilder_BuildVersionComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommentBuilder_BuildVersionComment_OngoingVerification struct { - mock *MockCommentBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommentBuilder_BuildVersionComment_OngoingVerification) GetCapturedArguments() (string, string, string) { - repoRelDir, workspace, project := c.GetAllCapturedArguments() - return repoRelDir[len(repoRelDir)-1], workspace[len(workspace)-1], project[len(project)-1] -} - -func (c *MockCommentBuilder_BuildVersionComment_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_comment_command_runner.go b/server/legacy/events/mocks/mock_comment_command_runner.go deleted file mode 100644 index 1e790a19c..000000000 --- a/server/legacy/events/mocks/mock_comment_command_runner.go +++ /dev/null @@ -1,102 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: CommentCommandRunner) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockCommentCommandRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockCommentCommandRunner(options ...pegomock.Option) *MockCommentCommandRunner { - mock := &MockCommentCommandRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockCommentCommandRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockCommentCommandRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockCommentCommandRunner) Run(_param0 *command.Context, _param1 *command.Comment) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommentCommandRunner().") - } - params := []pegomock.Param{_param0, _param1} - pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{}) -} - -func (mock *MockCommentCommandRunner) VerifyWasCalledOnce() *VerifierMockCommentCommandRunner { - return &VerifierMockCommentCommandRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockCommentCommandRunner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockCommentCommandRunner { - return &VerifierMockCommentCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockCommentCommandRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockCommentCommandRunner { - return &VerifierMockCommentCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockCommentCommandRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockCommentCommandRunner { - return &VerifierMockCommentCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockCommentCommandRunner struct { - mock *MockCommentCommandRunner - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockCommentCommandRunner) Run(_param0 *command.Context, _param1 *command.Comment) *MockCommentCommandRunner_Run_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params, verifier.timeout) - return &MockCommentCommandRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommentCommandRunner_Run_OngoingVerification struct { - mock *MockCommentCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommentCommandRunner_Run_OngoingVerification) GetCapturedArguments() (*command.Context, *command.Comment) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockCommentCommandRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*command.Comment) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - _param1 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Comment) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_comment_parsing.go b/server/legacy/events/mocks/mock_comment_parsing.go deleted file mode 100644 index dd52c8072..000000000 --- a/server/legacy/events/mocks/mock_comment_parsing.go +++ /dev/null @@ -1,111 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: CommentParsing) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - events "github.com/runatlantis/atlantis/server/legacy/events" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockCommentParsing struct { - fail func(message string, callerSkip ...int) -} - -func NewMockCommentParsing(options ...pegomock.Option) *MockCommentParsing { - mock := &MockCommentParsing{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockCommentParsing) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockCommentParsing) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockCommentParsing) Parse(comment string, vcsHost models.VCSHostType) events.CommentParseResult { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockCommentParsing().") - } - params := []pegomock.Param{comment, vcsHost} - result := pegomock.GetGenericMockFrom(mock).Invoke("Parse", params, []reflect.Type{reflect.TypeOf((*events.CommentParseResult)(nil)).Elem()}) - var ret0 events.CommentParseResult - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(events.CommentParseResult) - } - } - return ret0 -} - -func (mock *MockCommentParsing) VerifyWasCalledOnce() *VerifierMockCommentParsing { - return &VerifierMockCommentParsing{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockCommentParsing) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockCommentParsing { - return &VerifierMockCommentParsing{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockCommentParsing) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockCommentParsing { - return &VerifierMockCommentParsing{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockCommentParsing) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockCommentParsing { - return &VerifierMockCommentParsing{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockCommentParsing struct { - mock *MockCommentParsing - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockCommentParsing) Parse(comment string, vcsHost models.VCSHostType) *MockCommentParsing_Parse_OngoingVerification { - params := []pegomock.Param{comment, vcsHost} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Parse", params, verifier.timeout) - return &MockCommentParsing_Parse_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockCommentParsing_Parse_OngoingVerification struct { - mock *MockCommentParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockCommentParsing_Parse_OngoingVerification) GetCapturedArguments() (string, models.VCSHostType) { - comment, vcsHost := c.GetAllCapturedArguments() - return comment[len(comment)-1], vcsHost[len(vcsHost)-1] -} - -func (c *MockCommentParsing_Parse_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []models.VCSHostType) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]models.VCSHostType, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.VCSHostType) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_delete_lock_command.go b/server/legacy/events/mocks/mock_delete_lock_command.go deleted file mode 100644 index e9923e540..000000000 --- a/server/legacy/events/mocks/mock_delete_lock_command.go +++ /dev/null @@ -1,160 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: DeleteLockCommand) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockDeleteLockCommand struct { - fail func(message string, callerSkip ...int) -} - -func NewMockDeleteLockCommand(options ...pegomock.Option) *MockDeleteLockCommand { - mock := &MockDeleteLockCommand{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockDeleteLockCommand) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockDeleteLockCommand) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().") - } - params := []pegomock.Param{id} - result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *models.ProjectLock - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*models.ProjectLock) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().") - } - params := []pegomock.Param{repoFullName, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLocksByPull", params, []reflect.Type{reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 int - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(int) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockDeleteLockCommand) VerifyWasCalledOnce() *VerifierMockDeleteLockCommand { - return &VerifierMockDeleteLockCommand{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockDeleteLockCommand) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockDeleteLockCommand { - return &VerifierMockDeleteLockCommand{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockDeleteLockCommand) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockDeleteLockCommand { - return &VerifierMockDeleteLockCommand{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockDeleteLockCommand) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockDeleteLockCommand { - return &VerifierMockDeleteLockCommand{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockDeleteLockCommand struct { - mock *MockDeleteLockCommand - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockDeleteLockCommand) DeleteLock(id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification { - params := []pegomock.Param{id} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLock", params, verifier.timeout) - return &MockDeleteLockCommand_DeleteLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockDeleteLockCommand_DeleteLock_OngoingVerification struct { - mock *MockDeleteLockCommand - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() string { - id := c.GetAllCapturedArguments() - return id[len(id)-1] -} - -func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLocksByPull", params, verifier.timeout) - return &MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification struct { - mock *MockDeleteLockCommand - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] -} - -func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_event_parsing.go b/server/legacy/events/mocks/mock_event_parsing.go deleted file mode 100644 index bfa8fc072..000000000 --- a/server/legacy/events/mocks/mock_event_parsing.go +++ /dev/null @@ -1,931 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: EventParsing) - -package mocks - -import ( - "reflect" - "time" - - github "github.com/google/go-github/v45/github" - azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops" - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - go_gitlab "github.com/xanzy/go-gitlab" -) - -type MockEventParsing struct { - fail func(message string, callerSkip ...int) -} - -func NewMockEventParsing(options ...pegomock.Option) *MockEventParsing { - mock := &MockEventParsing{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockEventParsing) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockEventParsing) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockEventParsing) ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) (models.Repo, models.User, int, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{comment} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubIssueCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.Repo - var ret1 models.User - var ret2 int - var ret3 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.Repo) - } - if result[1] != nil { - ret1 = result[1].(models.User) - } - if result[2] != nil { - ret2 = result[2].(int) - } - if result[3] != nil { - ret3 = result[3].(error) - } - } - return ret0, ret1, ret2, ret3 -} - -func (mock *MockEventParsing) ParseGithubPull(ghPull *github.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{ghPull} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.Repo - var ret2 models.Repo - var ret3 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(error) - } - } - return ret0, ret1, ret2, ret3 -} - -func (mock *MockEventParsing) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{pullEvent} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPullEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.PullRequestEventType - var ret2 models.Repo - var ret3 models.Repo - var ret4 models.User - var ret5 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.PullRequestEventType) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.Repo) - } - if result[4] != nil { - ret4 = result[4].(models.User) - } - if result[5] != nil { - ret5 = result[5].(error) - } - } - return ret0, ret1, ret2, ret3, ret4, ret5 -} - -func (mock *MockEventParsing) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{ghRepo} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubRepo", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.Repo - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.Repo) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockEventParsing) ParseGitlabMergeRequestEvent(event go_gitlab.MergeEvent) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{event} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGitlabMergeRequestEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.PullRequestEventType - var ret2 models.Repo - var ret3 models.Repo - var ret4 models.User - var ret5 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.PullRequestEventType) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.Repo) - } - if result[4] != nil { - ret4 = result[4].(models.User) - } - if result[5] != nil { - ret5 = result[5].(error) - } - } - return ret0, ret1, ret2, ret3, ret4, ret5 -} - -func (mock *MockEventParsing) ParseGitlabMergeRequestCommentEvent(event go_gitlab.MergeCommentEvent) (models.Repo, models.Repo, models.User, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{event} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGitlabMergeRequestCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.Repo - var ret1 models.Repo - var ret2 models.User - var ret3 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.Repo) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.User) - } - if result[3] != nil { - ret3 = result[3].(error) - } - } - return ret0, ret1, ret2, ret3 -} - -func (mock *MockEventParsing) ParseGitlabMergeRequest(mr *go_gitlab.MergeRequest, baseRepo models.Repo) models.PullRequest { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{mr, baseRepo} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGitlabMergeRequest", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem()}) - var ret0 models.PullRequest - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - } - return ret0 -} - -func (mock *MockEventParsing) ParseBitbucketCloudPullEvent(body []byte) (models.PullRequest, models.Repo, models.Repo, models.User, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{body} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseBitbucketCloudPullEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.Repo - var ret2 models.Repo - var ret3 models.User - var ret4 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.User) - } - if result[4] != nil { - ret4 = result[4].(error) - } - } - return ret0, ret1, ret2, ret3, ret4 -} - -func (mock *MockEventParsing) ParseBitbucketCloudPullCommentEvent(body []byte) (models.PullRequest, models.Repo, models.Repo, models.User, string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{body} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseBitbucketCloudPullCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.Repo - var ret2 models.Repo - var ret3 models.User - var ret4 string - var ret5 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.User) - } - if result[4] != nil { - ret4 = result[4].(string) - } - if result[5] != nil { - ret5 = result[5].(error) - } - } - return ret0, ret1, ret2, ret3, ret4, ret5 -} - -func (mock *MockEventParsing) GetBitbucketCloudPullEventType(eventTypeHeader string) models.PullRequestEventType { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{eventTypeHeader} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetBitbucketCloudPullEventType", params, []reflect.Type{reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem()}) - var ret0 models.PullRequestEventType - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequestEventType) - } - } - return ret0 -} - -func (mock *MockEventParsing) ParseBitbucketServerPullEvent(body []byte) (models.PullRequest, models.Repo, models.Repo, models.User, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{body} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseBitbucketServerPullEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.Repo - var ret2 models.Repo - var ret3 models.User - var ret4 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.User) - } - if result[4] != nil { - ret4 = result[4].(error) - } - } - return ret0, ret1, ret2, ret3, ret4 -} - -func (mock *MockEventParsing) ParseBitbucketServerPullCommentEvent(body []byte) (models.PullRequest, models.Repo, models.Repo, models.User, string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{body} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseBitbucketServerPullCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.Repo - var ret2 models.Repo - var ret3 models.User - var ret4 string - var ret5 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.User) - } - if result[4] != nil { - ret4 = result[4].(string) - } - if result[5] != nil { - ret5 = result[5].(error) - } - } - return ret0, ret1, ret2, ret3, ret4, ret5 -} - -func (mock *MockEventParsing) GetBitbucketServerPullEventType(eventTypeHeader string) models.PullRequestEventType { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{eventTypeHeader} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetBitbucketServerPullEventType", params, []reflect.Type{reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem()}) - var ret0 models.PullRequestEventType - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequestEventType) - } - } - return ret0 -} - -func (mock *MockEventParsing) ParseAzureDevopsPull(adPull *azuredevops.GitPullRequest) (models.PullRequest, models.Repo, models.Repo, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{adPull} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseAzureDevopsPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.Repo - var ret2 models.Repo - var ret3 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.Repo) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(error) - } - } - return ret0, ret1, ret2, ret3 -} - -func (mock *MockEventParsing) ParseAzureDevopsPullEvent(pullEvent azuredevops.Event) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{pullEvent} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseAzureDevopsPullEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.PullRequest - var ret1 models.PullRequestEventType - var ret2 models.Repo - var ret3 models.Repo - var ret4 models.User - var ret5 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(models.PullRequestEventType) - } - if result[2] != nil { - ret2 = result[2].(models.Repo) - } - if result[3] != nil { - ret3 = result[3].(models.Repo) - } - if result[4] != nil { - ret4 = result[4].(models.User) - } - if result[5] != nil { - ret5 = result[5].(error) - } - } - return ret0, ret1, ret2, ret3, ret4, ret5 -} - -func (mock *MockEventParsing) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (models.Repo, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockEventParsing().") - } - params := []pegomock.Param{adRepo} - result := pegomock.GetGenericMockFrom(mock).Invoke("ParseAzureDevopsRepo", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.Repo - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.Repo) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockEventParsing) VerifyWasCalledOnce() *VerifierMockEventParsing { - return &VerifierMockEventParsing{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockEventParsing) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockEventParsing { - return &VerifierMockEventParsing{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockEventParsing) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockEventParsing { - return &VerifierMockEventParsing{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockEventParsing) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockEventParsing { - return &VerifierMockEventParsing{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockEventParsing struct { - mock *MockEventParsing - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockEventParsing) ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification { - params := []pegomock.Param{comment} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubIssueCommentEvent", params, verifier.timeout) - return &MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification) GetCapturedArguments() *github.IssueCommentEvent { - comment := c.GetAllCapturedArguments() - return comment[len(comment)-1] -} - -func (c *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.IssueCommentEvent) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*github.IssueCommentEvent, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*github.IssueCommentEvent) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseGithubPull(ghPull *github.PullRequest) *MockEventParsing_ParseGithubPull_OngoingVerification { - params := []pegomock.Param{ghPull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubPull", params, verifier.timeout) - return &MockEventParsing_ParseGithubPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGithubPull_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGithubPull_OngoingVerification) GetCapturedArguments() *github.PullRequest { - ghPull := c.GetAllCapturedArguments() - return ghPull[len(ghPull)-1] -} - -func (c *MockEventParsing_ParseGithubPull_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*github.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*github.PullRequest) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) *MockEventParsing_ParseGithubPullEvent_OngoingVerification { - params := []pegomock.Param{pullEvent} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubPullEvent", params, verifier.timeout) - return &MockEventParsing_ParseGithubPullEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGithubPullEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGithubPullEvent_OngoingVerification) GetCapturedArguments() *github.PullRequestEvent { - pullEvent := c.GetAllCapturedArguments() - return pullEvent[len(pullEvent)-1] -} - -func (c *MockEventParsing_ParseGithubPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.PullRequestEvent) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*github.PullRequestEvent, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*github.PullRequestEvent) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseGithubRepo(ghRepo *github.Repository) *MockEventParsing_ParseGithubRepo_OngoingVerification { - params := []pegomock.Param{ghRepo} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubRepo", params, verifier.timeout) - return &MockEventParsing_ParseGithubRepo_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGithubRepo_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGithubRepo_OngoingVerification) GetCapturedArguments() *github.Repository { - ghRepo := c.GetAllCapturedArguments() - return ghRepo[len(ghRepo)-1] -} - -func (c *MockEventParsing_ParseGithubRepo_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.Repository) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*github.Repository, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*github.Repository) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseGitlabMergeRequestEvent(event go_gitlab.MergeEvent) *MockEventParsing_ParseGitlabMergeRequestEvent_OngoingVerification { - params := []pegomock.Param{event} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGitlabMergeRequestEvent", params, verifier.timeout) - return &MockEventParsing_ParseGitlabMergeRequestEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGitlabMergeRequestEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGitlabMergeRequestEvent_OngoingVerification) GetCapturedArguments() go_gitlab.MergeEvent { - event := c.GetAllCapturedArguments() - return event[len(event)-1] -} - -func (c *MockEventParsing_ParseGitlabMergeRequestEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []go_gitlab.MergeEvent) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]go_gitlab.MergeEvent, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(go_gitlab.MergeEvent) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseGitlabMergeRequestCommentEvent(event go_gitlab.MergeCommentEvent) *MockEventParsing_ParseGitlabMergeRequestCommentEvent_OngoingVerification { - params := []pegomock.Param{event} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGitlabMergeRequestCommentEvent", params, verifier.timeout) - return &MockEventParsing_ParseGitlabMergeRequestCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGitlabMergeRequestCommentEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGitlabMergeRequestCommentEvent_OngoingVerification) GetCapturedArguments() go_gitlab.MergeCommentEvent { - event := c.GetAllCapturedArguments() - return event[len(event)-1] -} - -func (c *MockEventParsing_ParseGitlabMergeRequestCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []go_gitlab.MergeCommentEvent) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]go_gitlab.MergeCommentEvent, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(go_gitlab.MergeCommentEvent) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseGitlabMergeRequest(mr *go_gitlab.MergeRequest, baseRepo models.Repo) *MockEventParsing_ParseGitlabMergeRequest_OngoingVerification { - params := []pegomock.Param{mr, baseRepo} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGitlabMergeRequest", params, verifier.timeout) - return &MockEventParsing_ParseGitlabMergeRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseGitlabMergeRequest_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseGitlabMergeRequest_OngoingVerification) GetCapturedArguments() (*go_gitlab.MergeRequest, models.Repo) { - mr, baseRepo := c.GetAllCapturedArguments() - return mr[len(mr)-1], baseRepo[len(baseRepo)-1] -} - -func (c *MockEventParsing_ParseGitlabMergeRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []*go_gitlab.MergeRequest, _param1 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*go_gitlab.MergeRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*go_gitlab.MergeRequest) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseBitbucketCloudPullEvent(body []byte) *MockEventParsing_ParseBitbucketCloudPullEvent_OngoingVerification { - params := []pegomock.Param{body} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseBitbucketCloudPullEvent", params, verifier.timeout) - return &MockEventParsing_ParseBitbucketCloudPullEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseBitbucketCloudPullEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseBitbucketCloudPullEvent_OngoingVerification) GetCapturedArguments() []byte { - body := c.GetAllCapturedArguments() - return body[len(body)-1] -} - -func (c *MockEventParsing_ParseBitbucketCloudPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.([]byte) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseBitbucketCloudPullCommentEvent(body []byte) *MockEventParsing_ParseBitbucketCloudPullCommentEvent_OngoingVerification { - params := []pegomock.Param{body} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseBitbucketCloudPullCommentEvent", params, verifier.timeout) - return &MockEventParsing_ParseBitbucketCloudPullCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseBitbucketCloudPullCommentEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseBitbucketCloudPullCommentEvent_OngoingVerification) GetCapturedArguments() []byte { - body := c.GetAllCapturedArguments() - return body[len(body)-1] -} - -func (c *MockEventParsing_ParseBitbucketCloudPullCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.([]byte) - } - } - return -} - -func (verifier *VerifierMockEventParsing) GetBitbucketCloudPullEventType(eventTypeHeader string) *MockEventParsing_GetBitbucketCloudPullEventType_OngoingVerification { - params := []pegomock.Param{eventTypeHeader} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetBitbucketCloudPullEventType", params, verifier.timeout) - return &MockEventParsing_GetBitbucketCloudPullEventType_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_GetBitbucketCloudPullEventType_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_GetBitbucketCloudPullEventType_OngoingVerification) GetCapturedArguments() string { - eventTypeHeader := c.GetAllCapturedArguments() - return eventTypeHeader[len(eventTypeHeader)-1] -} - -func (c *MockEventParsing_GetBitbucketCloudPullEventType_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseBitbucketServerPullEvent(body []byte) *MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification { - params := []pegomock.Param{body} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseBitbucketServerPullEvent", params, verifier.timeout) - return &MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification) GetCapturedArguments() []byte { - body := c.GetAllCapturedArguments() - return body[len(body)-1] -} - -func (c *MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.([]byte) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseBitbucketServerPullCommentEvent(body []byte) *MockEventParsing_ParseBitbucketServerPullCommentEvent_OngoingVerification { - params := []pegomock.Param{body} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseBitbucketServerPullCommentEvent", params, verifier.timeout) - return &MockEventParsing_ParseBitbucketServerPullCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseBitbucketServerPullCommentEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseBitbucketServerPullCommentEvent_OngoingVerification) GetCapturedArguments() []byte { - body := c.GetAllCapturedArguments() - return body[len(body)-1] -} - -func (c *MockEventParsing_ParseBitbucketServerPullCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.([]byte) - } - } - return -} - -func (verifier *VerifierMockEventParsing) GetBitbucketServerPullEventType(eventTypeHeader string) *MockEventParsing_GetBitbucketServerPullEventType_OngoingVerification { - params := []pegomock.Param{eventTypeHeader} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetBitbucketServerPullEventType", params, verifier.timeout) - return &MockEventParsing_GetBitbucketServerPullEventType_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_GetBitbucketServerPullEventType_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_GetBitbucketServerPullEventType_OngoingVerification) GetCapturedArguments() string { - eventTypeHeader := c.GetAllCapturedArguments() - return eventTypeHeader[len(eventTypeHeader)-1] -} - -func (c *MockEventParsing_GetBitbucketServerPullEventType_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseAzureDevopsPull(adPull *azuredevops.GitPullRequest) *MockEventParsing_ParseAzureDevopsPull_OngoingVerification { - params := []pegomock.Param{adPull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseAzureDevopsPull", params, verifier.timeout) - return &MockEventParsing_ParseAzureDevopsPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseAzureDevopsPull_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseAzureDevopsPull_OngoingVerification) GetCapturedArguments() *azuredevops.GitPullRequest { - adPull := c.GetAllCapturedArguments() - return adPull[len(adPull)-1] -} - -func (c *MockEventParsing_ParseAzureDevopsPull_OngoingVerification) GetAllCapturedArguments() (_param0 []*azuredevops.GitPullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*azuredevops.GitPullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*azuredevops.GitPullRequest) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseAzureDevopsPullEvent(pullEvent azuredevops.Event) *MockEventParsing_ParseAzureDevopsPullEvent_OngoingVerification { - params := []pegomock.Param{pullEvent} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseAzureDevopsPullEvent", params, verifier.timeout) - return &MockEventParsing_ParseAzureDevopsPullEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseAzureDevopsPullEvent_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseAzureDevopsPullEvent_OngoingVerification) GetCapturedArguments() azuredevops.Event { - pullEvent := c.GetAllCapturedArguments() - return pullEvent[len(pullEvent)-1] -} - -func (c *MockEventParsing_ParseAzureDevopsPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []azuredevops.Event) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]azuredevops.Event, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(azuredevops.Event) - } - } - return -} - -func (verifier *VerifierMockEventParsing) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) *MockEventParsing_ParseAzureDevopsRepo_OngoingVerification { - params := []pegomock.Param{adRepo} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseAzureDevopsRepo", params, verifier.timeout) - return &MockEventParsing_ParseAzureDevopsRepo_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockEventParsing_ParseAzureDevopsRepo_OngoingVerification struct { - mock *MockEventParsing - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockEventParsing_ParseAzureDevopsRepo_OngoingVerification) GetCapturedArguments() *azuredevops.GitRepository { - adRepo := c.GetAllCapturedArguments() - return adRepo[len(adRepo)-1] -} - -func (c *MockEventParsing_ParseAzureDevopsRepo_OngoingVerification) GetAllCapturedArguments() (_param0 []*azuredevops.GitRepository) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*azuredevops.GitRepository, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*azuredevops.GitRepository) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_github_pull_getter.go b/server/legacy/events/mocks/mock_github_pull_getter.go deleted file mode 100644 index 965d7d2fe..000000000 --- a/server/legacy/events/mocks/mock_github_pull_getter.go +++ /dev/null @@ -1,114 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: GithubPullGetter) - -package mocks - -import ( - github "github.com/google/go-github/v45/github" - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockGithubPullGetter struct { - fail func(message string, callerSkip ...int) -} - -func NewMockGithubPullGetter(options ...pegomock.Option) *MockGithubPullGetter { - mock := &MockGithubPullGetter{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockGithubPullGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockGithubPullGetter) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockGithubPullGetter) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubPullGetter().") - } - params := []pegomock.Param{repo, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *github.PullRequest - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*github.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubPullGetter) VerifyWasCalledOnce() *VerifierMockGithubPullGetter { - return &VerifierMockGithubPullGetter{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockGithubPullGetter) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockGithubPullGetter { - return &VerifierMockGithubPullGetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockGithubPullGetter) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockGithubPullGetter { - return &VerifierMockGithubPullGetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockGithubPullGetter) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockGithubPullGetter { - return &VerifierMockGithubPullGetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockGithubPullGetter struct { - mock *MockGithubPullGetter - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockGithubPullGetter) GetPullRequest(repo models.Repo, pullNum int) *MockGithubPullGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) - return &MockGithubPullGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubPullGetter_GetPullRequest_OngoingVerification struct { - mock *MockGithubPullGetter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubPullGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] -} - -func (c *MockGithubPullGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_job_closer.go b/server/legacy/events/mocks/mock_job_closer.go deleted file mode 100644 index c0fe097bf..000000000 --- a/server/legacy/events/mocks/mock_job_closer.go +++ /dev/null @@ -1,102 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: JobCloser) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockJobCloser struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobCloser(options ...pegomock.Option) *MockJobCloser { - mock := &MockJobCloser{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobCloser) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobCloser) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobCloser) CloseJob(_param0 string, _param1 models.Repo) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobCloser().") - } - params := []pegomock.Param{_param0, _param1} - pegomock.GetGenericMockFrom(mock).Invoke("CloseJob", params, []reflect.Type{}) -} - -func (mock *MockJobCloser) VerifyWasCalledOnce() *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobCloser) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobCloser) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobCloser) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobCloser { - return &VerifierMockJobCloser{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobCloser struct { - mock *MockJobCloser - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobCloser) CloseJob(_param0 string, _param1 models.Repo) *MockJobCloser_CloseJob_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CloseJob", params, verifier.timeout) - return &MockJobCloser_CloseJob_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobCloser_CloseJob_OngoingVerification struct { - mock *MockJobCloser - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobCloser_CloseJob_OngoingVerification) GetCapturedArguments() (string, models.Repo) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockJobCloser_CloseJob_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_job_id_generator.go b/server/legacy/events/mocks/mock_job_id_generator.go deleted file mode 100644 index f47d9898a..000000000 --- a/server/legacy/events/mocks/mock_job_id_generator.go +++ /dev/null @@ -1,95 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: JobIDGenerator) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockJobIDGenerator struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobIDGenerator(options ...pegomock.Option) *MockJobIDGenerator { - mock := &MockJobIDGenerator{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobIDGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobIDGenerator) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobIDGenerator) GenerateJobID() string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobIDGenerator().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateJobID", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockJobIDGenerator) VerifyWasCalledOnce() *VerifierMockJobIDGenerator { - return &VerifierMockJobIDGenerator{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobIDGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobIDGenerator { - return &VerifierMockJobIDGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobIDGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobIDGenerator { - return &VerifierMockJobIDGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobIDGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobIDGenerator { - return &VerifierMockJobIDGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobIDGenerator struct { - mock *MockJobIDGenerator - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobIDGenerator) GenerateJobID() *MockJobIDGenerator_GenerateJobID_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateJobID", params, verifier.timeout) - return &MockJobIDGenerator_GenerateJobID_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobIDGenerator_GenerateJobID_OngoingVerification struct { - mock *MockJobIDGenerator - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobIDGenerator_GenerateJobID_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockJobIDGenerator_GenerateJobID_OngoingVerification) GetAllCapturedArguments() { -} diff --git a/server/legacy/events/mocks/mock_job_message_sender.go b/server/legacy/events/mocks/mock_job_message_sender.go deleted file mode 100644 index 1503b3616..000000000 --- a/server/legacy/events/mocks/mock_job_message_sender.go +++ /dev/null @@ -1,107 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: JobMessageSender) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type MockJobMessageSender struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobMessageSender(options ...pegomock.Option) *MockJobMessageSender { - mock := &MockJobMessageSender{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobMessageSender) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobMessageSender) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobMessageSender) Send(_param0 command.ProjectContext, _param1 string, _param2 bool) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobMessageSender().") - } - params := []pegomock.Param{_param0, _param1, _param2} - pegomock.GetGenericMockFrom(mock).Invoke("Send", params, []reflect.Type{}) -} - -func (mock *MockJobMessageSender) VerifyWasCalledOnce() *VerifierMockJobMessageSender { - return &VerifierMockJobMessageSender{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobMessageSender) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobMessageSender { - return &VerifierMockJobMessageSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobMessageSender) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobMessageSender { - return &VerifierMockJobMessageSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobMessageSender) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobMessageSender { - return &VerifierMockJobMessageSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobMessageSender struct { - mock *MockJobMessageSender - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobMessageSender) Send(_param0 command.ProjectContext, _param1 string, _param2 bool) *MockJobMessageSender_Send_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Send", params, verifier.timeout) - return &MockJobMessageSender_Send_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobMessageSender_Send_OngoingVerification struct { - mock *MockJobMessageSender - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobMessageSender_Send_OngoingVerification) GetCapturedArguments() (command.ProjectContext, string, bool) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockJobMessageSender_Send_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext, _param1 []string, _param2 []bool) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]bool, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(bool) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_job_url_setter.go b/server/legacy/events/mocks/mock_job_url_setter.go deleted file mode 100644 index 747cbba0e..000000000 --- a/server/legacy/events/mocks/mock_job_url_setter.go +++ /dev/null @@ -1,115 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: JobURLSetter) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockJobURLSetter struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobURLSetter(options ...pegomock.Option) *MockJobURLSetter { - mock := &MockJobURLSetter{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobURLSetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobURLSetter) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobURLSetter) SetJobURLWithStatus(_param0 command.ProjectContext, _param1 command.Name, _param2 models.VCSStatus) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobURLSetter().") - } - params := []pegomock.Param{_param0, _param1, _param2} - result := pegomock.GetGenericMockFrom(mock).Invoke("SetJobURLWithStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockJobURLSetter) VerifyWasCalledOnce() *VerifierMockJobURLSetter { - return &VerifierMockJobURLSetter{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobURLSetter) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobURLSetter { - return &VerifierMockJobURLSetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobURLSetter) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobURLSetter { - return &VerifierMockJobURLSetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobURLSetter) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobURLSetter { - return &VerifierMockJobURLSetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobURLSetter struct { - mock *MockJobURLSetter - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobURLSetter) SetJobURLWithStatus(_param0 command.ProjectContext, _param1 command.Name, _param2 models.VCSStatus) *MockJobURLSetter_SetJobURLWithStatus_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetJobURLWithStatus", params, verifier.timeout) - return &MockJobURLSetter_SetJobURLWithStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobURLSetter_SetJobURLWithStatus_OngoingVerification struct { - mock *MockJobURLSetter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobURLSetter_SetJobURLWithStatus_OngoingVerification) GetCapturedArguments() (command.ProjectContext, command.Name, models.VCSStatus) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockJobURLSetter_SetJobURLWithStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext, _param1 []command.Name, _param2 []models.VCSStatus) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - _param1 = make([]command.Name, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.Name) - } - _param2 = make([]models.VCSStatus, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.VCSStatus) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_lock_url_generator.go b/server/legacy/events/mocks/mock_lock_url_generator.go deleted file mode 100644 index 68ac35703..000000000 --- a/server/legacy/events/mocks/mock_lock_url_generator.go +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: LockURLGenerator) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockLockURLGenerator struct { - fail func(message string, callerSkip ...int) -} - -func NewMockLockURLGenerator(options ...pegomock.Option) *MockLockURLGenerator { - mock := &MockLockURLGenerator{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockLockURLGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockLockURLGenerator) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockLockURLGenerator) GenerateLockURL(lockID string) string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockLockURLGenerator().") - } - params := []pegomock.Param{lockID} - result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateLockURL", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockLockURLGenerator) VerifyWasCalledOnce() *VerifierMockLockURLGenerator { - return &VerifierMockLockURLGenerator{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockLockURLGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockLockURLGenerator { - return &VerifierMockLockURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockLockURLGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockLockURLGenerator { - return &VerifierMockLockURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockLockURLGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockLockURLGenerator { - return &VerifierMockLockURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockLockURLGenerator struct { - mock *MockLockURLGenerator - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockLockURLGenerator) GenerateLockURL(lockID string) *MockLockURLGenerator_GenerateLockURL_OngoingVerification { - params := []pegomock.Param{lockID} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateLockURL", params, verifier.timeout) - return &MockLockURLGenerator_GenerateLockURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockLockURLGenerator_GenerateLockURL_OngoingVerification struct { - mock *MockLockURLGenerator - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockLockURLGenerator_GenerateLockURL_OngoingVerification) GetCapturedArguments() string { - lockID := c.GetAllCapturedArguments() - return lockID[len(lockID)-1] -} - -func (c *MockLockURLGenerator_GenerateLockURL_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_log_stream_url_generator.go b/server/legacy/events/mocks/mock_log_stream_url_generator.go deleted file mode 100644 index ad5ed94d5..000000000 --- a/server/legacy/events/mocks/mock_log_stream_url_generator.go +++ /dev/null @@ -1,111 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: JobsUrlGenerator) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockJobsUrlGenerator struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobsUrlGenerator(options ...pegomock.Option) *MockJobsUrlGenerator { - mock := &MockJobsUrlGenerator{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobsUrlGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobsUrlGenerator) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobsUrlGenerator) GenerateProjectJobsUrl(pull models.PullRequest, p command.ProjectContext) string { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobsUrlGenerator().") - } - params := []pegomock.Param{pull, p} - result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateProjectJobsUrl", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()}) - var ret0 string - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - } - return ret0 -} - -func (mock *MockJobsUrlGenerator) VerifyWasCalledOnce() *VerifierMockJobsUrlGenerator { - return &VerifierMockJobsUrlGenerator{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobsUrlGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobsUrlGenerator { - return &VerifierMockJobsUrlGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobsUrlGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobsUrlGenerator { - return &VerifierMockJobsUrlGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobsUrlGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobsUrlGenerator { - return &VerifierMockJobsUrlGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobsUrlGenerator struct { - mock *MockJobsUrlGenerator - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobsUrlGenerator) GenerateProjectJobsUrl(pull models.PullRequest, p command.ProjectContext) *MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification { - params := []pegomock.Param{pull, p} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateProjectJobsUrl", params, verifier.timeout) - return &MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification struct { - mock *MockJobsUrlGenerator - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification) GetCapturedArguments() (models.PullRequest, command.ProjectContext) { - pull, p := c.GetAllCapturedArguments() - return pull[len(pull)-1], p[len(p)-1] -} - -func (c *MockJobsUrlGenerator_GenerateProjectJobsUrl_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest, _param1 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_pending_plan_finder.go b/server/legacy/events/mocks/mock_pending_plan_finder.go deleted file mode 100644 index 9b349f9bd..000000000 --- a/server/legacy/events/mocks/mock_pending_plan_finder.go +++ /dev/null @@ -1,152 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: PendingPlanFinder) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - events "github.com/runatlantis/atlantis/server/legacy/events" -) - -type MockPendingPlanFinder struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPendingPlanFinder(options ...pegomock.Option) *MockPendingPlanFinder { - mock := &MockPendingPlanFinder{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPendingPlanFinder) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockPendingPlanFinder) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPendingPlanFinder) Find(pullDir string) ([]events.PendingPlan, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPendingPlanFinder().") - } - params := []pegomock.Param{pullDir} - result := pegomock.GetGenericMockFrom(mock).Invoke("Find", params, []reflect.Type{reflect.TypeOf((*[]events.PendingPlan)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []events.PendingPlan - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]events.PendingPlan) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockPendingPlanFinder) DeletePlans(pullDir string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPendingPlanFinder().") - } - params := []pegomock.Param{pullDir} - result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlans", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockPendingPlanFinder) VerifyWasCalledOnce() *VerifierMockPendingPlanFinder { - return &VerifierMockPendingPlanFinder{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPendingPlanFinder) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockPendingPlanFinder { - return &VerifierMockPendingPlanFinder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPendingPlanFinder) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPendingPlanFinder { - return &VerifierMockPendingPlanFinder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPendingPlanFinder) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockPendingPlanFinder { - return &VerifierMockPendingPlanFinder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPendingPlanFinder struct { - mock *MockPendingPlanFinder - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPendingPlanFinder) Find(pullDir string) *MockPendingPlanFinder_Find_OngoingVerification { - params := []pegomock.Param{pullDir} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Find", params, verifier.timeout) - return &MockPendingPlanFinder_Find_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPendingPlanFinder_Find_OngoingVerification struct { - mock *MockPendingPlanFinder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPendingPlanFinder_Find_OngoingVerification) GetCapturedArguments() string { - pullDir := c.GetAllCapturedArguments() - return pullDir[len(pullDir)-1] -} - -func (c *MockPendingPlanFinder_Find_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockPendingPlanFinder) DeletePlans(pullDir string) *MockPendingPlanFinder_DeletePlans_OngoingVerification { - params := []pegomock.Param{pullDir} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlans", params, verifier.timeout) - return &MockPendingPlanFinder_DeletePlans_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPendingPlanFinder_DeletePlans_OngoingVerification struct { - mock *MockPendingPlanFinder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPendingPlanFinder_DeletePlans_OngoingVerification) GetCapturedArguments() string { - pullDir := c.GetAllCapturedArguments() - return pullDir[len(pullDir)-1] -} - -func (c *MockPendingPlanFinder_DeletePlans_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_pre_workflows_hooks_command_runner.go b/server/legacy/events/mocks/mock_pre_workflows_hooks_command_runner.go deleted file mode 100644 index 5faefa0c3..000000000 --- a/server/legacy/events/mocks/mock_pre_workflows_hooks_command_runner.go +++ /dev/null @@ -1,112 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: PreWorkflowHooksCommandRunner) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockPreWorkflowHooksCommandRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPreWorkflowHooksCommandRunner(options ...pegomock.Option) *MockPreWorkflowHooksCommandRunner { - mock := &MockPreWorkflowHooksCommandRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPreWorkflowHooksCommandRunner) SetFailHandler(fh pegomock.FailHandler) { - mock.fail = fh -} -func (mock *MockPreWorkflowHooksCommandRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPreWorkflowHooksCommandRunner) RunPreHooks(ctx context.Context, cmdCtx *command.Context) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPreWorkflowHooksCommandRunner().") - } - params := []pegomock.Param{ctx, cmdCtx} - result := pegomock.GetGenericMockFrom(mock).Invoke("RunPreHooks", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockPreWorkflowHooksCommandRunner) VerifyWasCalledOnce() *VerifierMockPreWorkflowHooksCommandRunner { - return &VerifierMockPreWorkflowHooksCommandRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPreWorkflowHooksCommandRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierMockPreWorkflowHooksCommandRunner { - return &VerifierMockPreWorkflowHooksCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPreWorkflowHooksCommandRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPreWorkflowHooksCommandRunner { - return &VerifierMockPreWorkflowHooksCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPreWorkflowHooksCommandRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.Matcher, timeout time.Duration) *VerifierMockPreWorkflowHooksCommandRunner { - return &VerifierMockPreWorkflowHooksCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPreWorkflowHooksCommandRunner struct { - mock *MockPreWorkflowHooksCommandRunner - invocationCountMatcher pegomock.Matcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPreWorkflowHooksCommandRunner) RunPreHooks(ctx context.Context, cmdCtx *command.Context) *MockPreWorkflowHooksCommandRunner_RunPreHooks_OngoingVerification { - params := []pegomock.Param{ctx, cmdCtx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunPreHooks", params, verifier.timeout) - return &MockPreWorkflowHooksCommandRunner_RunPreHooks_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPreWorkflowHooksCommandRunner_RunPreHooks_OngoingVerification struct { - mock *MockPreWorkflowHooksCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPreWorkflowHooksCommandRunner_RunPreHooks_OngoingVerification) GetCapturedArguments() (context.Context, *command.Context) { - ctx, cmdCtx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], cmdCtx[len(cmdCtx)-1] -} - -func (c *MockPreWorkflowHooksCommandRunner_RunPreHooks_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []*command.Context) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Context) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_project_command_builder.go b/server/legacy/events/mocks/mock_project_command_builder.go deleted file mode 100644 index 928b0df58..000000000 --- a/server/legacy/events/mocks/mock_project_command_builder.go +++ /dev/null @@ -1,310 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: ProjectCommandBuilder) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type MockProjectCommandBuilder struct { - fail func(message string, callerSkip ...int) -} - -func NewMockProjectCommandBuilder(options ...pegomock.Option) *MockProjectCommandBuilder { - mock := &MockProjectCommandBuilder{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockProjectCommandBuilder) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockProjectCommandBuilder) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandBuilder().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildAutoplanCommands", params, []reflect.Type{reflect.TypeOf((*[]command.ProjectContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []command.ProjectContext - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]command.ProjectContext) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandBuilder().") - } - params := []pegomock.Param{ctx, comment} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildPlanCommands", params, []reflect.Type{reflect.TypeOf((*[]command.ProjectContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []command.ProjectContext - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]command.ProjectContext) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandBuilder().") - } - params := []pegomock.Param{ctx, comment} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApplyCommands", params, []reflect.Type{reflect.TypeOf((*[]command.ProjectContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []command.ProjectContext - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]command.ProjectContext) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandBuilder().") - } - params := []pegomock.Param{ctx, comment} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApprovePoliciesCommands", params, []reflect.Type{reflect.TypeOf((*[]command.ProjectContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []command.ProjectContext - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]command.ProjectContext) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandBuilder().") - } - params := []pegomock.Param{ctx, comment} - result := pegomock.GetGenericMockFrom(mock).Invoke("BuildVersionCommands", params, []reflect.Type{reflect.TypeOf((*[]command.ProjectContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []command.ProjectContext - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]command.ProjectContext) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectCommandBuilder) VerifyWasCalledOnce() *VerifierMockProjectCommandBuilder { - return &VerifierMockProjectCommandBuilder{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockProjectCommandBuilder) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectCommandBuilder { - return &VerifierMockProjectCommandBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockProjectCommandBuilder) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectCommandBuilder { - return &VerifierMockProjectCommandBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockProjectCommandBuilder) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectCommandBuilder { - return &VerifierMockProjectCommandBuilder{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockProjectCommandBuilder struct { - mock *MockProjectCommandBuilder - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) *MockProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildAutoplanCommands", params, verifier.timeout) - return &MockProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification struct { - mock *MockProjectCommandBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification) GetCapturedArguments() *command.Context { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - } - return -} - -func (verifier *VerifierMockProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, comment *command.Comment) *MockProjectCommandBuilder_BuildPlanCommands_OngoingVerification { - params := []pegomock.Param{ctx, comment} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildPlanCommands", params, verifier.timeout) - return &MockProjectCommandBuilder_BuildPlanCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandBuilder_BuildPlanCommands_OngoingVerification struct { - mock *MockProjectCommandBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandBuilder_BuildPlanCommands_OngoingVerification) GetCapturedArguments() (*command.Context, *command.Comment) { - ctx, comment := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], comment[len(comment)-1] -} - -func (c *MockProjectCommandBuilder_BuildPlanCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*command.Comment) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - _param1 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Comment) - } - } - return -} - -func (verifier *VerifierMockProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, comment *command.Comment) *MockProjectCommandBuilder_BuildApplyCommands_OngoingVerification { - params := []pegomock.Param{ctx, comment} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApplyCommands", params, verifier.timeout) - return &MockProjectCommandBuilder_BuildApplyCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandBuilder_BuildApplyCommands_OngoingVerification struct { - mock *MockProjectCommandBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandBuilder_BuildApplyCommands_OngoingVerification) GetCapturedArguments() (*command.Context, *command.Comment) { - ctx, comment := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], comment[len(comment)-1] -} - -func (c *MockProjectCommandBuilder_BuildApplyCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*command.Comment) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - _param1 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Comment) - } - } - return -} - -func (verifier *VerifierMockProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, comment *command.Comment) *MockProjectCommandBuilder_BuildApprovePoliciesCommands_OngoingVerification { - params := []pegomock.Param{ctx, comment} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApprovePoliciesCommands", params, verifier.timeout) - return &MockProjectCommandBuilder_BuildApprovePoliciesCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandBuilder_BuildApprovePoliciesCommands_OngoingVerification struct { - mock *MockProjectCommandBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandBuilder_BuildApprovePoliciesCommands_OngoingVerification) GetCapturedArguments() (*command.Context, *command.Comment) { - ctx, comment := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], comment[len(comment)-1] -} - -func (c *MockProjectCommandBuilder_BuildApprovePoliciesCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*command.Comment) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - _param1 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Comment) - } - } - return -} - -func (verifier *VerifierMockProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, comment *command.Comment) *MockProjectCommandBuilder_BuildVersionCommands_OngoingVerification { - params := []pegomock.Param{ctx, comment} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildVersionCommands", params, verifier.timeout) - return &MockProjectCommandBuilder_BuildVersionCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandBuilder_BuildVersionCommands_OngoingVerification struct { - mock *MockProjectCommandBuilder - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandBuilder_BuildVersionCommands_OngoingVerification) GetCapturedArguments() (*command.Context, *command.Comment) { - ctx, comment := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], comment[len(comment)-1] -} - -func (c *MockProjectCommandBuilder_BuildVersionCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context, _param1 []*command.Comment) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - _param1 = make([]*command.Comment, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(*command.Comment) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_project_command_runner.go b/server/legacy/events/mocks/mock_project_command_runner.go deleted file mode 100644 index c7681a509..000000000 --- a/server/legacy/events/mocks/mock_project_command_runner.go +++ /dev/null @@ -1,274 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: ProjectCommandRunner) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type MockProjectCommandRunner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockProjectCommandRunner(options ...pegomock.Option) *MockProjectCommandRunner { - mock := &MockProjectCommandRunner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockProjectCommandRunner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockProjectCommandRunner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockProjectCommandRunner) Plan(ctx command.ProjectContext) command.ProjectResult { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandRunner().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("Plan", params, []reflect.Type{reflect.TypeOf((*command.ProjectResult)(nil)).Elem()}) - var ret0 command.ProjectResult - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(command.ProjectResult) - } - } - return ret0 -} - -func (mock *MockProjectCommandRunner) Apply(ctx command.ProjectContext) command.ProjectResult { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandRunner().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("Apply", params, []reflect.Type{reflect.TypeOf((*command.ProjectResult)(nil)).Elem()}) - var ret0 command.ProjectResult - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(command.ProjectResult) - } - } - return ret0 -} - -func (mock *MockProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandRunner().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("PolicyCheck", params, []reflect.Type{reflect.TypeOf((*command.ProjectResult)(nil)).Elem()}) - var ret0 command.ProjectResult - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(command.ProjectResult) - } - } - return ret0 -} - -func (mock *MockProjectCommandRunner) ApprovePolicies(ctx command.ProjectContext) command.ProjectResult { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandRunner().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("ApprovePolicies", params, []reflect.Type{reflect.TypeOf((*command.ProjectResult)(nil)).Elem()}) - var ret0 command.ProjectResult - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(command.ProjectResult) - } - } - return ret0 -} - -func (mock *MockProjectCommandRunner) Version(ctx command.ProjectContext) command.ProjectResult { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandRunner().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("Version", params, []reflect.Type{reflect.TypeOf((*command.ProjectResult)(nil)).Elem()}) - var ret0 command.ProjectResult - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(command.ProjectResult) - } - } - return ret0 -} - -func (mock *MockProjectCommandRunner) VerifyWasCalledOnce() *VerifierMockProjectCommandRunner { - return &VerifierMockProjectCommandRunner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockProjectCommandRunner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectCommandRunner { - return &VerifierMockProjectCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockProjectCommandRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectCommandRunner { - return &VerifierMockProjectCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockProjectCommandRunner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectCommandRunner { - return &VerifierMockProjectCommandRunner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockProjectCommandRunner struct { - mock *MockProjectCommandRunner - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockProjectCommandRunner) Plan(ctx command.ProjectContext) *MockProjectCommandRunner_Plan_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Plan", params, verifier.timeout) - return &MockProjectCommandRunner_Plan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandRunner_Plan_OngoingVerification struct { - mock *MockProjectCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandRunner_Plan_OngoingVerification) GetCapturedArguments() command.ProjectContext { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockProjectCommandRunner_Plan_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - } - return -} - -func (verifier *VerifierMockProjectCommandRunner) Apply(ctx command.ProjectContext) *MockProjectCommandRunner_Apply_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Apply", params, verifier.timeout) - return &MockProjectCommandRunner_Apply_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandRunner_Apply_OngoingVerification struct { - mock *MockProjectCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandRunner_Apply_OngoingVerification) GetCapturedArguments() command.ProjectContext { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockProjectCommandRunner_Apply_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - } - return -} - -func (verifier *VerifierMockProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) *MockProjectCommandRunner_PolicyCheck_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PolicyCheck", params, verifier.timeout) - return &MockProjectCommandRunner_PolicyCheck_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandRunner_PolicyCheck_OngoingVerification struct { - mock *MockProjectCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandRunner_PolicyCheck_OngoingVerification) GetCapturedArguments() command.ProjectContext { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockProjectCommandRunner_PolicyCheck_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - } - return -} - -func (verifier *VerifierMockProjectCommandRunner) ApprovePolicies(ctx command.ProjectContext) *MockProjectCommandRunner_ApprovePolicies_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ApprovePolicies", params, verifier.timeout) - return &MockProjectCommandRunner_ApprovePolicies_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandRunner_ApprovePolicies_OngoingVerification struct { - mock *MockProjectCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandRunner_ApprovePolicies_OngoingVerification) GetCapturedArguments() command.ProjectContext { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockProjectCommandRunner_ApprovePolicies_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - } - return -} - -func (verifier *VerifierMockProjectCommandRunner) Version(ctx command.ProjectContext) *MockProjectCommandRunner_Version_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Version", params, verifier.timeout) - return &MockProjectCommandRunner_Version_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandRunner_Version_OngoingVerification struct { - mock *MockProjectCommandRunner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandRunner_Version_OngoingVerification) GetCapturedArguments() command.ProjectContext { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockProjectCommandRunner_Version_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_project_lock.go b/server/legacy/events/mocks/mock_project_lock.go deleted file mode 100644 index 81b2a620d..000000000 --- a/server/legacy/events/mocks/mock_project_lock.go +++ /dev/null @@ -1,132 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: ProjectLocker) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - events "github.com/runatlantis/atlantis/server/legacy/events" - logging "github.com/runatlantis/atlantis/server/logging" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockProjectLocker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockProjectLocker(options ...pegomock.Option) *MockProjectLocker { - mock := &MockProjectLocker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockProjectLocker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockProjectLocker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockProjectLocker) TryLock(requestCtx context.Context, log logging.Logger, pull models.PullRequest, user models.User, workspace string, project models.Project) (*events.TryLockResponse, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectLocker().") - } - params := []pegomock.Param{requestCtx, log, pull, user, workspace, project} - result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((**events.TryLockResponse)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *events.TryLockResponse - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*events.TryLockResponse) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectLocker) VerifyWasCalledOnce() *VerifierMockProjectLocker { - return &VerifierMockProjectLocker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockProjectLocker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectLocker { - return &VerifierMockProjectLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockProjectLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectLocker { - return &VerifierMockProjectLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockProjectLocker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectLocker { - return &VerifierMockProjectLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockProjectLocker struct { - mock *MockProjectLocker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockProjectLocker) TryLock(log logging.Logger, requestCtx context.Context, pull models.PullRequest, user models.User, workspace string, project models.Project) *MockProjectLocker_TryLock_OngoingVerification { - params := []pegomock.Param{log, requestCtx, pull, user, workspace, project} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params, verifier.timeout) - return &MockProjectLocker_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectLocker_TryLock_OngoingVerification struct { - mock *MockProjectLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectLocker_TryLock_OngoingVerification) GetCapturedArguments() (logging.Logger, context.Context, models.PullRequest, models.User, string, models.Project) { - log, requestCtx, pull, user, workspace, project := c.GetAllCapturedArguments() - return log[len(log)-1], requestCtx[len(requestCtx)-1], pull[len(pull)-1], user[len(user)-1], workspace[len(workspace)-1], project[len(project)-1] -} - -func (c *MockProjectLocker_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []context.Context, _param2 []models.PullRequest, _param3 []models.User, _param4 []string, _param5 []models.Project) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(context.Context) - } - _param2 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.PullRequest) - } - _param3 = make([]models.User, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.User) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) - } - _param5 = make([]models.Project, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(models.Project) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_pull_cleaner.go b/server/legacy/events/mocks/mock_pull_cleaner.go deleted file mode 100644 index 2c9067267..000000000 --- a/server/legacy/events/mocks/mock_pull_cleaner.go +++ /dev/null @@ -1,110 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: PullCleaner) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockPullCleaner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPullCleaner(options ...pegomock.Option) *MockPullCleaner { - mock := &MockPullCleaner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPullCleaner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockPullCleaner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPullCleaner) CleanUpPull(_param0 models.Repo, _param1 models.PullRequest) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPullCleaner().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("CleanUpPull", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockPullCleaner) VerifyWasCalledOnce() *VerifierMockPullCleaner { - return &VerifierMockPullCleaner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPullCleaner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockPullCleaner { - return &VerifierMockPullCleaner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPullCleaner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPullCleaner { - return &VerifierMockPullCleaner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPullCleaner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockPullCleaner { - return &VerifierMockPullCleaner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPullCleaner struct { - mock *MockPullCleaner - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPullCleaner) CleanUpPull(_param0 models.Repo, _param1 models.PullRequest) *MockPullCleaner_CleanUpPull_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CleanUpPull", params, verifier.timeout) - return &MockPullCleaner_CleanUpPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPullCleaner_CleanUpPull_OngoingVerification struct { - mock *MockPullCleaner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPullCleaner_CleanUpPull_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockPullCleaner_CleanUpPull_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_pull_status_fetcher.go b/server/legacy/events/mocks/mock_pull_status_fetcher.go deleted file mode 100644 index ae8d405d0..000000000 --- a/server/legacy/events/mocks/mock_pull_status_fetcher.go +++ /dev/null @@ -1,110 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: PullStatusFetcher) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockPullStatusFetcher struct { - fail func(message string, callerSkip ...int) -} - -func NewMockPullStatusFetcher(options ...pegomock.Option) *MockPullStatusFetcher { - mock := &MockPullStatusFetcher{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockPullStatusFetcher) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockPullStatusFetcher) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockPullStatusFetcher) GetPullStatus(pull models.PullRequest) (*models.PullStatus, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockPullStatusFetcher().") - } - params := []pegomock.Param{pull} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullStatus", params, []reflect.Type{reflect.TypeOf((**models.PullStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *models.PullStatus - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*models.PullStatus) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockPullStatusFetcher) VerifyWasCalledOnce() *VerifierMockPullStatusFetcher { - return &VerifierMockPullStatusFetcher{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockPullStatusFetcher) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockPullStatusFetcher { - return &VerifierMockPullStatusFetcher{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockPullStatusFetcher) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockPullStatusFetcher { - return &VerifierMockPullStatusFetcher{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockPullStatusFetcher) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockPullStatusFetcher { - return &VerifierMockPullStatusFetcher{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockPullStatusFetcher struct { - mock *MockPullStatusFetcher - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockPullStatusFetcher) GetPullStatus(pull models.PullRequest) *MockPullStatusFetcher_GetPullStatus_OngoingVerification { - params := []pegomock.Param{pull} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullStatus", params, verifier.timeout) - return &MockPullStatusFetcher_GetPullStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockPullStatusFetcher_GetPullStatus_OngoingVerification struct { - mock *MockPullStatusFetcher - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockPullStatusFetcher_GetPullStatus_OngoingVerification) GetCapturedArguments() models.PullRequest { - pull := c.GetAllCapturedArguments() - return pull[len(pull)-1] -} - -func (c *MockPullStatusFetcher_GetPullStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_resource_cleaner.go b/server/legacy/events/mocks/mock_resource_cleaner.go deleted file mode 100644 index 4e9a81226..000000000 --- a/server/legacy/events/mocks/mock_resource_cleaner.go +++ /dev/null @@ -1,99 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: ResourceCleaner) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -type MockResourceCleaner struct { - fail func(message string, callerSkip ...int) -} - -func NewMockResourceCleaner(options ...pegomock.Option) *MockResourceCleaner { - mock := &MockResourceCleaner{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockResourceCleaner) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockResourceCleaner) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockResourceCleaner) CleanUp(_param0 jobs.PullInfo) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockResourceCleaner().") - } - params := []pegomock.Param{_param0} - pegomock.GetGenericMockFrom(mock).Invoke("CleanUp", params, []reflect.Type{}) -} - -func (mock *MockResourceCleaner) VerifyWasCalledOnce() *VerifierMockResourceCleaner { - return &VerifierMockResourceCleaner{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockResourceCleaner) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockResourceCleaner { - return &VerifierMockResourceCleaner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockResourceCleaner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockResourceCleaner { - return &VerifierMockResourceCleaner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockResourceCleaner) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockResourceCleaner { - return &VerifierMockResourceCleaner{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockResourceCleaner struct { - mock *MockResourceCleaner - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockResourceCleaner) CleanUp(_param0 jobs.PullInfo) *MockResourceCleaner_CleanUp_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CleanUp", params, verifier.timeout) - return &MockResourceCleaner_CleanUp_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockResourceCleaner_CleanUp_OngoingVerification struct { - mock *MockResourceCleaner - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockResourceCleaner_CleanUp_OngoingVerification) GetCapturedArguments() jobs.PullInfo { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockResourceCleaner_CleanUp_OngoingVerification) GetAllCapturedArguments() (_param0 []jobs.PullInfo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]jobs.PullInfo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(jobs.PullInfo) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_stale_command_checker.go b/server/legacy/events/mocks/mock_stale_command_checker.go deleted file mode 100644 index 5f3107686..000000000 --- a/server/legacy/events/mocks/mock_stale_command_checker.go +++ /dev/null @@ -1,105 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: StaleCommandChecker) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - "reflect" - "time" -) - -type MockStaleCommandChecker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockStaleCommandChecker(options ...pegomock.Option) *MockStaleCommandChecker { - mock := &MockStaleCommandChecker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockStaleCommandChecker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockStaleCommandChecker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockStaleCommandChecker) CommandIsStale(ctx *command.Context) bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockStaleCommandChecker().") - } - params := []pegomock.Param{ctx} - result := pegomock.GetGenericMockFrom(mock).Invoke("CommandIsStale", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockStaleCommandChecker) VerifyWasCalledOnce() *VerifierMockStaleCommandChecker { - return &VerifierMockStaleCommandChecker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockStaleCommandChecker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockStaleCommandChecker { - return &VerifierMockStaleCommandChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockStaleCommandChecker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockStaleCommandChecker { - return &VerifierMockStaleCommandChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockStaleCommandChecker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockStaleCommandChecker { - return &VerifierMockStaleCommandChecker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockStaleCommandChecker struct { - mock *MockStaleCommandChecker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockStaleCommandChecker) CommandIsStale(ctx *command.Context) *MockStaleCommandChecker_CommandIsStale_OngoingVerification { - params := []pegomock.Param{ctx} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CommandIsStale", params, verifier.timeout) - return &MockStaleCommandChecker_CommandIsStale_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockStaleCommandChecker_CommandIsStale_OngoingVerification struct { - mock *MockStaleCommandChecker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockStaleCommandChecker_CommandIsStale_OngoingVerification) GetCapturedArguments() *command.Context { - ctx := c.GetAllCapturedArguments() - return ctx[len(ctx)-1] -} - -func (c *MockStaleCommandChecker_CommandIsStale_OngoingVerification) GetAllCapturedArguments() (_param0 []*command.Context) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*command.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*command.Context) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_vcs_status_updater.go b/server/legacy/events/mocks/mock_vcs_status_updater.go deleted file mode 100644 index 0e9e0cd32..000000000 --- a/server/legacy/events/mocks/mock_vcs_status_updater.go +++ /dev/null @@ -1,276 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: VCSStatusUpdater) - -package mocks - -import ( - context "context" - fmt "fmt" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockVCSStatusUpdater struct { - fail func(message string, callerSkip ...int) -} - -func NewMockVCSStatusUpdater(options ...pegomock.Option) *MockVCSStatusUpdater { - mock := &MockVCSStatusUpdater{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockVCSStatusUpdater) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockVCSStatusUpdater) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockVCSStatusUpdater) UpdateCombined(_param0 context.Context, _param1 models.Repo, _param2 models.PullRequest, _param3 models.VCSStatus, _param4 fmt.Stringer, _param5 string, _param6 string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockVCSStatusUpdater().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5, _param6} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateCombined", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockVCSStatusUpdater) UpdateCombinedCount(_param0 context.Context, _param1 models.Repo, _param2 models.PullRequest, _param3 models.VCSStatus, _param4 fmt.Stringer, _param5 int, _param6 int, _param7 string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockVCSStatusUpdater().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5, _param6, _param7} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateCombinedCount", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockVCSStatusUpdater) UpdateProject(_param0 context.Context, _param1 command.ProjectContext, _param2 fmt.Stringer, _param3 models.VCSStatus, _param4 string, _param5 string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockVCSStatusUpdater().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockVCSStatusUpdater) VerifyWasCalledOnce() *VerifierMockVCSStatusUpdater { - return &VerifierMockVCSStatusUpdater{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockVCSStatusUpdater) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockVCSStatusUpdater { - return &VerifierMockVCSStatusUpdater{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockVCSStatusUpdater) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockVCSStatusUpdater { - return &VerifierMockVCSStatusUpdater{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockVCSStatusUpdater) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockVCSStatusUpdater { - return &VerifierMockVCSStatusUpdater{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockVCSStatusUpdater struct { - mock *MockVCSStatusUpdater - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockVCSStatusUpdater) UpdateCombined(_param0 context.Context, _param1 models.Repo, _param2 models.PullRequest, _param3 models.VCSStatus, _param4 fmt.Stringer, _param5 string, _param6 string) *MockVCSStatusUpdater_UpdateCombined_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5, _param6} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateCombined", params, verifier.timeout) - return &MockVCSStatusUpdater_UpdateCombined_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockVCSStatusUpdater_UpdateCombined_OngoingVerification struct { - mock *MockVCSStatusUpdater - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockVCSStatusUpdater_UpdateCombined_OngoingVerification) GetCapturedArguments() (context.Context, models.Repo, models.PullRequest, models.VCSStatus, fmt.Stringer, string, string) { - _param0, _param1, _param2, _param3, _param4, _param5, _param6 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1], _param6[len(_param6)-1] -} - -func (c *MockVCSStatusUpdater_UpdateCombined_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []models.VCSStatus, _param4 []fmt.Stringer, _param5 []string, _param6 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - _param2 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.PullRequest) - } - _param3 = make([]models.VCSStatus, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.VCSStatus) - } - _param4 = make([]fmt.Stringer, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(fmt.Stringer) - } - _param5 = make([]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(string) - } - _param6 = make([]string, len(c.methodInvocations)) - for u, param := range params[6] { - _param6[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockVCSStatusUpdater) UpdateCombinedCount(_param0 context.Context, _param1 models.Repo, _param2 models.PullRequest, _param3 models.VCSStatus, _param4 fmt.Stringer, _param5 int, _param6 int, _param7 string) *MockVCSStatusUpdater_UpdateCombinedCount_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5, _param6, _param7} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateCombinedCount", params, verifier.timeout) - return &MockVCSStatusUpdater_UpdateCombinedCount_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockVCSStatusUpdater_UpdateCombinedCount_OngoingVerification struct { - mock *MockVCSStatusUpdater - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockVCSStatusUpdater_UpdateCombinedCount_OngoingVerification) GetCapturedArguments() (context.Context, models.Repo, models.PullRequest, models.VCSStatus, fmt.Stringer, int, int, string) { - _param0, _param1, _param2, _param3, _param4, _param5, _param6, _param7 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1], _param6[len(_param6)-1], _param7[len(_param7)-1] -} - -func (c *MockVCSStatusUpdater_UpdateCombinedCount_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []models.VCSStatus, _param4 []fmt.Stringer, _param5 []int, _param6 []int, _param7 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - _param2 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.PullRequest) - } - _param3 = make([]models.VCSStatus, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.VCSStatus) - } - _param4 = make([]fmt.Stringer, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(fmt.Stringer) - } - _param5 = make([]int, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(int) - } - _param6 = make([]int, len(c.methodInvocations)) - for u, param := range params[6] { - _param6[u] = param.(int) - } - _param7 = make([]string, len(c.methodInvocations)) - for u, param := range params[7] { - _param7[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockVCSStatusUpdater) UpdateProject(_param0 context.Context, _param1 command.ProjectContext, _param2 fmt.Stringer, _param3 models.VCSStatus, _param4 string, _param5 string) *MockVCSStatusUpdater_UpdateProject_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateProject", params, verifier.timeout) - return &MockVCSStatusUpdater_UpdateProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockVCSStatusUpdater_UpdateProject_OngoingVerification struct { - mock *MockVCSStatusUpdater - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockVCSStatusUpdater_UpdateProject_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, fmt.Stringer, models.VCSStatus, string, string) { - _param0, _param1, _param2, _param3, _param4, _param5 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1] -} - -func (c *MockVCSStatusUpdater_UpdateProject_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []fmt.Stringer, _param3 []models.VCSStatus, _param4 []string, _param5 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]fmt.Stringer, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(fmt.Stringer) - } - _param3 = make([]models.VCSStatus, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.VCSStatus) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) - } - _param5 = make([]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_webhooks_sender.go b/server/legacy/events/mocks/mock_webhooks_sender.go deleted file mode 100644 index 77e3caff6..000000000 --- a/server/legacy/events/mocks/mock_webhooks_sender.go +++ /dev/null @@ -1,110 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: WebhooksSender) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - webhooks "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - logging "github.com/runatlantis/atlantis/server/logging" - "reflect" - "time" -) - -type MockWebhooksSender struct { - fail func(message string, callerSkip ...int) -} - -func NewMockWebhooksSender(options ...pegomock.Option) *MockWebhooksSender { - mock := &MockWebhooksSender{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockWebhooksSender) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockWebhooksSender) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockWebhooksSender) Send(log logging.Logger, res webhooks.ApplyResult) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWebhooksSender().") - } - params := []pegomock.Param{log, res} - result := pegomock.GetGenericMockFrom(mock).Invoke("Send", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWebhooksSender) VerifyWasCalledOnce() *VerifierMockWebhooksSender { - return &VerifierMockWebhooksSender{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockWebhooksSender) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockWebhooksSender { - return &VerifierMockWebhooksSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockWebhooksSender) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockWebhooksSender { - return &VerifierMockWebhooksSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockWebhooksSender) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockWebhooksSender { - return &VerifierMockWebhooksSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockWebhooksSender struct { - mock *MockWebhooksSender - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockWebhooksSender) Send(log logging.Logger, res webhooks.ApplyResult) *MockWebhooksSender_Send_OngoingVerification { - params := []pegomock.Param{log, res} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Send", params, verifier.timeout) - return &MockWebhooksSender_Send_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWebhooksSender_Send_OngoingVerification struct { - mock *MockWebhooksSender - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWebhooksSender_Send_OngoingVerification) GetCapturedArguments() (logging.Logger, webhooks.ApplyResult) { - log, res := c.GetAllCapturedArguments() - return log[len(log)-1], res[len(res)-1] -} - -func (c *MockWebhooksSender_Send_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []webhooks.ApplyResult) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]webhooks.ApplyResult, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(webhooks.ApplyResult) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_working_dir.go b/server/legacy/events/mocks/mock_working_dir.go deleted file mode 100644 index dde0f66f0..000000000 --- a/server/legacy/events/mocks/mock_working_dir.go +++ /dev/null @@ -1,376 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: WorkingDir) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - logging "github.com/runatlantis/atlantis/server/logging" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockWorkingDir struct { - fail func(message string, callerSkip ...int) -} - -func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { - mock := &MockWorkingDir{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockWorkingDir) Clone(log logging.Logger, headRepo models.Repo, p models.PullRequest, projectCloneDir string) (string, bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{log, headRepo, p, projectCloneDir} - result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 bool - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(bool) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetWorkingDir", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockWorkingDir) HasDiverged(log logging.Logger, cloneDir string, baseRepo models.Repo) bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{log, cloneDir, baseRepo} - result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullDir", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p} - result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDir().") - } - params := []pegomock.Param{r, p, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockWorkingDir) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockWorkingDir) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockWorkingDir) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockWorkingDir { - return &VerifierMockWorkingDir{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockWorkingDir struct { - mock *MockWorkingDir - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockWorkingDir) Clone(log logging.Logger, headRepo models.Repo, p models.PullRequest, projectCloneDir string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{log, headRepo, p, projectCloneDir} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) - return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_Clone_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.Logger, models.Repo, models.PullRequest, string) { - log, headRepo, p, projectCloneDir := c.GetAllCapturedArguments() - return log[len(log)-1], headRepo[len(headRepo)-1], p[len(p)-1], projectCloneDir[len(projectCloneDir)-1] -} - -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.Repo) - } - _param2 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.PullRequest) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetWorkingDir_OngoingVerification { - params := []pegomock.Param{r, p, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetWorkingDir", params, verifier.timeout) - return &MockWorkingDir_GetWorkingDir_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_GetWorkingDir_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] -} - -func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) HasDiverged(log logging.Logger, cloneDir string, baseRepo models.Repo) *MockWorkingDir_HasDiverged_OngoingVerification { - params := []pegomock.Param{log, cloneDir, baseRepo} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) - return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_HasDiverged_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.Logger, string, models.Repo) { - log, cloneDir, baseRepo := c.GetAllCapturedArguments() - return log[len(log)-1], cloneDir[len(cloneDir)-1], baseRepo[len(baseRepo)-1] -} - -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []string, _param2 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.Repo) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) GetPullDir(r models.Repo, p models.PullRequest) *MockWorkingDir_GetPullDir_OngoingVerification { - params := []pegomock.Param{r, p} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullDir", params, verifier.timeout) - return &MockWorkingDir_GetPullDir_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_GetPullDir_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_GetPullDir_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] -} - -func (c *MockWorkingDir_GetPullDir_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { - params := []pegomock.Param{r, p} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout) - return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_Delete_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] -} - -func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { - params := []pegomock.Param{r, p, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout) - return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct { - mock *MockWorkingDir - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] -} - -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} diff --git a/server/legacy/events/mocks/mock_working_dir_locker.go b/server/legacy/events/mocks/mock_working_dir_locker.go deleted file mode 100644 index a580789a7..000000000 --- a/server/legacy/events/mocks/mock_working_dir_locker.go +++ /dev/null @@ -1,167 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events (interfaces: WorkingDirLocker) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockWorkingDirLocker struct { - fail func(message string, callerSkip ...int) -} - -func NewMockWorkingDirLocker(options ...pegomock.Option) *MockWorkingDirLocker { - mock := &MockWorkingDirLocker{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockWorkingDirLocker) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockWorkingDirLocker) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockWorkingDirLocker) TryLock(repoFullName string, pullNum int, workspace string) (func(), error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDirLocker().") - } - params := []pegomock.Param{repoFullName, pullNum, workspace} - result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((*func())(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 func() - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(func()) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockWorkingDirLocker) TryLockPull(repoFullName string, pullNum int) (func(), error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWorkingDirLocker().") - } - params := []pegomock.Param{repoFullName, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("TryLockPull", params, []reflect.Type{reflect.TypeOf((*func())(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 func() - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(func()) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockWorkingDirLocker) VerifyWasCalledOnce() *VerifierMockWorkingDirLocker { - return &VerifierMockWorkingDirLocker{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockWorkingDirLocker) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockWorkingDirLocker { - return &VerifierMockWorkingDirLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockWorkingDirLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockWorkingDirLocker { - return &VerifierMockWorkingDirLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockWorkingDirLocker) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockWorkingDirLocker { - return &VerifierMockWorkingDirLocker{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockWorkingDirLocker struct { - mock *MockWorkingDirLocker - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockWorkingDirLocker) TryLock(repoFullName string, pullNum int, workspace string) *MockWorkingDirLocker_TryLock_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum, workspace} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params, verifier.timeout) - return &MockWorkingDirLocker_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDirLocker_TryLock_OngoingVerification struct { - mock *MockWorkingDirLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDirLocker_TryLock_OngoingVerification) GetCapturedArguments() (string, int, string) { - repoFullName, pullNum, workspace := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1], workspace[len(workspace)-1] -} - -func (c *MockWorkingDirLocker_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockWorkingDirLocker) TryLockPull(repoFullName string, pullNum int) *MockWorkingDirLocker_TryLockPull_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLockPull", params, verifier.timeout) - return &MockWorkingDirLocker_TryLockPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWorkingDirLocker_TryLockPull_OngoingVerification struct { - mock *MockWorkingDirLocker - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWorkingDirLocker_TryLockPull_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] -} - -func (c *MockWorkingDirLocker_TryLockPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} diff --git a/server/legacy/events/output_updater.go b/server/legacy/events/output_updater.go deleted file mode 100644 index 2c5bb4203..000000000 --- a/server/legacy/events/output_updater.go +++ /dev/null @@ -1,169 +0,0 @@ -package events - -import ( - "context" - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/vcs/markdown" -) - -type OutputUpdater interface { - UpdateOutput(ctx *command.Context, cmd PullCommand, res command.Result) -} - -// JobURLGenerator generates urls to view project's progress. -type jobURLGenerator interface { - GenerateProjectJobURL(jobID string) (string, error) -} - -type renderer interface { - Render(res command.Result, cmdName command.Name, baseRepo models.Repo) string - RenderProject(prjRes command.ProjectResult, cmdName fmt.Stringer, baseRepo models.Repo) string -} - -type checksClient interface { - UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) -} - -// Used to support checks type output (Github checks for example) -type ChecksOutputUpdater struct { - VCSClient checksClient - MarkdownRenderer renderer - TitleBuilder vcs.StatusTitleBuilder - JobURLGenerator jobURLGenerator -} - -func (c *ChecksOutputUpdater) UpdateOutput(ctx *command.Context, cmd PullCommand, res command.Result) { - if res.Error != nil || res.Failure != "" { - c.handleCommandFailure(ctx, cmd, res) - return - } - - // iterate through all project results and the update the github check - for _, projectResult := range res.ProjectResults { - updateStatusReq := types.UpdateStatusRequest{ - Repo: ctx.HeadRepo, - Ref: ctx.Pull.HeadCommit, - PullNum: ctx.Pull.Num, - PullCreationTime: ctx.Pull.CreatedAt, - StatusID: projectResult.StatusID, - DetailsURL: c.buildJobURL(ctx, projectResult.Command, projectResult.JobID), - Output: c.MarkdownRenderer.RenderProject(projectResult, projectResult.Command, ctx.HeadRepo), - State: c.resolveState(projectResult), - - StatusName: c.buildStatusName(cmd, vcs.StatusTitleOptions{ProjectName: projectResult.ProjectName}), - - // Additional fields to support templating for project level checkruns - CommandName: projectResult.Command.TitleString(), - Project: projectResult.ProjectName, - Workspace: projectResult.Workspace, - Directory: projectResult.RepoRelDir, - } - - if _, err := c.VCSClient.UpdateStatus(ctx.RequestCtx, updateStatusReq); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, "unable to update check run", map[string]interface{}{ - "error": err.Error(), - }) - } - } -} - -func (c *ChecksOutputUpdater) handleCommandFailure(ctx *command.Context, cmd PullCommand, res command.Result) { - updateStatusReq := types.UpdateStatusRequest{ - Repo: ctx.HeadRepo, - Ref: ctx.Pull.HeadCommit, - PullNum: ctx.Pull.Num, - PullCreationTime: ctx.Pull.CreatedAt, - State: models.FailedVCSStatus, - StatusName: c.buildStatusName(cmd, vcs.StatusTitleOptions{}), - CommandName: cmd.CommandName().TitleString(), - Output: c.buildOutput(res), - } - - if _, err := c.VCSClient.UpdateStatus(ctx.RequestCtx, updateStatusReq); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, "unable to update check run", map[string]interface{}{ - "error": err.Error(), - }) - } -} - -func (c *ChecksOutputUpdater) buildJobURL(ctx *command.Context, cmd command.Name, jobID string) string { - if jobID == "" { - return "" - } - - // Only support streaming logs for plan and apply operation for now - if cmd == command.Plan || cmd == command.Apply { - jobURL, err := c.JobURLGenerator.GenerateProjectJobURL(jobID) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("generating job URL %v", err)) - } - - return jobURL - } - return "" -} - -func (c *ChecksOutputUpdater) buildOutput(res command.Result) string { - if res.Error != nil { - return res.Error.Error() - } else if res.Failure != "" { - return res.Failure - } - return "" -} - -func (c *ChecksOutputUpdater) buildStatusName(cmd PullCommand, options vcs.StatusTitleOptions) string { - commandName := cmd.CommandName() - return c.TitleBuilder.Build(commandName.String(), options) -} - -func (c *ChecksOutputUpdater) resolveState(result command.ProjectResult) models.VCSStatus { - if result.Error != nil || result.Failure != "" { - return models.FailedVCSStatus - } - - return models.SuccessVCSStatus -} - -// Default prj output updater which writes to the pull req comment -type PullOutputUpdater struct { - HidePrevPlanComments bool - VCSClient vcs.Client - MarkdownRenderer *markdown.Renderer -} - -func (c *PullOutputUpdater) UpdateOutput(ctx *command.Context, cmd PullCommand, res command.Result) { - // Log if we got any errors or failures. - if res.Error != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, "", map[string]interface{}{ - "error": res.Error.Error(), - }) - } else if res.Failure != "" { - ctx.Log.WarnContext(ctx.RequestCtx, "", map[string]interface{}{ - "failiure": res.Failure, - }) - } - - // HidePrevCommandComments will hide old comments left from previous runs to reduce - // clutter in a pull/merge request. This will not delete the comment, since the - // comment trail may be useful in auditing or backtracing problems. - if c.HidePrevPlanComments { - if err := c.VCSClient.HidePrevCommandComments(ctx.Pull.BaseRepo, ctx.Pull.Num, cmd.CommandName().TitleString()); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, "unable to hide old comments", map[string]interface{}{ - "error": err.Error(), - }) - } - } - - comment := c.MarkdownRenderer.Render(res, cmd.CommandName(), ctx.Pull.BaseRepo) - if err := c.VCSClient.CreateComment(ctx.Pull.BaseRepo, ctx.Pull.Num, comment, cmd.CommandName().String()); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, "unable to comment", map[string]interface{}{ - "error": err.Error(), - }) - } -} diff --git a/server/legacy/events/output_updater_test.go b/server/legacy/events/output_updater_test.go deleted file mode 100644 index 867a8f946..000000000 --- a/server/legacy/events/output_updater_test.go +++ /dev/null @@ -1,368 +0,0 @@ -package events_test - -import ( - "context" - "errors" - "fmt" - "testing" - "time" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" - "github.com/stretchr/testify/assert" -) - -type testJobURLGenerator struct { - t *testing.T - expJobID string - url string - err error -} - -func (t *testJobURLGenerator) GenerateProjectJobURL(jobID string) (string, error) { - assert.Equal(t.t, t.expJobID, jobID) - return t.url, t.err -} - -type testRenderer struct { - t *testing.T - expectedResult command.Result - expectedCmdName command.Name - expectedRepo models.Repo - expectedProjectResult command.ProjectResult - - expectedOutput string -} - -func (t *testRenderer) Render(res command.Result, cmdName command.Name, baseRepo models.Repo) string { - assert.Equal(t.t, t.expectedResult, res) - assert.Equal(t.t, t.expectedCmdName, cmdName) - assert.Equal(t.t, t.expectedRepo, baseRepo) - - return t.expectedOutput -} -func (t *testRenderer) RenderProject(prjRes command.ProjectResult, cmdName fmt.Stringer, baseRepo models.Repo) string { - assert.Equal(t.t, t.expectedProjectResult, prjRes) - assert.Equal(t.t, t.expectedCmdName, cmdName) - assert.Equal(t.t, t.expectedRepo, baseRepo) - - return t.expectedOutput -} - -// this allows you to make multiple calls to the checks client with different results/requests -type strictTestChecksClient struct { - clients []*testChecksClient - - count int -} - -func (c *strictTestChecksClient) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - if c.count > (len(c.clients) - 1) { - return "", errors.New("more calls than expected") - } - _, err := c.clients[c.count].UpdateStatus(ctx, request) - c.count++ - return "", err -} - -type testChecksClient struct { - t *testing.T - expectedRequest types.UpdateStatusRequest - expectedError error - - called bool -} - -func (c *testChecksClient) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - c.called = true - assert.Equal(c.t, c.expectedRequest, request) - - return "", c.expectedError -} - -func TestChecksOutputUpdater_ProjectResults(t *testing.T) { - repo := models.Repo{ - FullName: "nish/repo", - } - - createdAt := time.Now() - sha := "12345" - - pull := models.PullRequest{ - HeadCommit: sha, - Num: 1, - CreatedAt: createdAt, - BaseRepo: repo, - } - - cmdCtx := &command.Context{ - Pull: pull, - RequestCtx: context.Background(), - HeadRepo: repo, - } - - output := "some output" - - t.Run("project result success", func(t *testing.T) { - projectResult := command.ProjectResult{ - ProjectName: "project1", - RepoRelDir: "somedir", - Workspace: "default", - Command: command.Plan, - } - commandResult := command.Result{ - ProjectResults: []command.ProjectResult{ - projectResult, - }, - } - - client := &testChecksClient{ - t: t, - expectedRequest: types.UpdateStatusRequest{ - Repo: repo, - Ref: sha, - StatusName: "nish/plan: project1", - State: models.SuccessVCSStatus, - PullCreationTime: createdAt, - Output: output, - PullNum: 1, - - CommandName: "Plan", - Project: "project1", - Workspace: "default", - Directory: "somedir", - }, - } - subject := events.ChecksOutputUpdater{ - VCSClient: client, - MarkdownRenderer: &testRenderer{ - t: t, - expectedCmdName: command.Plan, - expectedResult: commandResult, - expectedRepo: repo, - expectedOutput: output, - expectedProjectResult: projectResult, - }, - TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "nish"}, - JobURLGenerator: &testJobURLGenerator{ - t: t, - expJobID: "", - url: "", - err: nil, - }, - } - - subject.UpdateOutput(cmdCtx, events.AutoplanCommand{}, commandResult) - - assert.True(t, client.called) - }) - - t.Run("project result error", func(t *testing.T) { - projectResult := command.ProjectResult{ - ProjectName: "project1", - RepoRelDir: "somedir", - Workspace: "default", - Error: assert.AnError, - Command: command.Plan, - } - commandResult := command.Result{ - ProjectResults: []command.ProjectResult{ - projectResult, - }, - } - - client := &testChecksClient{ - t: t, - expectedRequest: types.UpdateStatusRequest{ - Repo: repo, - Ref: sha, - StatusName: "nish/plan: project1", - State: models.FailedVCSStatus, - PullCreationTime: createdAt, - Output: output, - PullNum: 1, - - CommandName: "Plan", - Project: "project1", - Workspace: "default", - Directory: "somedir", - }, - } - subject := events.ChecksOutputUpdater{ - VCSClient: client, - MarkdownRenderer: &testRenderer{ - t: t, - expectedCmdName: command.Plan, - expectedResult: commandResult, - expectedRepo: repo, - expectedOutput: output, - expectedProjectResult: projectResult, - }, - TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "nish"}, - JobURLGenerator: &testJobURLGenerator{ - t: t, - expJobID: "", - url: "", - err: nil, - }, - } - - subject.UpdateOutput(cmdCtx, events.AutoplanCommand{}, commandResult) - - assert.True(t, client.called) - }) - - t.Run("project result failure", func(t *testing.T) { - projectResult := command.ProjectResult{ - ProjectName: "project1", - RepoRelDir: "somedir", - Workspace: "default", - Failure: "failure", - Command: command.Plan, - } - commandResult := command.Result{ - ProjectResults: []command.ProjectResult{ - projectResult, - }, - } - - client := &testChecksClient{ - t: t, - expectedRequest: types.UpdateStatusRequest{ - Repo: repo, - Ref: sha, - StatusName: "nish/plan: project1", - State: models.FailedVCSStatus, - PullCreationTime: createdAt, - Output: output, - PullNum: 1, - - CommandName: "Plan", - Project: "project1", - Workspace: "default", - Directory: "somedir", - }, - } - subject := events.ChecksOutputUpdater{ - VCSClient: client, - MarkdownRenderer: &testRenderer{ - t: t, - expectedCmdName: command.Plan, - expectedResult: commandResult, - expectedRepo: repo, - expectedOutput: output, - expectedProjectResult: projectResult, - }, - TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "nish"}, - JobURLGenerator: &testJobURLGenerator{ - t: t, - expJobID: "", - url: "", - err: nil, - }, - } - - subject.UpdateOutput(cmdCtx, events.AutoplanCommand{}, commandResult) - - assert.True(t, client.called) - }) -} - -func TestChecksOutputUpdater_CommandFailure(t *testing.T) { - repo := models.Repo{ - FullName: "nish/repo", - } - - createdAt := time.Now() - sha := "12345" - - pull := models.PullRequest{ - HeadCommit: sha, - Num: 1, - CreatedAt: createdAt, - BaseRepo: repo, - } - - cmdCtx := &command.Context{ - Pull: pull, - RequestCtx: context.Background(), - HeadRepo: repo, - } - - t.Run("error", func(t *testing.T) { - errorString := "error" - commandResult := command.Result{ - Error: errors.New(errorString), - } - - client := &strictTestChecksClient{ - clients: []*testChecksClient{ - { - t: t, - expectedRequest: types.UpdateStatusRequest{ - Repo: repo, - Ref: sha, - StatusName: "nish/plan", - Output: errorString, - State: models.FailedVCSStatus, - PullCreationTime: createdAt, - PullNum: 1, - CommandName: "Plan", - }, - }, - }, - } - subject := events.ChecksOutputUpdater{ - VCSClient: client, - TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "nish"}, - JobURLGenerator: &testJobURLGenerator{ - t: t, - expJobID: "", - url: "", - err: nil, - }, - } - - subject.UpdateOutput(cmdCtx, events.AutoplanCommand{}, commandResult) - }) - - t.Run("failure", func(t *testing.T) { - failureString := "error" - commandResult := command.Result{ - Failure: failureString, - } - - client := &strictTestChecksClient{ - clients: []*testChecksClient{ - { - t: t, - expectedRequest: types.UpdateStatusRequest{ - Repo: repo, - Ref: sha, - StatusName: "nish/plan", - Output: failureString, - State: models.FailedVCSStatus, - PullCreationTime: createdAt, - PullNum: 1, - CommandName: "Plan", - }, - }, - }, - } - subject := events.ChecksOutputUpdater{ - VCSClient: client, - TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: "nish"}, - JobURLGenerator: &testJobURLGenerator{ - t: t, - expJobID: "", - url: "", - err: nil, - }, - } - - subject.UpdateOutput(cmdCtx, events.AutoplanCommand{}, commandResult) - }) -} diff --git a/server/legacy/events/pending_plan_finder.go b/server/legacy/events/pending_plan_finder.go deleted file mode 100644 index a9145ce93..000000000 --- a/server/legacy/events/pending_plan_finder.go +++ /dev/null @@ -1,100 +0,0 @@ -package events - -import ( - "os" - "os/exec" - "path/filepath" - "strings" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pending_plan_finder.go PendingPlanFinder - -type PendingPlanFinder interface { - Find(pullDir string) ([]PendingPlan, error) - DeletePlans(pullDir string) error -} - -// DefaultPendingPlanFinder finds unapplied plans. -type DefaultPendingPlanFinder struct{} - -// PendingPlan is a plan that has not been applied. -type PendingPlan struct { - // RepoDir is the absolute path to the root of the repo that holds this - // plan. - RepoDir string - // RepoRelDir is the relative path from the repo to the project that - // the plan is for. - RepoRelDir string - // Workspace is the workspace this plan should execute in. - Workspace string - ProjectName string -} - -// Find finds all pending plans in pullDir. pullDir should be the working -// directory where Atlantis will operate on this pull request. It's one level -// up from where Atlantis clones the repo for each workspace. -func (p *DefaultPendingPlanFinder) Find(pullDir string) ([]PendingPlan, error) { - plans, _, err := p.findWithAbsPaths(pullDir) - return plans, err -} - -func (p *DefaultPendingPlanFinder) findWithAbsPaths(pullDir string) ([]PendingPlan, []string, error) { - workspaceDirs, err := os.ReadDir(pullDir) - if err != nil { - return nil, nil, err - } - var plans []PendingPlan - var absPaths []string - for _, workspaceDir := range workspaceDirs { - workspace := workspaceDir.Name() - repoDir := filepath.Join(pullDir, workspace) - - // Any generated plans should be untracked by git since Atlantis created - // them. - lsCmd := exec.Command("git", "ls-files", ".", "--others") // nolint: gosec - lsCmd.Dir = repoDir - lsOut, err := lsCmd.CombinedOutput() - if err != nil { - return nil, nil, errors.Wrapf(err, "running git ls-files . "+ - "--others: %s", string(lsOut)) - } - for _, file := range strings.Split(string(lsOut), "\n") { - if filepath.Ext(file) == ".tfplan" { - // Ignore .terragrunt-cache dirs (#487) - if strings.Contains(file, ".terragrunt-cache/") { - continue - } - - projectName, err := runtime.ProjectNameFromPlanfile(workspace, filepath.Base(file)) - if err != nil { - return nil, nil, err - } - plans = append(plans, PendingPlan{ - RepoDir: repoDir, - RepoRelDir: filepath.Dir(file), - Workspace: workspace, - ProjectName: projectName, - }) - absPaths = append(absPaths, filepath.Join(repoDir, file)) - } - } - } - return plans, absPaths, nil -} - -// deletePlans deletes all plans in pullDir. -func (p *DefaultPendingPlanFinder) DeletePlans(pullDir string) error { - _, absPaths, err := p.findWithAbsPaths(pullDir) - if err != nil { - return err - } - for _, path := range absPaths { - if err := os.Remove(path); err != nil { - return errors.Wrapf(err, "delete plan at %s", path) - } - } - return nil -} diff --git a/server/legacy/events/pending_plan_finder_test.go b/server/legacy/events/pending_plan_finder_test.go deleted file mode 100644 index bafadfad0..000000000 --- a/server/legacy/events/pending_plan_finder_test.go +++ /dev/null @@ -1,286 +0,0 @@ -package events_test - -import ( - "os" - "os/exec" - "path/filepath" - "strings" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events" - . "github.com/runatlantis/atlantis/testing" -) - -// If the dir doesn't exist should get an error. -func TestPendingPlanFinder_FindNoDir(t *testing.T) { - pf := &events.DefaultPendingPlanFinder{} - _, err := pf.Find("/doesntexist") - ErrEquals(t, "open /doesntexist: no such file or directory", err) -} - -// Test different directory structures. -func TestPendingPlanFinder_Find(t *testing.T) { - cases := []struct { - description string - files map[string]interface{} - expPlans []events.PendingPlan - }{ - { - "no plans", - nil, - nil, - }, - { - "root directory", - map[string]interface{}{ - "default": map[string]interface{}{ - "default.tfplan": nil, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: ".", - Workspace: "default", - }, - }, - }, - { - "root dir project plan", - map[string]interface{}{ - "default": map[string]interface{}{ - "projectname-default.tfplan": nil, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: ".", - Workspace: "default", - ProjectName: "projectname", - }, - }, - }, - { - "root dir project plan with slashes", - map[string]interface{}{ - "default": map[string]interface{}{ - "project::name-default.tfplan": nil, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: ".", - Workspace: "default", - ProjectName: "project/name", - }, - }, - }, - { - "multiple directories in single workspace", - map[string]interface{}{ - "default": map[string]interface{}{ - "dir1": map[string]interface{}{ - "default.tfplan": nil, - }, - "dir2": map[string]interface{}{ - "default.tfplan": nil, - }, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: "dir1", - Workspace: "default", - }, - { - RepoDir: "???/default", - RepoRelDir: "dir2", - Workspace: "default", - }, - }, - }, - { - "multiple directories nested within each other", - map[string]interface{}{ - "default": map[string]interface{}{ - "dir1": map[string]interface{}{ - "default.tfplan": nil, - }, - "default.tfplan": nil, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: ".", - Workspace: "default", - }, - { - RepoDir: "???/default", - RepoRelDir: "dir1", - Workspace: "default", - }, - }, - }, - { - "multiple workspaces", - map[string]interface{}{ - "default": map[string]interface{}{ - "default.tfplan": nil, - }, - "staging": map[string]interface{}{ - "staging.tfplan": nil, - }, - "production": map[string]interface{}{ - "production.tfplan": nil, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: ".", - Workspace: "default", - }, - { - RepoDir: "???/production", - RepoRelDir: ".", - Workspace: "production", - }, - { - RepoDir: "???/staging", - RepoRelDir: ".", - Workspace: "staging", - }, - }, - }, - { - ".terragrunt-cache", - map[string]interface{}{ - "default": map[string]interface{}{ - ".terragrunt-cache": map[string]interface{}{ - "N6lY9xk7PivbOAzdsjDL6VUFVYk": map[string]interface{}{ - "K4xpUZI6HgUF-ip6E1eib4L8mwQ": map[string]interface{}{ - "app": map[string]interface{}{ - "default.tfplan": nil, - }, - }, - }, - }, - "default.tfplan": nil, - }, - }, - []events.PendingPlan{ - { - RepoDir: "???/default", - RepoRelDir: ".", - Workspace: "default", - }, - }, - }, - } - - pf := &events.DefaultPendingPlanFinder{} - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - tmpDir, cleanup := DirStructure(t, c.files) - defer cleanup() - - // Create a git repo in each workspace directory. - for dirname, contents := range c.files { - // If contents is nil then this isn't a directory. - if contents != nil { - runCmd(t, filepath.Join(tmpDir, dirname), "git", "init") - } - } - - actPlans, err := pf.Find(tmpDir) - Ok(t, err) - - // Replace the actual dir with ??? to allow for comparison. - var actPlansComparable []events.PendingPlan - for _, p := range actPlans { - p.RepoDir = strings.Replace(p.RepoDir, tmpDir, "???", -1) - actPlansComparable = append(actPlansComparable, p) - } - Equals(t, c.expPlans, actPlansComparable) - }) - } -} - -// If a planfile is checked in to git, we shouldn't use it. -func TestPendingPlanFinder_FindPlanCheckedIn(t *testing.T) { - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "default": map[string]interface{}{ - "default.tfplan": nil, - }, - }) - defer cleanup() - - // Add that file to git. - repoDir := filepath.Join(tmpDir, "default") - runCmd(t, repoDir, "git", "init") - runCmd(t, repoDir, "touch", ".gitkeep") - runCmd(t, repoDir, "git", "add", ".") - runCmd(t, repoDir, "git", "config", "--local", "user.email", "atlantisbot@runatlantis.io") - runCmd(t, repoDir, "git", "config", "--local", "user.name", "atlantisbot") - runCmd(t, repoDir, "git", "commit", "--no-gpg-sign", "-m", "initial commit") - - pf := &events.DefaultPendingPlanFinder{} - actPlans, err := pf.Find(tmpDir) - Ok(t, err) - Equals(t, 0, len(actPlans)) -} - -// Test that it deletes pending plans. -func TestPendingPlanFinder_DeletePlans(t *testing.T) { - files := map[string]interface{}{ - "default": map[string]interface{}{ - "dir1": map[string]interface{}{ - "default.tfplan": nil, - }, - "dir2": map[string]interface{}{ - "default.tfplan": nil, - }, - }, - } - tmp, cleanup := DirStructure(t, - files) - defer cleanup() - - // Create a git repo in each workspace directory. - for dirname, contents := range files { - // If contents is nil then this isn't a directory. - if contents != nil { - runCmd(t, filepath.Join(tmp, dirname), "git", "init") - } - } - - pf := &events.DefaultPendingPlanFinder{} - Ok(t, pf.DeletePlans(tmp)) - - // First, check the files were deleted. - for _, plan := range []string{ - "default/dir1/default.tfplan", - "default/dir2/default.tfplan", - } { - absPath := filepath.Join(tmp, plan) - _, err := os.Stat(absPath) - ErrContains(t, "no such file or directory", err) - } - - // Double check by using Find(). - foundPlans, err := pf.Find(tmp) - Ok(t, err) - Equals(t, 0, len(foundPlans)) -} - -func runCmd(t *testing.T, dir string, name string, args ...string) { - t.Helper() - cpCmd := exec.Command(name, args...) - cpCmd.Dir = dir - cpOut, err := cpCmd.CombinedOutput() - Assert(t, err == nil, "err running %q: %s", strings.Join(append([]string{name}, args...), " "), cpOut) -} diff --git a/server/legacy/events/plan_command_runner.go b/server/legacy/events/plan_command_runner.go deleted file mode 100644 index daf00df8f..000000000 --- a/server/legacy/events/plan_command_runner.go +++ /dev/null @@ -1,232 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/models" -) - -func NewPlanCommandRunner( - vcsClient vcs.Client, - pendingPlanFinder PendingPlanFinder, - workingDir WorkingDir, - vcsStatusUpdater VCSStatusUpdater, - projectCommandBuilder ProjectPlanCommandBuilder, - projectCommandRunner ProjectPlanCommandRunner, - dbUpdater *DBUpdater, - outputUpdater OutputUpdater, - policyCheckCommandRunner *PolicyCheckCommandRunner, - parallelPoolSize int, -) *PlanCommandRunner { - return &PlanCommandRunner{ - vcsClient: vcsClient, - pendingPlanFinder: pendingPlanFinder, - workingDir: workingDir, - vcsStatusUpdater: vcsStatusUpdater, - prjCmdBuilder: projectCommandBuilder, - prjCmdRunner: projectCommandRunner, - dbUpdater: dbUpdater, - outputUpdater: outputUpdater, - policyCheckCommandRunner: policyCheckCommandRunner, - parallelPoolSize: parallelPoolSize, - } -} - -type PlanCommandRunner struct { - vcsClient vcs.Client - vcsStatusUpdater VCSStatusUpdater - pendingPlanFinder PendingPlanFinder - workingDir WorkingDir - prjCmdBuilder ProjectPlanCommandBuilder - prjCmdRunner ProjectPlanCommandRunner - dbUpdater *DBUpdater - outputUpdater OutputUpdater - policyCheckCommandRunner *PolicyCheckCommandRunner - parallelPoolSize int -} - -func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { - baseRepo := ctx.Pull.BaseRepo - pull := ctx.Pull - - projectCmds, err := p.prjCmdBuilder.BuildAutoplanCommands(ctx) - if err != nil { - if _, statusErr := p.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, baseRepo, pull, models.FailedVCSStatus, command.Plan, "", ""); statusErr != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", statusErr)) - } - p.outputUpdater.UpdateOutput(ctx, AutoplanCommand{}, command.Result{Error: err}) - return - } - - projectCmds, policyCheckCmds := p.partitionProjectCmds(ctx, projectCmds) - - if len(projectCmds) == 0 { - ctx.Log.InfoContext(ctx.RequestCtx, "determined there was no project to run plan in") - // If there were no projects modified, we set successful commit statuses - // with 0/0 projects planned/policy_checked/applied successfully because some users require - // the Atlantis status to be passing for all pull requests. - if _, err := p.vcsStatusUpdater.UpdateCombinedCount(ctx.RequestCtx, baseRepo, pull, models.SuccessVCSStatus, command.Plan, 0, 0, ""); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - if _, err := p.vcsStatusUpdater.UpdateCombinedCount(ctx.RequestCtx, baseRepo, pull, models.SuccessVCSStatus, command.PolicyCheck, 0, 0, ""); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - if _, err := p.vcsStatusUpdater.UpdateCombinedCount(ctx.RequestCtx, baseRepo, pull, models.SuccessVCSStatus, command.Apply, 0, 0, ""); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - return - } - - // At this point we are sure Atlantis has work to do, so set commit status to pending - statusID, err := p.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, models.PendingVCSStatus, command.Plan, "", "") - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - - // Only run commands in parallel if enabled - var result command.Result - if p.isParallelEnabled(projectCmds) { - ctx.Log.InfoContext(ctx.RequestCtx, "Running plans in parallel") - result = runProjectCmdsParallel(projectCmds, p.prjCmdRunner.Plan, p.parallelPoolSize) - } else { - result = runProjectCmds(projectCmds, p.prjCmdRunner.Plan) - } - - p.outputUpdater.UpdateOutput(ctx, AutoplanCommand{}, result) - - pullStatus, err := p.dbUpdater.updateDB(ctx, ctx.Pull, result.ProjectResults) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("writing results: %s", err)) - } - - p.updateVcsStatus(ctx, pullStatus, statusID) - - // Check if there are any planned projects and if there are any errors or if plans are being deleted - if len(policyCheckCmds) > 0 && !result.HasErrors() { - // Run policy_check command - ctx.Log.InfoContext(ctx.RequestCtx, "Running policy_checks for all plans") - - // refresh ctx's view of pull status since we just wrote to it. - // realistically each command should refresh this at the start, - // however, policy checking is weird since it's called within the plan command itself - // we need to better structure how this command works. - ctx.PullStatus = &pullStatus - - p.policyCheckCommandRunner.Run(ctx, policyCheckCmds) - } -} - -func (p *PlanCommandRunner) run(ctx *command.Context, cmd *command.Comment) { - var err error - baseRepo := ctx.Pull.BaseRepo - pull := ctx.Pull - - // creating status for the first time - statusID, err := p.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, baseRepo, pull, models.PendingVCSStatus, command.Plan, "", "") - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - - projectCmds, err := p.prjCmdBuilder.BuildPlanCommands(ctx, cmd) - if err != nil { - if _, statusErr := p.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, models.FailedVCSStatus, command.Plan, statusID, ""); statusErr != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", statusErr)) - } - p.outputUpdater.UpdateOutput(ctx, cmd, command.Result{Error: err}) - return - } - - projectCmds, policyCheckCmds := p.partitionProjectCmds(ctx, projectCmds) - - // Only run commands in parallel if enabled - var result command.Result - if p.isParallelEnabled(projectCmds) { - ctx.Log.InfoContext(ctx.RequestCtx, "Running applies in parallel") - result = runProjectCmdsParallel(projectCmds, p.prjCmdRunner.Plan, p.parallelPoolSize) - } else { - result = runProjectCmds(projectCmds, p.prjCmdRunner.Plan) - } - - p.outputUpdater.UpdateOutput( - ctx, - cmd, - result) - - pullStatus, err := p.dbUpdater.updateDB(ctx, pull, result.ProjectResults) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("writing results: %s", err)) - return - } - - p.updateVcsStatus(ctx, pullStatus, statusID) - - // Runs policy checks step after all plans are successful. - // This step does not approve any policies that require approval. - if len(result.ProjectResults) > 0 && !result.HasErrors() { - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("Running policy check for %s", cmd.String())) - p.policyCheckCommandRunner.Run(ctx, policyCheckCmds) - } -} - -func (p *PlanCommandRunner) Run(ctx *command.Context, cmd *command.Comment) { - if ctx.Trigger == command.AutoTrigger { - p.runAutoplan(ctx) - } else { - p.run(ctx, cmd) - } -} - -func (p *PlanCommandRunner) updateVcsStatus(ctx *command.Context, pullStatus models.PullStatus, statusID string) { - var numSuccess int - var numErrored int - status := models.SuccessVCSStatus - - numErrored = pullStatus.StatusCount(models.ErroredPlanStatus) - // We consider anything that isn't a plan error as a plan success. - // For example, if there is an apply error, that means that at least a - // plan was generated successfully. - numSuccess = len(pullStatus.Projects) - numErrored - - if numErrored > 0 { - status = models.FailedVCSStatus - } - - if _, err := p.vcsStatusUpdater.UpdateCombinedCount( - ctx.RequestCtx, - ctx.Pull.BaseRepo, - ctx.Pull, - status, - command.Plan, - numSuccess, - len(pullStatus.Projects), - statusID, - ); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } -} - -func (p *PlanCommandRunner) partitionProjectCmds( - ctx *command.Context, - cmds []command.ProjectContext, -) ( - projectCmds []command.ProjectContext, - policyCheckCmds []command.ProjectContext, -) { - for _, cmd := range cmds { - switch cmd.CommandName { - case command.Plan: - projectCmds = append(projectCmds, cmd) - case command.PolicyCheck: - policyCheckCmds = append(policyCheckCmds, cmd) - default: - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("%s is not supported", cmd.CommandName)) - } - } - return -} - -func (p *PlanCommandRunner) isParallelEnabled(projectCmds []command.ProjectContext) bool { - return len(projectCmds) > 0 && projectCmds[0].ParallelPlanEnabled -} diff --git a/server/legacy/events/policy_check_command_runner.go b/server/legacy/events/policy_check_command_runner.go deleted file mode 100644 index 3c75068e5..000000000 --- a/server/legacy/events/policy_check_command_runner.go +++ /dev/null @@ -1,89 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -func NewPolicyCheckCommandRunner( - dbUpdater *DBUpdater, - outputUpdater OutputUpdater, - vcsStatusUpdater VCSStatusUpdater, - projectCommandRunner ProjectPolicyCheckCommandRunner, - parallelPoolSize int, -) *PolicyCheckCommandRunner { - return &PolicyCheckCommandRunner{ - dbUpdater: dbUpdater, - outputUpdater: outputUpdater, - vcsStatusUpdater: vcsStatusUpdater, - prjCmdRunner: projectCommandRunner, - parallelPoolSize: parallelPoolSize, - } -} - -type PolicyCheckCommandRunner struct { - dbUpdater *DBUpdater - outputUpdater OutputUpdater - vcsStatusUpdater VCSStatusUpdater - prjCmdRunner ProjectPolicyCheckCommandRunner - parallelPoolSize int -} - -func (p *PolicyCheckCommandRunner) Run(ctx *command.Context, cmds []command.ProjectContext) { - if len(cmds) == 0 { - ctx.Log.InfoContext(ctx.RequestCtx, "no projects to run policy_check in") - // If there were no projects modified, we set successful commit statuses - // with 0/0 projects policy_checked successfully because some users require - // the Atlantis status to be passing for all pull requests. - if _, err := p.vcsStatusUpdater.UpdateCombinedCount(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, models.SuccessVCSStatus, command.PolicyCheck, 0, 0, ""); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - return - } - - // So set policy_check commit status to pending - statusID, err := p.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, models.PendingVCSStatus, command.PolicyCheck, "", "") - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } - - var result command.Result - if p.isParallelEnabled(cmds) { - ctx.Log.InfoContext(ctx.RequestCtx, "Running policy_checks in parallel") - result = runProjectCmdsParallel(cmds, p.prjCmdRunner.PolicyCheck, p.parallelPoolSize) - } else { - result = runProjectCmds(cmds, p.prjCmdRunner.PolicyCheck) - } - - p.outputUpdater.UpdateOutput(ctx, PolicyCheckCommand{}, result) - - pullStatus, err := p.dbUpdater.updateDB(ctx, ctx.Pull, result.ProjectResults) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("writing results: %s", err)) - } - - p.updateVcsStatus(ctx, pullStatus, statusID) -} - -func (p *PolicyCheckCommandRunner) updateVcsStatus(ctx *command.Context, pullStatus models.PullStatus, statusID string) { - var numSuccess int - var numErrored int - status := models.SuccessVCSStatus - - numSuccess = pullStatus.StatusCount(models.PassedPolicyCheckStatus) - numErrored = pullStatus.StatusCount(models.ErroredPolicyCheckStatus) - - if numErrored > 0 { - status = models.FailedVCSStatus - } - - if _, err := p.vcsStatusUpdater.UpdateCombinedCount(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, status, command.PolicyCheck, numSuccess, len(pullStatus.Projects), statusID); err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", err)) - } -} - -func (p *PolicyCheckCommandRunner) isParallelEnabled(cmds []command.ProjectContext) bool { - return len(cmds) > 0 && cmds[0].ParallelPolicyCheckEnabled -} diff --git a/server/legacy/events/policy_check_project_context_builder.go b/server/legacy/events/policy_check_project_context_builder.go deleted file mode 100644 index f5dc131d0..000000000 --- a/server/legacy/events/policy_check_project_context_builder.go +++ /dev/null @@ -1,38 +0,0 @@ -package events - -import ( - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type PolicyCheckProjectContextBuilder struct { - ProjectCommandContextBuilder - CommentBuilder CommentBuilder -} - -func (p *PolicyCheckProjectContextBuilder) BuildProjectContext( - ctx *command.Context, - cmdName command.Name, - prjCfg valid.MergedProjectCfg, - commentArgs []string, - repoDir string, - contextFlags *command.ContextFlags, -) []command.ProjectContext { - prjCmds := p.ProjectCommandContextBuilder.BuildProjectContext(ctx, cmdName, prjCfg, commentArgs, repoDir, contextFlags) - if cmdName == command.Plan { - prjCmds = append(prjCmds, - buildContext( - ctx, - command.PolicyCheck, - getSteps(command.PolicyCheck, prjCfg.PullRequestWorkflow, contextFlags.LogLevel), - p.CommentBuilder, - prjCfg, - commentArgs, - repoDir, - contextFlags, - )..., - ) - } - - return prjCmds -} diff --git a/server/legacy/events/policy_check_prr_command_runner.go b/server/legacy/events/policy_check_prr_command_runner.go deleted file mode 100644 index 62163f8aa..000000000 --- a/server/legacy/events/policy_check_prr_command_runner.go +++ /dev/null @@ -1,29 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -type ProjectPolicyCheckCommandBuilder interface { - BuildPolicyCheckCommands(ctx *command.Context) ([]command.ProjectContext, error) -} - -type PRRPolicyCheckCommandRunner struct { - PrjCmdBuilder ProjectPolicyCheckCommandBuilder - *PolicyCheckCommandRunner -} - -func (p *PRRPolicyCheckCommandRunner) Run(ctx *command.Context) { - projectCmds, err := p.PrjCmdBuilder.BuildPolicyCheckCommands(ctx) - if err != nil { - if _, statusErr := p.vcsStatusUpdater.UpdateCombined(ctx.RequestCtx, ctx.Pull.BaseRepo, ctx.Pull, models.FailedVCSStatus, command.PolicyCheck, "", ""); statusErr != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("unable to update commit status: %s", statusErr)) - } - p.outputUpdater.UpdateOutput(ctx, PolicyCheckCommand{}, command.Result{Error: err}) - return - } - p.PolicyCheckCommandRunner.Run(ctx, projectCmds) -} diff --git a/server/legacy/events/policy_filter.go b/server/legacy/events/policy_filter.go deleted file mode 100644 index 85ea132f6..000000000 --- a/server/legacy/events/policy_filter.go +++ /dev/null @@ -1,156 +0,0 @@ -package events - -import ( - "context" - - gh "github.com/google/go-github/v45/github" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" -) - -type prReviewFetcher interface { - ListLatestApprovalUsernames(ctx context.Context, installationToken int64, repo models.Repo, prNum int) ([]string, error) - ListApprovalReviews(ctx context.Context, installationToken int64, repo models.Repo, prNum int) ([]*gh.PullRequestReview, error) -} - -type prReviewDismisser interface { - Dismiss(ctx context.Context, installationToken int64, repo models.Repo, prNum int, reviewID int64) error -} - -type teamMemberFetcher interface { - ListTeamMembers(ctx context.Context, installationToken int64, teamSlug string) ([]string, error) -} - -type ApprovedPolicyFilter struct { - prReviewDismisser prReviewDismisser - prReviewFetcher prReviewFetcher - teamMemberFetcher teamMemberFetcher - allocator feature.Allocator - policies []valid.PolicySet - logger logging.Logger -} - -func NewApprovedPolicyFilter( - prReviewFetcher prReviewFetcher, - prReviewDismisser prReviewDismisser, - teamMemberFetcher teamMemberFetcher, - allocator feature.Allocator, - policySets []valid.PolicySet, - logger logging.Logger) *ApprovedPolicyFilter { - return &ApprovedPolicyFilter{ - prReviewFetcher: prReviewFetcher, - prReviewDismisser: prReviewDismisser, - teamMemberFetcher: teamMemberFetcher, - policies: policySets, - allocator: allocator, - logger: logger, - } -} - -// Filter will remove failed policies if the underlying PR has been approved by a policy owner -func (p *ApprovedPolicyFilter) Filter(ctx context.Context, installationToken int64, repo models.Repo, prNum int, trigger command.CommandTrigger, failedPolicies []valid.PolicySet) ([]valid.PolicySet, error) { - // Skip GH API calls if no policies failed - if len(failedPolicies) == 0 { - return failedPolicies, nil - } - - // Dismiss PR reviews when event came from pull request change/atlantis plan comment - if trigger == command.AutoTrigger || trigger == command.CommentTrigger { - err := p.dismissStalePRReviews(ctx, installationToken, repo, prNum) - if err != nil { - return failedPolicies, errors.Wrap(err, "failed to dismiss stale PR reviews") - } - return failedPolicies, nil - } - - // Fetch reviewers who approved the PR - approvedReviewers, err := p.prReviewFetcher.ListLatestApprovalUsernames(ctx, installationToken, repo, prNum) - if err != nil { - return failedPolicies, errors.Wrap(err, "failed to fetch GH PR reviews") - } - - // Filter out policies that already have been approved within GH - var filteredFailedPolicies []valid.PolicySet - for _, failedPolicy := range failedPolicies { - approved, err := p.reviewersContainsPolicyOwner(ctx, installationToken, approvedReviewers, failedPolicy) - if err != nil { - return failedPolicies, errors.Wrap(err, "validating policy approval") - } - if !approved { - filteredFailedPolicies = append(filteredFailedPolicies, failedPolicy) - } - } - return filteredFailedPolicies, nil -} - -func (p *ApprovedPolicyFilter) dismissStalePRReviews(ctx context.Context, installationToken int64, repo models.Repo, prNum int) error { - shouldAllocate, err := p.allocator.ShouldAllocate(feature.LegacyDeprecation, feature.FeatureContext{ - RepoName: repo.FullName, - }) - if err != nil { - return errors.Wrap(err, "unable to allocate legacy deprecation feature flag") - } - // if legacy deprecation is enabled, don't dismiss stale PR reviews in legacy workflow - if shouldAllocate { - p.logger.InfoContext(ctx, "legacy deprecation feature flag enabled, not dismissing stale PR reviews") - return nil - } - - approvalReviews, err := p.prReviewFetcher.ListApprovalReviews(ctx, installationToken, repo, prNum) - if err != nil { - return errors.Wrap(err, "failed to fetch GH PR reviews") - } - - for _, approval := range approvalReviews { - isOwner, err := p.approverIsOwner(ctx, installationToken, approval) - if err != nil { - return errors.Wrap(err, "failed to validate approver is owner") - } - if isOwner { - err = p.prReviewDismisser.Dismiss(ctx, installationToken, repo, prNum, approval.GetID()) - if err != nil { - return errors.Wrap(err, "failed to dismiss GH PR reviews") - } - } - } - return nil -} - -func (p *ApprovedPolicyFilter) approverIsOwner(ctx context.Context, installationToken int64, approval *gh.PullRequestReview) (bool, error) { - if approval.GetUser() == nil { - return false, errors.New("failed to identify approver") - } - reviewers := []string{approval.GetUser().GetLogin()} - for _, policy := range p.policies { - isOwner, err := p.reviewersContainsPolicyOwner(ctx, installationToken, reviewers, policy) - if err != nil { - return false, errors.Wrap(err, "validating policy approval") - } - if isOwner { - return true, nil - } - } - return false, nil -} - -func (p *ApprovedPolicyFilter) reviewersContainsPolicyOwner(ctx context.Context, installationToken int64, reviewers []string, policy valid.PolicySet) (bool, error) { - // fetch owners from GH team - owners, err := p.teamMemberFetcher.ListTeamMembers(ctx, installationToken, policy.Owner) - if err != nil { - return false, errors.Wrap(err, "failed to fetch GH team members") - } - - // Check if any reviewer is an owner of the failed policy set - for _, owner := range owners { - for _, reviewer := range reviewers { - if reviewer == owner { - return true, nil - } - } - } - return false, nil -} diff --git a/server/legacy/events/policy_filter_test.go b/server/legacy/events/policy_filter_test.go deleted file mode 100644 index f95f9f155..000000000 --- a/server/legacy/events/policy_filter_test.go +++ /dev/null @@ -1,286 +0,0 @@ -package events - -// not using a separate test package to be able to test some private fields in struct ApprovedPolicyFilter - -import ( - "context" - "testing" - - "github.com/google/go-github/v45/github" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - "github.com/stretchr/testify/assert" -) - -const ( - ownerA = "A" - ownerB = "B" - ownerC = "C" - policyName = "some-policy" - policyOwner = "team" -) - -func TestFilter_Approved(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - approvers: []string{ownerB}, - } - reviewDismisser := &mockReviewDismisser{} - teamFetcher := &mockTeamMemberFetcher{ - members: []string{ownerA, ownerB, ownerC}, - } - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.PRReviewTrigger, failedPolicies) - assert.NoError(t, err) - assert.True(t, reviewFetcher.listUsernamesIsCalled) - assert.False(t, reviewFetcher.listApprovalsIsCalled) - assert.True(t, teamFetcher.isCalled) - assert.False(t, reviewDismisser.isCalled) - assert.Empty(t, filteredPolicies) -} - -func TestFilter_NotApproved(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - reviews: []*github.PullRequestReview{ - { - User: &github.User{Login: github.String(ownerA)}, - }, - { - User: &github.User{Login: github.String(ownerB)}, - }, - }, - } - teamFetcher := &mockTeamMemberFetcher{ - members: []string{ownerC}, - } - reviewDismisser := &mockReviewDismisser{} - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.AutoTrigger, failedPolicies) - assert.NoError(t, err) - assert.False(t, reviewFetcher.listUsernamesIsCalled) - assert.True(t, reviewFetcher.listApprovalsIsCalled) - assert.True(t, teamFetcher.isCalled) - assert.False(t, reviewDismisser.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -func TestFilter_DismissalBlockedByFeatureAllocator(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - reviews: []*github.PullRequestReview{ - { - User: &github.User{Login: github.String(ownerA)}, - }, - }, - } - teamFetcher := &mockTeamMemberFetcher{ - members: []string{ownerA}, - } - reviewDismisser := &mockReviewDismisser{} - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{Enabled: true}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.AutoTrigger, failedPolicies) - assert.NoError(t, err) - assert.False(t, reviewFetcher.listUsernamesIsCalled) - assert.False(t, reviewFetcher.listApprovalsIsCalled) - assert.False(t, teamFetcher.isCalled) - assert.False(t, reviewDismisser.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -func TestFilter_NotApproved_Dismissal(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - reviews: []*github.PullRequestReview{ - { - User: &github.User{Login: github.String(ownerA)}, - }, - }, - } - teamFetcher := &mockTeamMemberFetcher{ - members: []string{ownerA}, - } - reviewDismisser := &mockReviewDismisser{} - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.AutoTrigger, failedPolicies) - assert.NoError(t, err) - assert.False(t, reviewFetcher.listUsernamesIsCalled) - assert.True(t, reviewFetcher.listApprovalsIsCalled) - assert.True(t, teamFetcher.isCalled) - assert.True(t, reviewDismisser.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -func TestFilter_NoFailedPolicies(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - approvers: []string{ownerB}, - } - teamFetcher := &mockTeamMemberFetcher{ - members: []string{ownerA, ownerB, ownerC}, - } - reviewDismisser := &mockReviewDismisser{} - - var failedPolicies []valid.PolicySet - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.PRReviewTrigger, failedPolicies) - assert.NoError(t, err) - assert.False(t, reviewFetcher.listUsernamesIsCalled) - assert.False(t, reviewFetcher.listApprovalsIsCalled) - assert.False(t, teamFetcher.isCalled) - assert.False(t, reviewDismisser.isCalled) - assert.Empty(t, filteredPolicies) -} - -func TestFilter_FailedListLatestApprovalUsernames(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - listUsernamesError: assert.AnError, - } - teamFetcher := &mockTeamMemberFetcher{} - reviewDismisser := &mockReviewDismisser{} - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.PRReviewTrigger, failedPolicies) - assert.Error(t, err) - assert.True(t, reviewFetcher.listUsernamesIsCalled) - assert.False(t, reviewFetcher.listApprovalsIsCalled) - assert.False(t, reviewDismisser.isCalled) - assert.False(t, teamFetcher.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -func TestFilter_FailedListApprovalReviews(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - listApprovalsError: assert.AnError, - } - teamFetcher := &mockTeamMemberFetcher{} - reviewDismisser := &mockReviewDismisser{} - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.CommentTrigger, failedPolicies) - assert.Error(t, err) - assert.False(t, reviewFetcher.listUsernamesIsCalled) - assert.True(t, reviewFetcher.listApprovalsIsCalled) - assert.False(t, reviewDismisser.isCalled) - assert.False(t, teamFetcher.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -func TestFilter_FailedTeamMemberFetch(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - approvers: []string{ownerB}, - } - teamFetcher := &mockTeamMemberFetcher{ - error: assert.AnError, - } - reviewDismisser := &mockReviewDismisser{} - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.PRReviewTrigger, failedPolicies) - assert.Error(t, err) - assert.True(t, reviewFetcher.listUsernamesIsCalled) - assert.False(t, reviewFetcher.listApprovalsIsCalled) - assert.True(t, teamFetcher.isCalled) - assert.False(t, reviewDismisser.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -func TestFilter_FailedDismiss(t *testing.T) { - reviewFetcher := &mockReviewFetcher{ - reviews: []*github.PullRequestReview{ - { - User: &github.User{Login: github.String(ownerB)}, - }, - }, - } - reviewDismisser := &mockReviewDismisser{ - error: assert.AnError, - } - teamFetcher := &mockTeamMemberFetcher{ - members: []string{ownerB}, - } - failedPolicies := []valid.PolicySet{ - {Name: policyName, Owner: policyOwner}, - } - - policyFilter := NewApprovedPolicyFilter(reviewFetcher, reviewDismisser, teamFetcher, &testFeatureAllocator{}, failedPolicies, logging.NewNoopCtxLogger(t)) - filteredPolicies, err := policyFilter.Filter(context.Background(), 0, models.Repo{}, 0, command.AutoTrigger, failedPolicies) - assert.Error(t, err) - assert.False(t, reviewFetcher.listUsernamesIsCalled) - assert.True(t, reviewFetcher.listApprovalsIsCalled) - assert.True(t, teamFetcher.isCalled) - assert.True(t, reviewDismisser.isCalled) - assert.Equal(t, failedPolicies, filteredPolicies) -} - -type mockReviewFetcher struct { - approvers []string - listUsernamesIsCalled bool - listUsernamesError error - reviews []*github.PullRequestReview - listApprovalsIsCalled bool - listApprovalsError error -} - -func (f *mockReviewFetcher) ListLatestApprovalUsernames(_ context.Context, _ int64, _ models.Repo, _ int) ([]string, error) { - f.listUsernamesIsCalled = true - return f.approvers, f.listUsernamesError -} - -func (f *mockReviewFetcher) ListApprovalReviews(_ context.Context, _ int64, _ models.Repo, _ int) ([]*github.PullRequestReview, error) { - f.listApprovalsIsCalled = true - return f.reviews, f.listApprovalsError -} - -type mockReviewDismisser struct { - error error - isCalled bool -} - -func (d *mockReviewDismisser) Dismiss(_ context.Context, _ int64, _ models.Repo, _ int, _ int64) error { - d.isCalled = true - return d.error -} - -type mockTeamMemberFetcher struct { - members []string - error error - isCalled bool -} - -func (m *mockTeamMemberFetcher) ListTeamMembers(_ context.Context, _ int64, _ string) ([]string, error) { - m.isCalled = true - return m.members, m.error -} - -type testFeatureAllocator struct { - Enabled bool - Err error -} - -func (t *testFeatureAllocator) ShouldAllocate(featureID feature.Name, featureCtx feature.FeatureContext) (bool, error) { - return t.Enabled, t.Err -} diff --git a/server/legacy/events/pr_project_context_builder.go b/server/legacy/events/pr_project_context_builder.go deleted file mode 100644 index d339c28d9..000000000 --- a/server/legacy/events/pr_project_context_builder.go +++ /dev/null @@ -1,60 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" -) - -func NewPlatformModeProjectCommandContextBuilder( - commentBuilder CommentBuilder, - delegate ProjectCommandContextBuilder, - logger logging.Logger, - allocator feature.Allocator, -) *PlatformModeProjectContextBuilder { - return &PlatformModeProjectContextBuilder{ - CommentBuilder: commentBuilder, - delegate: delegate, - Logger: logger, - allocator: allocator, - } -} - -type PlatformModeProjectContextBuilder struct { - delegate ProjectCommandContextBuilder - allocator feature.Allocator - CommentBuilder CommentBuilder - Logger logging.Logger -} - -func (p *PlatformModeProjectContextBuilder) BuildProjectContext( - ctx *command.Context, - cmdName command.Name, - prjCfg valid.MergedProjectCfg, - commentArgs []string, - repoDir string, - contextFlags *command.ContextFlags, -) []command.ProjectContext { - shouldAllocate, err := p.allocator.ShouldAllocate(feature.PlatformMode, feature.FeatureContext{RepoName: ctx.HeadRepo.FullName}) - if err != nil { - p.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to allocate for feature: %s, error: %s", feature.PlatformMode, err)) - } - - if shouldAllocate { - return buildContext( - ctx, - cmdName, - getSteps(cmdName, prjCfg.PullRequestWorkflow, contextFlags.LogLevel), - p.CommentBuilder, - prjCfg, - commentArgs, - repoDir, - contextFlags, - ) - } - - return p.delegate.BuildProjectContext(ctx, cmdName, prjCfg, commentArgs, repoDir, contextFlags) -} diff --git a/server/legacy/events/pre_workflow_hooks_command_runner.go b/server/legacy/events/pre_workflow_hooks_command_runner.go deleted file mode 100644 index f84c23237..000000000 --- a/server/legacy/events/pre_workflow_hooks_command_runner.go +++ /dev/null @@ -1,96 +0,0 @@ -package events - -import ( - "context" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pre_workflows_hooks_command_runner.go PreWorkflowHooksCommandRunner - -type PreWorkflowHooksCommandRunner interface { - RunPreHooks(ctx context.Context, cmdCtx *command.Context) error -} - -// DefaultPreWorkflowHooksCommandRunner is the first step when processing a workflow hook commands. -type DefaultPreWorkflowHooksCommandRunner struct { - VCSClient vcs.Client - WorkingDirLocker WorkingDirLocker - WorkingDir WorkingDir - GlobalCfg valid.GlobalCfg - PreWorkflowHookRunner runtime.PreWorkflowHookRunner -} - -// RunPreHooks runs pre_workflow_hooks when PR is opened or updated. -func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks( - ctx context.Context, - cmdCtx *command.Context, -) error { - pull := cmdCtx.Pull - baseRepo := pull.BaseRepo - headRepo := cmdCtx.HeadRepo - user := cmdCtx.User - log := cmdCtx.Log - - preWorkflowHooks := make([]*valid.PreWorkflowHook, 0) - for _, repo := range w.GlobalCfg.Repos { - if repo.IDMatches(baseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 { - preWorkflowHooks = append(preWorkflowHooks, repo.PreWorkflowHooks...) - } - } - - // short circuit any other calls if there are no pre-hooks configured - if len(preWorkflowHooks) == 0 { - return nil - } - - unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace) - if err != nil { - return errors.Wrap(err, "locking working dir") - } - defer unlockFn() - - repoDir, _, err := w.WorkingDir.Clone(log, headRepo, pull, DefaultWorkspace) - if err != nil { - return errors.Wrap(err, "cloning repository") - } - - err = w.runHooks( - ctx, - models.PreWorkflowHookCommandContext{ - BaseRepo: baseRepo, - HeadRepo: headRepo, - Log: log, - Pull: pull, - User: user, - }, - preWorkflowHooks, repoDir) - - if err != nil { - return errors.Wrap(err, "running pre workflow hooks") - } - - return nil -} - -func (w *DefaultPreWorkflowHooksCommandRunner) runHooks( - ctx context.Context, - cmdCtx models.PreWorkflowHookCommandContext, - preWorkflowHooks []*valid.PreWorkflowHook, - repoDir string, -) error { - for _, hook := range preWorkflowHooks { - _, err := w.PreWorkflowHookRunner.Run(ctx, cmdCtx, hook.RunCommand, repoDir) - - if err != nil { - return err - } - } - - return nil -} diff --git a/server/legacy/events/pre_workflow_hooks_command_runner_test.go b/server/legacy/events/pre_workflow_hooks_command_runner_test.go deleted file mode 100644 index 9d5a20993..000000000 --- a/server/legacy/events/pre_workflow_hooks_command_runner_test.go +++ /dev/null @@ -1,218 +0,0 @@ -package events_test - -import ( - "context" - "errors" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config/valid" - runtime_mocks "github.com/runatlantis/atlantis/server/legacy/core/runtime/mocks" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/models/fixtures" - . "github.com/runatlantis/atlantis/testing" -) - -var wh events.DefaultPreWorkflowHooksCommandRunner -var whWorkingDir *mocks.MockWorkingDir -var whWorkingDirLocker *mocks.MockWorkingDirLocker -var whPreWorkflowHookRunner *runtime_mocks.MockPreWorkflowHookRunner - -func preWorkflowHooksSetup(t *testing.T) { - RegisterMockTestingT(t) - vcsClient := vcsmocks.NewMockClient() - whWorkingDir = mocks.NewMockWorkingDir() - whWorkingDirLocker = mocks.NewMockWorkingDirLocker() - whPreWorkflowHookRunner = runtime_mocks.NewMockPreWorkflowHookRunner() - - wh = events.DefaultPreWorkflowHooksCommandRunner{ - VCSClient: vcsClient, - WorkingDirLocker: whWorkingDirLocker, - WorkingDir: whWorkingDir, - PreWorkflowHookRunner: whPreWorkflowHookRunner, - } -} - -func TestRunPreHooks_Clone(t *testing.T) { - log := logging.NewNoopCtxLogger(t) - - var newPull = fixtures.Pull - newPull.BaseRepo = fixtures.GithubRepo - - ctx := context.Background() - cmdCtx := &command.Context{ - Pull: newPull, - HeadRepo: fixtures.GithubRepo, - User: fixtures.User, - Log: log, - } - - testHook := valid.PreWorkflowHook{ - StepName: "test", - RunCommand: "some command", - } - - pCtx := models.PreWorkflowHookCommandContext{ - BaseRepo: fixtures.GithubRepo, - HeadRepo: fixtures.GithubRepo, - Pull: newPull, - Log: log, - User: fixtures.User, - } - - repoDir := "path/to/repo" - result := "some result" - - t.Run("success hooks in cfg", func(t *testing.T) { - preWorkflowHooksSetup(t) - - var unlockCalled *bool = Bool(false) - unlockFn := func() { - unlockCalled = Bool(true) - } - - globalCfg := valid.GlobalCfg{ - Repos: []valid.Repo{ - { - ID: fixtures.GithubRepo.ID(), - PreWorkflowHooks: []*valid.PreWorkflowHook{ - &testHook, - }, - }, - }, - } - - wh.GlobalCfg = globalCfg - - When(whWorkingDirLocker.TryLock(fixtures.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace)).ThenReturn(unlockFn, nil) - When(whWorkingDir.Clone(log, fixtures.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(ctx, pCtx, testHook.RunCommand, repoDir)).ThenReturn(result, nil) - - err := wh.RunPreHooks(ctx, cmdCtx) - - Ok(t, err) - whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(ctx, pCtx, testHook.RunCommand, repoDir) - Assert(t, *unlockCalled == true, "unlock function called") - }) - t.Run("success hooks not in cfg", func(t *testing.T) { - preWorkflowHooksSetup(t) - globalCfg := valid.GlobalCfg{ - Repos: []valid.Repo{ - // one with hooks but mismatched id - { - ID: "id1", - PreWorkflowHooks: []*valid.PreWorkflowHook{ - &testHook, - }, - }, - // one with the correct id but no hooks - { - ID: fixtures.GithubRepo.ID(), - PreWorkflowHooks: []*valid.PreWorkflowHook{}, - }, - }, - } - - wh.GlobalCfg = globalCfg - - err := wh.RunPreHooks(ctx, cmdCtx) - - Ok(t, err) - - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(ctx, pCtx, testHook.RunCommand, repoDir) - whWorkingDirLocker.VerifyWasCalled(Never()).TryLock(fixtures.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace) - whWorkingDir.VerifyWasCalled(Never()).Clone(log, fixtures.GithubRepo, newPull, events.DefaultWorkspace) - }) - t.Run("error locking work dir", func(t *testing.T) { - preWorkflowHooksSetup(t) - - globalCfg := valid.GlobalCfg{ - Repos: []valid.Repo{ - { - ID: fixtures.GithubRepo.ID(), - PreWorkflowHooks: []*valid.PreWorkflowHook{ - &testHook, - }, - }, - }, - } - - wh.GlobalCfg = globalCfg - - When(whWorkingDirLocker.TryLock(fixtures.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace)).ThenReturn(func() {}, errors.New("some error")) - - err := wh.RunPreHooks(context.TODO(), cmdCtx) - - Assert(t, err != nil, "error not nil") - whWorkingDir.VerifyWasCalled(Never()).Clone(log, fixtures.GithubRepo, newPull, events.DefaultWorkspace) - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(ctx, pCtx, testHook.RunCommand, repoDir) - }) - - t.Run("error cloning", func(t *testing.T) { - preWorkflowHooksSetup(t) - - var unlockCalled *bool = Bool(false) - unlockFn := func() { - unlockCalled = Bool(true) - } - - globalCfg := valid.GlobalCfg{ - Repos: []valid.Repo{ - { - ID: fixtures.GithubRepo.ID(), - PreWorkflowHooks: []*valid.PreWorkflowHook{ - &testHook, - }, - }, - }, - } - - wh.GlobalCfg = globalCfg - - When(whWorkingDirLocker.TryLock(fixtures.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace)).ThenReturn(unlockFn, nil) - When(whWorkingDir.Clone(log, fixtures.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) - - err := wh.RunPreHooks(context.TODO(), cmdCtx) - - Assert(t, err != nil, "error not nil") - - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(ctx, pCtx, testHook.RunCommand, repoDir) - Assert(t, *unlockCalled == true, "unlock function called") - }) - - t.Run("error running pre hook", func(t *testing.T) { - preWorkflowHooksSetup(t) - - var unlockCalled *bool = Bool(false) - unlockFn := func() { - unlockCalled = Bool(true) - } - - globalCfg := valid.GlobalCfg{ - Repos: []valid.Repo{ - { - ID: fixtures.GithubRepo.ID(), - PreWorkflowHooks: []*valid.PreWorkflowHook{ - &testHook, - }, - }, - }, - } - - wh.GlobalCfg = globalCfg - - When(whWorkingDirLocker.TryLock(fixtures.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace)).ThenReturn(unlockFn, nil) - When(whWorkingDir.Clone(log, fixtures.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(ctx, pCtx, testHook.RunCommand, repoDir)).ThenReturn(result, errors.New("some error")) - - err := wh.RunPreHooks(ctx, cmdCtx) - - Assert(t, err != nil, "error not nil") - Assert(t, *unlockCalled == true, "unlock function called") - }) -} diff --git a/server/legacy/events/project_command_builder.go b/server/legacy/events/project_command_builder.go deleted file mode 100644 index 58669e3b0..000000000 --- a/server/legacy/events/project_command_builder.go +++ /dev/null @@ -1,605 +0,0 @@ -package events - -import ( - "fmt" - "os" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/logging" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" -) - -const ( - // DefaultRepoRelDir is the default directory we run commands in, relative - // to the root of the repo. - DefaultRepoRelDir = "." - // DefaultWorkspace is the default Terraform workspace we run commands in. - // This is also Terraform's default workspace. - DefaultWorkspace = "default" - // DefaultParallelApplyEnabled is the default for the parallel apply setting. - DefaultParallelApplyEnabled = false - // DefaultParallelPlanEnabled is the default for the parallel plan setting. - DefaultParallelPlanEnabled = false - // InfiniteProjectLimitPerPR is the default setting for number of projects per PR. - // this is set to -1 to signify no limit. - InfiniteProjectsPerPR = -1 -) - -func NewProjectCommandBuilder( - projectContextBuilder ProjectCommandContextBuilder, - parserValidator *config.ParserValidator, - projectFinder ProjectFinder, - vcsClient vcs.Client, - workingDir WorkingDir, - workingDirLocker WorkingDirLocker, - globalCfg valid.GlobalCfg, - pendingPlanFinder *DefaultPendingPlanFinder, - EnableRegExpCmd bool, - AutoplanFileList string, - logger logging.Logger, - limit int, -) ProjectCommandBuilder { - var projectCommandBuilder ProjectCommandBuilder = &DefaultProjectCommandBuilder{ - ParserValidator: parserValidator, - ProjectFinder: projectFinder, - VCSClient: vcsClient, - WorkingDir: workingDir, - WorkingDirLocker: workingDirLocker, - GlobalCfg: globalCfg, - PendingPlanFinder: pendingPlanFinder, - EnableRegExpCmd: EnableRegExpCmd, - AutoplanFileList: AutoplanFileList, - ProjectCommandContextBuilder: projectContextBuilder, - } - - projectCommandBuilder = &SizeLimitedProjectCommandBuilder{ - Limit: limit, - ProjectCommandBuilder: projectCommandBuilder, - } - - return &InstrumentedProjectCommandBuilder{ - ProjectCommandBuilder: projectCommandBuilder, - Logger: logger, - } -} - -type ProjectPlanCommandBuilder interface { - // BuildAutoplanCommands builds project commands that will run plan on - // the projects determined to be modified. - BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) - // BuildPlanCommands builds project plan commands for this ctx and comment. If - // comment doesn't specify one project then there may be multiple commands - // to be run. - BuildPlanCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) -} - -type ProjectApplyCommandBuilder interface { - // BuildApplyCommands builds project Apply commands for this ctx and comment. If - // comment doesn't specify one project then there may be multiple commands - // to be run. - BuildApplyCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) -} - -type ProjectVersionCommandBuilder interface { - // BuildVersionCommands builds project Version commands for this ctx and comment. If - // comment doesn't specify one project then there may be multiple commands - // to be run. - BuildVersionCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_builder.go ProjectCommandBuilder - -// ProjectCommandBuilder builds commands that run on individual projects. -type ProjectCommandBuilder interface { - ProjectPlanCommandBuilder - ProjectPolicyCheckCommandBuilder - ProjectApplyCommandBuilder - ProjectVersionCommandBuilder -} - -// DefaultProjectCommandBuilder implements ProjectCommandBuilder. -// This class combines the data from the comment and any atlantis.yaml file or -// Atlantis server config and then generates a set of contexts. -type DefaultProjectCommandBuilder struct { - ParserValidator *config.ParserValidator - ProjectFinder ProjectFinder - VCSClient vcs.Client - WorkingDir WorkingDir - WorkingDirLocker WorkingDirLocker - GlobalCfg valid.GlobalCfg - PendingPlanFinder *DefaultPendingPlanFinder - ProjectCommandContextBuilder ProjectCommandContextBuilder - EnableRegExpCmd bool - AutoplanFileList string - EnableDiffMarkdownFormat bool -} - -// See ProjectCommandBuilder.BuildAutoplanCommands. -func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) { - projCtxs, err := p.buildPlanAllCommands(ctx, nil, false, "") - if err != nil { - return nil, err - } - var autoplanEnabled []command.ProjectContext - for _, projCtx := range projCtxs { - if !projCtx.AutoplanEnabled { - continue - } - autoplanEnabled = append(autoplanEnabled, projCtx) - } - return autoplanEnabled, nil -} - -// See ProjectCommandBuilder.BuildPlanCommands. -func (p *DefaultProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - if !cmd.IsForSpecificProject() { - return p.buildPlanAllCommands(ctx, cmd.Flags, cmd.ForceApply, cmd.LogLevel) - } - pcc, err := p.buildProjectPlanCommand(ctx, cmd) - return pcc, err -} - -func (p *DefaultProjectCommandBuilder) BuildPolicyCheckCommands(ctx *command.Context) ([]command.ProjectContext, error) { - unlockFn, err := p.WorkingDirLocker.TryLockPull(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num) - if err != nil { - return nil, err - } - defer unlockFn() - - pullDir, err := p.WorkingDir.GetPullDir(ctx.Pull.BaseRepo, ctx.Pull) - if err != nil { - return nil, err - } - - plans, err := p.PendingPlanFinder.Find(pullDir) - if err != nil { - return nil, err - } - - // use the default repository workspace because it is the only one guaranteed to have an atlantis.yaml, - // other workspaces will not have the file if they are using pre_workflow_hooks to generate it dynamically - defaultRepoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, DefaultWorkspace) - if err != nil { - return nil, err - } - - var cmds []command.ProjectContext - for _, plan := range plans { - commands, err := p.buildProjectCommandCtx(ctx, command.PolicyCheck, plan.ProjectName, []string{}, defaultRepoDir, plan.RepoRelDir, plan.Workspace, false, "") - if err != nil { - return nil, errors.Wrapf(err, "building command for dir %q", plan.RepoRelDir) - } - cmds = append(cmds, commands...) - } - return cmds, nil -} - -// See ProjectCommandBuilder.BuildApplyCommands. -func (p *DefaultProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - if !cmd.IsForSpecificProject() { - return p.buildAllProjectCommands(ctx, cmd) - } - pac, err := p.buildProjectApplyCommand(ctx, cmd) - return pac, err -} - -func (p *DefaultProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - return p.buildAllProjectCommands(ctx, cmd) -} - -func (p *DefaultProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - if !cmd.IsForSpecificProject() { - return p.buildAllProjectCommands(ctx, cmd) - } - pac, err := p.buildProjectVersionCommand(ctx, cmd) - return pac, err -} - -// buildPlanAllCommands builds plan contexts for all projects we determine were -// modified in this ctx. -func (p *DefaultProjectCommandBuilder) buildPlanAllCommands(ctx *command.Context, commentFlags []string, forceApply bool, logLevel string) ([]command.ProjectContext, error) { - // We'll need the list of modified files. - modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.Pull.BaseRepo, ctx.Pull) - if err != nil { - return nil, err - } - - // Need to lock the workspace we're about to clone to. - workspace := DefaultWorkspace - - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, "workspace was locked") - return nil, err - } - defer unlockFn() - - repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) - if err != nil { - return nil, err - } - - // Parse config file if it exists. - hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir) - if err != nil { - return nil, errors.Wrapf(err, "looking for %s file in %q", config.AtlantisYAMLFilename, repoDir) - } - - var projCtxs []command.ProjectContext - if hasRepoCfg { - // If there's a repo cfg then we'll use it to figure out which projects - // should be planed. - repoCfg, err := p.ParserValidator.ParseRepoCfg(repoDir, p.GlobalCfg, ctx.Pull.BaseRepo.ID()) - if err != nil { - return nil, errors.Wrapf(err, "parsing %s", config.AtlantisYAMLFilename) - } - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("successfully parsed %s file", config.AtlantisYAMLFilename)) - matchingProjects, err := p.ProjectFinder.DetermineProjectsViaConfig(ctx.Log, modifiedFiles, repoCfg, repoDir) - if err != nil { - return nil, err - } - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("%d projects are to be planned based on their when_modified config", len(matchingProjects))) - - for _, mp := range matchingProjects { - mergedCfg := p.GlobalCfg.MergeProjectCfg(ctx.Pull.BaseRepo.ID(), mp, repoCfg) - contextFlags := &command.ContextFlags{ - ForceApply: forceApply, - ParallelApply: repoCfg.ParallelApply, - ParallelPlan: repoCfg.ParallelPlan, - LogLevel: logLevel, - } - projCtxs = append(projCtxs, - p.ProjectCommandContextBuilder.BuildProjectContext( - ctx, - command.Plan, - mergedCfg, - commentFlags, - repoDir, - contextFlags, - )...) - } - } else { - // If there is no config file, then we'll plan each project that - // our algorithm determines was modified. - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("found no %s file", config.AtlantisYAMLFilename)) - modifiedProjects := p.ProjectFinder.DetermineProjects(ctx.RequestCtx, ctx.Log, modifiedFiles, ctx.Pull.BaseRepo.FullName, repoDir, p.AutoplanFileList) - if err != nil { - return nil, errors.Wrapf(err, "finding modified projects: %s", modifiedFiles) - } - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("automatically determined that there were %d projects modified in this pull request: %s", len(modifiedProjects), modifiedProjects)) - for _, mp := range modifiedProjects { - pCfg := p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp.Path, DefaultWorkspace) - - contextFlags := &command.ContextFlags{ - ForceApply: forceApply, - ParallelApply: DefaultParallelApplyEnabled, - ParallelPlan: DefaultParallelPlanEnabled, - LogLevel: logLevel, - } - projCtxs = append(projCtxs, - p.ProjectCommandContextBuilder.BuildProjectContext( - ctx, - command.Plan, - pCfg, - commentFlags, - repoDir, - contextFlags, - )...) - } - } - - return projCtxs, nil -} - -// buildProjectPlanCommand builds a plan context for a single project. -// cmd must be for only one project. -func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - workspace := DefaultWorkspace - if cmd.Workspace != "" { - workspace = cmd.Workspace - } - - var pcc []command.ProjectContext - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) - if err != nil { - return pcc, err - } - defer unlockFn() - - _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) - if err != nil { - return pcc, err - } - - repoRelDir := DefaultRepoRelDir - if cmd.RepoRelDir != "" { - repoRelDir = cmd.RepoRelDir - } - - // use the default repository workspace because it is the only one guaranteed to have an atlantis.yaml, - // other workspaces will not have the file if they are using pre_workflow_hooks to generate it dynamically - defaultRepoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, DefaultWorkspace) - if err != nil { - return pcc, err - } - - return p.buildProjectCommandCtx( - ctx, - command.Plan, - cmd.ProjectName, - cmd.Flags, - defaultRepoDir, - repoRelDir, - workspace, - cmd.ForceApply, - cmd.LogLevel, - ) -} - -// getCfg returns the atlantis.yaml config (if it exists) for this project. If -// there is no config, then projectCfg and repoCfg will be nil. -func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName string, dir string, workspace string, repoDir string) (projectsCfg []valid.Project, repoCfg *valid.RepoCfg, err error) { - hasConfigFile, err := p.ParserValidator.HasRepoCfg(repoDir) - if err != nil { - err = errors.Wrapf(err, "looking for %s file in %q", config.AtlantisYAMLFilename, repoDir) - return - } - if !hasConfigFile { - if projectName != "" { - err = fmt.Errorf("cannot specify a project name unless an %s file exists to configure projects", config.AtlantisYAMLFilename) - return - } - return - } - - var repoConfig valid.RepoCfg - repoConfig, err = p.ParserValidator.ParseRepoCfg(repoDir, p.GlobalCfg, ctx.Pull.BaseRepo.ID()) - if err != nil { - return - } - repoCfg = &repoConfig - - // If they've specified a project by name we look it up. Otherwise we - // use the dir and workspace. - if projectName != "" { - if p.EnableRegExpCmd { - projectsCfg = repoCfg.FindProjectsByName(projectName) - } else { - if p := repoCfg.FindProjectByName(projectName); p != nil { - projectsCfg = append(projectsCfg, *p) - } - } - if len(projectsCfg) == 0 { - err = fmt.Errorf("no project with name %q is defined in %s", projectName, config.AtlantisYAMLFilename) - return - } - return - } - - projCfgs := repoCfg.FindProjectsByDirWorkspace(dir, workspace) - if len(projCfgs) == 0 { - return - } - if len(projCfgs) > 1 { - err = fmt.Errorf("must specify project name: more than one project defined in %s matched dir: %q workspace: %q", config.AtlantisYAMLFilename, dir, workspace) - return - } - projectsCfg = projCfgs - return -} - -// buildAllProjectCommands builds contexts for a command for every project that has -// pending plans in this ctx. -func (p *DefaultProjectCommandBuilder) buildAllProjectCommands(ctx *command.Context, commentCmd *command.Comment) ([]command.ProjectContext, error) { - // Lock all dirs in this pull request (instead of a single dir) because we - // don't know how many dirs we'll need to run the command in. - unlockFn, err := p.WorkingDirLocker.TryLockPull(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num) - if err != nil { - return nil, err - } - defer unlockFn() - - pullDir, err := p.WorkingDir.GetPullDir(ctx.Pull.BaseRepo, ctx.Pull) - if err != nil { - return nil, err - } - - plans, err := p.PendingPlanFinder.Find(pullDir) - if err != nil { - return nil, err - } - - // use the default repository workspace because it is the only one guaranteed to have an atlantis.yaml, - // other workspaces will not have the file if they are using pre_workflow_hooks to generate it dynamically - defaultRepoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, DefaultWorkspace) - if err != nil { - return nil, err - } - - var cmds []command.ProjectContext - for _, plan := range plans { - commentCmds, err := p.buildProjectCommandCtx(ctx, commentCmd.CommandName(), plan.ProjectName, commentCmd.Flags, defaultRepoDir, plan.RepoRelDir, plan.Workspace, commentCmd.ForceApply, commentCmd.LogLevel) - if err != nil { - return nil, errors.Wrapf(err, "building command for dir %q", plan.RepoRelDir) - } - cmds = append(cmds, commentCmds...) - } - return cmds, nil -} - -// buildProjectApplyCommand builds an apply command for the single project -// identified by cmd. -func (p *DefaultProjectCommandBuilder) buildProjectApplyCommand(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - workspace := DefaultWorkspace - if cmd.Workspace != "" { - workspace = cmd.Workspace - } - - var projCtx []command.ProjectContext - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) - if err != nil { - return projCtx, err - } - defer unlockFn() - - // use the default repository workspace because it is the only one guaranteed to have an atlantis.yaml, - // other workspaces will not have the file if they are using pre_workflow_hooks to generate it dynamically - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, DefaultWorkspace) - if os.IsNotExist(errors.Cause(err)) { - return projCtx, errors.New("no working directory found–did you run plan?") - } else if err != nil { - return projCtx, err - } - - repoRelDir := DefaultRepoRelDir - if cmd.RepoRelDir != "" { - repoRelDir = cmd.RepoRelDir - } - - return p.buildProjectCommandCtx( - ctx, - command.Apply, - cmd.ProjectName, - cmd.Flags, - repoDir, - repoRelDir, - workspace, - cmd.ForceApply, - cmd.LogLevel, - ) -} - -// buildProjectVersionCommand builds a version command for the single project -// identified by cmd. -func (p *DefaultProjectCommandBuilder) buildProjectVersionCommand(ctx *command.Context, cmd *command.Comment) ([]command.ProjectContext, error) { - workspace := DefaultWorkspace - if cmd.Workspace != "" { - workspace = cmd.Workspace - } - - var projCtx []command.ProjectContext - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, workspace) - if err != nil { - return projCtx, err - } - defer unlockFn() - - // use the default repository workspace because it is the only one guaranteed to have an atlantis.yaml, - // other workspaces will not have the file if they are using pre_workflow_hooks to generate it dynamically - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, DefaultWorkspace) - if os.IsNotExist(errors.Cause(err)) { - return projCtx, errors.New("no working directory found–did you run plan?") - } else if err != nil { - return projCtx, err - } - - repoRelDir := DefaultRepoRelDir - if cmd.RepoRelDir != "" { - repoRelDir = cmd.RepoRelDir - } - - return p.buildProjectCommandCtx( - ctx, - command.Version, - cmd.ProjectName, - cmd.Flags, - repoDir, - repoRelDir, - workspace, - cmd.ForceApply, - cmd.LogLevel, - ) -} - -// buildProjectCommandCtx builds a context for a single or several projects identified -// by the parameters. -func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *command.Context, - cmd command.Name, - projectName string, - commentFlags []string, - repoDir string, - repoRelDir string, - workspace string, - forceApply bool, - logLevel string) ([]command.ProjectContext, error) { - matchingProjects, repoCfgPtr, err := p.getCfg(ctx, projectName, repoRelDir, workspace, repoDir) - if err != nil { - return []command.ProjectContext{}, err - } - var projCtxs []command.ProjectContext - var projCfg valid.MergedProjectCfg - parallelApply := DefaultParallelApplyEnabled - parallelPlan := DefaultParallelPlanEnabled - if repoCfgPtr != nil { - parallelApply = repoCfgPtr.ParallelApply - parallelPlan = repoCfgPtr.ParallelPlan - } - - contextFlags := &command.ContextFlags{ - ForceApply: forceApply, - ParallelApply: parallelApply, - ParallelPlan: parallelPlan, - LogLevel: logLevel, - } - - if len(matchingProjects) > 0 { - // Override any dir/workspace defined on the comment with what was - // defined in config. This shouldn't matter since we don't allow comments - // with both project name and dir/workspace. - repoRelDir = projCfg.RepoRelDir - workspace = projCfg.Workspace - for _, mp := range matchingProjects { - projCfg = p.GlobalCfg.MergeProjectCfg(ctx.Pull.BaseRepo.ID(), mp, *repoCfgPtr) - - projCtxs = append(projCtxs, - p.ProjectCommandContextBuilder.BuildProjectContext( - ctx, - cmd, - projCfg, - commentFlags, - repoDir, - contextFlags, - )...) - } - } else { - projCfg = p.GlobalCfg.DefaultProjCfg( - ctx.Log, - ctx.Pull.BaseRepo.ID(), - repoRelDir, - workspace, - ) - - projCtxs = append(projCtxs, - p.ProjectCommandContextBuilder.BuildProjectContext( - ctx, - cmd, - projCfg, - commentFlags, - repoDir, - contextFlags, - )...) - } - - if err := p.validateWorkspaceAllowed(repoCfgPtr, repoRelDir, workspace); err != nil { - return []command.ProjectContext{}, err - } - - return projCtxs, nil -} - -// validateWorkspaceAllowed returns an error if repoCfg defines projects in -// repoRelDir but none of them use workspace. We want this to be an error -// because if users have gone to the trouble of defining projects in repoRelDir -// then it's likely that if we're running a command for a workspace that isn't -// defined then they probably just typed the workspace name wrong. -func (p *DefaultProjectCommandBuilder) validateWorkspaceAllowed(repoCfg *valid.RepoCfg, repoRelDir string, workspace string) error { - if repoCfg == nil { - return nil - } - - return repoCfg.ValidateWorkspaceAllowed(repoRelDir, workspace) -} diff --git a/server/legacy/events/project_command_builder_internal_test.go b/server/legacy/events/project_command_builder_internal_test.go deleted file mode 100644 index 1657cb918..000000000 --- a/server/legacy/events/project_command_builder_internal_test.go +++ /dev/null @@ -1,726 +0,0 @@ -package events - -import ( - "context" - "os" - "path/filepath" - "testing" - - version "github.com/hashicorp/go-version" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/matchers" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -// Test different permutations of global and repo config. -func TestBuildProjectCmdCtx(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - emptyPolicySets := valid.PolicySets{ - Version: nil, - PolicySets: []valid.PolicySet{}, - } - baseRepo := models.Repo{ - FullName: "owner/repo", - VCSHost: models.VCSHost{ - Hostname: "github.com", - }, - } - pull := models.PullRequest{ - BaseRepo: baseRepo, - } - cases := map[string]struct { - globalCfg string - repoCfg string - expErr string - expCtx command.ProjectContext - expPlanSteps []string - }{ - // Test that if we've set global defaults and no project config - // that the global defaults are used. - "global defaults": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default -pull_request_workflows: - default: - plan: - steps: - - init - - plan`, - repoCfg: "", - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"init", "plan"}, - }, - - // Test that if we've set global defaults, that they are used but the - // allowed project config values also come through. - "global defaults with repo cfg": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default -pull_request_workflows: - default: - plan: - steps: - - init - - plan`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 - `, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"init", "plan"}, - }, - - // Set a global apply req that should be used. - "global apply_requirements": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default - apply_requirements: [approved, mergeable] -pull_request_workflows: - default: - plan: - steps: - - init - - plan`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 -`, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved", "mergeable"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"init", "plan"}, - }, - - // If we have global config that matches a specific repo, it should be used. - "specific repo": { - globalCfg: ` -repos: -- id: /.*/ - pull_request_workflow: default -- id: github.com/owner/repo - pull_request_workflow: specific - apply_requirements: [approved] -pull_request_workflows: - default: - plan: - steps: - - init - - plan - specific: - plan: - steps: - - plan`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 -`, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"plan"}, - }, - - // We should get an error if the repo sets an apply req when its - // not allowed. - "repo defines apply_requirements": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default - apply_requirements: [approved, mergeable] -pull_request_workflows: - default: - plan: - steps: - - init - - plan`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - apply_requirements: [] -`, - expErr: "repo config not allowed to set 'apply_requirements' key: server-side config needs 'allowed_overrides: [apply_requirements]'", - }, - - // We should get an error if a repo sets a workflow when it's not allowed. - "repo sets its own workflow": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default - apply_requirements: [approved, mergeable] -pull_request_workflows: - default: - plan: - steps: - - init - - plan`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - pull_request_workflow: default -`, - expErr: "repo config not allowed to set 'pull_request_workflow' key: server-side config needs 'allowed_overrides: [pull_request_workflow]'", - }, - - // If the repos are allowed to set everything then their config should - // come through. - "full repo permissions": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default - apply_requirements: [approved] - allowed_overrides: [apply_requirements, pull_request_workflow] - allow_custom_workflows: true -pull_request_workflows: - default: - plan: - steps: [] - custom: - plan: - steps: - - plan -`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 - apply_requirements: [] - pull_request_workflow: custom -`, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"plan"}, - }, - - // Repos can choose server-side workflows. - "repos choose server-side workflow": { - globalCfg: ` -repos: -- id: /.*/ - workflow: default - allowed_overrides: [pull_request_workflow] -pull_request_workflows: - default: - plan: - steps: [] - custom: - plan: - steps: [plan] -`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 - pull_request_workflow: custom -`, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"plan"}, - }, - - // Test that if we leave keys undefined, that they don't override. - "cascading matches": { - globalCfg: ` -repos: -- id: /.*/ - apply_requirements: [approved] -- id: github.com/owner/repo - pull_request_workflow: custom -pull_request_workflows: - custom: - plan: - steps: [plan] -`, - repoCfg: ` -version: 3 -projects: -- dir: project1 - workspace: myworkspace -`, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -d project1 -w myworkspace", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logger, - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "", - ApplyRequirements: []string{"approved"}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -d project1 -w myworkspace -- flag", - RepoRelDir: "project1", - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"plan"}, - }, - } - - for name, c := range cases { - t.Run(name, func(t *testing.T) { - tmp, cleanup := DirStructure(t, map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - }, - "modules": map[string]interface{}{ - "module": map[string]interface{}{ - "main.tf": nil, - }, - }, - }) - defer cleanup() - - workingDir := NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) - - // Write and parse the global config file. - globalCfgPath := filepath.Join(tmp, "global.yaml") - Ok(t, os.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) - parser := &config.ParserValidator{} - globalCfg, err := parser.ParseGlobalCfg(globalCfgPath, valid.NewGlobalCfg("somedir")) - Ok(t, err) - - if c.repoCfg != "" { - Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) - } - - builder := &DefaultProjectCommandBuilder{ - ParserValidator: &config.ParserValidator{}, - ProjectFinder: &DefaultProjectFinder{}, - VCSClient: vcsClient, - WorkingDir: workingDir, - WorkingDirLocker: NewDefaultWorkingDirLocker(), - GlobalCfg: globalCfg, - PendingPlanFinder: &DefaultPendingPlanFinder{}, - ProjectCommandContextBuilder: &projectCommandContextBuilder{ - CommentBuilder: &CommentParser{}, - }, - AutoplanFileList: "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - EnableRegExpCmd: false, - } - - // We run a test for each type of command. - for _, cmd := range []command.Name{command.Plan, command.Apply} { - t.Run(cmd.String(), func(t *testing.T) { - ctxs, err := builder.buildProjectCommandCtx(&command.Context{ - RequestCtx: context.TODO(), - Log: logger, - Pull: models.PullRequest{ - BaseRepo: baseRepo, - }, - PullRequestStatus: models.PullReqStatus{ - Mergeable: true, - }, - }, cmd, "", []string{"flag"}, tmp, "project1", "myworkspace", false, "") - - if c.expErr != "" { - ErrEquals(t, c.expErr, err) - return - } - ctx := ctxs[0] - - Ok(t, err) - - // Construct expected steps. - var stepNames []string - switch cmd { - case command.Plan: - stepNames = c.expPlanSteps - } - var expSteps []valid.Step - for _, stepName := range stepNames { - expSteps = append(expSteps, valid.Step{ - StepName: stepName, - }) - } - - c.expCtx.CommandName = cmd - // Init fields we couldn't in our cases map. - c.expCtx.Steps = expSteps - ctx.PolicySets = emptyPolicySets - - // Job ID cannot be compared since its generated at random - ctx.JobID = "" - - Equals(t, c.expCtx, ctx) - // Equals() doesn't compare TF version properly so have to - // use .String(). - if c.expCtx.TerraformVersion != nil { - Equals(t, c.expCtx.TerraformVersion.String(), ctx.TerraformVersion.String()) - } - }) - } - }) - } -} - -func TestBuildProjectCmdCtx_WithRegExpCmdEnabled(t *testing.T) { - emptyPolicySets := valid.PolicySets{ - Version: nil, - PolicySets: []valid.PolicySet{}, - } - baseRepo := models.Repo{ - FullName: "owner/repo", - VCSHost: models.VCSHost{ - Hostname: "github.com", - }, - } - pull := models.PullRequest{ - BaseRepo: baseRepo, - } - cases := map[string]struct { - globalCfg string - repoCfg string - expErr string - expCtx command.ProjectContext - expPlanSteps []string - expApplySteps []string - }{ - - // Test that if we've set global defaults, that they are used but the - // allowed project config values also come through. - "global defaults with repo cfg": { - globalCfg: ` -repos: -- id: /.*/ - pull_request_workflow: default -pull_request_workflows: - default: - plan: - steps: - - init - - plan`, - repoCfg: ` -version: 3 -projects: -- name: myproject_1 - dir: project1 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 -- name: myproject_2 - dir: project2 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 -- name: myproject_3 - dir: project3 - workspace: myworkspace - autoplan: - enabled: true - when_modified: [../modules/**/*.tf] - terraform_version: v10.0 - `, - expCtx: command.ProjectContext{ - ApplyCmd: "atlantis apply -p myproject_1", - BaseRepo: baseRepo, - EscapedCommentArgs: []string{`\f\l\a\g`}, - AutoplanEnabled: true, - HeadRepo: models.Repo{}, - Log: logging.NewNoopCtxLogger(t), - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - Pull: pull, - ProjectName: "myproject_1", - ApplyRequirements: []string{}, - RepoConfigVersion: 3, - RePlanCmd: "atlantis plan -p myproject_1 -- flag", - RepoRelDir: "project1", - TerraformVersion: mustVersion("10.0"), - User: models.User{}, - Workspace: "myworkspace", - PolicySets: emptyPolicySets, - RequestCtx: context.TODO(), - }, - expPlanSteps: []string{"init", "plan"}, - }, - } - - for name, c := range cases { - t.Run(name, func(t *testing.T) { - tmp, cleanup := DirStructure(t, map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - }, - "modules": map[string]interface{}{ - "module": map[string]interface{}{ - "main.tf": nil, - }, - }, - }) - defer cleanup() - - workingDir := NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmp, false, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"modules/module/main.tf"}, nil) - - // Write and parse the global config file. - globalCfgPath := filepath.Join(tmp, "global.yaml") - Ok(t, os.WriteFile(globalCfgPath, []byte(c.globalCfg), 0600)) - parser := &config.ParserValidator{} - globalCfg, err := parser.ParseGlobalCfg(globalCfgPath, valid.NewGlobalCfg("somedir")) - Ok(t, err) - - if c.repoCfg != "" { - Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) - } - - builder := &DefaultProjectCommandBuilder{ - ParserValidator: &config.ParserValidator{}, - ProjectFinder: &DefaultProjectFinder{}, - VCSClient: vcsClient, - WorkingDir: workingDir, - WorkingDirLocker: NewDefaultWorkingDirLocker(), - GlobalCfg: globalCfg, - PendingPlanFinder: &DefaultPendingPlanFinder{}, - ProjectCommandContextBuilder: &projectCommandContextBuilder{ - CommentBuilder: &CommentParser{}, - }, - AutoplanFileList: "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - EnableRegExpCmd: true, - } - - // We run a test for each type of command, again specific projects - for _, cmd := range []command.Name{command.Plan, command.Apply} { - t.Run(cmd.String(), func(t *testing.T) { - ctxs, err := builder.buildProjectCommandCtx(&command.Context{ - Pull: models.PullRequest{ - BaseRepo: baseRepo, - }, - Log: logging.NewNoopCtxLogger(t), - PullRequestStatus: models.PullReqStatus{ - Mergeable: true, - }, - RequestCtx: context.TODO(), - }, cmd, "myproject_[1-2]", []string{"flag"}, tmp, "project1", "myworkspace", false, "") - - if c.expErr != "" { - ErrEquals(t, c.expErr, err) - return - } - ctx := ctxs[0] - - Ok(t, err) - - Equals(t, 2, len(ctxs)) - // Construct expected steps. - var stepNames []string - switch cmd { - case command.Plan: - stepNames = c.expPlanSteps - case command.Apply: - stepNames = c.expApplySteps - } - var expSteps []valid.Step - for _, stepName := range stepNames { - expSteps = append(expSteps, valid.Step{ - StepName: stepName, - }) - } - - c.expCtx.CommandName = cmd - // Init fields we couldn't in our cases map. - c.expCtx.Steps = expSteps - ctx.PolicySets = emptyPolicySets - - // Job ID cannot be compared since its generated at random - ctx.JobID = "" - - Equals(t, c.expCtx, ctx) - // Equals() doesn't compare TF version properly so have to - // use .String(). - if c.expCtx.TerraformVersion != nil { - Equals(t, c.expCtx.TerraformVersion.String(), ctx.TerraformVersion.String()) - } - }) - } - }) - } -} - -//nolint:unparam -func mustVersion(v string) *version.Version { - vers, err := version.NewVersion(v) - if err != nil { - panic(err) - } - return vers -} diff --git a/server/legacy/events/project_command_builder_test.go b/server/legacy/events/project_command_builder_test.go deleted file mode 100644 index 412555569..000000000 --- a/server/legacy/events/project_command_builder_test.go +++ /dev/null @@ -1,1323 +0,0 @@ -package events_test - -import ( - "context" - "fmt" - "os" - "path/filepath" - "strings" - "testing" - - "github.com/stretchr/testify/assert" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/matchers" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/legacy/wrappers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - "github.com/uber-go/tally/v4" -) - -func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) { - // expCtxFields define the ctx fields we're going to assert on. - // Since we're focused on autoplanning here, we don't validate all the - // fields so the tests are more obvious and targeted. - type expCtxFields struct { - ProjectName string - RepoRelDir string - Workspace string - } - cases := []struct { - Description string - AtlantisYAML string - ServerSideYAML string - exp []expCtxFields - }{ - { - Description: "simple atlantis.yaml", - AtlantisYAML: ` -version: 3 -projects: -- dir: . -`, - exp: []expCtxFields{ - { - ProjectName: "", - RepoRelDir: ".", - Workspace: "default", - }, - }, - }, - { - Description: "some projects disabled", - AtlantisYAML: ` -version: 3 -projects: -- dir: . - autoplan: - enabled: false -- dir: . - workspace: myworkspace - autoplan: - when_modified: ["main.tf"] -- dir: . - name: myname - workspace: myworkspace2 -`, - exp: []expCtxFields{ - { - ProjectName: "", - RepoRelDir: ".", - Workspace: "myworkspace", - }, - { - ProjectName: "myname", - RepoRelDir: ".", - Workspace: "myworkspace2", - }, - }, - }, - { - Description: "some projects disabled", - AtlantisYAML: ` -version: 3 -projects: -- dir: . - autoplan: - enabled: false -- dir: . - workspace: myworkspace - autoplan: - when_modified: ["main.tf"] -- dir: . - workspace: myworkspace2 -`, - exp: []expCtxFields{ - { - ProjectName: "", - RepoRelDir: ".", - Workspace: "myworkspace", - }, - { - ProjectName: "", - RepoRelDir: ".", - Workspace: "myworkspace2", - }, - }, - }, - { - Description: "no projects modified", - AtlantisYAML: ` -version: 3 -projects: -- dir: mydir -`, - exp: nil, - }, - } - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - }) - defer cleanup() - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) - if c.AtlantisYAML != "" { - err := os.WriteFile(filepath.Join(tmpDir, config.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) - Ok(t, err) - } - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - ctxs, err := builder.BuildAutoplanCommands(&command.Context{ - RequestCtx: context.TODO(), - PullRequestStatus: models.PullReqStatus{ - Mergeable: true, - }, - Log: logger, - Scope: scope, - }) - Ok(t, err) - Equals(t, len(c.exp), len(ctxs)) - for i, actCtx := range ctxs { - expCtx := c.exp[i] - Equals(t, expCtx.ProjectName, actCtx.ProjectName) - Equals(t, expCtx.RepoRelDir, actCtx.RepoRelDir) - Equals(t, expCtx.Workspace, actCtx.Workspace) - } - }) - } -} - -// Test building a plan and apply command for one project. -func TestDefaultProjectCommandBuilder_BuildSinglePlanApplyCommand(t *testing.T) { - cases := []struct { - Description string - AtlantisYAML string - Cmd command.Comment - ExpCommentArgs []string - ExpWorkspace string - ExpDir string - ExpProjectName string - ExpErr string - ExpApplyReqs []string - ExpParallelApply bool - ExpParallelPlan bool - }{ - { - Description: "no atlantis.yaml", - Cmd: command.Comment{ - RepoRelDir: ".", - Flags: []string{"commentarg"}, - Name: command.Plan, - Workspace: "myworkspace", - }, - AtlantisYAML: "", - ExpCommentArgs: []string{`\c\o\m\m\e\n\t\a\r\g`}, - ExpWorkspace: "myworkspace", - ExpDir: ".", - ExpApplyReqs: []string{}, - }, - { - Description: "no atlantis.yaml with project flag", - Cmd: command.Comment{ - RepoRelDir: ".", - Name: command.Plan, - ProjectName: "myproject", - }, - AtlantisYAML: "", - ExpErr: "cannot specify a project name unless an atlantis.yaml file exists to configure projects", - }, - { - Description: "simple atlantis.yaml", - Cmd: command.Comment{ - RepoRelDir: ".", - Name: command.Plan, - Workspace: "myworkspace", - }, - AtlantisYAML: ` -version: 3 -projects: -- dir: . - workspace: myworkspace - apply_requirements: [approved]`, - ExpApplyReqs: []string{"approved"}, - ExpWorkspace: "myworkspace", - ExpDir: ".", - }, - { - Description: "atlantis.yaml wrong dir", - Cmd: command.Comment{ - RepoRelDir: ".", - Name: command.Plan, - Workspace: "myworkspace", - }, - AtlantisYAML: ` -version: 3 -projects: -- dir: notroot - workspace: myworkspace - apply_requirements: [approved]`, - ExpWorkspace: "myworkspace", - ExpDir: ".", - ExpApplyReqs: []string{}, - }, - { - Description: "atlantis.yaml wrong workspace", - Cmd: command.Comment{ - RepoRelDir: ".", - Name: command.Plan, - Workspace: "myworkspace", - }, - AtlantisYAML: ` -version: 3 -projects: -- dir: . - workspace: notmyworkspace - apply_requirements: [approved]`, - ExpErr: "running commands in workspace \"myworkspace\" is not allowed because this directory is only configured for the following workspaces: notmyworkspace", - }, - { - Description: "atlantis.yaml with projectname", - Cmd: command.Comment{ - Name: command.Plan, - ProjectName: "myproject", - }, - AtlantisYAML: ` -version: 3 -projects: -- name: myproject - dir: . - workspace: myworkspace - apply_requirements: [approved]`, - ExpApplyReqs: []string{"approved"}, - ExpProjectName: "myproject", - ExpWorkspace: "myworkspace", - ExpDir: ".", - }, - { - Description: "atlantis.yaml with mergeable apply requirement", - Cmd: command.Comment{ - Name: command.Plan, - ProjectName: "myproject", - }, - AtlantisYAML: ` -version: 3 -projects: -- name: myproject - dir: . - workspace: myworkspace - apply_requirements: [mergeable]`, - ExpApplyReqs: []string{"mergeable"}, - ExpProjectName: "myproject", - ExpWorkspace: "myworkspace", - ExpDir: ".", - }, - { - Description: "atlantis.yaml with mergeable and approved apply requirements", - Cmd: command.Comment{ - Name: command.Plan, - ProjectName: "myproject", - }, - AtlantisYAML: ` -version: 3 -projects: -- name: myproject - dir: . - workspace: myworkspace - apply_requirements: [mergeable, approved]`, - ExpApplyReqs: []string{"mergeable", "approved"}, - ExpProjectName: "myproject", - ExpWorkspace: "myworkspace", - ExpDir: ".", - }, - { - Description: "atlantis.yaml with multiple dir/workspaces matching", - Cmd: command.Comment{ - Name: command.Plan, - RepoRelDir: ".", - Workspace: "myworkspace", - }, - AtlantisYAML: ` -version: 3 -projects: -- name: myproject - dir: . - workspace: myworkspace - apply_requirements: [approved] -- name: myproject2 - dir: . - workspace: myworkspace -`, - ExpErr: "must specify project name: more than one project defined in atlantis.yaml matched dir: \".\" workspace: \"myworkspace\"", - }, - { - Description: "atlantis.yaml with project flag not matching", - Cmd: command.Comment{ - Name: command.Plan, - RepoRelDir: ".", - Workspace: "default", - ProjectName: "notconfigured", - }, - AtlantisYAML: ` -version: 3 -projects: -- dir: . -`, - ExpErr: "no project with name \"notconfigured\" is defined in atlantis.yaml", - }, - { - Description: "atlantis.yaml with ParallelPlan Set to true", - Cmd: command.Comment{ - Name: command.Plan, - RepoRelDir: ".", - Workspace: "default", - ProjectName: "myproject", - }, - AtlantisYAML: ` -version: 3 -parallel_plan: true -projects: -- name: myproject - dir: . - workspace: myworkspace -`, - ExpParallelPlan: true, - ExpParallelApply: false, - ExpDir: ".", - ExpWorkspace: "myworkspace", - ExpProjectName: "myproject", - ExpApplyReqs: []string{}, - }, - } - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - for _, c := range cases { - // NOTE: we're testing both plan and apply here. - for _, cmdName := range []command.Name{command.Plan, command.Apply} { - t.Run(c.Description+"_"+cmdName.String(), func(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - }) - defer cleanup() - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) - When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) - if c.AtlantisYAML != "" { - err := os.WriteFile(filepath.Join(tmpDir, config.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) - Ok(t, err) - } - - globalCfg := valid.NewGlobalCfg("somedir") - globalCfg.Repos[0].AllowedOverrides = []string{"apply_requirements"} - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - events.NewDefaultWorkingDirLocker(), - globalCfg, - &events.DefaultPendingPlanFinder{}, - true, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - var actCtxs []command.ProjectContext - var err error - if cmdName == command.Plan { - cmd := c.Cmd - actCtxs, err = builder.BuildPlanCommands(&command.Context{ - RequestCtx: context.TODO(), - Log: logger, - Scope: scope, - }, &cmd) - } else { - cmd := c.Cmd - actCtxs, err = builder.BuildApplyCommands(&command.Context{Log: logger, Scope: scope, RequestCtx: context.TODO()}, &cmd) - } - - if c.ExpErr != "" { - ErrEquals(t, c.ExpErr, err) - return - } - Ok(t, err) - Equals(t, 1, len(actCtxs)) - actCtx := actCtxs[0] - Equals(t, c.ExpDir, actCtx.RepoRelDir) - Equals(t, c.ExpWorkspace, actCtx.Workspace) - Equals(t, c.ExpCommentArgs, actCtx.EscapedCommentArgs) - Equals(t, c.ExpProjectName, actCtx.ProjectName) - Equals(t, c.ExpApplyReqs, actCtx.ApplyRequirements) - Equals(t, c.ExpParallelApply, actCtx.ParallelApplyEnabled) - Equals(t, c.ExpParallelPlan, actCtx.ParallelPlanEnabled) - }) - } - } -} - -func TestDefaultProjectCommandBuilder_BuildPlanCommands(t *testing.T) { - // expCtxFields define the ctx fields we're going to assert on. - // Since we're focused on autoplanning here, we don't validate all the - // fields so the tests are more obvious and targeted. - type expCtxFields struct { - ProjectName string - RepoRelDir string - Workspace string - } - cases := map[string]struct { - DirStructure map[string]interface{} - AtlantisYAML string - ModifiedFiles []string - Exp []expCtxFields - }{ - "no atlantis.yaml": { - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - }, - }, - ModifiedFiles: []string{"project1/main.tf", "project2/main.tf"}, - Exp: []expCtxFields{ - { - ProjectName: "", - RepoRelDir: "project1", - Workspace: "default", - }, - { - ProjectName: "", - RepoRelDir: "project2", - Workspace: "default", - }, - }, - }, - "no modified files": { - DirStructure: map[string]interface{}{ - "main.tf": nil, - }, - ModifiedFiles: []string{}, - Exp: []expCtxFields{}, - }, - "follow when_modified config": { - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - }, - "project3": map[string]interface{}{ - "main.tf": nil, - }, - }, - AtlantisYAML: `version: 3 -projects: -- dir: project1 # project1 uses the defaults -- dir: project2 # project2 has autoplan disabled but should use default when_modified - autoplan: - enabled: false -- dir: project3 # project3 has an empty when_modified - autoplan: - enabled: false - when_modified: []`, - ModifiedFiles: []string{"project1/main.tf", "project2/main.tf", "project3/main.tf"}, - Exp: []expCtxFields{ - { - ProjectName: "", - RepoRelDir: "project1", - Workspace: "default", - }, - { - ProjectName: "", - RepoRelDir: "project2", - Workspace: "default", - }, - }, - }, - } - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - for name, c := range cases { - t.Run(name, func(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, c.DirStructure) - defer cleanup() - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) - When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil) - if c.AtlantisYAML != "" { - err := os.WriteFile(filepath.Join(tmpDir, config.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600) - Ok(t, err) - } - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - ctxs, err := builder.BuildPlanCommands( - &command.Context{ - Log: logger, - Scope: scope, - RequestCtx: context.TODO(), - }, - &command.Comment{ - RepoRelDir: "", - Flags: nil, - Name: command.Plan, - Workspace: "", - ProjectName: "", - }) - Ok(t, err) - Equals(t, len(c.Exp), len(ctxs)) - for i, actCtx := range ctxs { - expCtx := c.Exp[i] - Equals(t, expCtx.ProjectName, actCtx.ProjectName) - Equals(t, expCtx.RepoRelDir, actCtx.RepoRelDir) - Equals(t, expCtx.Workspace, actCtx.Workspace) - } - }) - } -} - -// Test building apply command for multiple projects when the comment -// isn't for a specific project, i.e. atlantis apply. -// In this case we should apply all outstanding plans. -func TestDefaultProjectCommandBuilder_BuildMultiApply(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "workspace1": map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - }, - "workspace2": map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - }, - }) - defer cleanup() - // Initialize git repos in each workspace so that the .tfplan files get - // picked up. - runCmd(t, filepath.Join(tmpDir, "workspace1"), "git", "init") - runCmd(t, filepath.Join(tmpDir, "workspace2"), "git", "init") - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.GetPullDir( - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest())). - ThenReturn(tmpDir, nil) - - logger := logging.NewNoopCtxLogger(t) - - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - nil, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - ctxs, err := builder.BuildApplyCommands( - &command.Context{ - Log: logger, - Scope: scope, - RequestCtx: context.TODO(), - }, - &command.Comment{ - RepoRelDir: "", - Flags: nil, - Name: command.Apply, - Workspace: "", - ProjectName: "", - }) - Ok(t, err) - Equals(t, 4, len(ctxs)) - Equals(t, "project1", ctxs[0].RepoRelDir) - Equals(t, "workspace1", ctxs[0].Workspace) - Equals(t, "project2", ctxs[1].RepoRelDir) - Equals(t, "workspace1", ctxs[1].Workspace) - Equals(t, "project1", ctxs[2].RepoRelDir) - Equals(t, "workspace2", ctxs[2].Workspace) - Equals(t, "project2", ctxs[3].RepoRelDir) - Equals(t, "workspace2", ctxs[3].Workspace) -} - -// Test that if a directory has a list of workspaces configured then we don't -// allow plans for other workspace names. -func TestDefaultProjectCommandBuilder_WrongWorkspaceName(t *testing.T) { - RegisterMockTestingT(t) - workingDir := mocks.NewMockWorkingDir() - - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "pulldir": map[string]interface{}{ - "notconfigured": map[string]interface{}{}, - }, - }) - defer cleanup() - repoDir := filepath.Join(tmpDir, "pulldir/notconfigured") - - yamlCfg := `version: 3 -projects: -- dir: . - workspace: default -- dir: . - workspace: staging -` - err := os.WriteFile(filepath.Join(repoDir, config.AtlantisYAMLFilename), []byte(yamlCfg), 0600) - Ok(t, err) - - When(workingDir.Clone( - matchers.AnyLoggingLogger(), - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString())).ThenReturn(repoDir, false, nil) - When(workingDir.GetWorkingDir( - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString())).ThenReturn(repoDir, nil) - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - nil, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - ctx := &command.Context{ - RequestCtx: context.TODO(), - HeadRepo: models.Repo{}, - Pull: models.PullRequest{}, - User: models.User{}, - Log: logging.NewNoopCtxLogger(t), - Scope: scope, - } - _, err = builder.BuildPlanCommands(ctx, &command.Comment{ - RepoRelDir: ".", - Flags: nil, - Name: command.Plan, - Workspace: "notconfigured", - ProjectName: "", - }) - ErrEquals(t, "running commands in workspace \"notconfigured\" is not allowed because this directory is only configured for the following workspaces: default, staging", err) -} - -// Test that extra comment args are escaped. -func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { - cases := []struct { - ExtraArgs []string - ExpEscapedArgs []string - }{ - { - ExtraArgs: []string{"arg1", "arg2"}, - ExpEscapedArgs: []string{`\a\r\g\1`, `\a\r\g\2`}, - }, - { - ExtraArgs: []string{"-var=$(touch bad)"}, - ExpEscapedArgs: []string{`\-\v\a\r\=\$\(\t\o\u\c\h\ \b\a\d\)`}, - }, - { - ExtraArgs: []string{"-- ;echo bad"}, - ExpEscapedArgs: []string{`\-\-\ \;\e\c\h\o\ \b\a\d`}, - }, - } - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - for _, c := range cases { - t.Run(strings.Join(c.ExtraArgs, " "), func(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - }) - defer cleanup() - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) - When(workingDir.GetWorkingDir(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - var actCtxs []command.ProjectContext - var err error - actCtxs, err = builder.BuildPlanCommands(&command.Context{ - RequestCtx: context.TODO(), - Log: logger, - Scope: scope, - }, &command.Comment{ - RepoRelDir: ".", - Flags: c.ExtraArgs, - Name: command.Plan, - Workspace: "default", - }) - Ok(t, err) - Equals(t, 1, len(actCtxs)) - actCtx := actCtxs[0] - Equals(t, c.ExpEscapedArgs, actCtx.EscapedCommentArgs) - }) - } -} - -// Test that terraform version is used when specified in terraform configuration -func TestDefaultProjectCommandBuilder_TerraformVersion(t *testing.T) { - // For the following tests: - // If terraform configuration is used, result should be `0.12.8`. - // If project configuration is used, result should be `0.12.6`. - // If default is to be used, result should be `nil`. - baseVersionConfig := ` -terraform { - required_version = "%s0.12.8" -} -` - - atlantisYamlContent := ` -version: 3 -projects: -- dir: project1 # project1 uses the defaults - terraform_version: v0.12.6 -` - - exactSymbols := []string{"", "="} - nonExactSymbols := []string{">", ">=", "<", "<=", "~="} - - type testCase struct { - DirStructure map[string]interface{} - AtlantisYAML string - ModifiedFiles []string - Exp map[string][]int - } - - testCases := make(map[string]testCase) - - for _, exactSymbol := range exactSymbols { - testCases[fmt.Sprintf("exact version in terraform config using \"%s\"", exactSymbol)] = testCase{ - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": fmt.Sprintf(baseVersionConfig, exactSymbol), - }, - }, - ModifiedFiles: []string{"project1/main.tf"}, - Exp: map[string][]int{ - "project1": {0, 12, 8}, - }, - } - } - - for _, nonExactSymbol := range nonExactSymbols { - testCases[fmt.Sprintf("non-exact version in terraform config using \"%s\"", nonExactSymbol)] = testCase{ - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": fmt.Sprintf(baseVersionConfig, nonExactSymbol), - }, - }, - ModifiedFiles: []string{"project1/main.tf"}, - Exp: map[string][]int{ - "project1": nil, - }, - } - } - - // atlantis.yaml should take precedence over terraform config - testCases["with project config and terraform config"] = testCase{ - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": fmt.Sprintf(baseVersionConfig, exactSymbols[0]), - }, - config.AtlantisYAMLFilename: atlantisYamlContent, - }, - ModifiedFiles: []string{"project1/main.tf", "project2/main.tf"}, - Exp: map[string][]int{ - "project1": {0, 12, 6}, - }, - } - - testCases["with project config only"] = testCase{ - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - }, - config.AtlantisYAMLFilename: atlantisYamlContent, - }, - ModifiedFiles: []string{"project1/main.tf"}, - Exp: map[string][]int{ - "project1": {0, 12, 6}, - }, - } - - testCases["neither project config or terraform config"] = testCase{ - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - }, - }, - ModifiedFiles: []string{"project1/main.tf", "project2/main.tf"}, - Exp: map[string][]int{ - "project1": nil, - }, - } - - testCases["project with different terraform config"] = testCase{ - DirStructure: map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": fmt.Sprintf(baseVersionConfig, exactSymbols[0]), - }, - "project2": map[string]interface{}{ - "main.tf": strings.Replace(fmt.Sprintf(baseVersionConfig, exactSymbols[0]), "0.12.8", "0.12.9", -1), - }, - }, - ModifiedFiles: []string{"project1/main.tf", "project2/main.tf"}, - Exp: map[string][]int{ - "project1": {0, 12, 8}, - "project2": {0, 12, 9}, - }, - } - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - for name, testCase := range testCases { - t.Run(name, func(t *testing.T) { - RegisterMockTestingT(t) - - tmpDir, cleanup := DirStructure(t, testCase.DirStructure) - - defer cleanup() - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(testCase.ModifiedFiles, nil) - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone( - matchers.AnyLoggingLogger(), - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString())).ThenReturn(tmpDir, false, nil) - - When(workingDir.GetWorkingDir( - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString())).ThenReturn(tmpDir, nil) - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - actCtxs, err := builder.BuildPlanCommands( - &command.Context{ - RequestCtx: context.TODO(), - Log: logger, - Scope: scope, - }, - &command.Comment{ - RepoRelDir: "", - Flags: nil, - Name: command.Plan, - }) - - Ok(t, err) - Equals(t, len(testCase.Exp), len(actCtxs)) - for _, actCtx := range actCtxs { - if testCase.Exp[actCtx.RepoRelDir] != nil { - Assert(t, actCtx.TerraformVersion != nil, "TerraformVersion is nil.") - Equals(t, testCase.Exp[actCtx.RepoRelDir], actCtx.TerraformVersion.Segments()) - } else { - Assert(t, actCtx.TerraformVersion == nil, "TerraformVersion is supposed to be nil.") - } - } - }) - } -} - -func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanCommand(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - }) - defer cleanup() - - logger := logging.NewNoopCtxLogger(t) - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(matchers.AnyLoggingLogger(), matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest(), AnyString())).ThenReturn(tmpDir, false, nil) - vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"main.tf"}, nil) - - globalCfg := valid.NewGlobalCfg("somedir") - commentParser := &events.CommentParser{} - contextBuilder := wrappers. - WrapProjectContext(events.NewProjectCommandContextBuilder(commentParser)). - EnablePolicyChecks(commentParser) - - builder := events.NewProjectCommandBuilder( - contextBuilder, - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - events.NewDefaultWorkingDirLocker(), - globalCfg, - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - ctxs, err := builder.BuildAutoplanCommands(&command.Context{ - PullRequestStatus: models.PullReqStatus{ - Mergeable: true, - }, - RequestCtx: context.TODO(), - Log: logger, - Scope: scope, - }) - - Ok(t, err) - Equals(t, 2, len(ctxs)) - planCtx := ctxs[0] - policyCheckCtx := ctxs[1] - Equals(t, command.Plan, planCtx.CommandName) - Equals(t, globalCfg.PullRequestWorkflows["default"].Plan.Steps, planCtx.Steps) - Equals(t, command.PolicyCheck, policyCheckCtx.CommandName) - Equals(t, globalCfg.PullRequestWorkflows["default"].PolicyCheck.Steps, policyCheckCtx.Steps) -} - -// Test building version command for multiple projects -func TestDefaultProjectCommandBuilder_BuildVersionCommand(t *testing.T) { - RegisterMockTestingT(t) - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "workspace1": map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - }, - "workspace2": map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - }, - }) - defer cleanup() - // Initialize git repos in each workspace so that the .tfplan files get - // picked up. - runCmd(t, filepath.Join(tmpDir, "workspace1"), "git", "init") - runCmd(t, filepath.Join(tmpDir, "workspace2"), "git", "init") - - workingDir := mocks.NewMockWorkingDir() - When(workingDir.GetPullDir( - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest())). - ThenReturn(tmpDir, nil) - - logger := logging.NewNoopCtxLogger(t) - scope := tally.NewTestScope("test", nil) - - builder := events.NewProjectCommandBuilder( - events.NewProjectCommandContextBuilder(&events.CommentParser{}), - &config.ParserValidator{}, - &events.DefaultProjectFinder{}, - nil, - workingDir, - events.NewDefaultWorkingDirLocker(), - valid.NewGlobalCfg("somedir"), - &events.DefaultPendingPlanFinder{}, - false, - "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl", - logger, - events.InfiniteProjectsPerPR, - ) - - ctxs, err := builder.BuildVersionCommands( - &command.Context{ - RequestCtx: context.TODO(), - Log: logger, - Scope: scope, - }, - &command.Comment{ - RepoRelDir: "", - Flags: nil, - Name: command.Version, - Workspace: "", - ProjectName: "", - }) - Ok(t, err) - Equals(t, 4, len(ctxs)) - Equals(t, "project1", ctxs[0].RepoRelDir) - Equals(t, "workspace1", ctxs[0].Workspace) - Equals(t, "project2", ctxs[1].RepoRelDir) - Equals(t, "workspace1", ctxs[1].Workspace) - Equals(t, "project1", ctxs[2].RepoRelDir) - Equals(t, "workspace2", ctxs[2].Workspace) - Equals(t, "project2", ctxs[3].RepoRelDir) - Equals(t, "workspace2", ctxs[3].Workspace) -} - -func TestDefaultProjectCommandBuilder_BuildPolicyCheckCommands(t *testing.T) { - testWorkingDirLocker := mockWorkingDirLocker{} - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "workspace1": map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - }, - }) - defer cleanup() - // Initialize git repos in each workspace so that the .tfplan files get - // picked up. - runCmd(t, filepath.Join(tmpDir, "workspace1"), "git", "init") - testWorkingDir := mockWorkingDir{ - pullDir: tmpDir, - workingDir: tmpDir, - } - expectedProjects := []command.ProjectContext{ - { - CommandName: command.PolicyCheck, - }, - } - testContextBuilder := mockContextBuilder{ - projects: expectedProjects, - } - builder := events.DefaultProjectCommandBuilder{ - ParserValidator: &config.ParserValidator{}, - WorkingDir: testWorkingDir, - WorkingDirLocker: testWorkingDirLocker, - GlobalCfg: valid.NewGlobalCfg("somedir"), - PendingPlanFinder: &events.DefaultPendingPlanFinder{}, - ProjectCommandContextBuilder: testContextBuilder, - } - commandCtx := &command.Context{ - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("atlantis", map[string]string{}), - RequestCtx: context.Background(), - } - projects, err := builder.BuildPolicyCheckCommands(commandCtx) - assert.NoError(t, err) - assert.Equal(t, expectedProjects, projects) -} - -func TestDefaultProjectCommandBuilder_BuildPolicyCheckCommands_TryLockPullError(t *testing.T) { - testWorkingDirLocker := mockWorkingDirLocker{ - error: assert.AnError, - } - builder := events.DefaultProjectCommandBuilder{ - WorkingDirLocker: testWorkingDirLocker, - } - commandCtx := &command.Context{ - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("atlantis", map[string]string{}), - RequestCtx: context.Background(), - } - projects, err := builder.BuildPolicyCheckCommands(commandCtx) - assert.ErrorIs(t, err, assert.AnError) - assert.Empty(t, projects) -} - -func TestDefaultProjectCommandBuilder_BuildPolicyCheckCommands_GetPullDirError(t *testing.T) { - testWorkingDir := mockWorkingDir{ - pullErr: assert.AnError, - } - testWorkingDirLocker := mockWorkingDirLocker{} - builder := events.DefaultProjectCommandBuilder{ - WorkingDir: testWorkingDir, - WorkingDirLocker: testWorkingDirLocker, - } - commandCtx := &command.Context{ - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("atlantis", map[string]string{}), - RequestCtx: context.Background(), - } - projects, err := builder.BuildPolicyCheckCommands(commandCtx) - assert.ErrorIs(t, err, assert.AnError) - assert.Empty(t, projects) -} - -func TestDefaultProjectCommandBuilder_BuildPolicyCheckCommands_FindError(t *testing.T) { - testWorkingDir := mockWorkingDir{} - testWorkingDirLocker := mockWorkingDirLocker{} - builder := events.DefaultProjectCommandBuilder{ - WorkingDir: testWorkingDir, - WorkingDirLocker: testWorkingDirLocker, - PendingPlanFinder: &events.DefaultPendingPlanFinder{}, - } - commandCtx := &command.Context{ - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("atlantis", map[string]string{}), - RequestCtx: context.Background(), - } - projects, err := builder.BuildPolicyCheckCommands(commandCtx) - assert.ErrorIs(t, err, os.ErrNotExist) - assert.Empty(t, projects) -} - -func TestDefaultProjectCommandBuilder_BuildPolicyCheckCommands_GetWorkingDirErr(t *testing.T) { - testWorkingDirLocker := mockWorkingDirLocker{} - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "workspace1": map[string]interface{}{ - "project1": map[string]interface{}{ - "main.tf": nil, - "workspace.tfplan": nil, - }, - }, - }) - defer cleanup() - // Initialize git repos in each workspace so that the .tfplan files get - // picked up. - runCmd(t, filepath.Join(tmpDir, "workspace1"), "git", "init") - testWorkingDir := mockWorkingDir{ - pullDir: tmpDir, - workingDirErr: assert.AnError, - } - builder := events.DefaultProjectCommandBuilder{ - WorkingDir: testWorkingDir, - WorkingDirLocker: testWorkingDirLocker, - GlobalCfg: valid.NewGlobalCfg("somedir"), - PendingPlanFinder: &events.DefaultPendingPlanFinder{}, - } - commandCtx := &command.Context{ - Log: logging.NewNoopCtxLogger(t), - Scope: tally.NewTestScope("atlantis", map[string]string{}), - RequestCtx: context.Background(), - } - projects, err := builder.BuildPolicyCheckCommands(commandCtx) - assert.ErrorIs(t, err, assert.AnError) - assert.Empty(t, projects) -} - -type mockWorkingDirLocker struct { - error error -} - -func (l mockWorkingDirLocker) TryLock(_ string, _ int, _ string) (func(), error) { - return func() {}, nil -} - -func (l mockWorkingDirLocker) TryLockPull(_ string, _ int) (func(), error) { - if l.error != nil { - return func() {}, l.error - } - return func() {}, nil -} - -type mockContextBuilder struct { - projects []command.ProjectContext -} - -func (b mockContextBuilder) BuildProjectContext(_ *command.Context, _ command.Name, _ valid.MergedProjectCfg, _ []string, _ string, _ *command.ContextFlags) []command.ProjectContext { - return b.projects -} - -type mockWorkingDir struct { - pullDir string - workingDir string - pullErr error - workingDirErr error -} - -func (w mockWorkingDir) GetPullDir(_ models.Repo, _ models.PullRequest) (string, error) { - return w.pullDir, w.pullErr -} - -func (w mockWorkingDir) GetWorkingDir(models.Repo, models.PullRequest, string) (string, error) { - return w.workingDir, w.workingDirErr -} - -func (w mockWorkingDir) HasDiverged(logging.Logger, string, models.Repo) bool { - return false -} - -func (w mockWorkingDir) Delete(models.Repo, models.PullRequest) error { - return nil -} - -func (w mockWorkingDir) DeleteForWorkspace(_ models.Repo, _ models.PullRequest, _ string) error { - return nil -} - -func (w mockWorkingDir) Clone(_ logging.Logger, _ models.Repo, _ models.PullRequest, _ string) (string, bool, error) { - return "", false, nil -} diff --git a/server/legacy/events/project_command_context_builder.go b/server/legacy/events/project_command_context_builder.go deleted file mode 100644 index adb4d1ded..000000000 --- a/server/legacy/events/project_command_context_builder.go +++ /dev/null @@ -1,161 +0,0 @@ -package events - -import ( - "fmt" - "path/filepath" - "regexp" - - "github.com/hashicorp/go-version" - "github.com/hashicorp/terraform-config-inspect/tfconfig" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type ProjectCommandContextBuilder interface { - // BuildProjectContext builds project command contexts for atlantis commands - BuildProjectContext( - ctx *command.Context, - cmdName command.Name, - prjCfg valid.MergedProjectCfg, - commentArgs []string, - repoDir string, - contextFlags *command.ContextFlags, - ) []command.ProjectContext -} - -func NewProjectCommandContextBuilder( - commentBuilder CommentBuilder, -) ProjectCommandContextBuilder { - return &projectCommandContextBuilder{ - CommentBuilder: commentBuilder, - } -} - -type projectCommandContextBuilder struct { - CommentBuilder CommentBuilder -} - -func (cb *projectCommandContextBuilder) BuildProjectContext( - ctx *command.Context, - cmdName command.Name, - prjCfg valid.MergedProjectCfg, - commentArgs []string, - repoDir string, - contextFlags *command.ContextFlags, -) []command.ProjectContext { - return buildContext( - ctx, - cmdName, - getSteps(cmdName, prjCfg.PullRequestWorkflow, contextFlags.LogLevel), - cb.CommentBuilder, - prjCfg, - commentArgs, - repoDir, - contextFlags, - ) -} - -func buildContext( - ctx *command.Context, - cmdName command.Name, - steps []valid.Step, - commentBuilder CommentBuilder, - prjCfg valid.MergedProjectCfg, - commentArgs []string, - repoDir string, - contextFlags *command.ContextFlags, -) []command.ProjectContext { - projectCmds := make([]command.ProjectContext, 0) - - // If TerraformVersion not defined in config file look for a - // terraform.require_version block. - if prjCfg.TerraformVersion == nil { - prjCfg.TerraformVersion = getTfVersion(ctx, filepath.Join(repoDir, prjCfg.RepoRelDir)) - } - - projectCmds = append(projectCmds, - command.NewProjectContext( - ctx, - cmdName, - commentBuilder.BuildApplyComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name), - commentBuilder.BuildPlanComment(prjCfg.RepoRelDir, prjCfg.Workspace, prjCfg.Name, commentArgs), - prjCfg, - steps, - prjCfg.PolicySets, - escapeArgs(commentArgs), - contextFlags, - ctx.Scope, - ctx.PullRequestStatus, - ), - ) - - return projectCmds -} - -func getSteps( - cmdName command.Name, - workflow valid.Workflow, - logLevel string, -) (steps []valid.Step) { - switch cmdName { - case command.Plan: - steps = workflow.Plan.Steps - if logLevel != "" { - steps = valid.PrependLogEnvStep(steps, logLevel) - } - case command.Apply: - steps = workflow.Apply.Steps - if logLevel != "" { - steps = valid.PrependLogEnvStep(steps, logLevel) - } - case command.Version: - steps = []valid.Step{{ - StepName: "version", - }} - case command.PolicyCheck: - steps = workflow.PolicyCheck.Steps - } - return steps -} - -func escapeArgs(args []string) []string { - var escaped []string - for _, arg := range args { - var escapedArg string - for i := range arg { - escapedArg += "\\" + string(arg[i]) - } - escaped = append(escaped, escapedArg) - } - return escaped -} - -// Extracts required_version from Terraform configuration. -// Returns nil if unable to determine version from configuration. -func getTfVersion(ctx *command.Context, absProjDir string) *version.Version { - module, diags := tfconfig.LoadModule(absProjDir) - if diags.HasErrors() { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("trying to detect required version: %s", diags.Error())) - return nil - } - if len(module.RequiredCore) != 1 { - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("cannot determine which version to use from terraform configuration, detected %d possibilities.", len(module.RequiredCore))) - return nil - } - requiredVersionSetting := module.RequiredCore[0] - - // We allow `= x.y.z`, `=x.y.z` or `x.y.z` where `x`, `y` and `z` are integers. - re := regexp.MustCompile(`^=?\s*([^\s]+)\s*$`) - matched := re.FindStringSubmatch(requiredVersionSetting) - if len(matched) == 0 { - return nil - } - version, err := version.NewVersion(matched[1]) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, err.Error()) - return nil - } - - ctx.Log.InfoContext(ctx.RequestCtx, fmt.Sprintf("detected module requires version: %q", version.String())) - return version -} diff --git a/server/legacy/events/project_command_context_builder_test.go b/server/legacy/events/project_command_context_builder_test.go deleted file mode 100644 index 4f956f149..000000000 --- a/server/legacy/events/project_command_context_builder_test.go +++ /dev/null @@ -1,105 +0,0 @@ -package events_test - -import ( - "context" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/stretchr/testify/assert" - "github.com/uber-go/tally/v4" -) - -func TestProjectCommandContextBuilder_PullStatus(t *testing.T) { - scope := tally.NewTestScope("test", nil) - mockCommentBuilder := mocks.NewMockCommentBuilder() - subject := events.NewProjectCommandContextBuilder(mockCommentBuilder) - - projRepoRelDir := "dir1" - projWorkspace := "default" - projName := "project1" - - projCfg := valid.MergedProjectCfg{ - RepoRelDir: projRepoRelDir, - Workspace: projWorkspace, - Name: projName, - PullRequestWorkflow: valid.Workflow{ - Name: valid.DefaultWorkflowName, - Plan: valid.DefaultLocklessPlanStage, - }, - } - - pullStatus := &models.PullStatus{ - Projects: []models.ProjectStatus{}, - } - - commandCtx := &command.Context{ - Log: logging.NewNoopCtxLogger(t), - PullStatus: pullStatus, - Scope: scope, - RequestCtx: context.TODO(), - } - - expectedApplyCmt := "Apply Comment" - expectedPlanCmt := "Plan Comment" - - t.Run("with project name defined", func(t *testing.T) { - When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, projName, []string{})).ThenReturn(expectedPlanCmt) - When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, projName)).ThenReturn(expectedApplyCmt) - - pullStatus.Projects = []models.ProjectStatus{ - { - Status: models.ErroredPolicyCheckStatus, - ProjectName: "project1", - RepoRelDir: "dir1", - }, - } - contextFlags := &command.ContextFlags{ - ParallelApply: false, - ParallelPlan: false, - ForceApply: false, - } - result := subject.BuildProjectContext(commandCtx, command.Plan, projCfg, []string{}, "some/dir", contextFlags) - - assert.Equal(t, models.ErroredPolicyCheckStatus, result[0].ProjectPlanStatus) - }) - - t.Run("when ParallelApply is set to true", func(t *testing.T) { - projCfg.Name = "Apply Comment" - When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, "", []string{})).ThenReturn(expectedPlanCmt) - When(mockCommentBuilder.BuildApplyComment(projRepoRelDir, projWorkspace, "")).ThenReturn(expectedApplyCmt) - pullStatus.Projects = []models.ProjectStatus{ - { - Status: models.ErroredPlanStatus, - RepoRelDir: "dir2", - }, - { - Status: models.ErroredPolicyCheckStatus, - RepoRelDir: "dir1", - }, - } - contextFlags := &command.ContextFlags{ - ParallelApply: true, - ParallelPlan: false, - ForceApply: false, - } - result := subject.BuildProjectContext(commandCtx, command.Plan, projCfg, []string{}, "some/dir", contextFlags) - - assert.True(t, result[0].ParallelApplyEnabled) - assert.False(t, result[0].ParallelPlanEnabled) - }) - - t.Run("when log level is set to warn", func(t *testing.T) { - result := subject.BuildProjectContext(commandCtx, command.Plan, projCfg, []string{}, "some/dir", &command.ContextFlags{LogLevel: "warn"}) - assert.Contains(t, result[0].Steps, valid.Step{ - StepName: "env", - EnvVarName: valid.TfLogEnvVar, - EnvVarValue: "warn", - }) - }) -} diff --git a/server/legacy/events/project_command_output_wrapper.go b/server/legacy/events/project_command_output_wrapper.go deleted file mode 100644 index 4610af030..000000000 --- a/server/legacy/events/project_command_output_wrapper.go +++ /dev/null @@ -1,71 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -type projectStatusUpdater interface { - UpdateProjectStatus(ctx command.ProjectContext, status models.VCSStatus) (string, error) -} - -// ProjectOutputWrapper is a decorator that creates a new PR status check per project. -// The status contains a url that outputs current progress of the terraform plan/apply command. -type ProjectOutputWrapper struct { - ProjectCommandRunner - - ProjectStatusUpdater projectStatusUpdater -} - -func (p *ProjectOutputWrapper) Plan(ctx command.ProjectContext) command.ProjectResult { - statusID, err := p.ProjectStatusUpdater.UpdateProjectStatus(ctx, models.PendingVCSStatus) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - - // Write the statusID to project context which is used by the command runners when making consecutive status updates - // Noop when checks is not enabled - ctx.StatusID = statusID - - result := p.ProjectCommandRunner.Plan(ctx) - if result.Error != nil || result.Failure != "" { - if _, err := p.ProjectStatusUpdater.UpdateProjectStatus(ctx, models.FailedVCSStatus); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - - return result - } - - if _, err := p.ProjectStatusUpdater.UpdateProjectStatus(ctx, models.SuccessVCSStatus); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - return result -} - -func (p *ProjectOutputWrapper) Apply(ctx command.ProjectContext) command.ProjectResult { - statusID, err := p.ProjectStatusUpdater.UpdateProjectStatus(ctx, models.PendingVCSStatus) - if err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - - // Write the statusID to project context which is used by the command runners when making consecutive status updates - // Noop when checks is not enabled - ctx.StatusID = statusID - - result := p.ProjectCommandRunner.Apply(ctx) - if result.Error != nil || result.Failure != "" { - if _, err := p.ProjectStatusUpdater.UpdateProjectStatus(ctx, models.FailedVCSStatus); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - - return result - } - - if _, err := p.ProjectStatusUpdater.UpdateProjectStatus(ctx, models.SuccessVCSStatus); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("updating project PR status %v", err)) - } - - return result -} diff --git a/server/legacy/events/project_command_pool_executor.go b/server/legacy/events/project_command_pool_executor.go deleted file mode 100644 index d4f3f1f55..000000000 --- a/server/legacy/events/project_command_pool_executor.go +++ /dev/null @@ -1,52 +0,0 @@ -package events - -import ( - "sync" - - "github.com/remeh/sizedwaitgroup" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type prjCmdRunnerFunc func(ctx command.ProjectContext) command.ProjectResult - -func runProjectCmdsParallel( - cmds []command.ProjectContext, - runnerFunc prjCmdRunnerFunc, - poolSize int, -) command.Result { - var results []command.ProjectResult - mux := &sync.Mutex{} - - wg := sizedwaitgroup.New(poolSize) - for _, pCmd := range cmds { - pCmd := pCmd - var execute func() - wg.Add() - - execute = func() { - defer wg.Done() - res := runnerFunc(pCmd) - mux.Lock() - results = append(results, res) - mux.Unlock() - } - - go execute() - } - - wg.Wait() - return command.Result{ProjectResults: results} -} - -func runProjectCmds( - cmds []command.ProjectContext, - runnerFunc prjCmdRunnerFunc, -) command.Result { - var results []command.ProjectResult - for _, pCmd := range cmds { - res := runnerFunc(pCmd) - - results = append(results, res) - } - return command.Result{ProjectResults: results} -} diff --git a/server/legacy/events/project_command_runner.go b/server/legacy/events/project_command_runner.go deleted file mode 100644 index d91fb254f..000000000 --- a/server/legacy/events/project_command_runner.go +++ /dev/null @@ -1,336 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "fmt" - "os" - "path/filepath" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -const OperationComplete = true - -// DirNotExistErr is an error caused by the directory not existing. -type DirNotExistErr struct { - RepoRelDir string -} - -// Error implements the error interface. -func (d DirNotExistErr) Error() string { - return fmt.Sprintf("dir %q does not exist", d.RepoRelDir) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_lock_url_generator.go LockURLGenerator - -// LockURLGenerator generates urls to locks. -type LockURLGenerator interface { - // GenerateLockURL returns the full URL to the lock at lockID. - GenerateLockURL(lockID string) string -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_webhooks_sender.go WebhooksSender - -// WebhooksSender sends webhook. -type WebhooksSender interface { - // Send sends the webhook. - Send(log logging.Logger, res webhooks.ApplyResult) error -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_runner.go ProjectCommandRunner - -type ProjectPlanCommandRunner interface { - // Plan runs terraform plan for the project described by ctx. - Plan(ctx command.ProjectContext) command.ProjectResult -} - -type ProjectApplyCommandRunner interface { - // Apply runs terraform apply for the project described by ctx. - Apply(ctx command.ProjectContext) command.ProjectResult -} - -type ProjectPolicyCheckCommandRunner interface { - // PolicyCheck runs OPA defined policies for the project desribed by ctx. - PolicyCheck(ctx command.ProjectContext) command.ProjectResult -} - -type ProjectVersionCommandRunner interface { - // Version runs terraform version for the project described by ctx. - Version(ctx command.ProjectContext) command.ProjectResult -} - -// ProjectCommandRunner runs project commands. A project command is a command -// for a specific TF project. -type ProjectCommandRunner interface { - ProjectPlanCommandRunner - ProjectApplyCommandRunner - ProjectPolicyCheckCommandRunner - ProjectVersionCommandRunner -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_job_closer.go JobCloser - -// Job Closer closes a job by marking op complete and clearing up buffers if logs are successfully persisted -type JobCloser interface { - CloseJob(jobID string, repo models.Repo) -} - -func NewProjectCommandRunner( - stepsRunner runtime.StepsRunner, - workingDir WorkingDir, - webhooks WebhooksSender, - workingDirLocker WorkingDirLocker, - aggregateApplyRequirements ApplyRequirement, -) *DefaultProjectCommandRunner { - return &DefaultProjectCommandRunner{ - StepsRunner: stepsRunner, - WorkingDir: workingDir, - Webhooks: webhooks, - WorkingDirLocker: workingDirLocker, - AggregateApplyRequirements: aggregateApplyRequirements, - } -} - -// DefaultProjectCommandRunner implements ProjectCommandRunner. -type DefaultProjectCommandRunner struct { //create object and test - StepsRunner runtime.StepsRunner - WorkingDir WorkingDir - Webhooks WebhooksSender - WorkingDirLocker WorkingDirLocker - AggregateApplyRequirements ApplyRequirement -} - -// Plan runs terraform plan for the project described by ctx. -func (p *DefaultProjectCommandRunner) Plan(ctx command.ProjectContext) command.ProjectResult { - planSuccess, err := p.doPlan(ctx) - return command.ProjectResult{ - Command: command.Plan, - PlanSuccess: planSuccess, - Error: err, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - StatusID: ctx.StatusID, - JobID: ctx.JobID, - } -} - -// PolicyCheck evaluates policies defined with Rego for the project described by ctx. -func (p *DefaultProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - policySuccess, failure, err := p.doPolicyCheck(ctx) - return command.ProjectResult{ - Command: command.PolicyCheck, - PolicyCheckSuccess: policySuccess, - Error: err, - Failure: failure, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - StatusID: ctx.StatusID, - } -} - -// Apply runs terraform apply for the project described by ctx. -func (p *DefaultProjectCommandRunner) Apply(ctx command.ProjectContext) command.ProjectResult { - applyOut, failure, err := p.doApply(ctx) - return command.ProjectResult{ - Command: command.Apply, - Failure: failure, - Error: err, - ApplySuccess: applyOut, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - StatusID: ctx.StatusID, - JobID: ctx.JobID, - } -} - -func (p *DefaultProjectCommandRunner) ApprovePolicies(ctx command.ProjectContext) command.ProjectResult { - return command.ProjectResult{ - Command: command.PolicyCheck, - PolicyCheckSuccess: &models.PolicyCheckSuccess{ - PolicyCheckOutput: "Policies approved", - }, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - StatusID: ctx.StatusID, - } -} - -func (p *DefaultProjectCommandRunner) Version(ctx command.ProjectContext) command.ProjectResult { - versionOut, failure, err := p.doVersion(ctx) - return command.ProjectResult{ - Command: command.Version, - Failure: failure, - Error: err, - VersionSuccess: versionOut, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - } -} - -func (p *DefaultProjectCommandRunner) doPolicyCheck(ctx command.ProjectContext) (*models.PolicyCheckSuccess, string, error) { - // Acquire internal lock for the directory we're going to operate in. - // We should refactor this to keep the lock for the duration of plan and policy check since as of now - // there is a small gap where we don't have the lock and if we can't get this here, we should just unlock the PR. - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.ProjectCloneDir()) - if err != nil { - return nil, "", err - } - defer unlockFn() - - // we shouldn't attempt to clone this again. If changes occur to the pull request while the plan is happening - // that shouldn't affect this particular operation. - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, ctx.Workspace) - if err != nil { - if os.IsNotExist(err) { - return nil, "", errors.New("project has not been cloned–did you run plan?") - } - return nil, "", err - } - absPath := filepath.Join(repoDir, ctx.RepoRelDir) - if _, err = os.Stat(absPath); os.IsNotExist(err) { - return nil, "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} - } - - outputs, err := p.StepsRunner.Run(ctx.RequestCtx, ctx, absPath) - if err != nil { - // Note: we are explicitly not unlocking the pr here since a failing - // policy check will require approval. This is a bit tricky and hacky - // solution because we will be missing legitimate failures and assume - // any failure is a policy check failure. - return nil, fmt.Sprintf("%s\n%s", err, outputs), nil - } - - return &models.PolicyCheckSuccess{ - PolicyCheckOutput: outputs, - - // set this to false right now because we don't have this information - // TODO: refactor the templates in a sane way so we don't need this - HasDiverged: false, - }, "", nil -} - -func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*models.PlanSuccess, error) { - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.ProjectCloneDir()) - if err != nil { - return nil, err - } - defer unlockFn() - - // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.ProjectCloneDir()) - if cloneErr != nil { - return nil, cloneErr - } - projAbsPath := filepath.Join(repoDir, ctx.RepoRelDir) - if _, err = os.Stat(projAbsPath); os.IsNotExist(err) { - return nil, DirNotExistErr{RepoRelDir: ctx.RepoRelDir} - } - - outputs, err := p.StepsRunner.Run(ctx.RequestCtx, ctx, projAbsPath) - - if err != nil { - return nil, fmt.Errorf("%s\n%s", err, outputs) - } - - return &models.PlanSuccess{ - TerraformOutput: outputs, - RePlanCmd: ctx.RePlanCmd, - ApplyCmd: ctx.ApplyCmd, - HasDiverged: hasDiverged, - }, nil -} - -func (p *DefaultProjectCommandRunner) doApply(ctx command.ProjectContext) (applyOut string, failure string, err error) { - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, ctx.Workspace) - if err != nil { - if os.IsNotExist(err) { - return "", "", errors.New("project has not been cloned–did you run plan?") - } - return "", "", err - } - absPath := filepath.Join(repoDir, ctx.RepoRelDir) - if _, err = os.Stat(absPath); os.IsNotExist(err) { - return "", "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} - } - - if !ctx.ForceApply && ctx.WorkflowModeType != valid.PlatformWorkflowMode { - failure, err = p.AggregateApplyRequirements.ValidateProject(repoDir, ctx) - if failure != "" || err != nil { - return "", failure, err - } - } - // Acquire internal lock for the directory we're going to operate in. - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace) - if err != nil { - return "", "", err - } - defer unlockFn() - - outputs, err := p.StepsRunner.Run(ctx.RequestCtx, ctx, absPath) - - p.Webhooks.Send(ctx.Log, webhooks.ApplyResult{ // nolint: errcheck - Workspace: ctx.Workspace, - User: ctx.User, - Repo: ctx.Pull.BaseRepo, - Pull: ctx.Pull, - Success: err == nil, - Directory: ctx.RepoRelDir, - }) - - if err != nil { - return "", "", fmt.Errorf("%s\n%s", err, outputs) - } - - return outputs, "", nil -} - -func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (versionOut string, failure string, err error) { - repoDir, err := p.WorkingDir.GetWorkingDir(ctx.Pull.BaseRepo, ctx.Pull, ctx.Workspace) - if err != nil { - if os.IsNotExist(err) { - return "", "", errors.New("project has not been cloned–did you run plan?") - } - return "", "", err - } - absPath := filepath.Join(repoDir, ctx.RepoRelDir) - if _, err = os.Stat(absPath); os.IsNotExist(err) { - return "", "", DirNotExistErr{RepoRelDir: ctx.RepoRelDir} - } - - // Acquire internal lock for the directory we're going to operate in. - unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace) - if err != nil { - return "", "", err - } - defer unlockFn() - - outputs, err := p.StepsRunner.Run(ctx.RequestCtx, ctx, absPath) - if err != nil { - return "", "", fmt.Errorf("%s\n%s", err, outputs) - } - - return outputs, "", nil -} diff --git a/server/legacy/events/project_command_runner_test.go b/server/legacy/events/project_command_runner_test.go deleted file mode 100644 index d4dc8b8c8..000000000 --- a/server/legacy/events/project_command_runner_test.go +++ /dev/null @@ -1,756 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "context" - "errors" - "fmt" - "os" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - smocks "github.com/runatlantis/atlantis/server/legacy/core/runtime/mocks" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - commandMocks "github.com/runatlantis/atlantis/server/legacy/events/command/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/legacy/wrappers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" -) - -// Test that it runs the expected plan steps. -func TestDefaultProjectCommandRunner_Plan(t *testing.T) { - RegisterMockTestingT(t) - mockWorkingDir := mocks.NewMockWorkingDir() - mockLocker := mocks.NewMockProjectLocker() - mockStepsRunner := smocks.NewMockStepsRunner() - applyRequirementHandler := &events.AggregateApplyRequirements{ - WorkingDir: workingDir, - } - - runner := events.NewProjectCommandRunner( - mockStepsRunner, - mockWorkingDir, - nil, - events.NewDefaultWorkingDirLocker(), - applyRequirementHandler, - ) - - wrappedRunner := wrappers. - WrapProjectRunner(runner). - WithSync(mockLocker, mockURLGenerator{}) - - When(mockLocker.TryLock( - matchers.AnyContextContext(), - matchers.AnyLoggingLogger(), - matchers.AnyModelsPullRequest(), - matchers.AnyModelsUser(), - AnyString(), - matchers.AnyModelsProject(), - )).ThenReturn(&events.TryLockResponse{ - LockAcquired: true, - LockKey: "lock-key", - }, nil) - repoDir, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.Clone( - matchers.AnyLoggingLogger(), - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString(), - )).ThenReturn(repoDir, false, nil) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Steps: []valid.Step{ - { - StepName: "env", - EnvVarName: "name", - EnvVarValue: "value", - }, - { - StepName: "run", - }, - { - StepName: "apply", - }, - { - StepName: "plan", - }, - { - StepName: "init", - }, - }, - Workspace: "default", - RepoRelDir: ".", - RequestCtx: ctx, - } - - When(mockStepsRunner.Run(ctx, prjCtx, repoDir)).ThenReturn("run\napply\nplan\ninit", nil) - firstRes := wrappedRunner.Plan(prjCtx) - - Assert(t, firstRes.PlanSuccess != nil, "exp plan success") - Equals(t, "https://lock-key", firstRes.PlanSuccess.LockURL) - Equals(t, "run\napply\nplan\ninit", firstRes.PlanSuccess.TerraformOutput) - mockStepsRunner.VerifyWasCalledOnce().Run(ctx, prjCtx, repoDir) -} - -// Test that it runs the expected plan steps. -func TestDefaultProjectCommandRunner_PlanWithSync(t *testing.T) { - RegisterMockTestingT(t) - prjCtx := command.ProjectContext{ - RequestCtx: context.TODO(), - Log: logging.NewNoopCtxLogger(t), - Pull: models.PullRequest{ - BaseRepo: models.Repo{ - FullName: "test", - }, - Num: 1, - }, - Workspace: "default", - RepoRelDir: ".", - } - - cases := []struct { - description string - usePrjLock bool - expFailure string - expPlanStatus models.ProjectPlanStatus - }{ - { - description: "plan with locking", - usePrjLock: true, - expFailure: "This project is currently locked by an unapplied plan from pull . To continue, delete the lock from or apply that plan and merge the pull request.\n\nOnce the lock is released, comment `atlantis plan` here to re-plan.", - expPlanStatus: models.ErroredPlanStatus, - }, - { - description: "plan without locking", - usePrjLock: false, - expFailure: "", - expPlanStatus: models.PlannedPlanStatus, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - mockWorkingDir := mocks.NewMockWorkingDir() - mockVcsClient := vcsmocks.NewMockClient() - - dataDir, cleanup := TempDir(t) - defer cleanup() - - boltdb, err := db.New(dataDir) - Ok(t, err) - - lockingClient := locking.NewClient(boltdb) - projectLocker := &events.DefaultProjectLocker{ - Locker: lockingClient, - VCSClient: mockVcsClient, - } - - applyRequirementHandler := &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - } - - runner := events.NewProjectCommandRunner( - smocks.NewMockStepsRunner(), - mockWorkingDir, - nil, - events.NewDefaultWorkingDirLocker(), - applyRequirementHandler, - ) - - targetCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Pull: models.PullRequest{ - BaseRepo: models.Repo{ - FullName: "test", - }, - Num: 2, - }, - Workspace: "default", - RepoRelDir: ".", - } - - wrappedRunner := wrappers.WrapProjectRunner(runner) - if c.usePrjLock { - wrappedRunner = wrappedRunner.WithSync(projectLocker, &mockURLGenerator{}) - } - - firstRes := wrappedRunner.Plan(prjCtx) - targetRes := wrappedRunner.Plan(targetCtx) - - Assert(t, firstRes.IsSuccessful(), "exp first prjCtx to succeed") - Equals(t, targetRes.PlanStatus(), c.expPlanStatus) - Equals(t, targetRes.Failure, c.expFailure) - }) - } -} - -type strictTestCommitStatusUpdater struct { - statusUpdaters []*testCommitStatusUpdater - count int -} - -// UpdateProject(ctx context.Context, projectCtx ProjectContext, cmdName fmt.Stringer, status models.CommitStatus, url string, statusID string) (string, error) -func (t *strictTestCommitStatusUpdater) UpdateProject(ctx context.Context, projectCtx command.ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) { - if t.count > (len(t.statusUpdaters) - 1) { - return "", errors.New("more calls than expected") - } - - statusID, err := t.statusUpdaters[t.count].UpdateProject(ctx, projectCtx, cmdName, status, url, statusID) - t.count++ - return statusID, err -} - -type testCommitStatusUpdater struct { - t *testing.T - expCtx context.Context - expPrjCtx command.ProjectContext - expCmdName fmt.Stringer - expStatus models.VCSStatus - expURL string - expStatusID string - - statusID string - err error -} - -func (t *testCommitStatusUpdater) UpdateProject(ctx context.Context, projectCtx command.ProjectContext, cmdName fmt.Stringer, status models.VCSStatus, url string, statusID string) (string, error) { - assert.Equal(t.t, t.expCtx, ctx) - assert.Equal(t.t, t.expPrjCtx, projectCtx) - assert.Equal(t.t, t.expCmdName, cmdName) - assert.Equal(t.t, t.expStatus, status) - assert.Equal(t.t, t.expURL, url) - assert.Equal(t.t, t.expStatusID, statusID) - - return t.statusID, t.err -} - -type testProjectCommandRunner struct { - t *testing.T - expPrjCtx command.ProjectContext - result command.ProjectResult -} - -func (t *testProjectCommandRunner) Apply(ctx command.ProjectContext) command.ProjectResult { - assert.Equal(t.t, t.expPrjCtx, ctx) - - return t.result -} - -func (t *testProjectCommandRunner) Plan(ctx command.ProjectContext) command.ProjectResult { - assert.Equal(t.t, t.expPrjCtx, ctx) - - return t.result -} - -func (t *testProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - assert.Equal(t.t, t.expPrjCtx, ctx) - - return t.result -} - -func (t *testProjectCommandRunner) Version(ctx command.ProjectContext) command.ProjectResult { - assert.Equal(t.t, t.expPrjCtx, ctx) - - return t.result -} - -func TestProjectOutputWrapper(t *testing.T) { - RegisterMockTestingT(t) - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Steps: []valid.Step{ - { - StepName: "plan", - }, - }, - Workspace: "default", - RepoRelDir: ".", - RequestCtx: context.TODO(), - } - - cases := []struct { - Description string - Failure bool - Error bool - Success bool - CommandName command.Name - }{ - { - Description: "plan success", - Success: true, - CommandName: command.Plan, - }, - { - Description: "plan failure", - Failure: true, - CommandName: command.Plan, - }, - { - Description: "plan error", - Error: true, - CommandName: command.Plan, - }, - { - Description: "apply success", - Success: true, - CommandName: command.Apply, - }, - { - Description: "apply failure", - Failure: true, - CommandName: command.Apply, - }, - { - Description: "apply error", - Error: true, - CommandName: command.Apply, - }, - } - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - var prjResult command.ProjectResult - var expCommitStatus models.VCSStatus - - mockJobURLGenerator := commandMocks.NewMockJobURLGenerator() - mockJobCloser := commandMocks.NewMockJobCloser() - - if c.Success { - prjResult = command.ProjectResult{ - PlanSuccess: &models.PlanSuccess{}, - ApplySuccess: "exists", - } - expCommitStatus = models.SuccessVCSStatus - } else if c.Failure { - prjResult = command.ProjectResult{ - Failure: "failure", - } - expCommitStatus = models.FailedVCSStatus - } else if c.Error { - prjResult = command.ProjectResult{ - Error: errors.New("error"), - } - expCommitStatus = models.FailedVCSStatus - } - - prjCtx.CommandName = c.CommandName - - mockProjectCommandRunner := testProjectCommandRunner{ - t: t, - expPrjCtx: prjCtx, - result: prjResult, - } - - mockCommitStatusUpdater := strictTestCommitStatusUpdater{ - statusUpdaters: []*testCommitStatusUpdater{ - { - t: t, - expCtx: context.TODO(), - expPrjCtx: prjCtx, - expCmdName: c.CommandName, - expStatus: models.PendingVCSStatus, - expStatusID: "", - expURL: "", - statusID: "", - err: nil, - }, - { - t: t, - expCtx: context.TODO(), - expPrjCtx: prjCtx, - expCmdName: c.CommandName, - expStatus: expCommitStatus, - expStatusID: "", - expURL: "", - statusID: "", - err: nil, - }, - }, - } - - projectUpdater := command.ProjectStatusUpdater{ - JobCloser: mockJobCloser, - ProjectJobURLGenerator: mockJobURLGenerator, - ProjectVCSStatusUpdater: &mockCommitStatusUpdater, - } - - runner := &events.ProjectOutputWrapper{ - ProjectStatusUpdater: projectUpdater, - ProjectCommandRunner: &mockProjectCommandRunner, - } - - switch c.CommandName { - case command.Plan: - runner.Plan(prjCtx) - case command.Apply: - runner.Apply(prjCtx) - } - }) - } -} - -// Test what happens if there's no working dir. This signals that the project -// was never planned. -func TestDefaultProjectCommandRunner_ApplyNotCloned(t *testing.T) { - mockWorkingDir := mocks.NewMockWorkingDir() - runner := &events.DefaultProjectCommandRunner{ - WorkingDir: mockWorkingDir, - } - prjCtx := command.ProjectContext{} - When(mockWorkingDir.GetWorkingDir(prjCtx.BaseRepo, prjCtx.Pull, prjCtx.Workspace)).ThenReturn("", os.ErrNotExist) - - firstRes := runner.Apply(prjCtx) - ErrEquals(t, "project has not been cloned–did you run plan?", firstRes.Error) -} - -// Test that if approval is required and the PR isn't approved we give an error. -func TestDefaultProjectCommandRunner_ApplyNotApproved(t *testing.T) { - RegisterMockTestingT(t) - mockWorkingDir := mocks.NewMockWorkingDir() - mockSender := mocks.NewMockWebhooksSender() - runner := &events.DefaultProjectCommandRunner{ - WorkingDir: mockWorkingDir, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - }, - Webhooks: mockSender, - } - prjCtx := command.ProjectContext{ - ApplyRequirements: []string{"approved"}, - PullReqStatus: models.PullReqStatus{ - ApprovalStatus: models.ApprovalStatus{ - IsApproved: false, - }, - }, - WorkflowModeType: valid.DefaultWorkflowMode, - } - tmp, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir(prjCtx.BaseRepo, prjCtx.Pull, prjCtx.Workspace)).ThenReturn(tmp, nil) - - firstRes := runner.Apply(prjCtx) - Equals(t, "Pull request must be approved by at least one person other than the author before running apply.", firstRes.Failure) -} - -func TestDefaultProjectCommandRunner_ForceOverridesApplyReqs_IfPlatformMode(t *testing.T) { - RegisterMockTestingT(t) - mockWorkingDir := mocks.NewMockWorkingDir() - mockSender := mocks.NewMockWebhooksSender() - runner := &events.DefaultProjectCommandRunner{ - WorkingDir: mockWorkingDir, - StepsRunner: smocks.NewMockStepsRunner(), - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - }, - Webhooks: mockSender, - } - prjCtx := command.ProjectContext{ - PullReqStatus: models.PullReqStatus{ - ApprovalStatus: models.ApprovalStatus{ - IsApproved: false, - }, - }, - ApplyRequirements: []string{"approved"}, - WorkflowModeType: valid.PlatformWorkflowMode, - } - tmp, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir(prjCtx.BaseRepo, prjCtx.Pull, prjCtx.Workspace)).ThenReturn(tmp, nil) - - firstRes := runner.Apply(prjCtx) - Equals(t, "", firstRes.Failure) -} - -func TestDefaultProjectCommandRunner_ForceOverridesApplyReqs(t *testing.T) { - RegisterMockTestingT(t) - mockWorkingDir := mocks.NewMockWorkingDir() - mockSender := mocks.NewMockWebhooksSender() - runner := &events.DefaultProjectCommandRunner{ - WorkingDir: mockWorkingDir, - StepsRunner: smocks.NewMockStepsRunner(), - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - }, - Webhooks: mockSender, - } - prjCtx := command.ProjectContext{ - PullReqStatus: models.PullReqStatus{ - ApprovalStatus: models.ApprovalStatus{ - IsApproved: false, - }, - }, - ApplyRequirements: []string{"approved"}, - ForceApply: true, - } - tmp, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir(prjCtx.BaseRepo, prjCtx.Pull, prjCtx.Workspace)).ThenReturn(tmp, nil) - - firstRes := runner.Apply(prjCtx) - Equals(t, "", firstRes.Failure) -} - -// Test that if mergeable is required and the PR isn't mergeable we give an error. -func TestDefaultProjectCommandRunner_ApplyNotMergeable(t *testing.T) { - RegisterMockTestingT(t) - mockWorkingDir := mocks.NewMockWorkingDir() - runner := &events.DefaultProjectCommandRunner{ - WorkingDir: mockWorkingDir, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - StepsRunner: smocks.NewMockStepsRunner(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - }, - } - prjCtx := command.ProjectContext{ - PullReqStatus: models.PullReqStatus{ - Mergeable: false, - }, - ApplyRequirements: []string{"mergeable"}, - WorkflowModeType: valid.DefaultWorkflowMode, - } - tmp, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir(prjCtx.BaseRepo, prjCtx.Pull, prjCtx.Workspace)).ThenReturn(tmp, nil) - - firstRes := runner.Apply(prjCtx) - Equals(t, "Pull request must be mergeable before running apply.", firstRes.Failure) -} - -// Test that if undiverged is required and the PR is diverged we give an error. -func TestDefaultProjectCommandRunner_ApplyDiverged(t *testing.T) { - RegisterMockTestingT(t) - mockWorkingDir := mocks.NewMockWorkingDir() - runner := &events.DefaultProjectCommandRunner{ - WorkingDir: mockWorkingDir, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - StepsRunner: smocks.NewMockStepsRunner(), - AggregateApplyRequirements: &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - }, - } - prjCtx := command.ProjectContext{ - ApplyRequirements: []string{"undiverged"}, - WorkflowModeType: valid.DefaultWorkflowMode, - } - tmp, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir(prjCtx.BaseRepo, prjCtx.Pull, prjCtx.Workspace)).ThenReturn(tmp, nil) - When(mockWorkingDir.HasDiverged(matchers.AnyLoggingLogger(), AnyString(), matchers.AnyModelsRepo())).ThenReturn(true) - - firstRes := runner.Apply(prjCtx) - Equals(t, "Default branch must be rebased onto pull request before running apply.", firstRes.Failure) -} - -// Test that it runs the expected apply steps. -func TestDefaultProjectCommandRunner_Apply(t *testing.T) { - cases := []struct { - description string - steps []valid.Step - applyReqs []string - - expSteps []string - expOut string - expFailure string - pullMergeable bool - }{ - { - description: "normal workflow", - steps: valid.DefaultApplyStage.Steps, - expSteps: []string{"apply"}, - expOut: "apply", - }, - { - description: "approval required", - steps: valid.DefaultApplyStage.Steps, - applyReqs: []string{"approved"}, - expSteps: []string{"approve", "apply"}, - expOut: "apply", - }, - { - description: "mergeable required", - steps: valid.DefaultApplyStage.Steps, - pullMergeable: true, - applyReqs: []string{"mergeable"}, - expSteps: []string{"apply"}, - expOut: "apply", - }, - { - description: "mergeable required, pull not mergeable", - steps: valid.DefaultApplyStage.Steps, - pullMergeable: false, - applyReqs: []string{"mergeable"}, - expSteps: []string{""}, - expOut: "", - expFailure: "Pull request must be mergeable before running apply.", - }, - { - description: "mergeable and approved required", - steps: valid.DefaultApplyStage.Steps, - pullMergeable: true, - applyReqs: []string{"mergeable", "approved"}, - expSteps: []string{"approved", "apply"}, - expOut: "apply", - }, - { - description: "workflow with custom apply stage", - steps: []valid.Step{ - { - StepName: "env", - EnvVarName: "key", - EnvVarValue: "value", - }, - { - StepName: "run", - }, - { - StepName: "apply", - }, - { - StepName: "plan", - }, - { - StepName: "init", - }, - }, - expSteps: []string{"env", "run", "apply", "plan", "init"}, - expOut: "run\napply\nplan\ninit", - }, - } - - for _, c := range cases { - if c.description != "workflow with custom apply stage" { - continue - } - t.Run(c.description, func(t *testing.T) { - RegisterMockTestingT(t) - mockStepsRunner := smocks.NewMockStepsRunner() - mockWorkingDir := mocks.NewMockWorkingDir() - mockSender := mocks.NewMockWebhooksSender() - applyReqHandler := &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - } - - runner := events.DefaultProjectCommandRunner{ - StepsRunner: mockStepsRunner, - WorkingDir: mockWorkingDir, - Webhooks: mockSender, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: applyReqHandler, - } - repoDir, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir( - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString(), - )).ThenReturn(repoDir, nil) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Steps: c.steps, - Workspace: "default", - ApplyRequirements: c.applyReqs, - RepoRelDir: ".", - PullReqStatus: models.PullReqStatus{ - ApprovalStatus: models.ApprovalStatus{ - IsApproved: true, - }, - Mergeable: true, - }, - RequestCtx: ctx, - } - - When(mockStepsRunner.Run(ctx, prjCtx, repoDir)).ThenReturn("run\napply\nplan\ninit", nil) - - firstRes := runner.Apply(prjCtx) - Equals(t, c.expOut, firstRes.ApplySuccess) - Equals(t, c.expFailure, firstRes.Failure) - - mockStepsRunner.VerifyWasCalledOnce().Run(ctx, prjCtx, repoDir) - }) - } -} - -// Test that it runs the expected apply steps. -func TestDefaultProjectCommandRunner_ApplyRunStepFailure(t *testing.T) { - RegisterMockTestingT(t) - mockStepsRunner := smocks.NewMockStepsRunner() - mockWorkingDir := mocks.NewMockWorkingDir() - mockSender := mocks.NewMockWebhooksSender() - applyReqHandler := &events.AggregateApplyRequirements{ - WorkingDir: mockWorkingDir, - } - - runner := events.DefaultProjectCommandRunner{ - StepsRunner: mockStepsRunner, - WorkingDir: mockWorkingDir, - WorkingDirLocker: events.NewDefaultWorkingDirLocker(), - AggregateApplyRequirements: applyReqHandler, - Webhooks: mockSender, - } - repoDir, cleanup := TempDir(t) - defer cleanup() - When(mockWorkingDir.GetWorkingDir( - matchers.AnyModelsRepo(), - matchers.AnyModelsPullRequest(), - AnyString(), - )).ThenReturn(repoDir, nil) - - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logging.NewNoopCtxLogger(t), - Steps: []valid.Step{ - { - StepName: "apply", - }, - }, - Workspace: "default", - ApplyRequirements: []string{}, - RepoRelDir: ".", - PullReqStatus: models.PullReqStatus{ - Mergeable: true, - }, - RequestCtx: ctx, - } - When(mockStepsRunner.Run(ctx, prjCtx, ".")).ThenReturn("apply", fmt.Errorf("something went wrong")) - - firstRes := runner.Apply(prjCtx) - Assert(t, firstRes.ApplySuccess == "", "exp apply failure") - - mockStepsRunner.VerifyWasCalledOnce().Run(ctx, prjCtx, repoDir) -} - -type mockURLGenerator struct{} - -func (m mockURLGenerator) GenerateLockURL(lockID string) string { - return "https://" + lockID -} diff --git a/server/legacy/events/project_finder.go b/server/legacy/events/project_finder.go deleted file mode 100644 index 0579a9936..000000000 --- a/server/legacy/events/project_finder.go +++ /dev/null @@ -1,265 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "context" - "fmt" - "os" - "path" - "path/filepath" - "strings" - - "github.com/runatlantis/atlantis/server/config/valid" - - "github.com/docker/docker/pkg/fileutils" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -// ProjectFinder determines which projects were modified in a given pull -// request. -type ProjectFinder interface { - // DetermineProjects returns the list of projects that were modified based on - // the modifiedFiles. The list will be de-duplicated. - // absRepoDir is the path to the cloned repo on disk. - DetermineProjects(requestCtx context.Context, log logging.Logger, modifiedFiles []string, repoFullName string, absRepoDir string, autoplanFileList string) []models.Project - // DetermineProjectsViaConfig returns the list of projects that were modified - // based on modifiedFiles and the repo's config. - // absRepoDir is the path to the cloned repo on disk. - DetermineProjectsViaConfig(log logging.Logger, modifiedFiles []string, config valid.RepoCfg, absRepoDir string) ([]valid.Project, error) -} - -// ignoredFilenameFragments contains filename fragments to ignore while looking at changes -var ignoredFilenameFragments = []string{"terraform.tfstate", "terraform.tfstate.backup", "tflint.hcl"} - -// DefaultProjectFinder implements ProjectFinder. -type DefaultProjectFinder struct{} - -// See ProjectFinder.DetermineProjects. -func (p *DefaultProjectFinder) DetermineProjects(requestCtx context.Context, log logging.Logger, modifiedFiles []string, repoFullName string, absRepoDir string, autoplanFileList string) []models.Project { - var projects []models.Project - - modifiedTerraformFiles := p.filterToFileList(modifiedFiles, autoplanFileList) - if len(modifiedTerraformFiles) == 0 { - return projects - } - log.InfoContext(requestCtx, fmt.Sprintf("filtered modified files to %d .tf or terragrunt.hcl files: %v", - len(modifiedTerraformFiles), modifiedTerraformFiles)) - - var dirs []string - for _, modifiedFile := range modifiedTerraformFiles { - projectDir := p.getProjectDir(modifiedFile, absRepoDir) - if projectDir != "" { - dirs = append(dirs, projectDir) - } - } - uniqueDirs := p.unique(dirs) - - // The list of modified files will include files that were deleted. We still - // want to run plan if a file was deleted since that often results in a - // change however we want to remove directories that have been completely - // deleted. - exists := p.removeNonExistingDirs(uniqueDirs, absRepoDir) - - for _, p := range exists { - projects = append(projects, models.NewProject(repoFullName, p)) - } - log.InfoContext(requestCtx, fmt.Sprintf("there are %d modified project(s) at path(s): %v", - len(projects), strings.Join(exists, ", "))) - return projects -} - -// See ProjectFinder.DetermineProjectsViaConfig. -func (p *DefaultProjectFinder) DetermineProjectsViaConfig(log logging.Logger, modifiedFiles []string, config valid.RepoCfg, absRepoDir string) ([]valid.Project, error) { - var projects []valid.Project - for _, project := range config.Projects { - var whenModifiedRelToRepoRoot []string - for _, wm := range project.Autoplan.WhenModified { - wm = strings.TrimSpace(wm) - // An exclusion uses a '!' at the beginning. If it's there, we need - // to remove it, then add in the project path, then add it back. - exclusion := false - if wm != "" && wm[0] == '!' { - wm = wm[1:] - exclusion = true - } - - // Prepend project dir to when modified patterns because the patterns - // are relative to the project dirs but our list of modified files is - // relative to the repo root. - wmRelPath := filepath.Join(project.Dir, wm) - if exclusion { - wmRelPath = "!" + wmRelPath - } - whenModifiedRelToRepoRoot = append(whenModifiedRelToRepoRoot, wmRelPath) - } - pm, err := fileutils.NewPatternMatcher(whenModifiedRelToRepoRoot) - if err != nil { - return nil, errors.Wrapf(err, "matching modified files with patterns: %v", project.Autoplan.WhenModified) - } - - // If any of the modified files matches the pattern then this project is - // considered modified. - for _, file := range modifiedFiles { - match, err := pm.Matches(file) - if err != nil { - continue - } - if match { - // If we're checking using an atlantis.yaml file we downloaded - // directly from the repo (when doing a no-clone check) then - // absRepoDir will be empty. Since we didn't clone the repo - // yet we can't do this check. If there was a file modified - // in a deleted directory then when we finally do clone the repo - // we'll call this function again and then we'll detect the - // directory was deleted. - if absRepoDir != "" { - _, err := os.Stat(filepath.Join(absRepoDir, project.Dir)) - if err == nil { - projects = append(projects, project) - } - } else { - projects = append(projects, project) - } - break - } - } - } - return projects, nil -} - -// filterToFileList filters out files not included in the file list -func (p *DefaultProjectFinder) filterToFileList(files []string, fileList string) []string { - var filtered []string - patterns := strings.Split(fileList, ",") - // Ignore pattern matcher error here as it was checked for errors in server validation - patternMatcher, _ := fileutils.NewPatternMatcher(patterns) - - for _, fileName := range files { - if p.shouldIgnore(fileName) { - continue - } - match, err := patternMatcher.Matches(fileName) - if err != nil { - continue - } - if match { - filtered = append(filtered, fileName) - } - } - - return filtered -} - -// shouldIgnore returns true if we shouldn't trigger a plan on changes to this file. -func (p *DefaultProjectFinder) shouldIgnore(fileName string) bool { - for _, s := range ignoredFilenameFragments { - if strings.Contains(fileName, s) { - return true - } - } - return false -} - -// getProjectDir attempts to determine based on the location of a modified -// file, where the root of the Terraform project is. It also attempts to verify -// if the root is valid by looking for a main.tf file. It returns a relative -// path to the repo. If the project is at the root returns ".". If modified file -// doesn't lead to a valid project path, returns an empty string. -func (p *DefaultProjectFinder) getProjectDir(modifiedFilePath string, repoDir string) string { - dir := path.Dir(modifiedFilePath) - if path.Base(dir) == "env" { - // If the modified file was inside an env/ directory, we treat this - // specially and run plan one level up. This supports directory structures - // like: - // root/ - // main.tf - // env/ - // dev.tfvars - // staging.tfvars - return path.Dir(dir) - } - - // Surrounding dir with /'s so we can match on /modules/ even if dir is - // "modules" or "project1/modules" - if strings.Contains("/"+dir+"/", "/modules/") { - // We treat changes inside modules/ folders specially. There are two cases: - // 1. modules folder inside project: - // root/ - // main.tf - // modules/ - // ... - // In this case, if we detect a change in modules/, we will determine - // the project root to be at root/. - // - // 2. shared top-level modules folder - // root/ - // project1/ - // main.tf # uses modules via ../modules - // project2/ - // main.tf # uses modules via ../modules - // modules/ - // ... - // In this case, if we detect a change in modules/ we don't know which - // project was using this module so we can't suggest a project root, but we - // also detect that there's no main.tf in the parent folder of modules/ - // so we won't suggest that as a project. So in this case we return nothing. - // The code below makes this happen. - - // Need to add a trailing slash before splitting on modules/ because if - // the input was modules/file.tf then path.Dir will be "modules" and so our - // split on "modules/" will fail. - dirWithTrailingSlash := dir + "/" - modulesSplit := strings.SplitN(dirWithTrailingSlash, "modules/", 2) - modulesParent := modulesSplit[0] - - // Now we check whether there is a main.tf in the parent. - if _, err := os.Stat(filepath.Join(repoDir, modulesParent, "main.tf")); os.IsNotExist(err) { - return "" - } - return path.Clean(modulesParent) - } - - // If it wasn't a modules directory, we assume we're in a project and return - // this directory. - return dir -} - -// unique de-duplicates strs. -func (p *DefaultProjectFinder) unique(strs []string) []string { - hash := make(map[string]bool) - var unique []string - for _, s := range strs { - if !hash[s] { - unique = append(unique, s) - hash[s] = true - } - } - return unique -} - -// removeNonExistingDirs removes paths from relativePaths that don't exist. -// relativePaths is a list of paths relative to absRepoDir. -func (p *DefaultProjectFinder) removeNonExistingDirs(relativePaths []string, absRepoDir string) []string { - var filtered []string - for _, pth := range relativePaths { - absPath := filepath.Join(absRepoDir, pth) - if _, err := os.Stat(absPath); !os.IsNotExist(err) { - filtered = append(filtered, pth) - } - } - return filtered -} diff --git a/server/legacy/events/project_finder_test.go b/server/legacy/events/project_finder_test.go deleted file mode 100644 index baaa43f9f..000000000 --- a/server/legacy/events/project_finder_test.go +++ /dev/null @@ -1,510 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "context" - "os" - "path/filepath" - "testing" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -var modifiedRepo = "owner/repo" -var m = events.DefaultProjectFinder{} -var nestedModules1 string -var nestedModules2 string -var topLevelModules string -var envDir string - -func setupTmpRepos(t *testing.T) { - // Create different repo structures for testing. - - // 1. Nested modules directory inside a project - // non-tf - // terraform.tfstate - // terraform.tfstate.backup - // project1/ - // main.tf - // terraform.tfstate - // terraform.tfstate.backup - // modules/ - // main.tf - var err error - nestedModules1 = t.TempDir() - Ok(t, err) - err = os.MkdirAll(filepath.Join(nestedModules1, "project1/modules"), 0700) - Ok(t, err) - files := []string{ - "non-tf", - ".tflint.hcl", - "terraform.tfstate.backup", - "project1/main.tf", - "project1/terraform.tfstate", - "project1/terraform.tfstate.backup", - "project1/modules/main.tf", - } - for _, f := range files { - _, err = os.Create(filepath.Join(nestedModules1, f)) - Ok(t, err) - } - - // 2. Nested modules dir inside top-level project - // main.tf - // modules/ - // main.tf - // We can just re-use part of the previous dir structure. - nestedModules2 = filepath.Join(nestedModules1, "project1") - - // 3. Top-level modules - // modules/ - // main.tf - // project1/ - // main.tf - // project2/ - // main.tf - topLevelModules = t.TempDir() - Ok(t, err) - for _, path := range []string{"modules", "project1", "project2"} { - err = os.MkdirAll(filepath.Join(topLevelModules, path), 0700) - Ok(t, err) - _, err = os.Create(filepath.Join(topLevelModules, path, "main.tf")) - Ok(t, err) - } - - // 4. Env/ dir - // main.tf - // env/ - // staging.tfvars - // production.tfvars - // global-env-config.auto.tfvars.json - envDir = t.TempDir() - Ok(t, err) - err = os.MkdirAll(filepath.Join(envDir, "env"), 0700) - Ok(t, err) - _, err = os.Create(filepath.Join(envDir, "env/staging.tfvars")) - Ok(t, err) - _, err = os.Create(filepath.Join(envDir, "env/production.tfvars")) - Ok(t, err) -} - -func TestDetermineProjects(t *testing.T) { - noopLogger := logging.NewNoopCtxLogger(t) - setupTmpRepos(t) - - defaultAutoplanFileList := "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl" - - cases := []struct { - description string - files []string - expProjectPaths []string - repoDir string - autoplanFileList string - }{ - { - "If no files were modified then should return an empty list", - nil, - nil, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should ignore non .tf files and return an empty list", - []string{"non-tf", "non.tf.suffix"}, - nil, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should ignore .tflint.hcl files and return an empty list", - []string{".tflint.hcl", "project1/.tflint.hcl"}, - nil, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should plan in the parent directory from modules if that dir has a main.tf", - []string{"project1/modules/main.tf"}, - []string{"project1"}, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should plan in the parent directory from modules if that dir has a main.tf", - []string{"modules/main.tf"}, - []string{"."}, - nestedModules2, - defaultAutoplanFileList, - }, - { - "Should plan in the parent directory from modules when module is in a subdir if that dir has a main.tf", - []string{"modules/subdir/main.tf"}, - []string{"."}, - nestedModules2, - defaultAutoplanFileList, - }, - { - "Should not plan in the parent directory from modules if that dir does not have a main.tf", - []string{"modules/main.tf"}, - []string{}, - topLevelModules, - defaultAutoplanFileList, - }, - { - "Should not plan in the parent directory from modules if that dir does not have a main.tf", - []string{"modules/main.tf", "project1/main.tf"}, - []string{"project1"}, - topLevelModules, - defaultAutoplanFileList, - }, - { - "Should ignore tfstate files and return an empty list", - []string{"terraform.tfstate", "terraform.tfstate.backup", "parent/terraform.tfstate", "parent/terraform.tfstate.backup"}, - nil, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should return '.' when changed file is at root", - []string{"a.tf"}, - []string{"."}, - nestedModules2, - defaultAutoplanFileList, - }, - { - "Should return directory when changed file is in a dir", - []string{"project1/a.tf"}, - []string{"project1"}, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should return parent dir when changed file is in an env/ dir", - []string{"env/staging.tfvars"}, - []string{"."}, - envDir, - defaultAutoplanFileList, - }, - { - "Should de-duplicate when multiple files changed in the same dir", - []string{"env/staging.tfvars", "main.tf", "other.tf"}, - []string{"."}, - "", - defaultAutoplanFileList, - }, - { - "Should ignore changes in a dir that was deleted", - []string{"wasdeleted/main.tf"}, - []string{}, - "", - defaultAutoplanFileList, - }, - { - "Should not ignore terragrunt.hcl files", - []string{"terragrunt.hcl"}, - []string{"."}, - nestedModules2, - defaultAutoplanFileList, - }, - { - "Should find terragrunt.hcl file inside a nested directory", - []string{"project1/terragrunt.hcl"}, - []string{"project1"}, - nestedModules1, - defaultAutoplanFileList, - }, - { - "Should find packer files and ignore default tf files", - []string{"project1/image.pkr.hcl", "project2/main.tf"}, - []string{"project1"}, - topLevelModules, - "**/*.pkr.hcl", - }, - { - "Should find yaml files in addition to defaults", - []string{"project1/ansible.yml", "project2/main.tf"}, - []string{"project1", "project2"}, - topLevelModules, - "**/*.tf,**/*.yml", - }, - { - "Should find yaml files unless excluded", - []string{"project1/ansible.yml", "project2/config.yml"}, - []string{"project1"}, - topLevelModules, - "**/*.yml,!project2/*.yml", - }, - } - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - projects := m.DetermineProjects(context.TODO(), noopLogger, c.files, modifiedRepo, c.repoDir, c.autoplanFileList) - - // Extract the paths from the projects. We use a slice here instead of a - // map so we can test whether there are duplicates returned. - var paths []string - for _, project := range projects { - paths = append(paths, project.Path) - // Check that the project object has the repo set properly. - Equals(t, modifiedRepo, project.RepoFullName) - } - Assert(t, len(c.expProjectPaths) == len(paths), - "exp %q but found %q", c.expProjectPaths, paths) - - for _, expPath := range c.expProjectPaths { - found := false - for _, actPath := range paths { - if expPath == actPath { - found = true - break - } - } - if !found { - t.Fatalf("exp %q but was not in paths %v", expPath, paths) - } - } - }) - } -} - -func TestDefaultProjectFinder_DetermineProjectsViaConfig(t *testing.T) { - // Create dir structure: - // main.tf - // project1/ - // main.tf - // terraform.tfvars.json - // project2/ - // main.tf - // terraform.tfvars - // modules/ - // module/ - // main.tf - tmpDir, cleanup := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - "project1": map[string]interface{}{ - "main.tf": nil, - "terraform.tfvars.json": nil, - }, - "project2": map[string]interface{}{ - "main.tf": nil, - "terraform.tfvars": nil, - }, - "modules": map[string]interface{}{ - "module": map[string]interface{}{ - "main.tf": nil, - }, - }, - }) - defer cleanup() - - cases := []struct { - description string - config valid.RepoCfg - modified []string - expProjPaths []string - }{ - { - // When autoplan is disabled, we still return the modified project. - // If our caller is interested in autoplan enabled projects, they'll - // need to filter the results. - description: "autoplan disabled", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: ".", - Autoplan: valid.Autoplan{ - Enabled: false, - WhenModified: []string{"**/*.tf"}, - }, - }, - }, - }, - modified: []string{"main.tf"}, - expProjPaths: []string{"."}, - }, - { - description: "autoplan default", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: ".", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"**/*.tf"}, - }, - }, - }, - }, - modified: []string{"main.tf"}, - expProjPaths: []string{"."}, - }, - { - description: "parent dir modified", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"**/*.tf"}, - }, - }, - }, - }, - modified: []string{"main.tf"}, - expProjPaths: nil, - }, - { - description: "parent dir modified matches", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project1", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"../**/*.tf"}, - }, - }, - }, - }, - modified: []string{"main.tf"}, - expProjPaths: []string{"project1"}, - }, - { - description: "dir deleted", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project3", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"*.tf"}, - }, - }, - }, - }, - modified: []string{"project3/main.tf"}, - expProjPaths: nil, - }, - { - description: "multiple projects", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: ".", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"*.tf"}, - }, - }, - { - Dir: "project1", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"../modules/module/*.tf", "**/*.tf"}, - }, - }, - { - Dir: "project2", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"**/*.tf"}, - }, - }, - }, - }, - modified: []string{"main.tf", "modules/module/another.tf", "project2/nontf.txt"}, - expProjPaths: []string{".", "project1"}, - }, - { - description: ".tfvars file modified", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project2", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"*.tf*"}, - }, - }, - }, - }, - modified: []string{"project2/terraform.tfvars"}, - expProjPaths: []string{"project2"}, - }, - { - description: "file excluded", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project1", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"*.tf", "!exclude-me.tf"}, - }, - }, - }, - }, - modified: []string{"project1/exclude-me.tf"}, - expProjPaths: nil, - }, - { - description: "some files excluded and others included", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project1", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"*.tf", "!exclude-me.tf"}, - }, - }, - }, - }, - modified: []string{"project1/exclude-me.tf", "project1/include-me.tf"}, - expProjPaths: []string{"project1"}, - }, - { - description: "multiple dirs excluded", - config: valid.RepoCfg{ - Projects: []valid.Project{ - { - Dir: "project1", - Autoplan: valid.Autoplan{ - Enabled: true, - WhenModified: []string{"**/*.tf", "!subdir1/*", "!subdir2/*"}, - }, - }, - }, - }, - modified: []string{"project1/subdir1/main.tf", "project1/subdir2/main.tf"}, - expProjPaths: nil, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - pf := events.DefaultProjectFinder{} - projects, err := pf.DetermineProjectsViaConfig(logging.NewNoopCtxLogger(t), c.modified, c.config, tmpDir) - Ok(t, err) - Equals(t, len(c.expProjPaths), len(projects)) - for i, proj := range projects { - Equals(t, c.expProjPaths[i], proj.Dir) - } - }) - } -} diff --git a/server/legacy/events/project_locker.go b/server/legacy/events/project_locker.go deleted file mode 100644 index 50ba350e3..000000000 --- a/server/legacy/events/project_locker.go +++ /dev/null @@ -1,90 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "context" - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_lock.go ProjectLocker - -// ProjectLocker locks this project against other plans being run until this -// project is unlocked. -type ProjectLocker interface { - // TryLock attempts to acquire the lock for this project. It returns true if the lock - // was acquired. If it returns false, the lock was not acquired and the second - // return value will be a string describing why the lock was not acquired. - // The third return value is a function that can be called to unlock the - // lock. It will only be set if the lock was acquired. Any errors will set - // error. - TryLock(requestCtx context.Context, log logging.Logger, pull models.PullRequest, user models.User, workspace string, project models.Project) (*TryLockResponse, error) -} - -// DefaultProjectLocker implements ProjectLocker. -type DefaultProjectLocker struct { - Locker locking.Locker - VCSClient vcs.Client -} - -// TryLockResponse is the result of trying to lock a project. -type TryLockResponse struct { - // LockAcquired is true if the lock was acquired. - LockAcquired bool - // LockFailureReason is the reason why the lock was not acquired. It will - // only be set if LockAcquired is false. - LockFailureReason string - // UnlockFn will unlock the lock created by the caller. This might be called - // if there is an error later and the caller doesn't want to continue to - // hold the lock. - UnlockFn func() error - // LockKey is the key for the lock if the lock was acquired. - LockKey string -} - -// TryLock implements ProjectLocker.TryLock. -func (p *DefaultProjectLocker) TryLock(requestCtx context.Context, log logging.Logger, pull models.PullRequest, user models.User, workspace string, project models.Project) (*TryLockResponse, error) { - lockAttempt, err := p.Locker.TryLock(project, workspace, pull, user) - if err != nil { - return nil, err - } - if !lockAttempt.LockAcquired && lockAttempt.CurrLock.Pull.Num != pull.Num { - link, err := p.VCSClient.MarkdownPullLink(lockAttempt.CurrLock.Pull) - if err != nil { - return nil, err - } - failureMsg := fmt.Sprintf( - "This project is currently locked by an unapplied plan from pull %s. To continue, delete the lock from %s or apply that plan and merge the pull request.\n\nOnce the lock is released, comment `atlantis plan` here to re-plan.", - link, - link) - return &TryLockResponse{ - LockAcquired: false, - LockFailureReason: failureMsg, - }, nil - } - log.InfoContext(requestCtx, fmt.Sprintf("acquired lock with id %q", lockAttempt.LockKey)) - return &TryLockResponse{ - LockAcquired: true, - UnlockFn: func() error { - _, err := p.Locker.Unlock(lockAttempt.LockKey) - return err - }, - LockKey: lockAttempt.LockKey, - }, nil -} diff --git a/server/legacy/events/project_locker_test.go b/server/legacy/events/project_locker_test.go deleted file mode 100644 index e230528aa..000000000 --- a/server/legacy/events/project_locker_test.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "context" - "fmt" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) { - var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient) - mockLocker := mocks.NewMockLocker() - locker := events.DefaultProjectLocker{ - Locker: mockLocker, - VCSClient: mockClient, - } - expProject := models.Project{} - expWorkspace := "default" - expPull := models.PullRequest{} - expUser := models.User{} - - lockingPull := models.PullRequest{ - Num: 2, - } - When(mockLocker.TryLock(expProject, expWorkspace, expPull, expUser)).ThenReturn( - locking.TryLockResponse{ - LockAcquired: false, - CurrLock: models.ProjectLock{ - Pull: lockingPull, - }, - LockKey: "", - }, - nil, - ) - res, err := locker.TryLock(context.Background(), logging.NewNoopCtxLogger(t), expPull, expUser, expWorkspace, expProject) - link, _ := mockClient.MarkdownPullLink(lockingPull) - Ok(t, err) - Equals(t, &events.TryLockResponse{ - LockAcquired: false, - LockFailureReason: fmt.Sprintf("This project is currently locked by an unapplied plan from pull %s. To continue, delete the lock from %s or apply that plan and merge the pull request.\n\nOnce the lock is released, comment `atlantis plan` here to re-plan.", link, link), - }, res) -} - -func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) { - RegisterMockTestingT(t) - var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient) - mockLocker := mocks.NewMockLocker() - locker := events.DefaultProjectLocker{ - Locker: mockLocker, - VCSClient: mockClient, - } - expProject := models.Project{} - expWorkspace := "default" - expPull := models.PullRequest{Num: 2} - expUser := models.User{} - - lockingPull := models.PullRequest{ - Num: 2, - } - lockKey := "key" - When(mockLocker.TryLock(expProject, expWorkspace, expPull, expUser)).ThenReturn( - locking.TryLockResponse{ - LockAcquired: false, - CurrLock: models.ProjectLock{ - Pull: lockingPull, - }, - LockKey: lockKey, - }, - nil, - ) - res, err := locker.TryLock(context.Background(), logging.NewNoopCtxLogger(t), expPull, expUser, expWorkspace, expProject) - Ok(t, err) - Equals(t, true, res.LockAcquired) - - // UnlockFn should work. - mockLocker.VerifyWasCalled(Never()).Unlock(lockKey) - err = res.UnlockFn() - Ok(t, err) - mockLocker.VerifyWasCalledOnce().Unlock(lockKey) -} - -func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) { - RegisterMockTestingT(t) - var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient) - mockLocker := mocks.NewMockLocker() - locker := events.DefaultProjectLocker{ - Locker: mockLocker, - VCSClient: mockClient, - } - expProject := models.Project{} - expWorkspace := "default" - expPull := models.PullRequest{Num: 2} - expUser := models.User{} - - lockingPull := models.PullRequest{ - Num: 2, - } - lockKey := "key" - When(mockLocker.TryLock(expProject, expWorkspace, expPull, expUser)).ThenReturn( - locking.TryLockResponse{ - LockAcquired: true, - CurrLock: models.ProjectLock{ - Pull: lockingPull, - }, - LockKey: lockKey, - }, - nil, - ) - res, err := locker.TryLock(context.Background(), logging.NewNoopCtxLogger(t), expPull, expUser, expWorkspace, expProject) - Ok(t, err) - Equals(t, true, res.LockAcquired) - - // UnlockFn should work. - mockLocker.VerifyWasCalled(Never()).Unlock(lockKey) - err = res.UnlockFn() - Ok(t, err) - mockLocker.VerifyWasCalledOnce().Unlock(lockKey) -} diff --git a/server/legacy/events/pull_closed_executor.go b/server/legacy/events/pull_closed_executor.go deleted file mode 100644 index 66d43951d..000000000 --- a/server/legacy/events/pull_closed_executor.go +++ /dev/null @@ -1,171 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "bytes" - "fmt" - "io" - "sort" - "strings" - "text/template" - - "github.com/runatlantis/atlantis/server/legacy/core/db" - - "github.com/runatlantis/atlantis/server/logging" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/legacy/jobs" - "github.com/runatlantis/atlantis/server/models" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_resource_cleaner.go ResourceCleaner - -type ResourceCleaner interface { - CleanUp(pullInfo jobs.PullInfo) -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pull_cleaner.go PullCleaner - -// PullCleaner cleans up pull requests after they're closed/merged. -type PullCleaner interface { - // CleanUpPull deletes the workspaces used by the pull request on disk - // and deletes any locks associated with this pull request for all workspaces. - CleanUpPull(repo models.Repo, pull models.PullRequest) error -} - -// PullClosedExecutor executes the tasks required to clean up a closed pull -// request. -type PullClosedExecutor struct { - Locker locking.Locker - Logger logging.Logger - DB *db.BoltDB - PullClosedTemplate PullCleanupTemplate - LogStreamResourceCleaner ResourceCleaner - VCSClient vcs.Client - WorkingDir WorkingDir -} - -type templatedProject struct { - RepoRelDir string - Workspaces string -} - -type PullCleanupTemplate interface { - Execute(wr io.Writer, data interface{}) error -} - -type PullClosedEventTemplate struct{} - -func (t *PullClosedEventTemplate) Execute(wr io.Writer, data interface{}) error { - return pullClosedTemplate.Execute(wr, data) -} - -var pullClosedTemplate = template.Must(template.New("").Parse( - "Locks and plans deleted for the projects and workspaces modified in this pull request:\n" + - "{{ range . }}\n" + - "- dir: `{{ .RepoRelDir }}` {{ .Workspaces }}{{ end }}")) - -// CleanUpPull cleans up after a closed pull request. -func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullRequest) error { - logFields := map[string]interface{}{ - "repository": repo.FullName, - "pull-num": pull.Num, - } - pullStatus, err := p.DB.GetPullStatus(pull) - if err != nil { - // Log and continue to clean up other resources. - p.Logger.Error(fmt.Sprintf("retrieving pull status: %s", err), logFields) - } - - if pullStatus != nil { - for _, project := range pullStatus.Projects { - jobContext := jobs.PullInfo{ - PullNum: pull.Num, - Repo: pull.BaseRepo.Name, - Workspace: project.Workspace, - ProjectName: project.ProjectName, - } - p.LogStreamResourceCleaner.CleanUp(jobContext) - } - } - - if err := p.WorkingDir.Delete(repo, pull); err != nil { - return errors.Wrap(err, "cleaning workspace") - } - - // Finally, delete locks. We do this last because when someone - // unlocks a project, right now we don't actually delete the plan - // so we might have plans laying around but no locks. - locks, err := p.Locker.UnlockByPull(repo.FullName, pull.Num) - if err != nil { - return errors.Wrap(err, "cleaning up locks") - } - - // Delete pull from DB. - if err := p.DB.DeletePullStatus(pull); err != nil { - p.Logger.Error(fmt.Sprintf("deleting pull from db: %s", err), logFields) - } - - // If there are no locks then there's no need to comment. - if len(locks) == 0 { - return nil - } - - templateData := p.buildTemplateData(locks) - var buf bytes.Buffer - if err = p.PullClosedTemplate.Execute(&buf, templateData); err != nil { - return errors.Wrap(err, "rendering template for comment") - } - return p.VCSClient.CreateComment(repo, pull.Num, buf.String(), "") -} - -// buildTemplateData formats the lock data into a slice that can easily be -// templated for the VCS comment. We organize all the workspaces by their -// respective project paths so the comment can look like: -// dir: {dir}, workspaces: {all-workspaces} -func (p *PullClosedExecutor) buildTemplateData(locks []models.ProjectLock) []templatedProject { - workspacesByPath := make(map[string][]string) - for _, l := range locks { - path := l.Project.Path - workspacesByPath[path] = append(workspacesByPath[path], l.Workspace) - } - - // sort keys so we can write deterministic tests - var sortedPaths []string - for p := range workspacesByPath { - sortedPaths = append(sortedPaths, p) - } - sort.Strings(sortedPaths) - - var projects []templatedProject - for _, p := range sortedPaths { - workspace := workspacesByPath[p] - workspacesStr := fmt.Sprintf("`%s`", strings.Join(workspace, "`, `")) - if len(workspace) == 1 { - projects = append(projects, templatedProject{ - RepoRelDir: p, - Workspaces: "workspace: " + workspacesStr, - }) - } else { - projects = append(projects, templatedProject{ - RepoRelDir: p, - Workspaces: "workspaces: " + workspacesStr, - }) - } - } - return projects -} diff --git a/server/legacy/events/pull_closed_executor_test.go b/server/legacy/events/pull_closed_executor_test.go deleted file mode 100644 index d73577e34..000000000 --- a/server/legacy/events/pull_closed_executor_test.go +++ /dev/null @@ -1,297 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "context" - "os" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/terraform/filter" - "github.com/uber-go/tally/v4" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/logging" - bolt "go.etcd.io/bbolt" - - "github.com/runatlantis/atlantis/server/legacy/jobs" - "github.com/stretchr/testify/assert" - - . "github.com/petergtz/pegomock" - lockmocks "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - jobmocks "github.com/runatlantis/atlantis/server/legacy/jobs/mocks" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/models/fixtures" - . "github.com/runatlantis/atlantis/testing" -) - -func TestCleanUpPullWorkspaceErrorf(t *testing.T) { - t.Log("when workspace.Delete returns an error, we return it") - RegisterMockTestingT(t) - w := mocks.NewMockWorkingDir() - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - pce := events.PullClosedExecutor{ - WorkingDir: w, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - DB: db, - } - err = errors.New("err") - When(w.Delete(fixtures.GithubRepo, fixtures.Pull)).ThenReturn(err) - actualErr := pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) - Equals(t, "cleaning workspace: err", actualErr.Error()) -} - -func TestCleanUpPullUnlockErrorf(t *testing.T) { - t.Log("when locker.UnlockByPull returns an error, we return it") - RegisterMockTestingT(t) - w := mocks.NewMockWorkingDir() - l := lockmocks.NewMockLocker() - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - pce := events.PullClosedExecutor{ - Locker: l, - WorkingDir: w, - DB: db, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - } - err = errors.New("err") - When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, err) - actualErr := pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) - Equals(t, "cleaning up locks: err", actualErr.Error()) -} - -func TestCleanUpPullNoLocks(t *testing.T) { - t.Log("when there are no locks to clean up, we don't comment") - RegisterMockTestingT(t) - w := mocks.NewMockWorkingDir() - l := lockmocks.NewMockLocker() - cp := vcsmocks.NewMockClient() - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - pce := events.PullClosedExecutor{ - Locker: l, - WorkingDir: w, - DB: db, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - } - When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, nil) - err = pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) - Ok(t, err) - cp.VerifyWasCalled(Never()).CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()) -} - -func TestCleanUpPullComments(t *testing.T) { - t.Log("should comment correctly") - RegisterMockTestingT(t) - cases := []struct { - Description string - Locks []models.ProjectLock - Exp string - }{ - { - "single lock, empty path", - []models.ProjectLock{ - { - Project: models.NewProject("owner/repo", ""), - Workspace: "default", - }, - }, - "- dir: `.` workspace: `default`", - }, - { - "single lock, non-empty path", - []models.ProjectLock{ - { - Project: models.NewProject("owner/repo", "path"), - Workspace: "default", - }, - }, - "- dir: `path` workspace: `default`", - }, - { - "single path, multiple workspaces", - []models.ProjectLock{ - { - Project: models.NewProject("owner/repo", "path"), - Workspace: "workspace1", - }, - { - Project: models.NewProject("owner/repo", "path"), - Workspace: "workspace2", - }, - }, - "- dir: `path` workspaces: `workspace1`, `workspace2`", - }, - { - "multiple paths, multiple workspaces", - []models.ProjectLock{ - { - Project: models.NewProject("owner/repo", "path"), - Workspace: "workspace1", - }, - { - Project: models.NewProject("owner/repo", "path"), - Workspace: "workspace2", - }, - { - Project: models.NewProject("owner/repo", "path2"), - Workspace: "workspace1", - }, - { - Project: models.NewProject("owner/repo", "path2"), - Workspace: "workspace2", - }, - }, - "- dir: `path` workspaces: `workspace1`, `workspace2`\n- dir: `path2` workspaces: `workspace1`, `workspace2`", - }, - } - for _, c := range cases { - func() { - cp := vcsmocks.NewMockClient() - l := lockmocks.NewMockLocker() - w := mocks.NewMockWorkingDir() - tmp, cleanup := TempDir(t) - defer cleanup() - db, err := db.New(tmp) - Ok(t, err) - pce := events.PullClosedExecutor{ - Locker: l, - DB: db, - VCSClient: cp, - WorkingDir: w, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - } - t.Log("testing: " + c.Description) - When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(c.Locks, nil) - err = pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) - Ok(t, err) - _, _, comment, _ := cp.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()).GetCapturedArguments() - - expected := "Locks and plans deleted for the projects and workspaces modified in this pull request:\n\n" + c.Exp - Equals(t, expected, comment) - }() - } -} - -func TestCleanUpLogStreaming(t *testing.T) { - RegisterMockTestingT(t) - - t.Run("Should Clean Up Log Streaming Resources When PR is closed", func(t *testing.T) { - // Create Log streaming resources - prjCmdOutput := make(chan *jobs.ProjectCmdOutputLine) - storageBackend := jobmocks.NewMockStorageBackend() - prjCmdOutHandler := jobs.NewAsyncProjectCommandOutputHandler(prjCmdOutput, logger, jobs.NewJobStore(storageBackend, tally.NewTestScope("test", map[string]string{})), filter.LogFilter{}) - ctx := command.ProjectContext{ - BaseRepo: fixtures.GithubRepo, - Pull: fixtures.Pull, - ProjectName: *fixtures.Project.Name, - Workspace: "default", - } - - go prjCmdOutHandler.Handle() - prjCmdOutHandler.Send(ctx, "Test Message") - - // Create boltdb and add pull request. - var lockBucket = "bucket" - var configBucket = "configBucket" - var pullsBucketName = "pulls" - - f, err := os.CreateTemp("", "") - if err != nil { - panic(errors.Wrap(err, "failed to create temp file")) - } - path := f.Name() - f.Close() // nolint: errcheck - - // Open the database. - boltDB, err := bolt.Open(path, 0600, nil) - if err != nil { - panic(errors.Wrap(err, "could not start bolt DB")) - } - if err := boltDB.Update(func(tx *bolt.Tx) error { - if _, err := tx.CreateBucketIfNotExists([]byte(pullsBucketName)); err != nil { - return errors.Wrap(err, "failed to create bucket") - } - return nil - }); err != nil { - panic(errors.Wrap(err, "could not create bucket")) - } - db, _ := db.NewWithDB(boltDB, lockBucket, configBucket) - result := []command.ProjectResult{ - { - RepoRelDir: fixtures.GithubRepo.FullName, - Workspace: "default", - ProjectName: *fixtures.Project.Name, - }, - } - - // Create a new record for pull - _, err = db.UpdatePullWithResults(fixtures.Pull, result) - Ok(t, err) - - workingDir := mocks.NewMockWorkingDir() - locker := lockmocks.NewMockLocker() - client := vcsmocks.NewMockClient() - logger := logging.NewNoopCtxLogger(t) - - pullClosedExecutor := events.PullClosedExecutor{ - Locker: locker, - WorkingDir: workingDir, - DB: db, - VCSClient: client, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - LogStreamResourceCleaner: prjCmdOutHandler, - Logger: logger, - } - - locks := []models.ProjectLock{ - { - Project: models.NewProject(fixtures.GithubRepo.FullName, ""), - Workspace: "default", - }, - } - When(locker.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(locks, nil) - - // Clean up. - err = pullClosedExecutor.CleanUpPull(fixtures.GithubRepo, fixtures.Pull) - Ok(t, err) - - close(prjCmdOutput) - _, _, comment, _ := client.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString(), AnyString()).GetCapturedArguments() - expectedComment := "Locks and plans deleted for the projects and workspaces modified in this pull request:\n\n" + "- dir: `.` workspace: `default`" - Equals(t, expectedComment, comment) - - // Assert log streaming resources are cleaned up. - dfPrjCmdOutputHandler := prjCmdOutHandler.(*jobs.AsyncProjectCommandOutputHandler) - - job, err := dfPrjCmdOutputHandler.JobStore.Get(context.Background(), ctx.PullInfof()) - Ok(t, err) - - assert.Empty(t, job.Output) - assert.Empty(t, dfPrjCmdOutputHandler.GetReceiverBufferForPull(ctx.PullInfof())) - }) -} diff --git a/server/legacy/events/pull_status_fetcher.go b/server/legacy/events/pull_status_fetcher.go deleted file mode 100644 index b021cb39b..000000000 --- a/server/legacy/events/pull_status_fetcher.go +++ /dev/null @@ -1,10 +0,0 @@ -package events - -import "github.com/runatlantis/atlantis/server/models" - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pull_status_fetcher.go PullStatusFetcher - -// PullStatusFetcher fetches our internal model of a pull requests status -type PullStatusFetcher interface { - GetPullStatus(pull models.PullRequest) (*models.PullStatus, error) -} diff --git a/server/legacy/events/repo_allowlist_checker.go b/server/legacy/events/repo_allowlist_checker.go deleted file mode 100644 index 5acc4e705..000000000 --- a/server/legacy/events/repo_allowlist_checker.go +++ /dev/null @@ -1,95 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "fmt" - "strings" -) - -// Wildcard matches 0-n of all characters except commas. -const Wildcard = "*" - -// RepoAllowlistChecker implements checking if repos are allowlisted to be used with -// this Atlantis. -type RepoAllowlistChecker struct { - rules []string -} - -// NewRepoAllowlistChecker constructs a new checker and validates that the -// allowlist isn't malformed. -func NewRepoAllowlistChecker(allowlist string) (*RepoAllowlistChecker, error) { - rules := strings.Split(allowlist, ",") - for _, rule := range rules { - if strings.Contains(rule, "://") { - return nil, fmt.Errorf("allowlist %q contained ://", rule) - } - } - return &RepoAllowlistChecker{ - rules: rules, - }, nil -} - -// IsAllowlisted returns true if this repo is in our allowlist and false -// otherwise. -func (r *RepoAllowlistChecker) IsAllowlisted(repoFullName string, vcsHostname string) bool { - candidate := fmt.Sprintf("%s/%s", vcsHostname, repoFullName) - for _, rule := range r.rules { - if r.matchesRule(rule, candidate) { - return true - } - } - return false -} - -func (r *RepoAllowlistChecker) matchesRule(rule string, candidate string) bool { - // Case insensitive compare. - rule = strings.ToLower(rule) - candidate = strings.ToLower(candidate) - - wildcardIdx := strings.Index(rule, Wildcard) - if wildcardIdx == -1 { - // No wildcard so can do a straight up match. - return candidate == rule - } - - // If the candidate length is less than where we found the wildcard - // then it can't be equal. For example: - // rule: abc* - // candidate: ab - if len(candidate) < wildcardIdx { - return false - } - - // If wildcard is not the last character, substring both to compare what is after the wildcard. Example: - // candidate: repo-abc - // rule: *-abc - // substr(candidate): -abc - // substr(rule): -abc - if wildcardIdx != len(rule)-1 { - // If the rule substring after wildcard does not exist in the candidate, then it is not a match. - idx := strings.LastIndex(candidate, rule[wildcardIdx+1:]) - if idx == -1 { - return false - } - return candidate[idx:] == rule[wildcardIdx+1:] - } - - // If wildcard is last character, substring both so they're comparing before the wildcard. Example: - // candidate: abcd - // rule: abc* - // substr(candidate): abc - // substr(rule): abc - return candidate[:wildcardIdx] == rule[:wildcardIdx] -} diff --git a/server/legacy/events/repo_allowlist_checker_test.go b/server/legacy/events/repo_allowlist_checker_test.go deleted file mode 100644 index 3cd5ad297..000000000 --- a/server/legacy/events/repo_allowlist_checker_test.go +++ /dev/null @@ -1,200 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events" - . "github.com/runatlantis/atlantis/testing" -) - -func TestRepoAllowlistChecker_IsAllowlisted(t *testing.T) { - cases := []struct { - Description string - Allowlist string - RepoFullName string - Hostname string - Exp bool - }{ - { - "exact match", - "github.com/owner/repo", - "owner/repo", - "github.com", - true, - }, - { - "exact match shouldn't match anything else", - "github.com/owner/repo", - "owner/rep", - "github.com", - false, - }, - { - "* should match anything", - "*", - "owner/repo", - "github.com", - true, - }, - { - "github.com* should match anything github", - "github.com*", - "owner/repo", - "github.com", - true, - }, - { - "github.com/o* should match", - "github.com/o*", - "owner/repo", - "github.com", - true, - }, - { - "github.com/owner/rep* should not match", - "github.com/owner/rep*", - "owner/re", - "github.com", - false, - }, - { - "github.com/owner/rep* should match", - "github.com/owner/rep*", - "owner/rep", - "github.com", - true, - }, - { - "github.com/o* should not match", - "github.com/o*", - "somethingelse/repo", - "github.com", - false, - }, - { - "github.com/owner/repo* should match exactly", - "github.com/owner/repo*", - "owner/repo", - "github.com", - true, - }, - { - "github.com/owner/* should match anything in org", - "github.com/owner/*", - "owner/repo", - "github.com", - true, - }, - { - "github.com/owner/* should not match anything not in org", - "github.com/owner/*", - "otherorg/repo", - "github.com", - false, - }, - { - "if there's any * it should match", - "github.com/owner/repo,*", - "otherorg/repo", - "github.com", - true, - }, - { - "any exact match should match", - "github.com/owner/repo,github.com/otherorg/repo", - "otherorg/repo", - "github.com", - true, - }, - { - "longer shouldn't match on exact", - "github.com/owner/repo", - "owner/repo-longer", - "github.com", - false, - }, - { - "should be case insensitive", - "github.com/owner/repo", - "OwNeR/rEpO", - "github.com", - true, - }, - { - "should be case insensitive for wildcards", - "github.com/owner/*", - "OwNeR/rEpO", - "github.com", - true, - }, - { - "should match if wildcard is not last character", - "github.com/owner/*-repo", - "owner/prefix-repo", - "github.com", - true, - }, - { - "should match if wildcard is first character within owner name", - "github.com/*-owner/repo", - "prefix-owner/repo", - "github.com", - true, - }, - { - "should match if wildcard is at beginning", - "*-owner/repo", - "prefix-owner/repo", - "github.com", - true, - }, - { - "should match with duplicate", - "*runatlantis", - "runatlantis/runatlantis", - "github.com", - true, - }, - } - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - w, err := events.NewRepoAllowlistChecker(c.Allowlist) - Ok(t, err) - Equals(t, c.Exp, w.IsAllowlisted(c.RepoFullName, c.Hostname)) - }) - } -} - -// If the allowlist contains a schema then we should get an error. -func TestRepoAllowlistChecker_ContainsSchema(t *testing.T) { - cases := []struct { - allowlist string - expErr string - }{ - { - "://", - `allowlist "://" contained ://`, - }, - } - - for _, c := range cases { - t.Run(c.allowlist, func(t *testing.T) { - _, err := events.NewRepoAllowlistChecker(c.allowlist) - ErrEquals(t, c.expErr, err) - }) - } -} diff --git a/server/legacy/events/runtime/common/common.go b/server/legacy/events/runtime/common/common.go deleted file mode 100644 index b2c8c602a..000000000 --- a/server/legacy/events/runtime/common/common.go +++ /dev/null @@ -1,89 +0,0 @@ -package common - -import ( - "os" - "os/exec" - "strings" -) - -// Looks for any argument in commandArgs that has been overridden by an entry in extra args and replaces them -// any extraArgs that are not used as overrides are added yo the end of the final string slice -func DeDuplicateExtraArgs(commandArgs []string, extraArgs []string) []string { - // work if any of the core args have been overridden - finalArgs := []string{} - usedExtraArgs := []string{} - for _, arg := range commandArgs { - override := "" - prefix := arg - argSplit := strings.Split(arg, "=") - if len(argSplit) == 2 { - prefix = argSplit[0] - } - for _, extraArgOrig := range extraArgs { - extraArg := extraArgOrig - if strings.HasPrefix(extraArg, prefix) { - override = extraArgOrig - break - } - if strings.HasPrefix(extraArg, "--") { - extraArg = extraArgOrig[1:] - if strings.HasPrefix(extraArg, prefix) { - override = extraArgOrig - break - } - } - if strings.HasPrefix(prefix, "--") { - prefixWithoutDash := prefix[1:] - if strings.HasPrefix(extraArg, prefixWithoutDash) { - override = extraArgOrig - break - } - } - } - if override != "" { - finalArgs = append(finalArgs, override) - usedExtraArgs = append(usedExtraArgs, override) - } else { - finalArgs = append(finalArgs, arg) - } - } - // add any extra args that are not overrides - for _, extraArg := range extraArgs { - if !stringInSlice(usedExtraArgs, extraArg) { - finalArgs = append(finalArgs, extraArg) - } - } - return finalArgs -} - -// returns true if a file at the passed path exists -func FileExists(path string) bool { - if _, err := os.Stat(path); err != nil { - if os.IsNotExist(err) { - return false - } - } - return true -} - -// returns true if the given file is tracked by git -func IsFileTracked(cloneDir string, filename string) (bool, error) { - cmd := exec.Command("git", "ls-files", filename) - cmd.Dir = cloneDir - - output, err := cmd.CombinedOutput() - - if err != nil { - return false, err - } - return len(output) > 0, nil -} - -func stringInSlice(stringSlice []string, target string) bool { - for _, value := range stringSlice { - if value == target { - return true - } - } - return false -} diff --git a/server/legacy/events/runtime/common/common_test.go b/server/legacy/events/runtime/common/common_test.go deleted file mode 100644 index d426eaf0e..000000000 --- a/server/legacy/events/runtime/common/common_test.go +++ /dev/null @@ -1,86 +0,0 @@ -package common - -import ( - "reflect" - "testing" -) - -func Test_DeDuplicateExtraArgs(t *testing.T) { - cases := []struct { - description string - inputArgs []string - extraArgs []string - expectedArgs []string - }{ - { - "No extra args", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{}, - []string{"init", "-input=false", "-no-color", "-upgrade"}, - }, - { - "Override -upgrade", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"-upgrade=false"}, - []string{"init", "-input=false", "-no-color", "-upgrade=false"}, - }, - { - "Override -input", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"-input=true"}, - []string{"init", "-input=true", "-no-color", "-upgrade"}, - }, - { - "Override -input and -upgrade", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"-input=true", "-upgrade=false"}, - []string{"init", "-input=true", "-no-color", "-upgrade=false"}, - }, - { - "Non duplicate extra args", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"extra", "args"}, - []string{"init", "-input=false", "-no-color", "-upgrade", "extra", "args"}, - }, - { - "Override upgrade with extra args", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"extra", "args", "-upgrade=false"}, - []string{"init", "-input=false", "-no-color", "-upgrade=false", "extra", "args"}, - }, - { - "Override -input (using --input)", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"--input=true"}, - []string{"init", "--input=true", "-no-color", "-upgrade"}, - }, - { - "Override -input (using --input) and -upgrade (using --upgrade)", - []string{"init", "-input=false", "-no-color", "-upgrade"}, - []string{"--input=true", "--upgrade=false"}, - []string{"init", "--input=true", "-no-color", "--upgrade=false"}, - }, - { - "Override long form flag ", - []string{"init", "--input=false", "-no-color", "-upgrade"}, - []string{"--input=true"}, - []string{"init", "--input=true", "-no-color", "-upgrade"}, - }, - { - "Override --input using (-input) ", - []string{"init", "--input=false", "-no-color", "-upgrade"}, - []string{"-input=true"}, - []string{"init", "-input=true", "-no-color", "-upgrade"}, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - finalArgs := DeDuplicateExtraArgs(c.inputArgs, c.extraArgs) - - if !reflect.DeepEqual(c.expectedArgs, finalArgs) { - t.Fatalf("finalArgs (%v) does not match expectedArgs (%v)", finalArgs, c.expectedArgs) - } - }) - } -} diff --git a/server/legacy/events/runtime/mocks/matchers/models_approvalstatus.go b/server/legacy/events/runtime/mocks/matchers/models_approvalstatus.go deleted file mode 100644 index 7be3789b3..000000000 --- a/server/legacy/events/runtime/mocks/matchers/models_approvalstatus.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsApprovalStatus() models.ApprovalStatus { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ApprovalStatus))(nil)).Elem())) - var nullValue models.ApprovalStatus - return nullValue -} - -func EqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.ApprovalStatus - return nullValue -} - -func NotEqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.ApprovalStatus - return nullValue -} - -func ModelsApprovalStatusThat(matcher pegomock.ArgumentMatcher) models.ApprovalStatus { - pegomock.RegisterMatcher(matcher) - var nullValue models.ApprovalStatus - return nullValue -} diff --git a/server/legacy/events/size_limited_project_command_builder.go b/server/legacy/events/size_limited_project_command_builder.go deleted file mode 100644 index 136a2f34f..000000000 --- a/server/legacy/events/size_limited_project_command_builder.go +++ /dev/null @@ -1,53 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -type SizeLimitedProjectCommandBuilder struct { - Limit int - ProjectCommandBuilder -} - -func (b *SizeLimitedProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) { - projects, err := b.ProjectCommandBuilder.BuildAutoplanCommands(ctx) - - if err != nil { - return projects, err - } - - return projects, b.CheckAgainstLimit(projects) -} - -func (b *SizeLimitedProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - projects, err := b.ProjectCommandBuilder.BuildPlanCommands(ctx, comment) - - if err != nil { - return projects, err - } - - return projects, b.CheckAgainstLimit(projects) -} - -func (b *SizeLimitedProjectCommandBuilder) CheckAgainstLimit(projects []command.ProjectContext) error { - var planCommands []command.ProjectContext - - for _, project := range projects { - if project.CommandName == command.Plan { - planCommands = append(planCommands, project) - } - } - - if b.Limit != InfiniteProjectsPerPR && len(planCommands) > b.Limit { - return fmt.Errorf( - "Number of projects cannot exceed %d. This can either be caused by:\n"+ - "1) GH failure in recognizing the diff\n"+ - "2) Pull Request batch is too large for the given Atlantis instance\n\n"+ - "Please break this pull request into smaller batches and try again.", - b.Limit, - ) - } - return nil -} diff --git a/server/legacy/events/size_limited_project_command_builder_test.go b/server/legacy/events/size_limited_project_command_builder_test.go deleted file mode 100644 index f939e7c51..000000000 --- a/server/legacy/events/size_limited_project_command_builder_test.go +++ /dev/null @@ -1,179 +0,0 @@ -package events_test - -import ( - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - . "github.com/runatlantis/atlantis/testing" -) - -func TestSizeLimitedProjectCommandBuilder_autoplan(t *testing.T) { - RegisterMockTestingT(t) - - ctx := &command.Context{} - - project1 := command.ProjectContext{ - ProjectName: "test1", - CommandName: command.Plan, - } - - project2 := command.ProjectContext{ - ProjectName: "test2", - CommandName: command.Plan, - } - - project3 := command.ProjectContext{ - ProjectName: "test1", - CommandName: command.PolicyCheck, - } - - expectedResult := []command.ProjectContext{project1, project2} - delegate := mockProjectCommandBuilder{ - commands: expectedResult, - } - - t.Run("Limit Defined and Breached", func(t *testing.T) { - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: 1, - ProjectCommandBuilder: delegate, - } - _, err := subject.BuildAutoplanCommands(ctx) - - ErrEquals(t, `Number of projects cannot exceed 1. This can either be caused by: -1) GH failure in recognizing the diff -2) Pull Request batch is too large for the given Atlantis instance - -Please break this pull request into smaller batches and try again.`, err) - }) - - t.Run("Limit defined and not breached", func(t *testing.T) { - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: 2, - ProjectCommandBuilder: delegate, - } - result, err := subject.BuildAutoplanCommands(ctx) - - Ok(t, err) - - Assert(t, len(result) == len(expectedResult), "size is expected") - }) - - t.Run("Limit not defined", func(t *testing.T) { - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: events.InfiniteProjectsPerPR, - ProjectCommandBuilder: delegate, - } - result, err := subject.BuildAutoplanCommands(ctx) - - Ok(t, err) - - Assert(t, len(result) == len(expectedResult), "size is expected") - }) - - t.Run("Only plan commands counted in limit", func(t *testing.T) { - resultWithPolicyCheckCommand := []command.ProjectContext{project1, project2, project3} - delegate = mockProjectCommandBuilder{ - commands: resultWithPolicyCheckCommand, - } - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: 2, - ProjectCommandBuilder: delegate, - } - result, err := subject.BuildAutoplanCommands(ctx) - - Ok(t, err) - - Assert(t, len(result) == len(resultWithPolicyCheckCommand), "size is expected") - }) -} - -func TestSizeLimitedProjectCommandBuilder_planComment(t *testing.T) { - RegisterMockTestingT(t) - ctx := &command.Context{} - - comment := &command.Comment{} - - project1 := command.ProjectContext{ - ProjectName: "test1", - CommandName: command.Plan, - } - - project2 := command.ProjectContext{ - ProjectName: "test2", - CommandName: command.Plan, - } - - expectedResult := []command.ProjectContext{project1, project2} - delegate := mockProjectCommandBuilder{ - commands: expectedResult, - } - - t.Run("Limit Defined and Breached", func(t *testing.T) { - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: 1, - ProjectCommandBuilder: delegate, - } - _, err := subject.BuildPlanCommands(ctx, comment) - - ErrEquals(t, `Number of projects cannot exceed 1. This can either be caused by: -1) GH failure in recognizing the diff -2) Pull Request batch is too large for the given Atlantis instance - -Please break this pull request into smaller batches and try again.`, err) - }) - - t.Run("Limit defined and not breached", func(t *testing.T) { - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: 2, - ProjectCommandBuilder: delegate, - } - result, err := subject.BuildPlanCommands(ctx, comment) - - Ok(t, err) - - Assert(t, len(result) == len(expectedResult), "size is expected") - }) - - t.Run("Limit not defined", func(t *testing.T) { - subject := &events.SizeLimitedProjectCommandBuilder{ - Limit: events.InfiniteProjectsPerPR, - ProjectCommandBuilder: delegate, - } - result, err := subject.BuildPlanCommands(ctx, comment) - - Ok(t, err) - - Assert(t, len(result) == len(expectedResult), "size is expected") - }) -} - -type mockProjectCommandBuilder struct { - commands []command.ProjectContext - error error -} - -func (m mockProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Context) ([]command.ProjectContext, error) { - return m.commands, m.error -} - -func (m mockProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - return m.commands, m.error -} - -func (m mockProjectCommandBuilder) BuildPolicyCheckCommands(ctx *command.Context) ([]command.ProjectContext, error) { - return m.commands, m.error -} - -func (m mockProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - return m.commands, m.error -} - -func (m mockProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - return m.commands, m.error -} - -func (m mockProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - return m.commands, m.error -} diff --git a/server/legacy/events/stale_command_handler.go b/server/legacy/events/stale_command_handler.go deleted file mode 100644 index 7cc539be2..000000000 --- a/server/legacy/events/stale_command_handler.go +++ /dev/null @@ -1,19 +0,0 @@ -package events - -import ( - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/uber-go/tally/v4" -) - -type StaleCommandHandler struct { - StaleStatsScope tally.Scope -} - -func (s *StaleCommandHandler) CommandIsStale(ctx *command.Context) bool { - status := ctx.PullStatus - if status != nil && status.UpdatedAt > ctx.TriggerTimestamp.Unix() { - s.StaleStatsScope.Counter("dropped_commands").Inc(1) - return true - } - return false -} diff --git a/server/legacy/events/stale_command_handler_test.go b/server/legacy/events/stale_command_handler_test.go deleted file mode 100644 index b2c3e2c72..000000000 --- a/server/legacy/events/stale_command_handler_test.go +++ /dev/null @@ -1,69 +0,0 @@ -package events_test - -import ( - "testing" - "time" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - "github.com/uber-go/tally/v4" -) - -func TestStaleCommandHandler_CommandIsStale(t *testing.T) { - olderTimestamp := time.Unix(123, 456) - newerTimestamp := time.Unix(124, 457) - testScope := tally.NewTestScope("test", nil) - cases := []struct { - Description string - PullStatus models.PullStatus - CommandTimestamp time.Time - Expected bool - }{ - { - Description: "simple stale command", - PullStatus: models.PullStatus{ - UpdatedAt: newerTimestamp.Unix(), - }, - CommandTimestamp: olderTimestamp, - Expected: true, - }, - { - Description: "simple not stale command", - PullStatus: models.PullStatus{ - UpdatedAt: olderTimestamp.Unix(), - }, - CommandTimestamp: newerTimestamp, - Expected: false, - }, - } - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - RegisterMockTestingT(t) - pull := c.PullStatus - ctx := &command.Context{ - TriggerTimestamp: c.CommandTimestamp, - PullStatus: &pull, - } - staleCommandHandler := &events.StaleCommandHandler{ - StaleStatsScope: testScope, - } - Assert(t, c.Expected == staleCommandHandler.CommandIsStale(ctx), - "CommandIsStale returned value should be %v", c.Expected) - }) - } - Assert(t, testScope.Snapshot().Counters()["test.dropped_commands+"].Value() == 1, "counted commands doesn't equal 1") -} - -func TestStaleCommandHandler_CommandIsStale_NilPullModel(t *testing.T) { - RegisterMockTestingT(t) - testScope := tally.NewTestScope("test", nil) - staleCommandHandler := &events.StaleCommandHandler{ - StaleStatsScope: testScope, - } - Assert(t, staleCommandHandler.CommandIsStale(&command.Context{}) == false, - "CommandIsStale returned value should be false") - Assert(t, len(testScope.Snapshot().Counters()) == 0, "no counters should have started") -} diff --git a/server/legacy/events/terraform/ansi/strip.go b/server/legacy/events/terraform/ansi/strip.go deleted file mode 100644 index fa8265de2..000000000 --- a/server/legacy/events/terraform/ansi/strip.go +++ /dev/null @@ -1,13 +0,0 @@ -package ansi - -import ( - "regexp" -) - -const ansi = "[\u001B\u009B][[\\]()#;?]*(?:(?:(?:[a-zA-Z\\d]*(?:;[a-zA-Z\\d]*)*)?\u0007)|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PRZcf-ntqry=><~]))" - -var re = regexp.MustCompile(ansi) - -func Strip(str string) string { - return re.ReplaceAllString(str, "") -} diff --git a/server/legacy/events/terraform/filter/filter.go b/server/legacy/events/terraform/filter/filter.go deleted file mode 100644 index 214020cbb..000000000 --- a/server/legacy/events/terraform/filter/filter.go +++ /dev/null @@ -1,18 +0,0 @@ -package filter - -import ( - "regexp" -) - -type LogFilter struct { - Regexes []*regexp.Regexp -} - -func (l *LogFilter) ShouldFilterLine(message string) bool { - for _, regex := range l.Regexes { - if regex.MatchString(message) { - return true - } - } - return false -} diff --git a/server/legacy/events/terraform/filter/filter_test.go b/server/legacy/events/terraform/filter/filter_test.go deleted file mode 100644 index 09959d356..000000000 --- a/server/legacy/events/terraform/filter/filter_test.go +++ /dev/null @@ -1,25 +0,0 @@ -package filter_test - -import ( - "regexp" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/terraform/filter" - "github.com/stretchr/testify/assert" -) - -func TestLogFilter_ShouldFilter(t *testing.T) { - regex := regexp.MustCompile("abc*") - filter := filter.LogFilter{ - Regexes: []*regexp.Regexp{regex}, - } - assert.True(t, filter.ShouldFilterLine("abcd")) -} - -func TestLogFilter_ShouldNotFilter(t *testing.T) { - regex := regexp.MustCompile("abc*") - filter := filter.LogFilter{ - Regexes: []*regexp.Regexp{regex}, - } - assert.False(t, filter.ShouldFilterLine("efg")) -} diff --git a/server/legacy/events/unlock_command_runner.go b/server/legacy/events/unlock_command_runner.go deleted file mode 100644 index 72a88d0b0..000000000 --- a/server/legacy/events/unlock_command_runner.go +++ /dev/null @@ -1,42 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" -) - -func NewUnlockCommandRunner( - deleteLockCommand DeleteLockCommand, - vcsClient vcs.Client, -) *UnlockCommandRunner { - return &UnlockCommandRunner{ - deleteLockCommand: deleteLockCommand, - vcsClient: vcsClient, - } -} - -type UnlockCommandRunner struct { - vcsClient vcs.Client - deleteLockCommand DeleteLockCommand -} - -func (u *UnlockCommandRunner) Run( - ctx *command.Context, - cmd *command.Comment, -) { - baseRepo := ctx.Pull.BaseRepo - pullNum := ctx.Pull.Num - - vcsMessage := "All Atlantis locks for this PR have been unlocked and plans discarded" - _, err := u.deleteLockCommand.DeleteLocksByPull(baseRepo.FullName, pullNum) - if err != nil { - vcsMessage = "Failed to delete PR locks" - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("failed to delete locks by pull %s", err.Error())) - } - - if commentErr := u.vcsClient.CreateComment(baseRepo, pullNum, vcsMessage, command.Unlock.String()); commentErr != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to comment: %s", commentErr)) - } -} diff --git a/server/legacy/events/vcs/client.go b/server/legacy/events/vcs/client.go deleted file mode 100644 index 046a162ac..000000000 --- a/server/legacy/events/vcs/client.go +++ /dev/null @@ -1,50 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package vcs - -import ( - "context" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" -) - -// Client is used to make API calls to a VCS host like GitHub. -// -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_client.go Client -//nolint:interfacebloat -type Client interface { - // GetModifiedFiles returns the names of files that were modified in the merge request - // relative to the repo root, e.g. parent/child/file.txt. - GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) - CreateComment(repo models.Repo, pullNum int, comment string, command string) error - HidePrevCommandComments(repo models.Repo, pullNum int, command string) error - PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) - PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) - // UpdateStatus updates the commit status to state for pull. src is the - // source of this status. This should be relatively static across runs, - // ex. atlantis/plan or atlantis/apply. - // description is a description of this particular status update and can - // change across runs. - // url is an optional link that users should click on for more information - // about this status. - UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) - MarkdownPullLink(pull models.PullRequest) (string, error) - - // DownloadRepoConfigFile return `atlantis.yaml` content from VCS (which support fetch a single file from repository) - // The first return value indicate that repo contain atlantis.yaml or not - // if BaseRepo had one repo config file, its content will placed on the second return value - DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) - SupportsSingleFileDownload(repo models.Repo) bool -} diff --git a/server/legacy/events/vcs/client_test.go b/server/legacy/events/vcs/client_test.go deleted file mode 100644 index 53a710530..000000000 --- a/server/legacy/events/vcs/client_test.go +++ /dev/null @@ -1,16 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package vcs - -// purposefully empty to trigger coverage report diff --git a/server/legacy/events/vcs/common/common.go b/server/legacy/events/vcs/common/common.go deleted file mode 100644 index 010a4d3e0..000000000 --- a/server/legacy/events/vcs/common/common.go +++ /dev/null @@ -1,39 +0,0 @@ -// Package common is used to share common code between all VCS clients without -// running into circular dependency issues. -package common - -import ( - "math" -) - -// SplitComment splits comment into a slice of comments that are under maxSize. -// It appends sepEnd to all comments that have a following comment. -// It prepends sepStart to all comments that have a preceding comment. -func SplitComment(comment string, maxSize int, sepEnd string, sepStart string) []string { - if len(comment) <= maxSize { - return []string{comment} - } - - maxWithSep := maxSize - len(sepEnd) - len(sepStart) - var comments []string - numComments := int(math.Ceil(float64(len(comment)) / float64(maxWithSep))) - for i := 0; i < numComments; i++ { - upTo := min(len(comment), (i+1)*maxWithSep) - portion := comment[i*maxWithSep : upTo] - if i < numComments-1 { - portion += sepEnd - } - if i > 0 { - portion = sepStart + portion - } - comments = append(comments, portion) - } - return comments -} - -func min(a, b int) int { - if a < b { - return a - } - return b -} diff --git a/server/legacy/events/vcs/common/common_test.go b/server/legacy/events/vcs/common/common_test.go deleted file mode 100644 index e4189d1cd..000000000 --- a/server/legacy/events/vcs/common/common_test.go +++ /dev/null @@ -1,63 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package common_test - -import ( - "strings" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs/common" - - . "github.com/runatlantis/atlantis/testing" -) - -// If under the maximum number of chars, we shouldn't split the comments. -func TestSplitComment_UnderMax(t *testing.T) { - comment := "comment under max size" - split := common.SplitComment(comment, len(comment)+1, "sepEnd", "sepStart") - Equals(t, []string{comment}, split) -} - -// If the comment needs to be split into 2 we should do the split and add the -// separators properly. -func TestSplitComment_TwoComments(t *testing.T) { - comment := strings.Repeat("a", 1000) - sepEnd := "-sepEnd" - sepStart := "-sepStart" - split := common.SplitComment(comment, len(comment)-1, sepEnd, sepStart) - - expCommentLen := len(comment) - len(sepEnd) - len(sepStart) - 1 - expFirstComment := comment[:expCommentLen] - expSecondComment := comment[expCommentLen:] - Equals(t, 2, len(split)) - Equals(t, expFirstComment+sepEnd, split[0]) - Equals(t, sepStart+expSecondComment, split[1]) -} - -// If the comment needs to be split into 4 we should do the split and add the -// separators properly. -func TestSplitComment_FourComments(t *testing.T) { - comment := strings.Repeat("a", 1000) - sepEnd := "-sepEnd" - sepStart := "-sepStart" - max := (len(comment) / 4) + len(sepEnd) + len(sepStart) - split := common.SplitComment(comment, max, sepEnd, sepStart) - - expMax := len(comment) / 4 - Equals(t, []string{ - comment[:expMax] + sepEnd, - sepStart + comment[expMax:expMax*2] + sepEnd, - sepStart + comment[expMax*2:expMax*3] + sepEnd, - sepStart + comment[expMax*3:]}, split) -} diff --git a/server/legacy/events/vcs/fixtures/azuredevops-policyevaluations.json b/server/legacy/events/vcs/fixtures/azuredevops-policyevaluations.json deleted file mode 100644 index 57fbb6263..000000000 --- a/server/legacy/events/vcs/fixtures/azuredevops-policyevaluations.json +++ /dev/null @@ -1,17 +0,0 @@ -{ - "value": [ - { - "configuration": { - "isDeleted": false, - "isEnabled": true, - "isBlocking": true, - "settings": { - "statusGenre": "Atlantis Bot/atlantis", - "statusName": "plan" - } - }, - "status": "approved" - } - ], - "count": 1 -} \ No newline at end of file diff --git a/server/legacy/events/vcs/fixtures/azuredevops-pr.json b/server/legacy/events/vcs/fixtures/azuredevops-pr.json deleted file mode 100644 index ca5f44f44..000000000 --- a/server/legacy/events/vcs/fixtures/azuredevops-pr.json +++ /dev/null @@ -1,46 +0,0 @@ -{ - "repository": { - "id": "22222222-2222-2222-222222222222", - "name": "MyRepository", - "project": { - "id": "33333333-3333-3333-333333333333", - "name": "MyProject", - "description": "The place for MyProject" - } - }, - "status": "active", - "createdBy": { - "displayName": "Atlantis Author", - "id": "11111111-1111-1111-111111111111", - "uniqueName": "atlantis.author@example.com" - }, - "mergeStatus": "notSet", - "isDraft": false, - "autoCompleteSetBy": { - "id": "11111111-1111-1111-111111111111", - "displayName": "Atlantis Author", - "uniqueName": "atlantis.author@example.com" - }, - "pullRequestId": 22, - "completionOptions": { - "bypassPolicy": false, - "bypassReason": "", - "deleteSourceBranch": false, - "mergeCommitMessage": "TEST MERGE COMMIT MESSAGE", - "mergeStrategy": "noFastForward", - "squashMerge": false, - "transitionWorkItems": true, - "triggeredByAutoComplete": false - }, - "reviewers": [ - { - "reviewerUrl": "https://example:8080/tfs/_apis/git/repositories/8010495e-1002-438d-acbf-aaf245dac7c2/pullRequests/22/reviewers/8010495e-1002-438d-acbf-aaf245dac7c2", - "vote": 0, - "id": "8010495e-1002-438d-acbf-aaf245dac7c2", - "displayName": "Atlantis Reviewer", - "uniqueName": "atlantis.reviewer@example.com", - "url": "https://owner:8080/tfs/_apis/Identities/8010495e-1002-438d-acbf-aaf245dac7c2", - "imageUrl": "https://owner:8080/tfs/_api/_common/identityImage?id=8010495e-1002-438d-acbf-aaf245dac7c2" - } - ] -} \ No newline at end of file diff --git a/server/legacy/events/vcs/fixtures/fixtures.go b/server/legacy/events/vcs/fixtures/fixtures.go deleted file mode 100644 index 4ce4e184b..000000000 --- a/server/legacy/events/vcs/fixtures/fixtures.go +++ /dev/null @@ -1,408 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package fixtures - -import ( - "fmt" - "net/http" - "net/http/httptest" - "net/url" - "strings" - "testing" - - "github.com/dgrijalva/jwt-go" - "github.com/google/go-github/v45/github" -) - -var PullEvent = github.PullRequestEvent{ - Sender: &github.User{ - Login: github.String("user"), - }, - Repo: &Repo, - PullRequest: &Pull, - Action: github.String("opened"), -} - -var Pull = github.PullRequest{ - Head: &github.PullRequestBranch{ - SHA: github.String("sha256"), - Ref: github.String("ref"), - Repo: &Repo, - }, - Base: &github.PullRequestBranch{ - SHA: github.String("sha256"), - Repo: &Repo, - Ref: github.String("basebranch"), - }, - HTMLURL: github.String("html-url"), - User: &github.User{ - Login: github.String("user"), - }, - Number: github.Int(1), - State: github.String("open"), -} - -var Repo = github.Repository{ - FullName: github.String("owner/repo"), - Owner: &github.User{Login: github.String("owner")}, - Name: github.String("repo"), - CloneURL: github.String("https://github.com/owner/repo.git"), -} - -// nolint: gosec -const GithubPrivateKey = `-----BEGIN RSA PRIVATE KEY----- -MIIEowIBAAKCAQEAuEPzOUE+kiEH1WLiMeBytTEF856j0hOVcSUSUkZxKvqczkWM -9vo1gDyC7ZXhdH9fKh32aapba3RSsp4ke+giSmYTk2mGR538ShSDxh0OgpJmjiKP -X0Bj4j5sFqfXuCtl9SkH4iueivv4R53ktqM+n6hk98l6hRwC39GVIblAh2lEM4L/ -6WvYwuQXPMM5OG2Ryh2tDZ1WS5RKfgq+9ksNJ5Q9UtqtqHkO+E63N5OK9sbzpUUm -oNaOl3udTlZD3A8iqwMPVxH4SxgATBPAc+bmjk6BMJ0qIzDcVGTrqrzUiywCTLma -szdk8GjzXtPDmuBgNn+o6s02qVGpyydgEuqmTQIDAQABAoIBACL6AvkjQVVLn8kJ -dBYznJJ4M8ECo+YEgaFwgAHODT0zRQCCgzd+Vxl4YwHmKV2Lr+y2s0drZt8GvYva -KOK8NYYZyi15IlwFyRXmvvykF1UBpSXluYFDH7KaVroWMgRreHcIys5LqVSIb6Bo -gDmK0yBLPp8qR29s2b7ScZRtLaqGJiX+j55rNzrZwxHkxFHyG9OG+u9IsBElcKCP -kYCVE8ZdYexfnKOZbgn2kZB9qu0T/Mdvki8yk3I2bI6xYO24oQmhnT36qnqWoCBX -NuCNsBQgpYZeZET8mEAUmo9d+ABmIHIvSs005agK8xRaP4+6jYgy6WwoejJRF5yd -NBuF7aECgYEA50nZ4FiZYV0vcJDxFYeY3kYOvVuKn8OyW+2rg7JIQTremIjv8FkE -ZnwuF9ZRxgqLxUIfKKfzp/5l5LrycNoj2YKfHKnRejxRWXqG+ZETfxxlmlRns0QG -J4+BYL0CoanDSeA4fuyn4Bv7cy/03TDhfg/Uq0Aeg+hhcPE/vx3ebPsCgYEAy/Pv -eDLssOSdeyIxf0Brtocg6aPXIVaLdus+bXmLg77rJIFytAZmTTW8SkkSczWtucI3 -FI1I6sei/8FdPzAl62/JDdlf7Wd9K7JIotY4TzT7Tm7QU7xpfLLYIP1bOFjN81rk -77oOD4LsXcosB/U6s1blPJMZ6AlO2EKs10UuR1cCgYBipzuJ2ADEaOz9RLWwi0AH -Pza2Sj+c2epQD9ZivD7Zo/Sid3ZwvGeGF13JyR7kLEdmAkgsHUdu1rI7mAolXMaB -1pdrsHureeLxGbRM6za3tzMXWv1Il7FQWoPC8ZwXvMOR1VQDv4nzq7vbbA8z8c+c -57+8tALQHOTDOgQIzwK61QKBgERGVc0EJy4Uag+VY8J4m1ZQKBluqo7TfP6DQ7O8 -M5MX73maB/7yAX8pVO39RjrhJlYACRZNMbK+v/ckEQYdJSSKmGCVe0JrGYDuPtic -I9+IGfSorf7KHPoMmMN6bPYQ7Gjh7a++tgRFTMEc8956Hnt4xGahy9NcglNtBpVN -6G8jAoGBAMCh028pdzJa/xeBHLLaVB2sc0Fe7993WlsPmnVE779dAz7qMscOtXJK -fgtriltLSSD6rTA9hUAsL/X62rY0wdXuNdijjBb/qvrx7CAV6i37NK1CjABNjsfG -ZM372Ac6zc1EqSrid2IjET1YqyIW2KGLI1R2xbQc98UGlt48OdWu ------END RSA PRIVATE KEY----- -` - -// https://developer.github.com/v3/apps/#response-9 -var githubConversionJSON = `{ - "id": 1, - "node_id": "MDM6QXBwNTk=", - "owner": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "name": "Atlantis", - "description": null, - "external_url": "https://atlantis.example.com", - "html_url": "https://github.com/apps/atlantis", - "created_at": "2018-09-13T12:28:37Z", - "updated_at": "2018-09-13T12:28:37Z", - "client_id": "Iv1.8a61f9b3a7aba766", - "pem": "%s" -}` - -var githubAppInstallationJSON = `[ - { - "id": 1, - "account": { - "login": "github", - "id": 1, - "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", - "url": "https://api.github.com/orgs/github", - "repos_url": "https://api.github.com/orgs/github/repos", - "events_url": "https://api.github.com/orgs/github/events", - "hooks_url": "https://api.github.com/orgs/github/hooks", - "issues_url": "https://api.github.com/orgs/github/issues", - "members_url": "https://api.github.com/orgs/github/members{/member}", - "public_members_url": "https://api.github.com/orgs/github/public_members{/member}", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "description": "A great organization" - }, - "access_tokens_url": "https://api.github.com/installations/1/access_tokens", - "repositories_url": "https://api.github.com/installation/repositories", - "html_url": "https://github.com/organizations/github/settings/installations/1", - "app_id": 1, - "target_id": 1, - "target_type": "Organization", - "permissions": { - "metadata": "read", - "contents": "read", - "issues": "write", - "single_file": "write" - }, - "events": [ - "push", - "pull_request" - ], - "single_file_name": "config.yml", - "repository_selection": "selected" - } -]` - -// nolint: gosec -var githubAppTokenJSON = `{ - "token": "some-token", - "expires_at": "2050-01-01T00:00:00Z", - "permissions": { - "issues": "write", - "contents": "read" - }, - "repositories": [ - { - "id": 1296269, - "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5", - "name": "Hello-World", - "full_name": "octocat/Hello-World", - "owner": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/octocat/Hello-World", - "description": "This your first repo!", - "fork": false, - "url": "https://api.github.com/repos/octocat/Hello-World", - "archive_url": "http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}", - "assignees_url": "http://api.github.com/repos/octocat/Hello-World/assignees{/user}", - "blobs_url": "http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}", - "branches_url": "http://api.github.com/repos/octocat/Hello-World/branches{/branch}", - "collaborators_url": "http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}", - "comments_url": "http://api.github.com/repos/octocat/Hello-World/comments{/number}", - "commits_url": "http://api.github.com/repos/octocat/Hello-World/commits{/sha}", - "compare_url": "http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}", - "contents_url": "http://api.github.com/repos/octocat/Hello-World/contents/{+path}", - "contributors_url": "http://api.github.com/repos/octocat/Hello-World/contributors", - "deployments_url": "http://api.github.com/repos/octocat/Hello-World/deployments", - "downloads_url": "http://api.github.com/repos/octocat/Hello-World/downloads", - "events_url": "http://api.github.com/repos/octocat/Hello-World/events", - "forks_url": "http://api.github.com/repos/octocat/Hello-World/forks", - "git_commits_url": "http://api.github.com/repos/octocat/Hello-World/git/commits{/sha}", - "git_refs_url": "http://api.github.com/repos/octocat/Hello-World/git/refs{/sha}", - "git_tags_url": "http://api.github.com/repos/octocat/Hello-World/git/tags{/sha}", - "git_url": "git:github.com/octocat/Hello-World.git", - "issue_comment_url": "http://api.github.com/repos/octocat/Hello-World/issues/comments{/number}", - "issue_events_url": "http://api.github.com/repos/octocat/Hello-World/issues/events{/number}", - "issues_url": "http://api.github.com/repos/octocat/Hello-World/issues{/number}", - "keys_url": "http://api.github.com/repos/octocat/Hello-World/keys{/key_id}", - "labels_url": "http://api.github.com/repos/octocat/Hello-World/labels{/name}", - "languages_url": "http://api.github.com/repos/octocat/Hello-World/languages", - "merges_url": "http://api.github.com/repos/octocat/Hello-World/merges", - "milestones_url": "http://api.github.com/repos/octocat/Hello-World/milestones{/number}", - "notifications_url": "http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}", - "pulls_url": "http://api.github.com/repos/octocat/Hello-World/pulls{/number}", - "releases_url": "http://api.github.com/repos/octocat/Hello-World/releases{/id}", - "ssh_url": "git@github.com:octocat/Hello-World.git", - "stargazers_url": "http://api.github.com/repos/octocat/Hello-World/stargazers", - "statuses_url": "http://api.github.com/repos/octocat/Hello-World/statuses/{sha}", - "subscribers_url": "http://api.github.com/repos/octocat/Hello-World/subscribers", - "subscription_url": "http://api.github.com/repos/octocat/Hello-World/subscription", - "tags_url": "http://api.github.com/repos/octocat/Hello-World/tags", - "teams_url": "http://api.github.com/repos/octocat/Hello-World/teams", - "trees_url": "http://api.github.com/repos/octocat/Hello-World/git/trees{/sha}", - "clone_url": "https://github.com/octocat/Hello-World.git", - "mirror_url": "git:git.example.com/octocat/Hello-World", - "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks", - "svn_url": "https://svn.github.com/octocat/Hello-World", - "homepage": "https://github.com", - "language": null, - "forks_count": 9, - "stargazers_count": 80, - "watchers_count": 80, - "size": 108, - "default_branch": "master", - "open_issues_count": 0, - "is_template": true, - "topics": [ - "octocat", - "atom", - "electron", - "api" - ], - "has_issues": true, - "has_projects": true, - "has_wiki": true, - "has_pages": false, - "has_downloads": true, - "archived": false, - "disabled": false, - "visibility": "public", - "pushed_at": "2011-01-26T19:06:43Z", - "created_at": "2011-01-26T19:01:12Z", - "updated_at": "2011-01-26T19:14:43Z", - "permissions": { - "admin": false, - "push": false, - "pull": true - }, - "allow_rebase_merge": true, - "template_repository": null, - "temp_clone_token": "ABTLWHOULUVAXGTRYU7OC2876QJ2O", - "allow_squash_merge": true, - "allow_merge_commit": true, - "subscribers_count": 42, - "network_count": 0 - } - ] -}` - -var githubAppJSON = `{ - "id": 1, - "slug": "octoapp", - "node_id": "MDExOkludGVncmF0aW9uMQ==", - "owner": { - "login": "github", - "id": 1, - "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", - "url": "https://api.github.com/orgs/github", - "repos_url": "https://api.github.com/orgs/github/repos", - "events_url": "https://api.github.com/orgs/github/events", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": true - }, - "name": "Octocat App", - "description": "", - "external_url": "https://example.com", - "html_url": "https://github.com/apps/octoapp", - "created_at": "2017-07-08T16:18:44-04:00", - "updated_at": "2017-07-08T16:18:44-04:00", - "permissions": { - "metadata": "read", - "contents": "read", - "issues": "write", - "single_file": "write" - }, - "events": [ - "push", - "pull_request" - ] - }` - -func validateGithubToken(tokenString string) error { - key, err := jwt.ParseRSAPrivateKeyFromPEM([]byte(GithubPrivateKey)) - if err != nil { - return fmt.Errorf("could not parse private key: %s", err) - } - - token, err := jwt.Parse(tokenString, func(token *jwt.Token) (interface{}, error) { - // Don't forget to validate the alg is what you expect: - if _, ok := token.Method.(*jwt.SigningMethodRSA); !ok { - err := fmt.Errorf("Unexpected signing method: %v", token.Header["alg"]) - - return nil, err - } - - return key.Public(), nil - }) - if err != nil { - return err - } - - if claims, ok := token.Claims.(jwt.MapClaims); !ok || !token.Valid || claims["iss"] != "1" { - return fmt.Errorf("Invalid token") - } - return nil -} - -func GithubAppTestServer(t *testing.T) (string, error) { - counter := 0 - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - case "/api/v3/app-manifests/good-code/conversions": - encodedKey := strings.Join(strings.Split(GithubPrivateKey, "\n"), "\\n") - appInfo := fmt.Sprintf(githubConversionJSON, encodedKey) - w.Write([]byte(appInfo)) // nolint: errcheck - // https://developer.github.com/v3/apps/#list-installations - case "/api/v3/app/installations": - token := strings.Replace(r.Header.Get("Authorization"), "Bearer ", "", 1) - if err := validateGithubToken(token); err != nil { - w.WriteHeader(http.StatusForbidden) - w.Write([]byte("Invalid token")) // nolint: errcheck - return - } - - w.Write([]byte(githubAppInstallationJSON)) // nolint: errcheck - return - case "/api/v3/apps/some-app": - token := strings.Replace(r.Header.Get("Authorization"), "token ", "", 1) - - // token is taken from githubAppTokenJSON - if token != "some-token" { - w.WriteHeader(http.StatusForbidden) - w.Write([]byte("Invalid installation token")) // nolint: errcheck - return - } - w.Write([]byte(githubAppJSON)) // nolint: errcheck - return - case "/api/v3/app/installations/1/access_tokens": - token := strings.Replace(r.Header.Get("Authorization"), "Bearer ", "", 1) - if err := validateGithubToken(token); err != nil { - w.WriteHeader(http.StatusForbidden) - w.Write([]byte("Invalid token")) // nolint: errcheck - return - } - - appToken := fmt.Sprintf(githubAppTokenJSON, counter) - counter++ - w.Write([]byte(appToken)) // nolint: errcheck - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - testServerURL, err := url.Parse(testServer.URL) - - return testServerURL.Host, err -} diff --git a/server/legacy/events/vcs/fixtures/github-pull-request.json b/server/legacy/events/vcs/fixtures/github-pull-request.json deleted file mode 100644 index a0846cc49..000000000 --- a/server/legacy/events/vcs/fixtures/github-pull-request.json +++ /dev/null @@ -1,510 +0,0 @@ -{ - "url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347", - "id": 1, - "node_id": "MDExOlB1bGxSZXF1ZXN0MQ==", - "html_url": "https://github.com/octocat/Hello-World/pull/1347", - "diff_url": "https://github.com/octocat/Hello-World/pull/1347.diff", - "patch_url": "https://github.com/octocat/Hello-World/pull/1347.patch", - "issue_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347", - "commits_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits", - "review_comments_url": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments", - "review_comment_url": "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}", - "comments_url": "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments", - "statuses_url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "number": 1347, - "state": "open", - "locked": true, - "title": "new-feature", - "user": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "body": "Please pull these awesome changes", - "labels": [ - { - "id": 208045946, - "node_id": "MDU6TGFiZWwyMDgwNDU5NDY=", - "url": "https://api.github.com/repos/octocat/Hello-World/labels/bug", - "name": "bug", - "description": "Something isn't working", - "color": "f29513", - "default": true - } - ], - "milestone": { - "url": "https://api.github.com/repos/octocat/Hello-World/milestones/1", - "html_url": "https://github.com/octocat/Hello-World/milestones/v1.0", - "labels_url": "https://api.github.com/repos/octocat/Hello-World/milestones/1/labels", - "id": 1002604, - "node_id": "MDk6TWlsZXN0b25lMTAwMjYwNA==", - "number": 1, - "state": "open", - "title": "v1.0", - "description": "Tracking milestone for version 1.0", - "creator": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "open_issues": 4, - "closed_issues": 8, - "created_at": "2011-04-10T20:09:31Z", - "updated_at": "2014-03-03T18:58:10Z", - "closed_at": "2013-02-12T13:22:01Z", - "due_on": "2012-10-09T23:39:01Z" - }, - "active_lock_reason": "too heated", - "created_at": "2011-01-26T19:01:12Z", - "updated_at": "2011-01-26T19:01:12Z", - "closed_at": "2011-01-26T19:01:12Z", - "merged_at": "2011-01-26T19:01:12Z", - "merge_commit_sha": "e5bd3914e2e596debea16f433f57875b5b90bcd6", - "assignee": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "assignees": [ - { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - { - "login": "hubot", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/hubot_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/hubot", - "html_url": "https://github.com/hubot", - "followers_url": "https://api.github.com/users/hubot/followers", - "following_url": "https://api.github.com/users/hubot/following{/other_user}", - "gists_url": "https://api.github.com/users/hubot/gists{/gist_id}", - "starred_url": "https://api.github.com/users/hubot/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/hubot/subscriptions", - "organizations_url": "https://api.github.com/users/hubot/orgs", - "repos_url": "https://api.github.com/users/hubot/repos", - "events_url": "https://api.github.com/users/hubot/events{/privacy}", - "received_events_url": "https://api.github.com/users/hubot/received_events", - "type": "User", - "site_admin": true - } - ], - "requested_reviewers": [ - { - "login": "other_user", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/other_user_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/other_user", - "html_url": "https://github.com/other_user", - "followers_url": "https://api.github.com/users/other_user/followers", - "following_url": "https://api.github.com/users/other_user/following{/other_user}", - "gists_url": "https://api.github.com/users/other_user/gists{/gist_id}", - "starred_url": "https://api.github.com/users/other_user/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/other_user/subscriptions", - "organizations_url": "https://api.github.com/users/other_user/orgs", - "repos_url": "https://api.github.com/users/other_user/repos", - "events_url": "https://api.github.com/users/other_user/events{/privacy}", - "received_events_url": "https://api.github.com/users/other_user/received_events", - "type": "User", - "site_admin": false - } - ], - "requested_teams": [ - { - "id": 1, - "node_id": "MDQ6VGVhbTE=", - "url": "https://api.github.com/teams/1", - "name": "Justice League", - "slug": "justice-league", - "description": "A great team.", - "privacy": "closed", - "permission": "admin", - "members_url": "https://api.github.com/teams/1/members{/member}", - "repositories_url": "https://api.github.com/teams/1/repos", - "parent": null - } - ], - "head": { - "label": "new-topic", - "ref": "new-topic", - "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e", - "user": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "repo": { - "id": 1296269, - "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5", - "name": "Hello-World", - "full_name": "octocat/Hello-World", - "owner": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/octocat/Hello-World", - "description": "This your first repo!", - "fork": true, - "url": "https://api.github.com/repos/octocat/Hello-World", - "archive_url": "http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}", - "assignees_url": "http://api.github.com/repos/octocat/Hello-World/assignees{/user}", - "blobs_url": "http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}", - "branches_url": "http://api.github.com/repos/octocat/Hello-World/branches{/branch}", - "collaborators_url": "http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}", - "comments_url": "http://api.github.com/repos/octocat/Hello-World/comments{/number}", - "commits_url": "http://api.github.com/repos/octocat/Hello-World/commits{/sha}", - "compare_url": "http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}", - "contents_url": "http://api.github.com/repos/octocat/Hello-World/contents/{+path}", - "contributors_url": "http://api.github.com/repos/octocat/Hello-World/contributors", - "deployments_url": "http://api.github.com/repos/octocat/Hello-World/deployments", - "downloads_url": "http://api.github.com/repos/octocat/Hello-World/downloads", - "events_url": "http://api.github.com/repos/octocat/Hello-World/events", - "forks_url": "http://api.github.com/repos/octocat/Hello-World/forks", - "git_commits_url": "http://api.github.com/repos/octocat/Hello-World/git/commits{/sha}", - "git_refs_url": "http://api.github.com/repos/octocat/Hello-World/git/refs{/sha}", - "git_tags_url": "http://api.github.com/repos/octocat/Hello-World/git/tags{/sha}", - "git_url": "git:github.com/octocat/Hello-World.git", - "issue_comment_url": "http://api.github.com/repos/octocat/Hello-World/issues/comments{/number}", - "issue_events_url": "http://api.github.com/repos/octocat/Hello-World/issues/events{/number}", - "issues_url": "http://api.github.com/repos/octocat/Hello-World/issues{/number}", - "keys_url": "http://api.github.com/repos/octocat/Hello-World/keys{/key_id}", - "labels_url": "http://api.github.com/repos/octocat/Hello-World/labels{/name}", - "languages_url": "http://api.github.com/repos/octocat/Hello-World/languages", - "merges_url": "http://api.github.com/repos/octocat/Hello-World/merges", - "milestones_url": "http://api.github.com/repos/octocat/Hello-World/milestones{/number}", - "notifications_url": "http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}", - "pulls_url": "http://api.github.com/repos/octocat/Hello-World/pulls{/number}", - "releases_url": "http://api.github.com/repos/octocat/Hello-World/releases{/id}", - "ssh_url": "git@github.com:octocat/Hello-World.git", - "stargazers_url": "http://api.github.com/repos/octocat/Hello-World/stargazers", - "statuses_url": "http://api.github.com/repos/octocat/Hello-World/statuses/{sha}", - "subscribers_url": "http://api.github.com/repos/octocat/Hello-World/subscribers", - "subscription_url": "http://api.github.com/repos/octocat/Hello-World/subscription", - "tags_url": "http://api.github.com/repos/octocat/Hello-World/tags", - "teams_url": "http://api.github.com/repos/octocat/Hello-World/teams", - "trees_url": "http://api.github.com/repos/octocat/Hello-World/git/trees{/sha}", - "clone_url": "https://github.com/octocat/Hello-World.git", - "mirror_url": "git:git.example.com/octocat/Hello-World", - "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks", - "svn_url": "https://svn.github.com/octocat/Hello-World", - "homepage": "https://github.com", - "language": null, - "forks_count": 9, - "stargazers_count": 80, - "watchers_count": 80, - "size": 108, - "default_branch": "master", - "open_issues_count": 0, - "topics": [ - "octocat", - "atom", - "electron", - "API" - ], - "has_issues": true, - "has_projects": true, - "has_wiki": true, - "has_pages": false, - "has_downloads": true, - "archived": false, - "pushed_at": "2011-01-26T19:06:43Z", - "created_at": "2011-01-26T19:01:12Z", - "updated_at": "2011-01-26T19:14:43Z", - "permissions": { - "admin": false, - "push": false, - "pull": true - }, - "allow_rebase_merge": true, - "allow_squash_merge": true, - "allow_merge_commit": true, - "subscribers_count": 42, - "network_count": 0 - } - }, - "base": { - "label": "master", - "ref": "master", - "sha": "6dcb09b5b57875f334f61aebed695e2e4193db5e", - "user": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "repo": { - "id": 1296269, - "node_id": "MDEwOlJlcG9zaXRvcnkxMjk2MjY5", - "name": "Hello-World", - "full_name": "octocat/Hello-World", - "owner": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "private": false, - "html_url": "https://github.com/octocat/Hello-World", - "description": "This your first repo!", - "fork": true, - "url": "https://api.github.com/repos/octocat/Hello-World", - "archive_url": "http://api.github.com/repos/octocat/Hello-World/{archive_format}{/ref}", - "assignees_url": "http://api.github.com/repos/octocat/Hello-World/assignees{/user}", - "blobs_url": "http://api.github.com/repos/octocat/Hello-World/git/blobs{/sha}", - "branches_url": "http://api.github.com/repos/octocat/Hello-World/branches{/branch}", - "collaborators_url": "http://api.github.com/repos/octocat/Hello-World/collaborators{/collaborator}", - "comments_url": "http://api.github.com/repos/octocat/Hello-World/comments{/number}", - "commits_url": "http://api.github.com/repos/octocat/Hello-World/commits{/sha}", - "compare_url": "http://api.github.com/repos/octocat/Hello-World/compare/{base}...{head}", - "contents_url": "http://api.github.com/repos/octocat/Hello-World/contents/{+path}", - "contributors_url": "http://api.github.com/repos/octocat/Hello-World/contributors", - "deployments_url": "http://api.github.com/repos/octocat/Hello-World/deployments", - "downloads_url": "http://api.github.com/repos/octocat/Hello-World/downloads", - "events_url": "http://api.github.com/repos/octocat/Hello-World/events", - "forks_url": "http://api.github.com/repos/octocat/Hello-World/forks", - "git_commits_url": "http://api.github.com/repos/octocat/Hello-World/git/commits{/sha}", - "git_refs_url": "http://api.github.com/repos/octocat/Hello-World/git/refs{/sha}", - "git_tags_url": "http://api.github.com/repos/octocat/Hello-World/git/tags{/sha}", - "git_url": "git:github.com/octocat/Hello-World.git", - "issue_comment_url": "http://api.github.com/repos/octocat/Hello-World/issues/comments{/number}", - "issue_events_url": "http://api.github.com/repos/octocat/Hello-World/issues/events{/number}", - "issues_url": "http://api.github.com/repos/octocat/Hello-World/issues{/number}", - "keys_url": "http://api.github.com/repos/octocat/Hello-World/keys{/key_id}", - "labels_url": "http://api.github.com/repos/octocat/Hello-World/labels{/name}", - "languages_url": "http://api.github.com/repos/octocat/Hello-World/languages", - "merges_url": "http://api.github.com/repos/octocat/Hello-World/merges", - "milestones_url": "http://api.github.com/repos/octocat/Hello-World/milestones{/number}", - "notifications_url": "http://api.github.com/repos/octocat/Hello-World/notifications{?since,all,participating}", - "pulls_url": "http://api.github.com/repos/octocat/Hello-World/pulls{/number}", - "releases_url": "http://api.github.com/repos/octocat/Hello-World/releases{/id}", - "ssh_url": "git@github.com:octocat/Hello-World.git", - "stargazers_url": "http://api.github.com/repos/octocat/Hello-World/stargazers", - "statuses_url": "http://api.github.com/repos/octocat/Hello-World/statuses/{sha}", - "subscribers_url": "http://api.github.com/repos/octocat/Hello-World/subscribers", - "subscription_url": "http://api.github.com/repos/octocat/Hello-World/subscription", - "tags_url": "http://api.github.com/repos/octocat/Hello-World/tags", - "teams_url": "http://api.github.com/repos/octocat/Hello-World/teams", - "trees_url": "http://api.github.com/repos/octocat/Hello-World/git/trees{/sha}", - "clone_url": "https://github.com/octocat/Hello-World.git", - "mirror_url": "git:git.example.com/octocat/Hello-World", - "hooks_url": "http://api.github.com/repos/octocat/Hello-World/hooks", - "svn_url": "https://svn.github.com/octocat/Hello-World", - "homepage": "https://github.com", - "language": null, - "forks_count": 9, - "stargazers_count": 80, - "watchers_count": 80, - "size": 108, - "default_branch": "master", - "open_issues_count": 0, - "topics": [ - "octocat", - "atom", - "electron", - "API" - ], - "has_issues": true, - "has_projects": true, - "has_wiki": true, - "has_pages": false, - "has_downloads": true, - "archived": false, - "pushed_at": "2011-01-26T19:06:43Z", - "created_at": "2011-01-26T19:01:12Z", - "updated_at": "2011-01-26T19:14:43Z", - "permissions": { - "admin": false, - "push": false, - "pull": true - }, - "allow_rebase_merge": true, - "allow_squash_merge": true, - "allow_merge_commit": true, - "subscribers_count": 42, - "network_count": 0 - } - }, - "_links": { - "self": { - "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347" - }, - "html": { - "href": "https://github.com/octocat/Hello-World/pull/1347" - }, - "issue": { - "href": "https://api.github.com/repos/octocat/Hello-World/issues/1347" - }, - "comments": { - "href": "https://api.github.com/repos/octocat/Hello-World/issues/1347/comments" - }, - "review_comments": { - "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/comments" - }, - "review_comment": { - "href": "https://api.github.com/repos/octocat/Hello-World/pulls/comments{/number}" - }, - "commits": { - "href": "https://api.github.com/repos/octocat/Hello-World/pulls/1347/commits" - }, - "statuses": { - "href": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e" - } - }, - "author_association": "OWNER", - "merged": false, - "mergeable": true, - "rebaseable": true, - "mergeable_state": "clean", - "merged_by": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "comments": 10, - "review_comments": 0, - "maintainer_can_modify": true, - "commits": 3, - "additions": 100, - "deletions": 3, - "changed_files": 5 -} diff --git a/server/legacy/events/vcs/fixtures/github-repo.json b/server/legacy/events/vcs/fixtures/github-repo.json deleted file mode 100644 index 3012d2477..000000000 --- a/server/legacy/events/vcs/fixtures/github-repo.json +++ /dev/null @@ -1,103 +0,0 @@ -{ - "id": 167228802, - "node_id": "MDEwOlJlcG9zaXRvcnkxNjcyMjg4MDI=", - "name": "atlantis", - "full_name": "runatlantis/atlantis", - "private": false, - "owner": { - "login": "runatlantis", - "id": 1034429, - "node_id": "MDQ6VXNlcjEwMzQ0Mjk=", - "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4", - "gravatar_id": "", - "url": "https://api.github.com/users/runatlantis", - "html_url": "https://github.com/runatlantis", - "followers_url": "https://api.github.com/users/runatlantis/followers", - "following_url": "https://api.github.com/users/runatlantis/following{/other_user}", - "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}", - "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions", - "organizations_url": "https://api.github.com/users/runatlantis/orgs", - "repos_url": "https://api.github.com/users/runatlantis/repos", - "events_url": "https://api.github.com/users/runatlantis/events{/privacy}", - "received_events_url": "https://api.github.com/users/runatlantis/received_events", - "type": "User", - "site_admin": false - }, - "html_url": "https://github.com/runatlantis/atlantis", - "description": null, - "fork": false, - "url": "https://api.github.com/repos/runatlantis/atlantis", - "forks_url": "https://api.github.com/repos/runatlantis/atlantis/forks", - "keys_url": "https://api.github.com/repos/runatlantis/atlantis/keys{/key_id}", - "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis/collaborators{/collaborator}", - "teams_url": "https://api.github.com/repos/runatlantis/atlantis/teams", - "hooks_url": "https://api.github.com/repos/runatlantis/atlantis/hooks", - "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis/issues/events{/number}", - "events_url": "https://api.github.com/repos/runatlantis/atlantis/events", - "assignees_url": "https://api.github.com/repos/runatlantis/atlantis/assignees{/user}", - "branches_url": "https://api.github.com/repos/runatlantis/atlantis/branches{/branch}", - "tags_url": "https://api.github.com/repos/runatlantis/atlantis/tags", - "blobs_url": "https://api.github.com/repos/runatlantis/atlantis/git/blobs{/sha}", - "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis/git/tags{/sha}", - "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis/git/refs{/sha}", - "trees_url": "https://api.github.com/repos/runatlantis/atlantis/git/trees{/sha}", - "statuses_url": "https://api.github.com/repos/runatlantis/atlantis/statuses/{sha}", - "languages_url": "https://api.github.com/repos/runatlantis/atlantis/languages", - "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis/stargazers", - "contributors_url": "https://api.github.com/repos/runatlantis/atlantis/contributors", - "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis/subscribers", - "subscription_url": "https://api.github.com/repos/runatlantis/atlantis/subscription", - "commits_url": "https://api.github.com/repos/runatlantis/atlantis/commits{/sha}", - "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis/git/commits{/sha}", - "comments_url": "https://api.github.com/repos/runatlantis/atlantis/comments{/number}", - "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis/issues/comments{/number}", - "contents_url": "https://api.github.com/repos/runatlantis/atlantis/contents/{+path}", - "compare_url": "https://api.github.com/repos/runatlantis/atlantis/compare/{base}...{head}", - "merges_url": "https://api.github.com/repos/runatlantis/atlantis/merges", - "archive_url": "https://api.github.com/repos/runatlantis/atlantis/{archive_format}{/ref}", - "downloads_url": "https://api.github.com/repos/runatlantis/atlantis/downloads", - "issues_url": "https://api.github.com/repos/runatlantis/atlantis/issues{/number}", - "pulls_url": "https://api.github.com/repos/runatlantis/atlantis/pulls{/number}", - "milestones_url": "https://api.github.com/repos/runatlantis/atlantis/milestones{/number}", - "notifications_url": "https://api.github.com/repos/runatlantis/atlantis/notifications{?since,all,participating}", - "labels_url": "https://api.github.com/repos/runatlantis/atlantis/labels{/name}", - "releases_url": "https://api.github.com/repos/runatlantis/atlantis/releases{/id}", - "deployments_url": "https://api.github.com/repos/runatlantis/atlantis/deployments", - "created_at": "2019-01-23T17:58:45Z", - "updated_at": "2019-02-08T21:46:28Z", - "pushed_at": "2019-02-10T01:49:25Z", - "git_url": "git://github.com/runatlantis/atlantis.git", - "ssh_url": "git@github.com:runatlantis/atlantis.git", - "clone_url": "https://github.com/runatlantis/atlantis.git", - "svn_url": "https://github.com/runatlantis/atlantis", - "homepage": null, - "size": 32, - "stargazers_count": 0, - "watchers_count": 0, - "language": "HCL", - "has_issues": true, - "has_projects": true, - "has_downloads": true, - "has_wiki": true, - "has_pages": false, - "forks_count": 0, - "mirror_url": null, - "archived": false, - "open_issues_count": 1, - "license": null, - "forks": 0, - "open_issues": 1, - "watchers": 0, - "default_branch": "master", - "permissions": { - "admin": true, - "push": true, - "pull": true - }, - "allow_squash_merge": true, - "allow_merge_commit": true, - "allow_rebase_merge": true, - "network_count": 0, - "subscribers_count": 0 -} diff --git a/server/legacy/events/vcs/github_client.go b/server/legacy/events/vcs/github_client.go deleted file mode 100644 index 13d3213d3..000000000 --- a/server/legacy/events/vcs/github_client.go +++ /dev/null @@ -1,730 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package vcs - -import ( - "context" - "encoding/base64" - "fmt" - "net/http" - "strconv" - "strings" - "time" - - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - - "github.com/Laisky/graphql" - "github.com/google/go-github/v45/github" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/common" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/shurcooL/githubv4" -) - -var projectCommandTemplateWithLogs = ` -| **Command Name** | **Project** | **Workspace** | **Status** | **Logs** | -| - | - | - | - | - | -| %s | {%s} | {%s} | {%s} | %s | -` - -var projectCommandTemplate = ` -| **Command Name** | **Project** | **Workspace** | **Status** | -| - | - | - | - | -| %s | {%s} | {%s} | {%s} | -` - -var commandTemplate = ` -| **Command Name** | **Status** | -| - | - | -| %s | {%s} | - -:information_source: Visit the checkrun for the root in the navigation panel on your left to view logs and details on the operation. - -` - -var commandTemplateWithCount = ` -| **Command Name** | **Num Total** | **Num Success** | **Status** | -| - | - | - | - | -| %s | {%s} | {%s} | {%s} | - -:information_source: Visit the checkrun for the root in the navigation panel on your left to view logs and details on the operation. - -` - -// github checks conclusion -type ChecksConclusion int //nolint:golint // avoiding refactor while adding linter action - -const ( - Neutral ChecksConclusion = iota - TimedOut - ActionRequired - Cancelled - Failure - Success -) - -func (e ChecksConclusion) String() string { - switch e { - case Neutral: - return "neutral" - case TimedOut: - return "timed_out" - case ActionRequired: - return "action_required" - case Cancelled: - return "cancelled" - case Failure: - return "failure" - case Success: - return "success" - } - return "" -} - -// github checks status -type CheckStatus int - -const ( - Queued CheckStatus = iota - InProgress - Completed -) - -func (e CheckStatus) String() string { - switch e { - case Queued: - return "queued" - case InProgress: - return "in_progress" - case Completed: - return "completed" - } - return "" -} - -// maxCommentLength is the maximum number of chars allowed in a single comment -// by GitHub. -const ( - maxCommentLength = 65536 - // Reference: https://github.com/github/docs/issues/3765 - maxChecksOutputLength = 65535 -) - -// allows for custom handling of github 404s -type PullRequestNotFound struct { - Err error -} - -func (p *PullRequestNotFound) Error() string { - return "Pull request not found: " + p.Err.Error() -} - -// GithubClient is used to perform GitHub actions. -type GithubClient struct { - user string - client *github.Client - v4MutateClient *graphql.Client - ctx context.Context - logger logging.Logger - mergeabilityChecker MergeabilityChecker - allocator feature.Allocator -} - -// GithubAppTemporarySecrets holds app credentials obtained from github after creation. -type GithubAppTemporarySecrets struct { - // ID is the app id. - ID int64 - // Key is the app's PEM-encoded key. - Key string - // Name is the app name. - Name string - // WebhookSecret is the generated webhook secret for this app. - WebhookSecret string - // URL is a link to the app, like https://github.com/apps/octoapp. - URL string -} - -// NewGithubClient returns a valid GitHub client. -func NewGithubClient(hostname string, credentials GithubCredentials, logger logging.Logger, allocator feature.Allocator, mergeabilityChecker MergeabilityChecker) (*GithubClient, error) { - transport, err := credentials.Client() - if err != nil { - return nil, errors.Wrap(err, "error initializing github authentication transport") - } - - var graphqlURL string - var client *github.Client - if hostname == "github.com" { - client = github.NewClient(transport) - graphqlURL = "https://api.github.com/graphql" - } else { - apiURL := resolveGithubAPIURL(hostname) - client, err = github.NewEnterpriseClient(apiURL.String(), apiURL.String(), transport) - if err != nil { - return nil, err - } - graphqlURL = fmt.Sprintf("https://%s/api/graphql", apiURL.Host) - } - - // shurcooL's githubv4 library has a client ctor, but it doesn't support schema - // previews, which need custom Accept headers (https://developer.github.com/v4/previews) - // So for now use the graphql client, since the githubv4 library was basically - // a simple wrapper around it. And instead of using shurcooL's graphql lib, use - // Laisky's, since shurcooL's doesn't support custom headers. - // Once the Minimize Comment schema is official, this can revert back to using - // shurcooL's libraries completely. - v4MutateClient := graphql.NewClient( - graphqlURL, - transport, - graphql.WithHeader("Accept", "application/vnd.github.queen-beryl-preview+json"), - ) - - user, err := credentials.GetUser() - - if err != nil { - return nil, errors.Wrap(err, "getting user") - } - return &GithubClient{ - user: user, - client: client, - v4MutateClient: v4MutateClient, - ctx: context.Background(), - logger: logger, - mergeabilityChecker: mergeabilityChecker, - allocator: allocator, - }, nil -} - -func (g *GithubClient) GetRateLimits() (*github.RateLimits, error) { - rateLimits, resp, err := g.client.RateLimits(g.ctx) - - if err != nil { - g.logger.Error("error retrieving rate limits", map[string]interface{}{"err": err}) - return nil, errors.Wrap(err, "retrieving rate limits") - } - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("error retrieving rate limits: %s", resp.Status) - } - return rateLimits, nil -} - -// GetModifiedFiles returns the names of files that were modified in the pull request -// relative to the repo root, e.g. parent/child/file.txt. -func (g *GithubClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { - var files []string - nextPage := 0 - for { - opts := github.ListOptions{ - PerPage: 300, - } - if nextPage != 0 { - opts.Page = nextPage - } - pageFiles, resp, err := g.client.PullRequests.ListFiles(g.ctx, repo.Owner, repo.Name, pull.Num, &opts) - if err != nil { - return files, err - } - for _, f := range pageFiles { - files = append(files, f.GetFilename()) - - // If the file was renamed, we'll want to run plan in the directory - // it was moved from as well. - if f.GetStatus() == "renamed" { - files = append(files, f.GetPreviousFilename()) - } - } - if resp.NextPage == 0 { - break - } - nextPage = resp.NextPage - } - return files, nil -} - -// CreateComment creates a comment on the pull request. -// If comment length is greater than the max comment length we split into -// multiple comments. -func (g *GithubClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - var sepStart string - - sepEnd := "\n```\n" + - "\n
\n\n**Warning**: Output length greater than max comment size. Continued in next comment." - - if command != "" { - sepStart = fmt.Sprintf("Continued %s output from previous comment.\n
Show Output\n\n", command) + - "```diff\n" - } else { - sepStart = "Continued from previous comment.\n
Show Output\n\n" + - "```diff\n" - } - - comments := common.SplitComment(comment, maxCommentLength, sepEnd, sepStart) - for i := range comments { - _, _, err := g.client.Issues.CreateComment(g.ctx, repo.Owner, repo.Name, pullNum, &github.IssueComment{Body: &comments[i]}) - if err != nil { - return err - } - } - return nil -} - -func (g *GithubClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { - var allComments []*github.IssueComment - nextPage := 0 - for { - comments, resp, err := g.client.Issues.ListComments(g.ctx, repo.Owner, repo.Name, pullNum, &github.IssueListCommentsOptions{ - Sort: github.String("created"), - Direction: github.String("asc"), - ListOptions: github.ListOptions{Page: nextPage}, - }) - if err != nil { - return errors.Wrap(err, "listing comments") - } - allComments = append(allComments, comments...) - if resp.NextPage == 0 { - break - } - nextPage = resp.NextPage - } - - for _, comment := range allComments { - // Using a case insensitive compare here because usernames aren't case - // sensitive and users may enter their atlantis users with different - // cases. - if comment.User != nil && !strings.EqualFold(comment.User.GetLogin(), g.user) { - continue - } - // Crude filtering: The comment templates typically include the command name - // somewhere in the first line. It's a bit of an assumption, but seems like - // a reasonable one, given we've already filtered the comments by the - // configured Atlantis user. - body := strings.Split(comment.GetBody(), "\n") - if len(body) == 0 { - continue - } - firstLine := strings.ToLower(body[0]) - if !strings.Contains(firstLine, strings.ToLower(command)) { - continue - } - var m struct { - MinimizeComment struct { - MinimizedComment struct { - IsMinimized githubv4.Boolean - MinimizedReason githubv4.String - ViewerCanMinimize githubv4.Boolean - } - } `graphql:"minimizeComment(input:$input)"` - } - input := map[string]interface{}{ - "input": githubv4.MinimizeCommentInput{ - Classifier: githubv4.ReportedContentClassifiersOutdated, - SubjectID: comment.GetNodeID(), - }, - } - if err := g.v4MutateClient.Mutate(g.ctx, &m, input); err != nil { - return errors.Wrapf(err, "minimize comment %s", comment.GetNodeID()) - } - } - - return nil -} - -// PullIsApproved returns true if the pull request was approved. -func (g *GithubClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (approvalStatus models.ApprovalStatus, err error) { - nextPage := 0 - for { - opts := github.ListOptions{ - PerPage: 300, - } - if nextPage != 0 { - opts.Page = nextPage - } - pageReviews, resp, err := g.client.PullRequests.ListReviews(g.ctx, repo.Owner, repo.Name, pull.Num, &opts) - if err != nil { - return approvalStatus, errors.Wrap(err, "getting reviews") - } - for _, review := range pageReviews { - if review != nil && review.GetState() == "APPROVED" { - return models.ApprovalStatus{ - IsApproved: true, - ApprovedBy: *review.User.Login, - Date: *review.SubmittedAt, - }, nil - } - } - if resp.NextPage == 0 { - break - } - nextPage = resp.NextPage - } - return approvalStatus, nil -} - -// PullIsMergeable returns true if the pull request is mergeable. -func (g *GithubClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { - githubPR, err := g.GetPullRequest(repo, pull.Num) - if err != nil { - return false, errors.Wrap(err, "getting pull request") - } - - statuses, err := g.GetRepoStatuses(repo, pull) - - if err != nil { - return false, errors.Wrap(err, "getting commit statuses") - } - - checks, err := g.GetRepoChecks(repo, pull.HeadCommit) - - if err != nil { - return false, errors.Wrapf(err, "getting check runs") - } - - return g.mergeabilityChecker.Check(githubPR, statuses, checks), nil -} - -func (g *GithubClient) GetPullRequestFromName(repoName string, repoOwner string, num int) (*github.PullRequest, error) { - var err error - var pull *github.PullRequest - - // GitHub has started to return 404's here (#1019) even after they send the webhook. - // They've got some eventual consistency issues going on so we're just going - // to retry up to 3 times with a 1s sleep. - numRetries := 3 - retryDelay := 1 * time.Second - for i := 0; i < numRetries; i++ { - pull, _, err = g.client.PullRequests.Get(g.ctx, repoOwner, repoName, num) - if err == nil { - return pull, nil - } - ghErr, ok := err.(*github.ErrorResponse) - if !ok || ghErr.Response.StatusCode != http.StatusNotFound { - return pull, err - } - time.Sleep(retryDelay) - } - - ghErr, ok := err.(*github.ErrorResponse) - if ok && ghErr.Response.StatusCode == http.StatusNotFound { - return pull, &PullRequestNotFound{Err: err} - } - return pull, err -} - -// GetPullRequest returns the pull request. -func (g *GithubClient) GetPullRequest(repo models.Repo, num int) (*github.PullRequest, error) { - return g.GetPullRequestFromName(repo.Name, repo.Owner, num) -} - -func (g *GithubClient) GetRepoChecks(repo models.Repo, commitSHA string) ([]*github.CheckRun, error) { - nextPage := 0 - - var results []*github.CheckRun - - for { - opts := &github.ListCheckRunsOptions{ - ListOptions: github.ListOptions{ - PerPage: 100, - }, - } - - if nextPage != 0 { - opts.Page = nextPage - } - - result, response, err := g.client.Checks.ListCheckRunsForRef(g.ctx, repo.Owner, repo.Name, commitSHA, opts) - - if err != nil { - return results, errors.Wrapf(err, "getting check runs for page %d", nextPage) - } - - results = append(results, result.CheckRuns...) - - if response.NextPage == 0 { - break - } - nextPage = response.NextPage - } - - return results, nil -} - -func (g *GithubClient) GetRepoStatuses(repo models.Repo, pull models.PullRequest) ([]*github.RepoStatus, error) { - // Get Combined statuses - - nextPage := 0 - - var result []*github.RepoStatus - - for { - opts := github.ListOptions{ - // explicit default - // https://developer.github.com/v3/repos/statuses/#list-commit-statuses-for-a-reference - PerPage: 100, - } - if nextPage != 0 { - opts.Page = nextPage - } - - combinedStatus, response, err := g.client.Repositories.GetCombinedStatus(g.ctx, repo.Owner, repo.Name, pull.HeadCommit, &opts) - result = append(result, combinedStatus.Statuses...) - - if err != nil { - return nil, err - } - if response.NextPage == 0 { - break - } - nextPage = response.NextPage - } - - return result, nil -} - -// UpdateStatus updates the status badge on the pull request. -// See https://github.com/blog/1227-commit-status-api. -func (g *GithubClient) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - shouldAllocate, err := g.allocator.ShouldAllocate(feature.LegacyDeprecation, feature.FeatureContext{ - RepoName: request.Repo.FullName, - }) - if err != nil { - return "", errors.Wrap(err, "unable to allocate legacy deprecation feature flag") - } - // if legacy deprecation is enabled, don't mutate check runs in legacy workflow - if shouldAllocate { - g.logger.InfoContext(ctx, "legacy deprecation feature flag enabled, not updating check runs") - return "", nil - } - - // Empty status ID means we create a new check run - if request.StatusID == "" { - return g.createCheckRun(ctx, request) - } - return request.StatusID, g.updateCheckRun(ctx, request, request.StatusID) -} - -func (g *GithubClient) createCheckRun(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - status, conclusion := g.resolveChecksStatus(request.State) - createCheckRunOpts := github.CreateCheckRunOptions{ - Name: request.StatusName, - HeadSHA: request.Ref, - Status: &status, - Output: g.createCheckRunOutput(request), - } - - if request.DetailsURL != "" { - createCheckRunOpts.DetailsURL = &request.DetailsURL - } - - // Conclusion is required if status is Completed - if status == Completed.String() { - createCheckRunOpts.Conclusion = &conclusion - } - - checkRun, _, err := g.client.Checks.CreateCheckRun(ctx, request.Repo.Owner, request.Repo.Name, createCheckRunOpts) - if err != nil { - return "", err - } - - return strconv.FormatInt(*checkRun.ID, 10), nil -} - -func (g *GithubClient) updateCheckRun(ctx context.Context, request types.UpdateStatusRequest, checkRunID string) error { - status, conclusion := g.resolveChecksStatus(request.State) - updateCheckRunOpts := github.UpdateCheckRunOptions{ - Name: request.StatusName, - Status: &status, - Output: g.createCheckRunOutput(request), - } - - if request.DetailsURL != "" { - updateCheckRunOpts.DetailsURL = &request.DetailsURL - } - - // Conclusion is required if status is Completed - if status == Completed.String() { - updateCheckRunOpts.Conclusion = &conclusion - } - - checkRunIDInt, err := strconv.ParseInt(checkRunID, 10, 64) - if err != nil { - return err - } - - _, _, err = g.client.Checks.UpdateCheckRun(ctx, request.Repo.Owner, request.Repo.Name, checkRunIDInt, updateCheckRunOpts) - return err -} - -func (g *GithubClient) resolveState(state models.VCSStatus) string { - switch state { - case models.QueuedVCSStatus: - return "Queued" - case models.PendingVCSStatus: - return "In Progress" - case models.SuccessVCSStatus: - return "Success" - case models.FailedVCSStatus: - return "Failed" - } - return "Failed" -} - -func (g *GithubClient) createCheckRunOutput(request types.UpdateStatusRequest) *github.CheckRunOutput { - var summary string - - // Project command - if strings.Contains(request.StatusName, ":") { - // plan/apply command - if request.DetailsURL != "" { - summary = fmt.Sprintf(projectCommandTemplateWithLogs, - request.CommandName, - request.Project, - request.Workspace, - g.resolveState(request.State), - fmt.Sprintf("[Logs](%s)", request.DetailsURL), - ) - } else { - summary = fmt.Sprintf(projectCommandTemplate, - request.CommandName, - request.Project, - request.Workspace, - g.resolveState(request.State), - ) - } - } else { - if request.NumSuccess != "" && request.NumTotal != "" { - summary = fmt.Sprintf(commandTemplateWithCount, - request.CommandName, - request.NumTotal, - request.NumSuccess, - g.resolveState(request.State)) - } else { - summary = fmt.Sprintf(commandTemplate, - request.CommandName, - g.resolveState(request.State)) - } - } - - // Add formatting to summary - summary = strings.ReplaceAll(strings.ReplaceAll(summary, "{", "`"), "}", "`") - - checkRunOutput := github.CheckRunOutput{ - Title: &request.StatusName, - Summary: &summary, - } - - if request.Output == "" { - return &checkRunOutput - } - if len(request.Output) > maxChecksOutputLength { - terraformOutputTooLong := "Terraform output is too long for Github UI, please review the above link to view detailed logs." - checkRunOutput.Text = &terraformOutputTooLong - } else { - checkRunOutput.Text = &request.Output - } - return &checkRunOutput -} - -// Github Checks uses Status and Conclusion to report status of the check run. Need to map models.VcsStatus to Status and Conclusion -// Status -> queued, in_progress, completed -// Conclusion -> failure, neutral, cancelled, timed_out, or action_required. (Optional. Required if you provide a status of "completed".) -func (g *GithubClient) resolveChecksStatus(state models.VCSStatus) (string, string) { - status := Queued - conclusion := Neutral - - switch state { - case models.SuccessVCSStatus: - status = Completed - conclusion = Success - - case models.PendingVCSStatus: - status = InProgress - - case models.FailedVCSStatus: - status = Completed - conclusion = Failure - - case models.QueuedVCSStatus: - status = Queued - } - - return status.String(), conclusion.String() -} - -// MarkdownPullLink specifies the string used in a pull request comment to reference another pull request. -func (g *GithubClient) MarkdownPullLink(pull models.PullRequest) (string, error) { - return fmt.Sprintf("#%d", pull.Num), nil -} - -// ExchangeCode returns a newly created app's info -func (g *GithubClient) ExchangeCode(code string) (*GithubAppTemporarySecrets, error) { - ctx := context.Background() - cfg, _, err := g.client.Apps.CompleteAppManifest(ctx, code) - data := &GithubAppTemporarySecrets{ - ID: cfg.GetID(), - Key: cfg.GetPEM(), - WebhookSecret: cfg.GetWebhookSecret(), - Name: cfg.GetName(), - URL: cfg.GetHTMLURL(), - } - - return data, err -} - -// DownloadRepoConfigFile return `atlantis.yaml` content from VCS (which support fetch a single file from repository) -// The first return value indicate that repo contain atlantis.yaml or not -// if BaseRepo had one repo config file, its content will placed on the second return value -func (g *GithubClient) DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) { - opt := github.RepositoryContentGetOptions{Ref: pull.HeadBranch} - fileContent, _, resp, err := g.client.Repositories.GetContents(g.ctx, pull.BaseRepo.Owner, pull.BaseRepo.Name, config.AtlantisYAMLFilename, &opt) - - if resp.StatusCode == http.StatusNotFound { - return false, []byte{}, nil - } - if err != nil { - return true, []byte{}, err - } - - decodedData, err := base64.StdEncoding.DecodeString(*fileContent.Content) - if err != nil { - return true, []byte{}, err - } - - return true, decodedData, nil -} - -func (g *GithubClient) GetContents(owner, repo, branch, path string) ([]byte, error) { - opt := github.RepositoryContentGetOptions{Ref: branch} - fileContent, _, resp, err := g.client.Repositories.GetContents(g.ctx, owner, repo, path, &opt) - if err != nil { - return []byte{}, errors.Wrap(err, "fetching file contents") - } - - if resp.StatusCode == http.StatusNotFound { - return []byte{}, fmt.Errorf("%s not found in %s/%s", path, owner, repo) - } - - decodedData, err := base64.StdEncoding.DecodeString(*fileContent.Content) - if err != nil { - return []byte{}, errors.Wrapf(err, "decoding file content") - } - - return decodedData, nil -} - -func (g *GithubClient) SupportsSingleFileDownload(repo models.Repo) bool { - return true -} diff --git a/server/legacy/events/vcs/github_client_internal_test.go b/server/legacy/events/vcs/github_client_internal_test.go deleted file mode 100644 index c68691d5d..000000000 --- a/server/legacy/events/vcs/github_client_internal_test.go +++ /dev/null @@ -1,48 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package vcs - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -// If the hostname is github.com, should use normal BaseURL. -func TestNewGithubClient_GithubCom(t *testing.T) { - mergeabilityChecker := NewPullMergeabilityChecker("atlantis") - client, err := NewGithubClient("github.com", &GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - Equals(t, "https://api.github.com/", client.client.BaseURL.String()) -} - -// If the hostname is a non-github hostname should use the right BaseURL. -func TestNewGithubClient_NonGithub(t *testing.T) { - mergeabilityChecker := NewPullMergeabilityChecker("atlantis") - client, err := NewGithubClient("example.com", &GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - Equals(t, "https://example.com/api/v3/", client.client.BaseURL.String()) - // If possible in the future, test the GraphQL client's URL as well. But at the - // moment the shurcooL library doesn't expose it. -} - -type testAllocator struct { -} - -func (t testAllocator) ShouldAllocate(featureID feature.Name, featureCtx feature.FeatureContext) (bool, error) { - return false, nil -} diff --git a/server/legacy/events/vcs/github_client_lyft_test.go b/server/legacy/events/vcs/github_client_lyft_test.go deleted file mode 100644 index 8fafe82d0..000000000 --- a/server/legacy/events/vcs/github_client_lyft_test.go +++ /dev/null @@ -1,219 +0,0 @@ -package vcs_test - -import ( - "fmt" - "net/http" - "net/http/httptest" - "net/url" - "os" - "strings" - "testing" - - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/stretchr/testify/assert" -) - -// TODO: move this test to the mergeability checker itself -func TestLyftGithubClient_PullisMergeable_BlockedStatus(t *testing.T) { - checkJSON := `{ - "id": 4, - "status": "%s", - "conclusion": "%s", - "name": "%s", - "check_suite": { - "id": 5 - } - }` - combinedStatusJSON := `{ - "state": "success", - "statuses": [%s] - }` - combinedChecksJSON := `{ - "check_runs": [%s] - }` - statusJSON := `{ - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://github.com/images/error/other_user_happy.gif", - "id": 2, - "node_id": "MDY6U3RhdHVzMg==", - "state": "%s", - "description": "Testing has completed successfully", - "target_url": "https://ci.example.com/2000/output", - "context": "%s", - "created_at": "2012-08-20T01:19:13Z", - "updated_at": "2012-08-20T01:19:13Z" - }` - - // Use a real GitHub json response and edit the mergeable_state field. - jsBytes, err := os.ReadFile("fixtures/github-pull-request.json") - assert.NoError(t, err) - json := string(jsBytes) - - pullResponse := strings.Replace(json, - `"mergeable_state": "clean"`, - fmt.Sprintf(`"mergeable_state": "%s"`, "blocked"), - 1, - ) - - cases := []struct { - description string - statuses []string - checks []string - expMergeable bool - }{ - { - "sq-pending+owners-success+check-success", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "success", "_owners-check"), - }, - []string{ - fmt.Sprintf(checkJSON, "completed", "success", "check-name"), - }, - true, - }, - { - "sq-pending+owners-missing", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - }, - []string{ - fmt.Sprintf(checkJSON, "completed", "success", "check-name"), - }, - false, - }, - { - "sq-pending+owners-failure", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "failure", "_owners-check"), - }, - []string{ - fmt.Sprintf(checkJSON, "completed", "success", "check-name"), - }, - false, - }, - { - "sq-pending+apply-failure", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "success", "_owners-check"), - }, - []string{ - fmt.Sprintf(checkJSON, "completed", "success", "check-name"), - fmt.Sprintf(checkJSON, "completed", "failure", "atlantis/apply"), - }, - true, - }, - { - "sq-pending+apply-failure+check-failure", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "success", "_owners-check"), - fmt.Sprintf(statusJSON, "failure", "atlantis/apply"), - }, - []string{ - fmt.Sprintf(checkJSON, "in_progress", "", "check-name"), - }, - false, - }, - { - "sq-pending+check_pending", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "success", "_owners-check"), - }, - []string{ - fmt.Sprintf(checkJSON, "in_progress", "", "check-name"), - }, - false, - }, - { - "sq-pending+check_failure", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "success", "_owners-check"), - }, - []string{ - fmt.Sprintf(checkJSON, "complete", "failure", "check-name"), - }, - false, - }, - { - "sq-pending-check+owners-success+check-success", - []string{ - fmt.Sprintf(statusJSON, "pending", "sq-ready-to-merge"), - fmt.Sprintf(statusJSON, "success", "_owners-check"), - }, - []string{ - fmt.Sprintf(checkJSON, "queued", "", "sq-ready-to-merge"), - fmt.Sprintf(checkJSON, "completed", "success", "check-name"), - }, - true, - }, - } - - for _, c := range cases { - t.Run("blocked/"+c.description, func(t *testing.T) { - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - case "/api/v3/repos/owner/repo/pulls/1": - w.Write([]byte(pullResponse)) // nolint: errcheck - return - case "/api/v3/repos/owner/repo/commits/2/status?per_page=100": - _, _ = w.Write([]byte( - fmt.Sprintf(combinedStatusJSON, strings.Join(c.statuses, ",")), - )) // nolint: errcheck - return - case "/api/v3/repos/owner/repo/commits/2/check-runs?per_page=100": - _, _ = w.Write([]byte( - fmt.Sprintf(combinedChecksJSON, strings.Join(c.checks, ",")), - )) - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - defer testServer.Close() - - testServerURL, err := url.Parse(testServer.URL) - assert.NoError(t, err) - mergeabilityChecker := vcs.NewLyftPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - assert.NoError(t, err) - defer disableSSLVerification()() - - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - HeadCommit: "2", - }) - assert.NoError(t, err) - assert.Equal(t, c.expMergeable, actMergeable) - }) - } -} - -type testAllocator struct { -} - -func (t testAllocator) ShouldAllocate(featureID feature.Name, featureCtx feature.FeatureContext) (bool, error) { - return false, nil -} diff --git a/server/legacy/events/vcs/github_client_test.go b/server/legacy/events/vcs/github_client_test.go deleted file mode 100644 index ecd67c1ad..000000000 --- a/server/legacy/events/vcs/github_client_test.go +++ /dev/null @@ -1,845 +0,0 @@ -package vcs_test - -import ( - "crypto/tls" - "encoding/json" - "fmt" - "io" - "net/http" - "net/http/httptest" - "net/url" - "os" - "strings" - "testing" - "time" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - - "github.com/shurcooL/githubv4" -) - -// GetModifiedFiles should make multiple requests if more than one page -// and concat results. -func TestGithubClient_GetModifiedFiles(t *testing.T) { - logger := logging.NewNoopCtxLogger(t) - respTemplate := `[ - { - "sha": "bbcd538c8e72b8c175046e27cc8f907076331401", - "filename": "%s", - "status": "added", - "additions": 103, - "deletions": 21, - "changes": 124, - "blob_url": "https://github.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt", - "raw_url": "https://github.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt", - "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e", - "patch": "@@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test" - } -]` - firstResp := fmt.Sprintf(respTemplate, "file1.txt") - secondResp := fmt.Sprintf(respTemplate, "file2.txt") - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - // The first request should hit this URL. - case "/api/v3/repos/owner/repo/pulls/1/files?per_page=300": - // We write a header that means there's an additional page. - w.Header().Add("Link", `; rel="next", - ; rel="last"`) - w.Write([]byte(firstResp)) // nolint: errcheck - return - // The second should hit this URL. - case "/api/v3/repos/owner/repo/pulls/1/files?page=2&per_page=300": - w.Write([]byte(secondResp)) // nolint: errcheck - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logger, &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }) - Ok(t, err) - Equals(t, []string{"file1.txt", "file2.txt"}, files) -} - -// GetModifiedFiles should include the source and destination of a moved -// file. -func TestGithubClient_GetModifiedFilesMovedFile(t *testing.T) { - resp := `[ - { - "sha": "bbcd538c8e72b8c175046e27cc8f907076331401", - "filename": "new/filename.txt", - "previous_filename": "previous/filename.txt", - "status": "renamed", - "additions": 103, - "deletions": 21, - "changes": 124, - "blob_url": "https://github.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt", - "raw_url": "https://github.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt", - "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e", - "patch": "@@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test" - } -]` - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - // The first request should hit this URL. - case "/api/v3/repos/owner/repo/pulls/1/files?per_page=300": - w.Write([]byte(resp)) // nolint: errcheck - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - files, err := client.GetModifiedFiles(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }) - Ok(t, err) - Equals(t, []string{"new/filename.txt", "previous/filename.txt"}, files) -} - -func TestGithubClient_PaginatesComments(t *testing.T) { - calls := 0 - issueResps := []string{ - `[ - {"node_id": "1", "body": "asd\nplan\nasd", "user": {"login": "someone-else"}}, - {"node_id": "2", "body": "asd plan\nasd", "user": {"login": "user"}} -]`, - `[ - {"node_id": "3", "body": "asd", "user": {"login": "someone-else"}}, - {"node_id": "4", "body": "asdasd", "user": {"login": "someone-else"}} -]`, - `[ - {"node_id": "5", "body": "asd plan", "user": {"login": "someone-else"}}, - {"node_id": "6", "body": "asd\nplan", "user": {"login": "user"}} -]`, - `[ - {"node_id": "7", "body": "asd", "user": {"login": "user"}}, - {"node_id": "8", "body": "asd plan \n asd", "user": {"login": "user"}} -]`, - } - minimizeResp := "{}" - type graphQLCall struct { - Variables struct { - Input githubv4.MinimizeCommentInput `json:"input"` - } `json:"variables"` - } - gotMinimizeCalls := make([]graphQLCall, 0, 2) - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.Method + " " + r.RequestURI { - case "POST /api/graphql": - defer r.Body.Close() // nolint: errcheck - body, err := io.ReadAll(r.Body) - if err != nil { - t.Errorf("read body error: %v", err) - http.Error(w, "server error", http.StatusInternalServerError) - return - } - call := graphQLCall{} - err = json.Unmarshal(body, &call) - if err != nil { - t.Errorf("parse body error: %v", err) - http.Error(w, "server error", http.StatusInternalServerError) - return - } - gotMinimizeCalls = append(gotMinimizeCalls, call) - w.Write([]byte(minimizeResp)) // nolint: errcheck - return - default: - if r.Method != http.MethodGet || !strings.HasPrefix(r.RequestURI, "/api/v3/repos/owner/repo/issues/123/comments") { - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - if (calls + 1) < len(issueResps) { - w.Header().Add( - "Link", - fmt.Sprintf( - `; rel="next"`, - r.Host, - calls+1, - ), - ) - } - w.Write([]byte(issueResps[calls])) // nolint: errcheck - calls++ - } - }), - ) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - err = client.HidePrevCommandComments( - models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - 123, - "Plan", - ) - Ok(t, err) - Equals(t, 2, len(gotMinimizeCalls)) - Equals(t, "2", gotMinimizeCalls[0].Variables.Input.SubjectID) - Equals(t, "8", gotMinimizeCalls[1].Variables.Input.SubjectID) - Equals(t, githubv4.ReportedContentClassifiersOutdated, gotMinimizeCalls[0].Variables.Input.Classifier) - Equals(t, githubv4.ReportedContentClassifiersOutdated, gotMinimizeCalls[1].Variables.Input.Classifier) -} - -func TestGithubClient_HideOldComments(t *testing.T) { - // Only comment 6 should be minimized, because it's by the same Atlantis bot user - // and it has "plan" in the first line of the comment body. - issueResp := `[ - {"node_id": "1", "body": "asd\nplan\nasd", "user": {"login": "someone-else"}}, - {"node_id": "2", "body": "asd plan\nasd", "user": {"login": "someone-else"}}, - {"node_id": "3", "body": "asdasdasd\nasdasdasd", "user": {"login": "someone-else"}}, - {"node_id": "4", "body": "asdasdasd\nasdasdasd", "user": {"login": "user"}}, - {"node_id": "5", "body": "asd\nplan\nasd", "user": {"login": "user"}}, - {"node_id": "6", "body": "asd plan\nasd", "user": {"login": "user"}}, - {"node_id": "7", "body": "asdasdasd", "user": {"login": "user"}}, - {"node_id": "8", "body": "asd plan\nasd", "user": {"login": "user"}}, - {"node_id": "9", "body": "Continued Plan from previous comment\nasd", "user": {"login": "user"}} -]` - minimizeResp := "{}" - type graphQLCall struct { - Variables struct { - Input githubv4.MinimizeCommentInput `json:"input"` - } `json:"variables"` - } - gotMinimizeCalls := make([]graphQLCall, 0, 1) - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.Method + " " + r.RequestURI { - // This gets the pull request's comments. - case "GET /api/v3/repos/owner/repo/issues/123/comments?direction=asc&sort=created": - w.Write([]byte(issueResp)) // nolint: errcheck - return - case "POST /api/graphql": - if accept, has := r.Header["Accept"]; !has || accept[0] != "application/vnd.github.queen-beryl-preview+json" { - t.Error("missing preview header") - http.Error(w, "bad request", http.StatusBadRequest) - return - } - defer r.Body.Close() // nolint: errcheck - body, err := io.ReadAll(r.Body) - if err != nil { - t.Errorf("read body error: %v", err) - http.Error(w, "server error", http.StatusInternalServerError) - return - } - call := graphQLCall{} - err = json.Unmarshal(body, &call) - if err != nil { - t.Errorf("parse body error: %v", err) - http.Error(w, "server error", http.StatusInternalServerError) - return - } - gotMinimizeCalls = append(gotMinimizeCalls, call) - w.Write([]byte(minimizeResp)) // nolint: errcheck - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - }), - ) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - err = client.HidePrevCommandComments( - models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Hostname: "github.com", - Type: models.Github, - }, - }, - 123, - "Plan", - ) - Ok(t, err) - Equals(t, 3, len(gotMinimizeCalls)) - Equals(t, "6", gotMinimizeCalls[0].Variables.Input.SubjectID) - Equals(t, "9", gotMinimizeCalls[2].Variables.Input.SubjectID) - Equals(t, githubv4.ReportedContentClassifiersOutdated, gotMinimizeCalls[0].Variables.Input.Classifier) -} - -func TestGithubClient_PullIsApproved(t *testing.T) { - respTemplate := `[ - { - "id": %d, - "node_id": "MDE3OlB1bGxSZXF1ZXN0UmV2aWV3ODA=", - "user": { - "login": "octocat", - "id": 1, - "node_id": "MDQ6VXNlcjE=", - "avatar_url": "https://github.com/images/error/octocat_happy.gif", - "gravatar_id": "", - "url": "https://api.github.com/users/octocat", - "html_url": "https://github.com/octocat", - "followers_url": "https://api.github.com/users/octocat/followers", - "following_url": "https://api.github.com/users/octocat/following{/other_user}", - "gists_url": "https://api.github.com/users/octocat/gists{/gist_id}", - "starred_url": "https://api.github.com/users/octocat/starred{/owner}{/repo}", - "subscriptions_url": "https://api.github.com/users/octocat/subscriptions", - "organizations_url": "https://api.github.com/users/octocat/orgs", - "repos_url": "https://api.github.com/users/octocat/repos", - "events_url": "https://api.github.com/users/octocat/events{/privacy}", - "received_events_url": "https://api.github.com/users/octocat/received_events", - "type": "User", - "site_admin": false - }, - "body": "Here is the body for the review.", - "commit_id": "ecdd80bb57125d7ba9641ffaa4d7d2c19d3f3091", - "state": "APPROVED", - "html_url": "https://github.com/octocat/Hello-World/pull/12#pullrequestreview-%d", - "pull_request_url": "https://api.github.com/repos/octocat/Hello-World/pulls/12", - "_links": { - "html": { - "href": "https://github.com/octocat/Hello-World/pull/12#pullrequestreview-%d" - }, - "pull_request": { - "href": "https://api.github.com/repos/octocat/Hello-World/pulls/12" - } - }, - "submitted_at": "2019-11-17T17:43:43Z" - } -]` - firstResp := fmt.Sprintf(respTemplate, 80, 80, 80) - secondResp := fmt.Sprintf(respTemplate, 81, 81, 81) - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - // The first request should hit this URL. - case "/api/v3/repos/owner/repo/pulls/1/reviews?per_page=300": - // We write a header that means there's an additional page. - w.Header().Add("Link", `; rel="next", - ; rel="last"`) - w.Write([]byte(firstResp)) // nolint: errcheck - return - // The second should hit this URL. - case "/api/v3/repos/owner/repo/pulls/1/reviews?page=2&per_page=300": - w.Write([]byte(secondResp)) // nolint: errcheck - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - approvalStatus, err := client.PullIsApproved(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - }) - Ok(t, err) - - timeOfApproval, err := time.Parse("2006-01-02T15:04:05Z", "2019-11-17T17:43:43Z") - Ok(t, err) - - expApprovalStatus := models.ApprovalStatus{ - IsApproved: true, - ApprovedBy: "octocat", - Date: timeOfApproval, - } - Equals(t, expApprovalStatus, approvalStatus) -} - -func TestGithubClient_PullIsMergeable(t *testing.T) { - combinedStatusJSON := `{ - "state": "success", - "statuses": [%s] - }` - statusJSON := `{ - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://github.com/images/error/other_user_happy.gif", - "id": 2, - "node_id": "MDY6U3RhdHVzMg==", - "state": "%s", - "description": "Testing has completed successfully", - "target_url": "https://ci.example.com/2000/output", - "context": "%s", - "created_at": "2012-08-20T01:19:13Z", - "updated_at": "2012-08-20T01:19:13Z" - }` - checksJSON := `{ - "check_runs": [ - { - "id": 4, - "status": "%s", - "conclusion": "%s", - "name": "mighty_readme", - "check_suite": { - "id": 5 - } - } - ] - }` - cases := []struct { - state string - expMergeable bool - }{ - { - "dirty", - false, - }, - { - "unknown", - false, - }, - { - "behind", - false, - }, - { - "random", - false, - }, - { - "unstable", - true, - }, - { - "has_hooks", - true, - }, - { - "clean", - true, - }, - { - "", - false, - }, - } - - // Use a real GitHub json response and edit the mergeable_state field. - jsBytes, err := os.ReadFile("fixtures/github-pull-request.json") - Ok(t, err) - json := string(jsBytes) - - for _, c := range cases { - t.Run(c.state, func(t *testing.T) { - response := strings.Replace(json, - `"mergeable_state": "clean"`, - fmt.Sprintf(`"mergeable_state": "%s"`, c.state), - 1, - ) - - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - case "/api/v3/repos/owner/repo/pulls/1": - w.Write([]byte(response)) // nolint: errcheck - return - case "/api/v3/repos/owner/repo/commits/2/status?per_page=100": - _, _ = w.Write([]byte( - fmt.Sprintf(combinedStatusJSON, fmt.Sprintf(statusJSON, "success", "some_status")), - )) // nolint: errcheck - return - case "/api/v3/repos/owner/repo/commits/2/check-runs?per_page=100": - _, _ = w.Write([]byte(fmt.Sprintf(checksJSON, "completed", "success"))) - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - HeadCommit: "2", - }) - Ok(t, err) - Equals(t, c.expMergeable, actMergeable) - }) - } -} - -// TODO: move this test to the mergeability checker itself -func TestGithubClient_PullisMergeable_BlockedStatus(t *testing.T) { - // Use a real GitHub json response and edit the mergeable_state field. - jsBytes, err := os.ReadFile("fixtures/github-pull-request.json") - Ok(t, err) - json := string(jsBytes) - - pullResponse := strings.Replace(json, - `"mergeable_state": "clean"`, - fmt.Sprintf(`"mergeable_state": "%s"`, "blocked"), - 1, - ) - - combinedStatusJSON := `{ - "state": "success", - "statuses": [%s] - }` - statusJSON := `{ - "url": "https://api.github.com/repos/octocat/Hello-World/statuses/6dcb09b5b57875f334f61aebed695e2e4193db5e", - "avatar_url": "https://github.com/images/error/other_user_happy.gif", - "id": 2, - "node_id": "MDY6U3RhdHVzMg==", - "state": "%s", - "description": "Testing has completed successfully", - "target_url": "https://ci.example.com/2000/output", - "context": "%s", - "created_at": "2012-08-20T01:19:13Z", - "updated_at": "2012-08-20T01:19:13Z" - }` - checksJSON := `{ - "id": 4, - "status": "%s", - "conclusion": "%s", - "name": "%s", - "check_suite": { - "id": 5 - } - }` - combinedChecksJSON := `{ - "total_count": %d, - "check_runs": [%s] - }` - - completedCheckResponse := fmt.Sprintf(checksJSON, "completed", "success", "mighty-readme") - - cases := []struct { - description string - statuses []string - checks []string - expMergeable bool - }{ - { - "apply-failure", - []string{}, - []string{ - completedCheckResponse, - fmt.Sprintf(checksJSON, "complete", "failure", "atlantis/apply"), - }, - true, - }, - { - "apply-project-failure", - []string{}, - []string{ - fmt.Sprintf(checksJSON, "complete", "failure", "atlantis/apply: terraform_cloud_workspace"), - completedCheckResponse, - }, - true, - }, - { - "plan+apply-failure", - []string{ - fmt.Sprintf(statusJSON, "failure", "atlantis/plan"), - }, - []string{ - completedCheckResponse, - fmt.Sprintf(checksJSON, "complete", "failure", "atlantis/apply"), - }, - false, - }, - { - "apply-failure-checks-failed", - []string{}, - []string{ - fmt.Sprintf(checksJSON, "complete", "failure", "mighty-readme"), - fmt.Sprintf(checksJSON, "complete", "failure", "atlantis/apply"), - }, - false, - }, - { - "plan-success-checks-in-progress", - []string{ - fmt.Sprintf(statusJSON, "success", "atlantis/plan"), - }, - []string{ - fmt.Sprintf(checksJSON, "in_progress", "", "mighty-readme"), - }, - false, - }, - } - - for _, c := range cases { - t.Run("blocked/"+c.description, func(t *testing.T) { - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.RequestURI { - case "/api/v3/repos/owner/repo/commits/2/status?per_page=100": - _, _ = w.Write([]byte( - fmt.Sprintf(combinedStatusJSON, strings.Join(c.statuses, ",")), - )) // nolint: errcheck - return - case "/api/v3/repos/owner/repo/commits/2/check-runs?per_page=100": - _, _ = w.Write([]byte( - fmt.Sprintf(combinedChecksJSON, len(c.checks), strings.Join(c.checks, ",")), - )) - return - case "/api/v3/repos/owner/repo/pulls/1": - w.Write([]byte(pullResponse)) // nolint: errcheck - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - defer testServer.Close() - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - - actMergeable, err := client.PullIsMergeable(models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - }, models.PullRequest{ - Num: 1, - HeadCommit: "2", - }) - Ok(t, err) - Equals(t, c.expMergeable, actMergeable) - }) - } -} - -func TestGithubClient_MarkdownPullLink(t *testing.T) { - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient("hostname", &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - pull := models.PullRequest{Num: 1} - s, _ := client.MarkdownPullLink(pull) - exp := "#1" - Equals(t, exp, s) -} - -// disableSSLVerification disables ssl verification for the global http client -// and returns a function to be called in a defer that will re-enable it. -func disableSSLVerification() func() { - orig := http.DefaultTransport.(*http.Transport).TLSClientConfig - // nolint: gosec - http.DefaultTransport.(*http.Transport).TLSClientConfig = &tls.Config{InsecureSkipVerify: true} - return func() { - http.DefaultTransport.(*http.Transport).TLSClientConfig = orig - } -} - -func TestGithubClient_SplitComments(t *testing.T) { - type githubComment struct { - Body string `json:"body"` - } - githubComments := make([]githubComment, 0, 1) - - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.Method + " " + r.RequestURI { - case "POST /api/v3/repos/runatlantis/atlantis/issues/1/comments": - defer r.Body.Close() // nolint: errcheck - body, err := io.ReadAll(r.Body) - if err != nil { - t.Errorf("read body error: %v", err) - http.Error(w, "server error", http.StatusInternalServerError) - return - } - requestBody := githubComment{} - err = json.Unmarshal(body, &requestBody) - if err != nil { - t.Errorf("parse body error: %v", err) - http.Error(w, "server error", http.StatusInternalServerError) - return - } - githubComments = append(githubComments, requestBody) - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - pull := models.PullRequest{Num: 1} - repo := models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - } - // create an extra long string - comment := strings.Repeat("a", 65537) - err = client.CreateComment(repo, pull.Num, comment, "plan") - Ok(t, err) - err = client.CreateComment(repo, pull.Num, comment, "") - Ok(t, err) - - body := strings.Split(githubComments[1].Body, "\n") - firstSplit := strings.ToLower(body[0]) - body = strings.Split(githubComments[3].Body, "\n") - secondSplit := strings.ToLower(body[0]) - - Equals(t, 4, len(githubComments)) - Assert(t, strings.Contains(firstSplit, "plan"), fmt.Sprintf("comment should contain the command name but was %q", firstSplit)) - Assert(t, strings.Contains(secondSplit, "continued from previous comment"), fmt.Sprintf("comment should contain no reference to the command name but was %q", secondSplit)) -} - -// Test that we retry the get pull request call if it 404s. -func TestGithubClient_Retry404(t *testing.T) { - var numCalls = 0 - - testServer := httptest.NewTLSServer( - http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { - switch r.Method + " " + r.RequestURI { - case "GET /api/v3/repos/runatlantis/atlantis/pulls/1": - defer r.Body.Close() // nolint: errcheck - numCalls++ - if numCalls < 3 { - w.WriteHeader(http.StatusNotFound) - } else { - w.WriteHeader(http.StatusOK) - } - return - default: - t.Errorf("got unexpected request at %q", r.RequestURI) - http.Error(w, "not found", http.StatusNotFound) - return - } - })) - - testServerURL, err := url.Parse(testServer.URL) - Ok(t, err) - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - defer disableSSLVerification()() - repo := models.Repo{ - FullName: "runatlantis/atlantis", - Owner: "runatlantis", - Name: "atlantis", - CloneURL: "", - SanitizedCloneURL: "", - VCSHost: models.VCSHost{ - Type: models.Github, - Hostname: "github.com", - }, - } - _, err = client.GetPullRequest(repo, 1) - Ok(t, err) - Equals(t, 3, numCalls) -} diff --git a/server/legacy/events/vcs/github_credentials.go b/server/legacy/events/vcs/github_credentials.go deleted file mode 100644 index bdcbda926..000000000 --- a/server/legacy/events/vcs/github_credentials.go +++ /dev/null @@ -1,200 +0,0 @@ -package vcs - -import ( - "context" - "fmt" - "net/http" - "net/url" - "strings" - - "github.com/bradleyfalzon/ghinstallation" - "github.com/google/go-github/v45/github" - "github.com/pkg/errors" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_github_credentials.go GithubCredentials - -// GithubCredentials handles creating http.Clients that authenticate. -type GithubCredentials interface { - Client() (*http.Client, error) - GetToken() (string, error) - GetUser() (string, error) -} - -// GithubAnonymousCredentials expose no credentials. -type GithubAnonymousCredentials struct{} - -// Client returns a client with no credentials. -func (c *GithubAnonymousCredentials) Client() (*http.Client, error) { - tr := http.DefaultTransport - return &http.Client{Transport: tr}, nil -} - -// GetUser returns the username for these credentials. -func (c *GithubAnonymousCredentials) GetUser() (string, error) { - return "anonymous", nil -} - -// GetToken returns an empty token. -func (c *GithubAnonymousCredentials) GetToken() (string, error) { - return "", nil -} - -// GithubUserCredentials implements GithubCredentials for the personal auth token flow. -type GithubUserCredentials struct { - User string - Token string -} - -// Client returns a client for basic auth user credentials. -func (c *GithubUserCredentials) Client() (*http.Client, error) { - tr := &github.BasicAuthTransport{ - Username: strings.TrimSpace(c.User), - Password: strings.TrimSpace(c.Token), - } - return tr.Client(), nil -} - -// GetUser returns the username for these credentials. -func (c *GithubUserCredentials) GetUser() (string, error) { - return c.User, nil -} - -// GetToken returns the user token. -func (c *GithubUserCredentials) GetToken() (string, error) { - return c.Token, nil -} - -// GithubAppCredentials implements GithubCredentials for github app installation token flow. -type GithubAppCredentials struct { - AppID int64 - Key []byte - Hostname string - apiURL *url.URL - installationID int64 - tr *ghinstallation.Transport - AppSlug string -} - -// Client returns a github app installation client. -func (c *GithubAppCredentials) Client() (*http.Client, error) { - itr, err := c.transport() - if err != nil { - return nil, err - } - return &http.Client{Transport: itr}, nil -} - -// GetUser returns the username for these credentials. -func (c *GithubAppCredentials) GetUser() (string, error) { - // Keeping backwards compatibility since this flag is optional - if c.AppSlug == "" { - return "", nil - } - client, err := c.Client() - - if err != nil { - return "", errors.Wrap(err, "initializing client") - } - - ghClient := github.NewClient(client) - ghClient.BaseURL = c.getAPIURL() - ctx := context.Background() - - app, _, err := ghClient.Apps.Get(ctx, c.AppSlug) - - if err != nil { - return "", errors.Wrap(err, "getting app details") - } - // Currently there is no way to get the bot's login info, so this is a - // hack until Github exposes that. - return fmt.Sprintf("%s[bot]", app.GetName()), nil -} - -// GetToken returns a fresh installation token. -func (c *GithubAppCredentials) GetToken() (string, error) { - tr, err := c.transport() - if err != nil { - return "", errors.Wrap(err, "transport failed") - } - - return tr.Token(context.Background()) -} - -func (c *GithubAppCredentials) getInstallationID() (int64, error) { - if c.installationID != 0 { - return c.installationID, nil - } - - tr := http.DefaultTransport - // A non-installation transport - t, err := ghinstallation.NewAppsTransport(tr, c.AppID, c.Key) - if err != nil { - return 0, err - } - t.BaseURL = c.getAPIURL().String() - - // Query github with the app's JWT - client := github.NewClient(&http.Client{Transport: t}) - client.BaseURL = c.getAPIURL() - ctx := context.Background() - - installations, _, err := client.Apps.ListInstallations(ctx, nil) - if err != nil { - return 0, err - } - - if len(installations) != 1 { - return 0, fmt.Errorf("wrong number of installations, expected 1, found %d", len(installations)) - } - - c.installationID = installations[0].GetID() - return c.installationID, nil -} - -func (c *GithubAppCredentials) transport() (*ghinstallation.Transport, error) { - if c.tr != nil { - return c.tr, nil - } - - installationID, err := c.getInstallationID() - if err != nil { - return nil, err - } - - tr := http.DefaultTransport - itr, err := ghinstallation.New(tr, c.AppID, installationID, c.Key) - if err == nil { - apiURL := c.getAPIURL() - itr.BaseURL = strings.TrimSuffix(apiURL.String(), "/") - c.tr = itr - } - return itr, err -} - -func (c *GithubAppCredentials) getAPIURL() *url.URL { - if c.apiURL != nil { - return c.apiURL - } - - c.apiURL = resolveGithubAPIURL(c.Hostname) - return c.apiURL -} - -func resolveGithubAPIURL(hostname string) *url.URL { - // If we're using github.com then we don't need to do any additional configuration - // for the client. It we're using Github Enterprise, then we need to manually - // set the base url for the API. - baseURL := &url.URL{ - Scheme: "https", - Host: "api.github.com", - Path: "/", - } - - if hostname != "github.com" { - baseURL.Host = hostname - baseURL.Path = "/api/v3/" - } - - return baseURL -} diff --git a/server/legacy/events/vcs/github_credentials_test.go b/server/legacy/events/vcs/github_credentials_test.go deleted file mode 100644 index 338c1a65b..000000000 --- a/server/legacy/events/vcs/github_credentials_test.go +++ /dev/null @@ -1,71 +0,0 @@ -package vcs_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/fixtures" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestGithubClient_GetUser_AppSlug(t *testing.T) { - defer disableSSLVerification()() - testServer, err := fixtures.GithubAppTestServer(t) - Ok(t, err) - - anonCreds := &vcs.GithubAnonymousCredentials{} - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - tempSecrets, err := anonClient.ExchangeCode("good-code") - Ok(t, err) - - appCreds := &vcs.GithubAppCredentials{ - AppID: tempSecrets.ID, - Key: []byte(fixtures.GithubPrivateKey), - Hostname: testServer, - AppSlug: "some-app", - } - - user, err := appCreds.GetUser() - Ok(t, err) - - Assert(t, user == "Octocat App[bot]", "user should not empty") -} - -func TestGithubClient_AppAuthentication(t *testing.T) { - defer disableSSLVerification()() - testServer, err := fixtures.GithubAppTestServer(t) - Ok(t, err) - - anonCreds := &vcs.GithubAnonymousCredentials{} - mergeabilityChecker := vcs.NewPullMergeabilityChecker("atlantis") - anonClient, err := vcs.NewGithubClient(testServer, anonCreds, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - tempSecrets, err := anonClient.ExchangeCode("good-code") - Ok(t, err) - - appCreds := &vcs.GithubAppCredentials{ - AppID: tempSecrets.ID, - Key: []byte(fixtures.GithubPrivateKey), - Hostname: testServer, - } - _, err = vcs.NewGithubClient(testServer, appCreds, logging.NewNoopCtxLogger(t), &testAllocator{}, mergeabilityChecker) - Ok(t, err) - - token, err := appCreds.GetToken() - Ok(t, err) - - newToken, err := appCreds.GetToken() - Ok(t, err) - - user, err := appCreds.GetUser() - Ok(t, err) - - Assert(t, user == "", "user should be empty") - - if token != newToken { - t.Errorf("app token was not cached: %q != %q", token, newToken) - } -} diff --git a/server/legacy/events/vcs/inject.go b/server/legacy/events/vcs/inject.go deleted file mode 100644 index 1ac7ec636..000000000 --- a/server/legacy/events/vcs/inject.go +++ /dev/null @@ -1,41 +0,0 @@ -package vcs - -// Declare all package dependencies here - -func NewPullMergeabilityChecker(vcsStatusPrefix string) MergeabilityChecker { - statusFilters := newValidStatusFilters(vcsStatusPrefix) - checksFilters := newValidChecksFilters(vcsStatusPrefix) - - return &PullMergeabilityChecker{ - supplementalChecker: newSupplementalMergeabilityChecker(statusFilters, checksFilters), - } -} - -func newValidStatusFilters(vcsStatusPrefix string) []ValidStatusFilter { - return []ValidStatusFilter{ - SuccessStateFilter, - } -} - -func newValidChecksFilters(vcsStatusPrefix string) []ValidChecksFilter { - titleMatcher := StatusTitleMatcher{TitlePrefix: vcsStatusPrefix} - applyChecksFilter := &ApplyChecksFilter{ - statusTitleMatcher: titleMatcher, - } - deployChecksFilter := &DeployChecksFilter{ - statusTitleMatcher: titleMatcher, - } - return []ValidChecksFilter{ - SuccessConclusionFilter, SkippedConclusionFilter, applyChecksFilter, deployChecksFilter, - } -} - -func newSupplementalMergeabilityChecker( - statusFilters []ValidStatusFilter, - checksFilters []ValidChecksFilter, -) MergeabilityChecker { - return &SupplementalMergabilityChecker{ - statusFilter: statusFilters, - checksFilters: checksFilters, - } -} diff --git a/server/legacy/events/vcs/inject_lyft.go b/server/legacy/events/vcs/inject_lyft.go deleted file mode 100644 index 116160954..000000000 --- a/server/legacy/events/vcs/inject_lyft.go +++ /dev/null @@ -1,20 +0,0 @@ -package vcs - -import "github.com/runatlantis/atlantis/server/legacy/events/vcs/lyft" - -// Declare all lyft package dependencies here - -func NewLyftPullMergeabilityChecker(vcsStatusPrefix string) MergeabilityChecker { - statusFilters := newValidStatusFilters(vcsStatusPrefix) - statusFilters = append(statusFilters, lyft.NewSQFilter()) - - checksFilters := newValidChecksFilters(vcsStatusPrefix) - checksFilters = append(checksFilters, lyft.NewSQCheckFilter()) - - supplementalChecker := newSupplementalMergeabilityChecker(statusFilters, checksFilters) - supplementalChecker = lyft.NewOwnersStatusChecker(supplementalChecker) - - return &PullMergeabilityChecker{ - supplementalChecker: supplementalChecker, - } -} diff --git a/server/legacy/events/vcs/instrumented_client.go b/server/legacy/events/vcs/instrumented_client.go deleted file mode 100644 index 4d2aff112..000000000 --- a/server/legacy/events/vcs/instrumented_client.go +++ /dev/null @@ -1,319 +0,0 @@ -package vcs - -import ( - "context" - "fmt" - "strconv" - - "github.com/google/go-github/v45/github" - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/logging/fields" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - keys "github.com/runatlantis/atlantis/server/neptune/context" - "github.com/uber-go/tally/v4" -) - -// NewInstrumentedGithubClient creates a client proxy responsible for gathering stats and logging -func NewInstrumentedGithubClient(client *GithubClient, statsScope tally.Scope, logger logging.Logger) IGithubClient { - scope := statsScope.SubScope("github") - - instrumentedGHClient := &InstrumentedClient{ - Client: client, - StatsScope: scope, - Logger: logger, - } - - return &InstrumentedGithubClient{ - InstrumentedClient: instrumentedGHClient, - GhClient: client, - StatsScope: scope, - Logger: logger, - } -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_github_pull_request_getter.go GithubPullRequestGetter - -type GithubPullRequestGetter interface { - GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) - GetPullRequestFromName(repoName string, repoOwner string, pullNum int) (*github.PullRequest, error) -} - -// IGithubClient exists to bridge the gap between GithubPullRequestGetter and Client interface to allow -// for a single instrumented client - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_IGithub_client.go IGithubClient -type IGithubClient interface { - Client - GithubPullRequestGetter - - GetContents(owner, repo, branch, path string) ([]byte, error) - GetRepoStatuses(repo models.Repo, pull models.PullRequest) ([]*github.RepoStatus, error) - GetRepoChecks(repo models.Repo, commitSHA string) ([]*github.CheckRun, error) -} - -// InstrumentedGithubClient should delegate to the underlying InstrumentedClient for vcs provider-agnostic -// methods and implement soley any github specific interfaces. -type InstrumentedGithubClient struct { - *InstrumentedClient - GhClient *GithubClient - StatsScope tally.Scope - Logger logging.Logger -} - -func (c *InstrumentedGithubClient) GetContents(owner, repo, branch, path string) ([]byte, error) { - scope := c.StatsScope.SubScope("get_contents") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - contents, err := c.GhClient.GetContents(owner, repo, branch, path) - - if err != nil { - executionError.Inc(1) - return contents, err - } - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched contents", map[string]interface{}{ - keys.RepositoryKey.String(): repo, - }) - - return contents, err -} - -func (c *InstrumentedGithubClient) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { - return c.GetPullRequestFromName(repo.Name, repo.Owner, pullNum) -} - -func (c *InstrumentedGithubClient) GetPullRequestFromName(repoName string, repoOwner string, pullNum int) (*github.PullRequest, error) { - scope := c.StatsScope.SubScope("get_pull_request") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - pull, err := c.GhClient.GetPullRequestFromName(repoName, repoOwner, pullNum) - - if err != nil { - executionError.Inc(1) - return pull, err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched pull request", map[string]interface{}{ - keys.RepositoryKey.String(): fmt.Sprintf("%s/%s", repoOwner, repoName), - keys.PullNumKey.String(): strconv.Itoa(pullNum), - }) - - return pull, err -} - -func (c *InstrumentedGithubClient) GetRepoChecks(repo models.Repo, commitSHA string) ([]*github.CheckRun, error) { - scope := c.StatsScope.SubScope("get_repo_checks") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - statuses, err := c.GhClient.GetRepoChecks(repo, commitSHA) - - if err != nil { - executionError.Inc(1) - return statuses, err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched vcs repo checks", map[string]interface{}{"commitSHA": commitSHA}) - - return statuses, err -} - -func (c *InstrumentedGithubClient) GetRepoStatuses(repo models.Repo, pull models.PullRequest) ([]*github.RepoStatus, error) { - scope := c.StatsScope.SubScope("get_repo_status") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - statuses, err := c.GhClient.GetRepoStatuses(repo, pull) - - if err != nil { - executionError.Inc(1) - return statuses, err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched vcs repo statuses", fields.PullRequest(pull)) - - return statuses, err -} - -type InstrumentedClient struct { - Client - StatsScope tally.Scope - Logger logging.Logger -} - -func (c *InstrumentedClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { - scope := c.StatsScope.SubScope("get_modified_files") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - files, err := c.Client.GetModifiedFiles(repo, pull) - - if err != nil { - executionError.Inc(1) - return files, err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched pull request modified files", fields.PullRequest(pull)) - - return files, err -} -func (c *InstrumentedClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - scope := c.StatsScope.SubScope("create_comment") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - if err := c.Client.CreateComment(repo, pullNum, comment, command); err != nil { - executionError.Inc(1) - return err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("created pull request comment", map[string]interface{}{ - keys.RepositoryKey.String(): repo.FullName, - keys.PullNumKey.String(): strconv.Itoa(pullNum), - }) - return nil -} -func (c *InstrumentedClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { - scope := c.StatsScope.SubScope("hide_prev_plan_comments") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - if err := c.Client.HidePrevCommandComments(repo, pullNum, command); err != nil { - executionError.Inc(1) - return err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("hid previous comments", map[string]interface{}{ - keys.RepositoryKey.String(): repo.FullName, - keys.PullNumKey.String(): strconv.Itoa(pullNum), - }) - return nil -} -func (c *InstrumentedClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { - scope := c.StatsScope.SubScope("pull_is_approved") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - approvalStatus, err := c.Client.PullIsApproved(repo, pull) - - if err != nil { - executionError.Inc(1) - return approvalStatus, err - } - - executionSuccess.Inc(1) - - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched pull request approval status", fields.PullRequest(pull)) - - return approvalStatus, err -} -func (c *InstrumentedClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { - scope := c.StatsScope.SubScope("pull_is_mergeable") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - mergeable, err := c.Client.PullIsMergeable(repo, pull) - - if err != nil { - executionError.Inc(1) - return mergeable, err - } - - executionSuccess.Inc(1) - //TODO: thread context and use related logging methods. - c.Logger.Info("fetched pull request mergeability", fields.PullRequest(pull)) - - return mergeable, err -} - -func (c *InstrumentedClient) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - scope := c.StatsScope.SubScope("update_status") - - executionTime := scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - statusID, err := c.Client.UpdateStatus(ctx, request) - if err != nil { - executionError.Inc(1) - return "", err - } - - //TODO: thread context and use related logging methods. - // for now keeping this at info to debug weirdness we've been - // seeing with status api calls. - c.Logger.Info("updated vcs status", map[string]interface{}{ - keys.RepositoryKey.String(): request.Repo.FullName, - keys.PullNumKey.String(): strconv.Itoa(request.PullNum), - keys.SHAKey.String(): request.Ref, - "status-name": request.StatusName, - "status-id": request.StatusID, - "state": request.State.String(), - }) - - executionSuccess.Inc(1) - return statusID, nil -} diff --git a/server/legacy/events/vcs/lyft/mergeability.go b/server/legacy/events/vcs/lyft/mergeability.go deleted file mode 100644 index c950da71d..000000000 --- a/server/legacy/events/vcs/lyft/mergeability.go +++ /dev/null @@ -1,93 +0,0 @@ -package lyft - -import ( - "github.com/google/go-github/v45/github" -) - -const ( - SubmitQueueReadinessContext = "sq-ready-to-merge" - OwnersStatusContext = "_owners-check" -) - -// NameStateFilter filters statuses that correspond to atlantis apply -type NameStateFilter struct { - statusName string - state string -} - -// MewSQFilter filters statuses matching submit queue if they are pending. -func NewSQFilter() NameStateFilter { - return NameStateFilter{statusName: SubmitQueueReadinessContext, state: "pending"} -} - -func (f NameStateFilter) Filter(statuses []*github.RepoStatus) []*github.RepoStatus { - var filtered []*github.RepoStatus - for _, status := range statuses { - if status.GetState() == f.state && status.GetContext() == f.statusName { - continue - } - - filtered = append(filtered, status) - } - - return filtered -} - -type NameCheckFilter struct { - checkName string - status string -} - -func NewSQCheckFilter() NameCheckFilter { - return NameCheckFilter{checkName: SubmitQueueReadinessContext, status: "queued"} -} - -func (f NameCheckFilter) Filter(checks []*github.CheckRun) []*github.CheckRun { - var filtered []*github.CheckRun - for _, check := range checks { - if check.GetStatus() == f.status && check.GetName() == f.checkName { - continue - } - - filtered = append(filtered, check) - } - - return filtered -} - -// This interface is brought into the package to prevent a cyclic dependency. -type MergeabilityChecker interface { - Check(pull *github.PullRequest, statuses []*github.RepoStatus, checks []*github.CheckRun) bool -} - -// OwnersStatusChecker delegates to an underlying mergeability checker iff the owners check already exists. -// since this check can be created at random we want to make sure it's present before doing anything. -// This doesn't check the state of the owners check and assumees that the state of all status checks are checked, -// further downstream. -type OwnersStatusChecker struct { - delegate MergeabilityChecker -} - -func NewOwnersStatusChecker(delegate MergeabilityChecker) *OwnersStatusChecker { - return &OwnersStatusChecker{ - delegate: delegate, - } -} - -func (c *OwnersStatusChecker) Check(pull *github.PullRequest, statuses []*github.RepoStatus, checks []*github.CheckRun) bool { - if status := findOwnersCheckStatus(statuses); status == nil { - return false - } - - return c.delegate.Check(pull, statuses, checks) -} - -func findOwnersCheckStatus(statuses []*github.RepoStatus) *github.RepoStatus { - for _, status := range statuses { - if status.GetContext() == OwnersStatusContext { - return status - } - } - - return nil -} diff --git a/server/legacy/events/vcs/lyft/pull_status_fetcher.go b/server/legacy/events/vcs/lyft/pull_status_fetcher.go deleted file mode 100644 index 4fec8ef30..000000000 --- a/server/legacy/events/vcs/lyft/pull_status_fetcher.go +++ /dev/null @@ -1,126 +0,0 @@ -package lyft - -import ( - "encoding/json" - "fmt" - - "github.com/google/go-github/v45/github" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/models" -) - -const LockValue = "lock" - -type PullClient interface { - GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) - GetRepoStatuses(repo models.Repo, pull models.PullRequest) ([]*github.RepoStatus, error) - GetRepoChecks(repo models.Repo, commitSHA string) ([]*github.CheckRun, error) - PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) -} - -type SQBasedPullStatusFetcher struct { - client PullClient - checker MergeabilityChecker -} - -func NewSQBasedPullStatusFetcher(client PullClient, checker MergeabilityChecker) *SQBasedPullStatusFetcher { - return &SQBasedPullStatusFetcher{ - client: client, - checker: checker, - } -} - -func (s *SQBasedPullStatusFetcher) FetchPullStatus(repo models.Repo, pull models.PullRequest) (models.PullReqStatus, error) { - pullStatus := models.PullReqStatus{} - - approvalStatus, err := s.client.PullIsApproved(repo, pull) - if err != nil { - return models.PullReqStatus{}, errors.Wrap(err, "fetching pull approval status") - } - - githubPR, err := s.client.GetPullRequest(repo, pull.Num) - if err != nil { - return pullStatus, errors.Wrap(err, "fetching pull request") - } - - statuses, err := s.client.GetRepoStatuses(repo, pull) - if err != nil { - return pullStatus, errors.Wrap(err, "fetching repo statuses") - } - - checks, err := s.client.GetRepoChecks(repo, pull.HeadCommit) - if err != nil { - return pullStatus, errors.Wrap(err, "fetching repo checks") - } - - mergeable := s.checker.Check(githubPR, statuses, checks) - if err != nil { - return pullStatus, errors.Wrapf(err, "checking mergeability") - } - - sqLocked, err := s.isPRLocked(statuses, checks) - if err != nil { - return pullStatus, errors.Wrapf(err, "checking sq lock status") - } - - return models.PullReqStatus{ - ApprovalStatus: approvalStatus, - Mergeable: mergeable, - SQLocked: sqLocked, - }, nil -} - -func (s SQBasedPullStatusFetcher) isPRLocked(statuses []*github.RepoStatus, checks []*github.CheckRun) (bool, error) { - rawMetadata := "" - - // First check statuses - for _, status := range statuses { - if status.GetContext() == SubmitQueueReadinessContext { - rawMetadata = status.GetDescription() - break - } - } - // Next try check runs if no statuses - if len(rawMetadata) == 0 { - for _, check := range checks { - if check.GetName() == SubmitQueueReadinessContext { - output := check.GetOutput() - if output != nil { - rawMetadata = output.GetTitle() - } - break - } - } - } - - // No metadata found, assume not locked - if len(rawMetadata) == 0 { - return false, nil - } - - // Not using struct tags because there's no predefined schema for description. - description := make(map[string]interface{}) - err := json.Unmarshal([]byte(rawMetadata), &description) - if err != nil { - return false, errors.Wrapf(err, "parsing status description") - } - - waitingList, ok := description["waiting"] - if !ok { - // No waiting key means no lock. - return false, nil - } - - typedWaitingList, ok := waitingList.([]interface{}) - if !ok { - return false, fmt.Errorf("cast failed for %v", waitingList) - } - for _, item := range typedWaitingList { - if item == LockValue { - return true, nil - } - } - - // No Lock found. - return false, nil -} diff --git a/server/legacy/events/vcs/lyft/pull_status_fetcher_test.go b/server/legacy/events/vcs/lyft/pull_status_fetcher_test.go deleted file mode 100644 index b3ad208f7..000000000 --- a/server/legacy/events/vcs/lyft/pull_status_fetcher_test.go +++ /dev/null @@ -1,129 +0,0 @@ -package lyft - -import ( - "testing" - - "github.com/google/go-github/v45/github" - "github.com/stretchr/testify/assert" -) - -func pointer(str string) *string { - temp := str - return &temp -} - -func TestIsPRLocked(t *testing.T) { - // shouldn't need any fields for this - subject := SQBasedPullStatusFetcher{} - - cases := []struct { - description string - statuses []*github.RepoStatus - checks []*github.CheckRun - isLocked bool - }{ - { - "pull is locked", - []*github.RepoStatus{ - { - Context: pointer(SubmitQueueReadinessContext), - Description: pointer("{\"pr_number\": 176, \"waiting\": [\"approval\", \"lock\"]}"), - }, - }, - []*github.CheckRun{}, - true, - }, - { - "pull is unlocked", - []*github.RepoStatus{ - { - Context: pointer(SubmitQueueReadinessContext), - Description: pointer("{\"pr_number\": 176, \"waiting\": [\"approval\"]}"), - }, - }, - []*github.CheckRun{}, - false, - }, - { - "sq not found", - []*github.RepoStatus{ - { - Context: pointer("random"), - Description: pointer("{\"pr_number\": 176, \"waiting\": [\"approval\"]}"), - }, - }, - []*github.CheckRun{}, - false, - }, - { - "waiting key not found", - []*github.RepoStatus{ - { - Context: pointer(SubmitQueueReadinessContext), - Description: pointer("{\"pr_number\": 176}"), - }, - }, - []*github.CheckRun{}, - false, - }, - { - "empty sq status", - []*github.RepoStatus{ - { - Context: pointer(SubmitQueueReadinessContext), - Description: pointer(""), - }, - }, - []*github.CheckRun{}, - false, - }, - { - "pull is locked check", - []*github.RepoStatus{}, - []*github.CheckRun{ - { - Name: pointer(SubmitQueueReadinessContext), - Output: &github.CheckRunOutput{ - Title: pointer("{\"pr_number\": 176, \"waiting\": [\"approval\", \"lock\"]}"), - }, - }, - }, - true, - }, - { - "pull is unlocked check", - []*github.RepoStatus{}, - []*github.CheckRun{ - { - Name: pointer(SubmitQueueReadinessContext), - Output: &github.CheckRunOutput{ - Title: pointer("{\"pr_number\": 176, \"waiting\": [\"approval\"]}"), - }, - }, - }, - false, - }, - { - "empty sq status check", - []*github.RepoStatus{}, - []*github.CheckRun{ - { - Name: pointer(SubmitQueueReadinessContext), - Output: &github.CheckRunOutput{ - Title: pointer(""), - }, - }, - }, - false, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - isLocked, err := subject.isPRLocked(c.statuses, c.checks) - - assert.NoError(t, err) - assert.Equal(t, c.isLocked, isLocked) - }) - } -} diff --git a/server/legacy/events/vcs/mergeability.go b/server/legacy/events/vcs/mergeability.go deleted file mode 100644 index fe4d1f548..000000000 --- a/server/legacy/events/vcs/mergeability.go +++ /dev/null @@ -1,165 +0,0 @@ -package vcs - -import ( - "github.com/google/go-github/v45/github" -) - -// ValidStatusFilter implementations filter any valid statuses, -// the definition of valid is up to the implementation. -type ValidStatusFilter interface { - Filter(status []*github.RepoStatus) []*github.RepoStatus -} - -// ApplyStatusFilter filters statuses that correspond to atlantis apply -type ApplyStatusFilter struct { - statusTitleMatcher StatusTitleMatcher -} - -func (d ApplyStatusFilter) Filter(statuses []*github.RepoStatus) []*github.RepoStatus { - var filtered []*github.RepoStatus - for _, status := range statuses { - if d.statusTitleMatcher.MatchesCommand(status.GetContext(), "apply") { - continue - } - - filtered = append(filtered, status) - } - - return filtered -} - -// StateFilter filters statuses that match a given state. -type StateFilter string - -func (d StateFilter) Filter(statuses []*github.RepoStatus) []*github.RepoStatus { - var filtered []*github.RepoStatus - for _, status := range statuses { - if status.GetState() == string(d) { - continue - } - - filtered = append(filtered, status) - } - - return filtered -} - -var SuccessStateFilter StateFilter = "success" - -type ValidChecksFilter interface { - Filter(status []*github.CheckRun) []*github.CheckRun -} - -// ApplyChecksFilter filters statuses that correspond to atlantis apply -type DeployChecksFilter struct { - statusTitleMatcher StatusTitleMatcher -} - -func (d DeployChecksFilter) Filter(checks []*github.CheckRun) []*github.CheckRun { - var filtered []*github.CheckRun - for _, check := range checks { - if d.statusTitleMatcher.MatchesCommand(*check.Name, "deploy") { - continue - } - - filtered = append(filtered, check) - } - - return filtered -} - -// ApplyChecksFilter filters statuses that correspond to atlantis apply -type ApplyChecksFilter struct { - statusTitleMatcher StatusTitleMatcher -} - -func (d ApplyChecksFilter) Filter(checks []*github.CheckRun) []*github.CheckRun { - var filtered []*github.CheckRun - for _, check := range checks { - if d.statusTitleMatcher.MatchesCommand(*check.Name, "apply") { - continue - } - - filtered = append(filtered, check) - } - - return filtered -} - -// ConclusionFilter filters checks that match a given conclusion -type ConclusionFilter string - -func (c ConclusionFilter) Filter(checks []*github.CheckRun) []*github.CheckRun { - var filtered []*github.CheckRun - for _, check := range checks { - if check.GetStatus() == "completed" && check.GetConclusion() == string(c) { - continue - } - - filtered = append(filtered, check) - } - - return filtered -} - -var SuccessConclusionFilter ConclusionFilter = "success" -var SkippedConclusionFilter ConclusionFilter = "skipped" - -type MergeabilityChecker interface { - Check(pull *github.PullRequest, statuses []*github.RepoStatus, checks []*github.CheckRun) bool -} - -// SupplementalMergeabilityChecker is used to determine a more finegrained mergeability -// definition as Github's is purely based on green or not. -// This checker runs each status through a set of ValidStateFilters and ValidCheckFilters -// any leftover statuses or checks are considered invalid and mergeability fails -type SupplementalMergabilityChecker struct { - statusFilter []ValidStatusFilter - checksFilters []ValidChecksFilter -} - -func (c *SupplementalMergabilityChecker) Check(_ *github.PullRequest, statuses []*github.RepoStatus, checks []*github.CheckRun) bool { - invalidStatuses := statuses - for _, f := range c.statusFilter { - invalidStatuses = f.Filter(invalidStatuses) - } - - if len(invalidStatuses) > 0 { - return false - } - - invalidChecks := checks - for _, f := range c.checksFilters { - invalidChecks = f.Filter(invalidChecks) - } - - return len(invalidChecks) <= 0 -} - -// PullMergeabilityChecker primarily uses the mergeable state from github PR and falls back to a supplement checker -// if that fails. -type PullMergeabilityChecker struct { - supplementalChecker MergeabilityChecker -} - -func (c *PullMergeabilityChecker) Check(pull *github.PullRequest, statuses []*github.RepoStatus, checks []*github.CheckRun) bool { - state := pull.GetMergeableState() - // We map our mergeable check to when the GitHub merge button is clickable. - // This corresponds to the following states: - // clean: No conflicts, all requirements satisfied. - // Merging is allowed (green box). - // unstable: Failing/pending commit status that is not part of the required - // status checks. Merging is allowed (yellow box). - // has_hooks: GitHub Enterprise only, if a repo has custom pre-receive - // hooks. Merging is allowed (green box). - // See: https://github.com/octokit/octokit.net/issues/1763 - if state != "clean" && state != "unstable" && state != "has_hooks" { - //blocked: Blocked by a failing/missing required status check. - if state != "blocked" { - return false - } - - return c.supplementalChecker.Check(pull, statuses, checks) - } - return true -} diff --git a/server/legacy/events/vcs/mocks/matchers/context_context.go b/server/legacy/events/vcs/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/events/vcs/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/models_approvalstatus.go b/server/legacy/events/vcs/mocks/matchers/models_approvalstatus.go deleted file mode 100644 index 10f4f2264..000000000 --- a/server/legacy/events/vcs/mocks/matchers/models_approvalstatus.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsApprovalStatus() models.ApprovalStatus { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ApprovalStatus))(nil)).Elem())) - var nullValue models.ApprovalStatus - return nullValue -} - -func EqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.ApprovalStatus - return nullValue -} - -func NotEqModelsApprovalStatus(value models.ApprovalStatus) models.ApprovalStatus { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.ApprovalStatus - return nullValue -} - -func ModelsApprovalStatusThat(matcher pegomock.ArgumentMatcher) models.ApprovalStatus { - pegomock.RegisterMatcher(matcher) - var nullValue models.ApprovalStatus - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/models_pullrequest.go b/server/legacy/events/vcs/mocks/matchers/models_pullrequest.go deleted file mode 100644 index 94e36a1ab..000000000 --- a/server/legacy/events/vcs/mocks/matchers/models_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsPullRequest() models.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequest))(nil)).Elem())) - var nullValue models.PullRequest - return nullValue -} - -func EqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func NotEqModelsPullRequest(value models.PullRequest) models.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.PullRequest - return nullValue -} - -func ModelsPullRequestThat(matcher pegomock.ArgumentMatcher) models.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue models.PullRequest - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/models_repo.go b/server/legacy/events/vcs/mocks/matchers/models_repo.go deleted file mode 100644 index b36c3ee7c..000000000 --- a/server/legacy/events/vcs/mocks/matchers/models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func NotEqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func ModelsRepoThat(matcher pegomock.ArgumentMatcher) models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/ptr_to_github_pullrequest.go b/server/legacy/events/vcs/mocks/matchers/ptr_to_github_pullrequest.go deleted file mode 100644 index 4f93e0cee..000000000 --- a/server/legacy/events/vcs/mocks/matchers/ptr_to_github_pullrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - github "github.com/google/go-github/v45/github" -) - -func AnyPtrToGithubPullRequest() *github.PullRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*github.PullRequest))(nil)).Elem())) - var nullValue *github.PullRequest - return nullValue -} - -func EqPtrToGithubPullRequest(value *github.PullRequest) *github.PullRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *github.PullRequest - return nullValue -} - -func NotEqPtrToGithubPullRequest(value *github.PullRequest) *github.PullRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *github.PullRequest - return nullValue -} - -func PtrToGithubPullRequestThat(matcher pegomock.ArgumentMatcher) *github.PullRequest { - pegomock.RegisterMatcher(matcher) - var nullValue *github.PullRequest - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/ptr_to_http_client.go b/server/legacy/events/vcs/mocks/matchers/ptr_to_http_client.go deleted file mode 100644 index 4893e9348..000000000 --- a/server/legacy/events/vcs/mocks/matchers/ptr_to_http_client.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - http "net/http" -) - -func AnyPtrToHTTPClient() *http.Client { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*http.Client))(nil)).Elem())) - var nullValue *http.Client - return nullValue -} - -func EqPtrToHTTPClient(value *http.Client) *http.Client { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *http.Client - return nullValue -} - -func NotEqPtrToHTTPClient(value *http.Client) *http.Client { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *http.Client - return nullValue -} - -func PtrToHTTPClientThat(matcher pegomock.ArgumentMatcher) *http.Client { - pegomock.RegisterMatcher(matcher) - var nullValue *http.Client - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/slice_of_byte.go b/server/legacy/events/vcs/mocks/matchers/slice_of_byte.go deleted file mode 100644 index 951531345..000000000 --- a/server/legacy/events/vcs/mocks/matchers/slice_of_byte.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfByte() []byte { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]byte))(nil)).Elem())) - var nullValue []byte - return nullValue -} - -func EqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func NotEqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func SliceOfByteThat(matcher pegomock.ArgumentMatcher) []byte { - pegomock.RegisterMatcher(matcher) - var nullValue []byte - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/slice_of_ptr_to_github_checkrun.go b/server/legacy/events/vcs/mocks/matchers/slice_of_ptr_to_github_checkrun.go deleted file mode 100644 index 43dc1fc51..000000000 --- a/server/legacy/events/vcs/mocks/matchers/slice_of_ptr_to_github_checkrun.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - github "github.com/google/go-github/v45/github" -) - -func AnySliceOfPtrToGithubCheckRun() []*github.CheckRun { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]*github.CheckRun))(nil)).Elem())) - var nullValue []*github.CheckRun - return nullValue -} - -func EqSliceOfPtrToGithubCheckRun(value []*github.CheckRun) []*github.CheckRun { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []*github.CheckRun - return nullValue -} - -func NotEqSliceOfPtrToGithubCheckRun(value []*github.CheckRun) []*github.CheckRun { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []*github.CheckRun - return nullValue -} - -func SliceOfPtrToGithubCheckRunThat(matcher pegomock.ArgumentMatcher) []*github.CheckRun { - pegomock.RegisterMatcher(matcher) - var nullValue []*github.CheckRun - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/slice_of_ptr_to_github_repostatus.go b/server/legacy/events/vcs/mocks/matchers/slice_of_ptr_to_github_repostatus.go deleted file mode 100644 index bfda831ae..000000000 --- a/server/legacy/events/vcs/mocks/matchers/slice_of_ptr_to_github_repostatus.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - github "github.com/google/go-github/v45/github" -) - -func AnySliceOfPtrToGithubRepoStatus() []*github.RepoStatus { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]*github.RepoStatus))(nil)).Elem())) - var nullValue []*github.RepoStatus - return nullValue -} - -func EqSliceOfPtrToGithubRepoStatus(value []*github.RepoStatus) []*github.RepoStatus { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []*github.RepoStatus - return nullValue -} - -func NotEqSliceOfPtrToGithubRepoStatus(value []*github.RepoStatus) []*github.RepoStatus { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []*github.RepoStatus - return nullValue -} - -func SliceOfPtrToGithubRepoStatusThat(matcher pegomock.ArgumentMatcher) []*github.RepoStatus { - pegomock.RegisterMatcher(matcher) - var nullValue []*github.RepoStatus - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/slice_of_string.go b/server/legacy/events/vcs/mocks/matchers/slice_of_string.go deleted file mode 100644 index f9281819d..000000000 --- a/server/legacy/events/vcs/mocks/matchers/slice_of_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfString() []string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) - var nullValue []string - return nullValue -} - -func EqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func NotEqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { - pegomock.RegisterMatcher(matcher) - var nullValue []string - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/matchers/types_updatestatusrequest.go b/server/legacy/events/vcs/mocks/matchers/types_updatestatusrequest.go deleted file mode 100644 index e230c7363..000000000 --- a/server/legacy/events/vcs/mocks/matchers/types_updatestatusrequest.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - types "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" -) - -func AnyTypesUpdateStatusRequest() types.UpdateStatusRequest { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(types.UpdateStatusRequest))(nil)).Elem())) - var nullValue types.UpdateStatusRequest - return nullValue -} - -func EqTypesUpdateStatusRequest(value types.UpdateStatusRequest) types.UpdateStatusRequest { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue types.UpdateStatusRequest - return nullValue -} - -func NotEqTypesUpdateStatusRequest(value types.UpdateStatusRequest) types.UpdateStatusRequest { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue types.UpdateStatusRequest - return nullValue -} - -func TypesUpdateStatusRequestThat(matcher pegomock.ArgumentMatcher) types.UpdateStatusRequest { - pegomock.RegisterMatcher(matcher) - var nullValue types.UpdateStatusRequest - return nullValue -} diff --git a/server/legacy/events/vcs/mocks/mock_IGithub_client.go b/server/legacy/events/vcs/mocks/mock_IGithub_client.go deleted file mode 100644 index 19a9c3212..000000000 --- a/server/legacy/events/vcs/mocks/mock_IGithub_client.go +++ /dev/null @@ -1,766 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/vcs (interfaces: IGithubClient) - -package mocks - -import ( - context "context" - github "github.com/google/go-github/v45/github" - pegomock "github.com/petergtz/pegomock" - types "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockIGithubClient struct { - fail func(message string, callerSkip ...int) -} - -func NewMockIGithubClient(options ...pegomock.Option) *MockIGithubClient { - mock := &MockIGithubClient{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockIGithubClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockIGithubClient) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockIGithubClient) CreateComment(_param0 models.Repo, _param1 int, _param2 string, _param3 string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3} - result := pegomock.GetGenericMockFrom(mock).Invoke("CreateComment", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockIGithubClient) DownloadRepoConfigFile(_param0 models.PullRequest) (bool, []byte, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("DownloadRepoConfigFile", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 []byte - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].([]byte) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockIGithubClient) GetContents(_param0 string, _param1 string, _param2 string, _param3 string) ([]byte, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetContents", params, []reflect.Type{reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []byte - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]byte) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) GetModifiedFiles(_param0 models.Repo, _param1 models.PullRequest) ([]string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetModifiedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) GetPullRequest(_param0 models.Repo, _param1 int) (*github.PullRequest, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *github.PullRequest - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*github.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) GetPullRequestFromName(_param0 string, _param1 string, _param2 int) (*github.PullRequest, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1, _param2} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequestFromName", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *github.PullRequest - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*github.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) GetRepoChecks(_param0 models.Repo, _param1 string) ([]*github.CheckRun, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetRepoChecks", params, []reflect.Type{reflect.TypeOf((*[]*github.CheckRun)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []*github.CheckRun - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]*github.CheckRun) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) GetRepoStatuses(_param0 models.Repo, _param1 models.PullRequest) ([]*github.RepoStatus, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetRepoStatuses", params, []reflect.Type{reflect.TypeOf((*[]*github.RepoStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []*github.RepoStatus - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]*github.RepoStatus) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) HidePrevCommandComments(_param0 models.Repo, _param1 int, _param2 string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1, _param2} - result := pegomock.GetGenericMockFrom(mock).Invoke("HidePrevCommandComments", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockIGithubClient) MarkdownPullLink(_param0 models.PullRequest) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("MarkdownPullLink", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) (models.ApprovalStatus, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.ApprovalStatus - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.ApprovalStatus) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) PullIsMergeable(_param0 models.Repo, _param1 models.PullRequest) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsMergeable", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockIGithubClient) SupportsSingleFileDownload(_param0 models.Repo) bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("SupportsSingleFileDownload", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockIGithubClient) UpdateStatus(_param0 context.Context, _param1 types.UpdateStatusRequest) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockIGithubClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockIGithubClient) VerifyWasCalledOnce() *VerifierMockIGithubClient { - return &VerifierMockIGithubClient{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockIGithubClient) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockIGithubClient { - return &VerifierMockIGithubClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockIGithubClient) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockIGithubClient { - return &VerifierMockIGithubClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockIGithubClient) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockIGithubClient { - return &VerifierMockIGithubClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockIGithubClient struct { - mock *MockIGithubClient - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockIGithubClient) CreateComment(_param0 models.Repo, _param1 int, _param2 string, _param3 string) *MockIGithubClient_CreateComment_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CreateComment", params, verifier.timeout) - return &MockIGithubClient_CreateComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_CreateComment_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_CreateComment_OngoingVerification) GetCapturedArguments() (models.Repo, int, string, string) { - _param0, _param1, _param2, _param3 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1] -} - -func (c *MockIGithubClient_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string, _param3 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) DownloadRepoConfigFile(_param0 models.PullRequest) *MockIGithubClient_DownloadRepoConfigFile_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DownloadRepoConfigFile", params, verifier.timeout) - return &MockIGithubClient_DownloadRepoConfigFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_DownloadRepoConfigFile_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_DownloadRepoConfigFile_OngoingVerification) GetCapturedArguments() models.PullRequest { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockIGithubClient_DownloadRepoConfigFile_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) GetContents(_param0 string, _param1 string, _param2 string, _param3 string) *MockIGithubClient_GetContents_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetContents", params, verifier.timeout) - return &MockIGithubClient_GetContents_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_GetContents_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_GetContents_OngoingVerification) GetCapturedArguments() (string, string, string, string) { - _param0, _param1, _param2, _param3 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1] -} - -func (c *MockIGithubClient_GetContents_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []string, _param3 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) GetModifiedFiles(_param0 models.Repo, _param1 models.PullRequest) *MockIGithubClient_GetModifiedFiles_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetModifiedFiles", params, verifier.timeout) - return &MockIGithubClient_GetModifiedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_GetModifiedFiles_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_GetModifiedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) GetPullRequest(_param0 models.Repo, _param1 int) *MockIGithubClient_GetPullRequest_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) - return &MockIGithubClient_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_GetPullRequest_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) GetPullRequestFromName(_param0 string, _param1 string, _param2 int) *MockIGithubClient_GetPullRequestFromName_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequestFromName", params, verifier.timeout) - return &MockIGithubClient_GetPullRequestFromName_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_GetPullRequestFromName_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_GetPullRequestFromName_OngoingVerification) GetCapturedArguments() (string, string, int) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockIGithubClient_GetPullRequestFromName_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]int, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(int) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) GetRepoChecks(_param0 models.Repo, _param1 string) *MockIGithubClient_GetRepoChecks_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetRepoChecks", params, verifier.timeout) - return &MockIGithubClient_GetRepoChecks_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_GetRepoChecks_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_GetRepoChecks_OngoingVerification) GetCapturedArguments() (models.Repo, string) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_GetRepoChecks_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) GetRepoStatuses(_param0 models.Repo, _param1 models.PullRequest) *MockIGithubClient_GetRepoStatuses_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetRepoStatuses", params, verifier.timeout) - return &MockIGithubClient_GetRepoStatuses_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_GetRepoStatuses_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_GetRepoStatuses_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_GetRepoStatuses_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) HidePrevCommandComments(_param0 models.Repo, _param1 int, _param2 string) *MockIGithubClient_HidePrevCommandComments_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HidePrevCommandComments", params, verifier.timeout) - return &MockIGithubClient_HidePrevCommandComments_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_HidePrevCommandComments_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_HidePrevCommandComments_OngoingVerification) GetCapturedArguments() (models.Repo, int, string) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockIGithubClient_HidePrevCommandComments_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) MarkdownPullLink(_param0 models.PullRequest) *MockIGithubClient_MarkdownPullLink_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MarkdownPullLink", params, verifier.timeout) - return &MockIGithubClient_MarkdownPullLink_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_MarkdownPullLink_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_MarkdownPullLink_OngoingVerification) GetCapturedArguments() models.PullRequest { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockIGithubClient_MarkdownPullLink_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) *MockIGithubClient_PullIsApproved_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) - return &MockIGithubClient_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_PullIsApproved_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) PullIsMergeable(_param0 models.Repo, _param1 models.PullRequest) *MockIGithubClient_PullIsMergeable_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsMergeable", params, verifier.timeout) - return &MockIGithubClient_PullIsMergeable_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_PullIsMergeable_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_PullIsMergeable_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_PullIsMergeable_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) SupportsSingleFileDownload(_param0 models.Repo) *MockIGithubClient_SupportsSingleFileDownload_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SupportsSingleFileDownload", params, verifier.timeout) - return &MockIGithubClient_SupportsSingleFileDownload_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_SupportsSingleFileDownload_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_SupportsSingleFileDownload_OngoingVerification) GetCapturedArguments() models.Repo { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockIGithubClient_SupportsSingleFileDownload_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - } - return -} - -func (verifier *VerifierMockIGithubClient) UpdateStatus(_param0 context.Context, _param1 types.UpdateStatusRequest) *MockIGithubClient_UpdateStatus_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateStatus", params, verifier.timeout) - return &MockIGithubClient_UpdateStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockIGithubClient_UpdateStatus_OngoingVerification struct { - mock *MockIGithubClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockIGithubClient_UpdateStatus_OngoingVerification) GetCapturedArguments() (context.Context, types.UpdateStatusRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockIGithubClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []types.UpdateStatusRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]types.UpdateStatusRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(types.UpdateStatusRequest) - } - } - return -} diff --git a/server/legacy/events/vcs/mocks/mock_client.go b/server/legacy/events/vcs/mocks/mock_client.go deleted file mode 100644 index 1578e8ba8..000000000 --- a/server/legacy/events/vcs/mocks/mock_client.go +++ /dev/null @@ -1,507 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/vcs (interfaces: Client) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - types "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockClient struct { - fail func(message string, callerSkip ...int) -} - -func NewMockClient(options ...pegomock.Option) *MockClient { - mock := &MockClient{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockClient) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockClient) CreateComment(_param0 models.Repo, _param1 int, _param2 string, _param3 string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3} - result := pegomock.GetGenericMockFrom(mock).Invoke("CreateComment", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockClient) DownloadRepoConfigFile(_param0 models.PullRequest) (bool, []byte, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("DownloadRepoConfigFile", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*[]byte)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 []byte - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].([]byte) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockClient) GetModifiedFiles(_param0 models.Repo, _param1 models.PullRequest) ([]string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetModifiedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) HidePrevCommandComments(_param0 models.Repo, _param1 int, _param2 string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0, _param1, _param2} - result := pegomock.GetGenericMockFrom(mock).Invoke("HidePrevCommandComments", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockClient) MarkdownPullLink(_param0 models.PullRequest) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("MarkdownPullLink", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) (models.ApprovalStatus, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*models.ApprovalStatus)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 models.ApprovalStatus - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(models.ApprovalStatus) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) PullIsMergeable(_param0 models.Repo, _param1 models.PullRequest) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsMergeable", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) SupportsSingleFileDownload(_param0 models.Repo) bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("SupportsSingleFileDownload", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockClient) UpdateStatus(_param0 context.Context, _param1 types.UpdateStatusRequest) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateStatus", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockClient) VerifyWasCalledOnce() *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockClient) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockClient) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockClient) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockClient { - return &VerifierMockClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockClient struct { - mock *MockClient - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockClient) CreateComment(_param0 models.Repo, _param1 int, _param2 string, _param3 string) *MockClient_CreateComment_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CreateComment", params, verifier.timeout) - return &MockClient_CreateComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_CreateComment_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_CreateComment_OngoingVerification) GetCapturedArguments() (models.Repo, int, string, string) { - _param0, _param1, _param2, _param3 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1] -} - -func (c *MockClient_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string, _param3 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - _param3 = make([]string, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockClient) DownloadRepoConfigFile(_param0 models.PullRequest) *MockClient_DownloadRepoConfigFile_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DownloadRepoConfigFile", params, verifier.timeout) - return &MockClient_DownloadRepoConfigFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_DownloadRepoConfigFile_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_DownloadRepoConfigFile_OngoingVerification) GetCapturedArguments() models.PullRequest { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockClient_DownloadRepoConfigFile_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockClient) GetModifiedFiles(_param0 models.Repo, _param1 models.PullRequest) *MockClient_GetModifiedFiles_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetModifiedFiles", params, verifier.timeout) - return &MockClient_GetModifiedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_GetModifiedFiles_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_GetModifiedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockClient) HidePrevCommandComments(_param0 models.Repo, _param1 int, _param2 string) *MockClient_HidePrevCommandComments_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HidePrevCommandComments", params, verifier.timeout) - return &MockClient_HidePrevCommandComments_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_HidePrevCommandComments_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetCapturedArguments() (models.Repo, int, string) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockClient_HidePrevCommandComments_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - _param2 = make([]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockClient) MarkdownPullLink(_param0 models.PullRequest) *MockClient_MarkdownPullLink_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "MarkdownPullLink", params, verifier.timeout) - return &MockClient_MarkdownPullLink_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_MarkdownPullLink_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_MarkdownPullLink_OngoingVerification) GetCapturedArguments() models.PullRequest { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockClient_MarkdownPullLink_OngoingVerification) GetAllCapturedArguments() (_param0 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockClient) PullIsApproved(_param0 models.Repo, _param1 models.PullRequest) *MockClient_PullIsApproved_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params, verifier.timeout) - return &MockClient_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_PullIsApproved_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockClient_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockClient) PullIsMergeable(_param0 models.Repo, _param1 models.PullRequest) *MockClient_PullIsMergeable_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsMergeable", params, verifier.timeout) - return &MockClient_PullIsMergeable_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_PullIsMergeable_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_PullIsMergeable_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockClient_PullIsMergeable_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) - } - } - return -} - -func (verifier *VerifierMockClient) SupportsSingleFileDownload(_param0 models.Repo) *MockClient_SupportsSingleFileDownload_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SupportsSingleFileDownload", params, verifier.timeout) - return &MockClient_SupportsSingleFileDownload_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_SupportsSingleFileDownload_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_SupportsSingleFileDownload_OngoingVerification) GetCapturedArguments() models.Repo { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockClient_SupportsSingleFileDownload_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - } - return -} - -func (verifier *VerifierMockClient) UpdateStatus(_param0 context.Context, _param1 types.UpdateStatusRequest) *MockClient_UpdateStatus_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateStatus", params, verifier.timeout) - return &MockClient_UpdateStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_UpdateStatus_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_UpdateStatus_OngoingVerification) GetCapturedArguments() (context.Context, types.UpdateStatusRequest) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockClient_UpdateStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []types.UpdateStatusRequest) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]types.UpdateStatusRequest, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(types.UpdateStatusRequest) - } - } - return -} diff --git a/server/legacy/events/vcs/mocks/mock_github_credentials.go b/server/legacy/events/vcs/mocks/mock_github_credentials.go deleted file mode 100644 index 0fb185fa8..000000000 --- a/server/legacy/events/vcs/mocks/mock_github_credentials.go +++ /dev/null @@ -1,172 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/vcs (interfaces: GithubCredentials) - -package mocks - -import ( - http "net/http" - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" -) - -type MockGithubCredentials struct { - fail func(message string, callerSkip ...int) -} - -func NewMockGithubCredentials(options ...pegomock.Option) *MockGithubCredentials { - mock := &MockGithubCredentials{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockGithubCredentials) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockGithubCredentials) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockGithubCredentials) Client() (*http.Client, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubCredentials().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("Client", params, []reflect.Type{reflect.TypeOf((**http.Client)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *http.Client - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*http.Client) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubCredentials) GetToken() (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubCredentials().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetToken", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubCredentials) GetUser() (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubCredentials().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetUser", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubCredentials) VerifyWasCalledOnce() *VerifierMockGithubCredentials { - return &VerifierMockGithubCredentials{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockGithubCredentials) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockGithubCredentials { - return &VerifierMockGithubCredentials{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockGithubCredentials) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockGithubCredentials { - return &VerifierMockGithubCredentials{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockGithubCredentials) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockGithubCredentials { - return &VerifierMockGithubCredentials{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockGithubCredentials struct { - mock *MockGithubCredentials - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockGithubCredentials) Client() *MockGithubCredentials_Client_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Client", params, verifier.timeout) - return &MockGithubCredentials_Client_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubCredentials_Client_OngoingVerification struct { - mock *MockGithubCredentials - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubCredentials_Client_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockGithubCredentials_Client_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockGithubCredentials) GetToken() *MockGithubCredentials_GetToken_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetToken", params, verifier.timeout) - return &MockGithubCredentials_GetToken_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubCredentials_GetToken_OngoingVerification struct { - mock *MockGithubCredentials - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubCredentials_GetToken_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockGithubCredentials_GetToken_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockGithubCredentials) GetUser() *MockGithubCredentials_GetUser_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetUser", params, verifier.timeout) - return &MockGithubCredentials_GetUser_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubCredentials_GetUser_OngoingVerification struct { - mock *MockGithubCredentials - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubCredentials_GetUser_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockGithubCredentials_GetUser_OngoingVerification) GetAllCapturedArguments() { -} diff --git a/server/legacy/events/vcs/mocks/mock_github_pull_request_getter.go b/server/legacy/events/vcs/mocks/mock_github_pull_request_getter.go deleted file mode 100644 index b8001593b..000000000 --- a/server/legacy/events/vcs/mocks/mock_github_pull_request_getter.go +++ /dev/null @@ -1,169 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/vcs (interfaces: GithubPullRequestGetter) - -package mocks - -import ( - "reflect" - "time" - - github "github.com/google/go-github/v45/github" - pegomock "github.com/petergtz/pegomock" - models "github.com/runatlantis/atlantis/server/models" -) - -type MockGithubPullRequestGetter struct { - fail func(message string, callerSkip ...int) -} - -func NewMockGithubPullRequestGetter(options ...pegomock.Option) *MockGithubPullRequestGetter { - mock := &MockGithubPullRequestGetter{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockGithubPullRequestGetter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockGithubPullRequestGetter) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubPullRequestGetter().") - } - params := []pegomock.Param{repo, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequest", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *github.PullRequest - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*github.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubPullRequestGetter) GetPullRequestFromName(repoName string, repoOwner string, pullNum int) (*github.PullRequest, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockGithubPullRequestGetter().") - } - params := []pegomock.Param{repoName, repoOwner, pullNum} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullRequestFromName", params, []reflect.Type{reflect.TypeOf((**github.PullRequest)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *github.PullRequest - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*github.PullRequest) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockGithubPullRequestGetter) VerifyWasCalledOnce() *VerifierMockGithubPullRequestGetter { - return &VerifierMockGithubPullRequestGetter{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockGithubPullRequestGetter) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockGithubPullRequestGetter { - return &VerifierMockGithubPullRequestGetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockGithubPullRequestGetter) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockGithubPullRequestGetter { - return &VerifierMockGithubPullRequestGetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockGithubPullRequestGetter) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockGithubPullRequestGetter { - return &VerifierMockGithubPullRequestGetter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockGithubPullRequestGetter struct { - mock *MockGithubPullRequestGetter - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequest(repo models.Repo, pullNum int) *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification { - params := []pegomock.Param{repo, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequest", params, verifier.timeout) - return &MockGithubPullRequestGetter_GetPullRequest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubPullRequestGetter_GetPullRequest_OngoingVerification struct { - mock *MockGithubPullRequestGetter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetCapturedArguments() (models.Repo, int) { - repo, pullNum := c.GetAllCapturedArguments() - return repo[len(repo)-1], pullNum[len(pullNum)-1] -} - -func (c *MockGithubPullRequestGetter_GetPullRequest_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(models.Repo) - } - _param1 = make([]int, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(int) - } - } - return -} - -func (verifier *VerifierMockGithubPullRequestGetter) GetPullRequestFromName(repoName string, repoOwner string, pullNum int) *MockGithubPullRequestGetter_GetPullRequestFromName_OngoingVerification { - params := []pegomock.Param{repoName, repoOwner, pullNum} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullRequestFromName", params, verifier.timeout) - return &MockGithubPullRequestGetter_GetPullRequestFromName_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockGithubPullRequestGetter_GetPullRequestFromName_OngoingVerification struct { - mock *MockGithubPullRequestGetter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockGithubPullRequestGetter_GetPullRequestFromName_OngoingVerification) GetCapturedArguments() (string, string, int) { - repoName, repoOwner, pullNum := c.GetAllCapturedArguments() - return repoName[len(repoName)-1], repoOwner[len(repoOwner)-1], pullNum[len(pullNum)-1] -} - -func (c *MockGithubPullRequestGetter_GetPullRequestFromName_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []int) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]int, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(int) - } - } - return -} diff --git a/server/legacy/events/vcs/not_configured_vcs_client.go b/server/legacy/events/vcs/not_configured_vcs_client.go deleted file mode 100644 index e6341d074..000000000 --- a/server/legacy/events/vcs/not_configured_vcs_client.go +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package vcs - -import ( - "context" - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" -) - -// NotConfiguredVCSClient is used as a placeholder when Atlantis isn't configured -// on startup to support a certain VCS host. For example, if there is no GitHub -// config then this client will be used which will error if it's ever called. -type NotConfiguredVCSClient struct { - Host models.VCSHostType -} - -func (a *NotConfiguredVCSClient) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { - return nil, a.err() -} -func (a *NotConfiguredVCSClient) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - return a.err() -} -func (a *NotConfiguredVCSClient) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { - return nil -} -func (a *NotConfiguredVCSClient) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { - return models.ApprovalStatus{}, a.err() -} -func (a *NotConfiguredVCSClient) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { - return false, a.err() -} -func (a *NotConfiguredVCSClient) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - return "", a.err() -} -func (a *NotConfiguredVCSClient) MarkdownPullLink(pull models.PullRequest) (string, error) { - return "", a.err() -} -func (a *NotConfiguredVCSClient) err() error { - return fmt.Errorf("atlantis was not configured to support repos from %s", a.Host.String()) -} - -func (a *NotConfiguredVCSClient) SupportsSingleFileDownload(repo models.Repo) bool { - return false -} - -func (a *NotConfiguredVCSClient) DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) { - return true, []byte{}, a.err() -} diff --git a/server/legacy/events/vcs/proxy.go b/server/legacy/events/vcs/proxy.go deleted file mode 100644 index a5997b139..000000000 --- a/server/legacy/events/vcs/proxy.go +++ /dev/null @@ -1,76 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package vcs - -import ( - "context" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs/types" - "github.com/runatlantis/atlantis/server/models" -) - -// ClientProxy proxies calls to the correct VCS client depending on which -// VCS host is required. -type ClientProxy struct { - // clients maps from the vcs host type to the client that implements the - // api for that host type, ex. github -> github client. - clients map[models.VCSHostType]Client -} - -func NewClientProxy(githubClient Client) *ClientProxy { - if githubClient == nil { - githubClient = &NotConfiguredVCSClient{} - } - return &ClientProxy{ - clients: map[models.VCSHostType]Client{ - models.Github: githubClient, - }, - } -} - -func (d *ClientProxy) GetModifiedFiles(repo models.Repo, pull models.PullRequest) ([]string, error) { - return d.clients[repo.VCSHost.Type].GetModifiedFiles(repo, pull) -} - -func (d *ClientProxy) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - return d.clients[repo.VCSHost.Type].CreateComment(repo, pullNum, comment, command) -} - -func (d *ClientProxy) HidePrevCommandComments(repo models.Repo, pullNum int, command string) error { - return d.clients[repo.VCSHost.Type].HidePrevCommandComments(repo, pullNum, command) -} - -func (d *ClientProxy) PullIsApproved(repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { - return d.clients[repo.VCSHost.Type].PullIsApproved(repo, pull) -} - -func (d *ClientProxy) PullIsMergeable(repo models.Repo, pull models.PullRequest) (bool, error) { - return d.clients[repo.VCSHost.Type].PullIsMergeable(repo, pull) -} - -func (d *ClientProxy) UpdateStatus(ctx context.Context, request types.UpdateStatusRequest) (string, error) { - return d.clients[request.Repo.VCSHost.Type].UpdateStatus(ctx, request) -} - -func (d *ClientProxy) MarkdownPullLink(pull models.PullRequest) (string, error) { - return d.clients[pull.BaseRepo.VCSHost.Type].MarkdownPullLink(pull) -} - -func (d *ClientProxy) DownloadRepoConfigFile(pull models.PullRequest) (bool, []byte, error) { - return d.clients[pull.BaseRepo.VCSHost.Type].DownloadRepoConfigFile(pull) -} - -func (d *ClientProxy) SupportsSingleFileDownload(repo models.Repo) bool { - return d.clients[repo.VCSHost.Type].SupportsSingleFileDownload(repo) -} diff --git a/server/legacy/events/vcs/pull_status_fetcher.go b/server/legacy/events/vcs/pull_status_fetcher.go deleted file mode 100644 index 0cc53cff4..000000000 --- a/server/legacy/events/vcs/pull_status_fetcher.go +++ /dev/null @@ -1,37 +0,0 @@ -package vcs - -import ( - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/models" -) - -type PullReqStatusFetcher interface { - FetchPullStatus(repo models.Repo, pull models.PullRequest) (models.PullReqStatus, error) -} - -type pullReqStatusFetcher struct { - client Client -} - -func NewPullReqStatusFetcher(client Client) PullReqStatusFetcher { - return &pullReqStatusFetcher{ - client: client, - } -} - -func (f *pullReqStatusFetcher) FetchPullStatus(repo models.Repo, pull models.PullRequest) (pullStatus models.PullReqStatus, err error) { - approvalStatus, err := f.client.PullIsApproved(repo, pull) - if err != nil { - return pullStatus, errors.Wrapf(err, "fetching pull approval status for repo: %s, and pull number: %d", repo.FullName, pull.Num) - } - - mergeable, err := f.client.PullIsMergeable(repo, pull) - if err != nil { - return pullStatus, errors.Wrapf(err, "fetching mergeability status for repo: %s, and pull number: %d", repo.FullName, pull.Num) - } - - return models.PullReqStatus{ - ApprovalStatus: approvalStatus, - Mergeable: mergeable, - }, err -} diff --git a/server/legacy/events/vcs/status.go b/server/legacy/events/vcs/status.go deleted file mode 100644 index 929d3930e..000000000 --- a/server/legacy/events/vcs/status.go +++ /dev/null @@ -1,39 +0,0 @@ -package vcs - -import ( - "fmt" - "strings" -) - -type StatusTitleMatcher struct { - TitlePrefix string -} - -func (m StatusTitleMatcher) MatchesCommand(title string, command string) bool { - return strings.HasPrefix(title, fmt.Sprintf("%s/%s", m.TitlePrefix, command)) -} - -type StatusTitleBuilder struct { - TitlePrefix string -} - -type StatusTitleOptions struct { - ProjectName string -} - -func (b StatusTitleBuilder) Build(command string, options ...StatusTitleOptions) string { - src := fmt.Sprintf("%s/%s", b.TitlePrefix, command) - - var projectName string - for _, opt := range options { - if opt.ProjectName != "" { - projectName = opt.ProjectName - } - } - - if projectName != "" { - src = fmt.Sprintf("%s: %s", src, projectName) - } - - return src -} diff --git a/server/legacy/events/vcs/status_test.go b/server/legacy/events/vcs/status_test.go deleted file mode 100644 index 49eef9537..000000000 --- a/server/legacy/events/vcs/status_test.go +++ /dev/null @@ -1,31 +0,0 @@ -package vcs_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/stretchr/testify/assert" -) - -func TestMatches(t *testing.T) { - t.Run("in sync with builder", func(t *testing.T) { - titlePrefix := "atlantis-test" - command := "apply" - builder := vcs.StatusTitleBuilder{TitlePrefix: titlePrefix} - matcher := vcs.StatusTitleMatcher{TitlePrefix: titlePrefix} - - title := builder.Build(command) - - assert.True(t, matcher.MatchesCommand(title, command)) - }) - - t.Run("incorrect command", func(t *testing.T) { - titlePrefix := "atlantis-test" - builder := vcs.StatusTitleBuilder{TitlePrefix: titlePrefix} - matcher := vcs.StatusTitleMatcher{TitlePrefix: titlePrefix} - - title := builder.Build("apply") - - assert.False(t, matcher.MatchesCommand(title, "plan")) - }) -} diff --git a/server/legacy/events/vcs/types/status.go b/server/legacy/events/vcs/types/status.go deleted file mode 100644 index 6b2e24b89..000000000 --- a/server/legacy/events/vcs/types/status.go +++ /dev/null @@ -1,31 +0,0 @@ -package types - -import ( - "time" - - "github.com/runatlantis/atlantis/server/models" -) - -type UpdateStatusRequest struct { - Repo models.Repo - Ref string - State models.VCSStatus - StatusName string - Description string - DetailsURL string - Output string - // if not present, should be -1 - PullNum int - PullCreationTime time.Time - StatusID string - - // Fields used to support templating project level command for github checks - CommandName string - Project string - Workspace string - Directory string - - // Fields used to support templating command level operations for github checks - NumSuccess string - NumTotal string -} diff --git a/server/legacy/events/vcs/vcs.go b/server/legacy/events/vcs/vcs.go deleted file mode 100644 index 547b1da48..000000000 --- a/server/legacy/events/vcs/vcs.go +++ /dev/null @@ -1 +0,0 @@ -package vcs diff --git a/server/legacy/events/version_command_runner.go b/server/legacy/events/version_command_runner.go deleted file mode 100644 index 23bc3aa83..000000000 --- a/server/legacy/events/version_command_runner.go +++ /dev/null @@ -1,57 +0,0 @@ -package events - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func NewVersionCommandRunner( - outputUpdater OutputUpdater, - prjCmdBuilder ProjectVersionCommandBuilder, - prjCmdRunner ProjectVersionCommandRunner, - parallelPoolSize int, -) *VersionCommandRunner { - return &VersionCommandRunner{ - outputUpdater: outputUpdater, - prjCmdBuilder: prjCmdBuilder, - prjCmdRunner: prjCmdRunner, - parallelPoolSize: parallelPoolSize, - } -} - -type VersionCommandRunner struct { - outputUpdater OutputUpdater - prjCmdBuilder ProjectVersionCommandBuilder - prjCmdRunner ProjectVersionCommandRunner - parallelPoolSize int -} - -func (v *VersionCommandRunner) Run(ctx *command.Context, cmd *command.Comment) { - var err error - var projectCmds []command.ProjectContext - projectCmds, err = v.prjCmdBuilder.BuildVersionCommands(ctx, cmd) - if err != nil { - ctx.Log.WarnContext(ctx.RequestCtx, fmt.Sprintf("Error %s", err)) - } - - if len(projectCmds) == 0 { - ctx.Log.InfoContext(ctx.RequestCtx, "no projects to run version in") - return - } - - // Only run commands in parallel if enabled - var result command.Result - if v.isParallelEnabled(projectCmds) { - ctx.Log.InfoContext(ctx.RequestCtx, "Running version in parallel") - result = runProjectCmdsParallel(projectCmds, v.prjCmdRunner.Version, v.parallelPoolSize) - } else { - result = runProjectCmds(projectCmds, v.prjCmdRunner.Version) - } - - v.outputUpdater.UpdateOutput(ctx, cmd, result) -} - -func (v *VersionCommandRunner) isParallelEnabled(cmds []command.ProjectContext) bool { - return len(cmds) > 0 && cmds[0].ParallelPolicyCheckEnabled -} diff --git a/server/legacy/events/webhooks/mocks/matchers/logging_logger.go b/server/legacy/events/webhooks/mocks/matchers/logging_logger.go deleted file mode 100644 index d43fd90e9..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/logging_logger.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingLogger() logging.Logger { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.Logger))(nil)).Elem())) - var nullValue logging.Logger - return nullValue -} - -func EqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func NotEqLoggingLogger(value logging.Logger) logging.Logger { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.Logger - return nullValue -} - -func LoggingLoggerThat(matcher pegomock.ArgumentMatcher) logging.Logger { - pegomock.RegisterMatcher(matcher) - var nullValue logging.Logger - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/logging_simplelogging.go b/server/legacy/events/webhooks/mocks/matchers/logging_simplelogging.go deleted file mode 100644 index c3b96f61f..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/logging_simplelogging.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyLoggingSimpleLogging() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func NotEqLoggingSimpleLogging(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func LoggingSimpleLoggingThat(matcher pegomock.ArgumentMatcher) logging.SimpleLogging { - pegomock.RegisterMatcher(matcher) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/ptr_to_logging_simplelogger.go b/server/legacy/events/webhooks/mocks/matchers/ptr_to_logging_simplelogger.go deleted file mode 100644 index 49af91b55..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/ptr_to_logging_simplelogger.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - logging "github.com/runatlantis/atlantis/server/logging" -) - -func AnyPtrToLoggingSimpleLogger() logging.SimpleLogging { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(logging.SimpleLogging))(nil)).Elem())) - var nullValue logging.SimpleLogging - return nullValue -} - -func EqPtrToLoggingSimpleLogger(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func NotEqPtrToLoggingSimpleLogger(value logging.SimpleLogging) logging.SimpleLogging { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue logging.SimpleLogging - return nullValue -} - -func PtrToLoggingSimpleLoggerThat(matcher pegomock.ArgumentMatcher) logging.SimpleLogging { - pegomock.RegisterMatcher(matcher) - var nullValue logging.SimpleLogging - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/ptr_to_slack_authtestresponse.go b/server/legacy/events/webhooks/mocks/matchers/ptr_to_slack_authtestresponse.go deleted file mode 100644 index 958bfd2eb..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/ptr_to_slack_authtestresponse.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - slack "github.com/nlopes/slack" -) - -func AnyPtrToSlackAuthTestResponse() *slack.AuthTestResponse { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*slack.AuthTestResponse))(nil)).Elem())) - var nullValue *slack.AuthTestResponse - return nullValue -} - -func EqPtrToSlackAuthTestResponse(value *slack.AuthTestResponse) *slack.AuthTestResponse { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *slack.AuthTestResponse - return nullValue -} - -func NotEqPtrToSlackAuthTestResponse(value *slack.AuthTestResponse) *slack.AuthTestResponse { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *slack.AuthTestResponse - return nullValue -} - -func PtrToSlackAuthTestResponseThat(matcher pegomock.ArgumentMatcher) *slack.AuthTestResponse { - pegomock.RegisterMatcher(matcher) - var nullValue *slack.AuthTestResponse - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/ptr_to_slack_getconversationsparameters.go b/server/legacy/events/webhooks/mocks/matchers/ptr_to_slack_getconversationsparameters.go deleted file mode 100644 index 54831bd3d..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/ptr_to_slack_getconversationsparameters.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - slack "github.com/nlopes/slack" -) - -func AnyPtrToSlackGetConversationsParameters() *slack.GetConversationsParameters { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*slack.GetConversationsParameters))(nil)).Elem())) - var nullValue *slack.GetConversationsParameters - return nullValue -} - -func EqPtrToSlackGetConversationsParameters(value *slack.GetConversationsParameters) *slack.GetConversationsParameters { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *slack.GetConversationsParameters - return nullValue -} - -func NotEqPtrToSlackGetConversationsParameters(value *slack.GetConversationsParameters) *slack.GetConversationsParameters { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *slack.GetConversationsParameters - return nullValue -} - -func PtrToSlackGetConversationsParametersThat(matcher pegomock.ArgumentMatcher) *slack.GetConversationsParameters { - pegomock.RegisterMatcher(matcher) - var nullValue *slack.GetConversationsParameters - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/slack_postmessageparameters.go b/server/legacy/events/webhooks/mocks/matchers/slack_postmessageparameters.go deleted file mode 100644 index e5ae68a61..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/slack_postmessageparameters.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - slack "github.com/nlopes/slack" -) - -func AnySlackPostMessageParameters() slack.PostMessageParameters { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(slack.PostMessageParameters))(nil)).Elem())) - var nullValue slack.PostMessageParameters - return nullValue -} - -func EqSlackPostMessageParameters(value slack.PostMessageParameters) slack.PostMessageParameters { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue slack.PostMessageParameters - return nullValue -} - -func NotEqSlackPostMessageParameters(value slack.PostMessageParameters) slack.PostMessageParameters { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue slack.PostMessageParameters - return nullValue -} - -func SlackPostMessageParametersThat(matcher pegomock.ArgumentMatcher) slack.PostMessageParameters { - pegomock.RegisterMatcher(matcher) - var nullValue slack.PostMessageParameters - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/slice_of_slack_channel.go b/server/legacy/events/webhooks/mocks/matchers/slice_of_slack_channel.go deleted file mode 100644 index 37ab8bf1e..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/slice_of_slack_channel.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - slack "github.com/nlopes/slack" -) - -func AnySliceOfSlackChannel() []slack.Channel { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]slack.Channel))(nil)).Elem())) - var nullValue []slack.Channel - return nullValue -} - -func EqSliceOfSlackChannel(value []slack.Channel) []slack.Channel { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []slack.Channel - return nullValue -} - -func NotEqSliceOfSlackChannel(value []slack.Channel) []slack.Channel { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []slack.Channel - return nullValue -} - -func SliceOfSlackChannelThat(matcher pegomock.ArgumentMatcher) []slack.Channel { - pegomock.RegisterMatcher(matcher) - var nullValue []slack.Channel - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/matchers/webhooks_applyresult.go b/server/legacy/events/webhooks/mocks/matchers/webhooks_applyresult.go deleted file mode 100644 index ad2b8776d..000000000 --- a/server/legacy/events/webhooks/mocks/matchers/webhooks_applyresult.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - webhooks "github.com/runatlantis/atlantis/server/legacy/events/webhooks" -) - -func AnyWebhooksApplyResult() webhooks.ApplyResult { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(webhooks.ApplyResult))(nil)).Elem())) - var nullValue webhooks.ApplyResult - return nullValue -} - -func EqWebhooksApplyResult(value webhooks.ApplyResult) webhooks.ApplyResult { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue webhooks.ApplyResult - return nullValue -} - -func NotEqWebhooksApplyResult(value webhooks.ApplyResult) webhooks.ApplyResult { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue webhooks.ApplyResult - return nullValue -} - -func WebhooksApplyResultThat(matcher pegomock.ArgumentMatcher) webhooks.ApplyResult { - pegomock.RegisterMatcher(matcher) - var nullValue webhooks.ApplyResult - return nullValue -} diff --git a/server/legacy/events/webhooks/mocks/mock_sender.go b/server/legacy/events/webhooks/mocks/mock_sender.go deleted file mode 100644 index aada73406..000000000 --- a/server/legacy/events/webhooks/mocks/mock_sender.go +++ /dev/null @@ -1,110 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/webhooks (interfaces: Sender) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - webhooks "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - logging "github.com/runatlantis/atlantis/server/logging" - "reflect" - "time" -) - -type MockSender struct { - fail func(message string, callerSkip ...int) -} - -func NewMockSender(options ...pegomock.Option) *MockSender { - mock := &MockSender{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockSender) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockSender) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockSender) Send(log logging.Logger, applyResult webhooks.ApplyResult) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockSender().") - } - params := []pegomock.Param{log, applyResult} - result := pegomock.GetGenericMockFrom(mock).Invoke("Send", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockSender) VerifyWasCalledOnce() *VerifierMockSender { - return &VerifierMockSender{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockSender) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockSender { - return &VerifierMockSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockSender) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockSender { - return &VerifierMockSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockSender) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockSender { - return &VerifierMockSender{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockSender struct { - mock *MockSender - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockSender) Send(log logging.Logger, applyResult webhooks.ApplyResult) *MockSender_Send_OngoingVerification { - params := []pegomock.Param{log, applyResult} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Send", params, verifier.timeout) - return &MockSender_Send_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockSender_Send_OngoingVerification struct { - mock *MockSender - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockSender_Send_OngoingVerification) GetCapturedArguments() (logging.Logger, webhooks.ApplyResult) { - log, applyResult := c.GetAllCapturedArguments() - return log[len(log)-1], applyResult[len(applyResult)-1] -} - -func (c *MockSender_Send_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.Logger, _param1 []webhooks.ApplyResult) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.Logger, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.Logger) - } - _param1 = make([]webhooks.ApplyResult, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(webhooks.ApplyResult) - } - } - return -} diff --git a/server/legacy/events/webhooks/mocks/mock_slack_client.go b/server/legacy/events/webhooks/mocks/mock_slack_client.go deleted file mode 100644 index 29736ffc3..000000000 --- a/server/legacy/events/webhooks/mocks/mock_slack_client.go +++ /dev/null @@ -1,220 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/webhooks (interfaces: SlackClient) - -package mocks - -import ( - "reflect" - "time" - - pegomock "github.com/petergtz/pegomock" - webhooks "github.com/runatlantis/atlantis/server/legacy/events/webhooks" -) - -type MockSlackClient struct { - fail func(message string, callerSkip ...int) -} - -func NewMockSlackClient(options ...pegomock.Option) *MockSlackClient { - mock := &MockSlackClient{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockSlackClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockSlackClient) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockSlackClient) AuthTest() error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockSlackClient().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("AuthTest", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockSlackClient) TokenIsSet() bool { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockSlackClient().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("TokenIsSet", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) - var ret0 bool - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - } - return ret0 -} - -func (mock *MockSlackClient) ChannelExists(channelName string) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockSlackClient().") - } - params := []pegomock.Param{channelName} - result := pegomock.GetGenericMockFrom(mock).Invoke("ChannelExists", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockSlackClient) PostMessage(channel string, applyResult webhooks.ApplyResult) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockSlackClient().") - } - params := []pegomock.Param{channel, applyResult} - result := pegomock.GetGenericMockFrom(mock).Invoke("PostMessage", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockSlackClient) VerifyWasCalledOnce() *VerifierMockSlackClient { - return &VerifierMockSlackClient{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockSlackClient) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockSlackClient { - return &VerifierMockSlackClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockSlackClient) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockSlackClient { - return &VerifierMockSlackClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockSlackClient) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockSlackClient { - return &VerifierMockSlackClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockSlackClient struct { - mock *MockSlackClient - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockSlackClient) AuthTest() *MockSlackClient_AuthTest_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "AuthTest", params, verifier.timeout) - return &MockSlackClient_AuthTest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockSlackClient_AuthTest_OngoingVerification struct { - mock *MockSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockSlackClient_AuthTest_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockSlackClient_AuthTest_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockSlackClient) TokenIsSet() *MockSlackClient_TokenIsSet_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TokenIsSet", params, verifier.timeout) - return &MockSlackClient_TokenIsSet_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockSlackClient_TokenIsSet_OngoingVerification struct { - mock *MockSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockSlackClient_TokenIsSet_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockSlackClient_TokenIsSet_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockSlackClient) ChannelExists(channelName string) *MockSlackClient_ChannelExists_OngoingVerification { - params := []pegomock.Param{channelName} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ChannelExists", params, verifier.timeout) - return &MockSlackClient_ChannelExists_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockSlackClient_ChannelExists_OngoingVerification struct { - mock *MockSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockSlackClient_ChannelExists_OngoingVerification) GetCapturedArguments() string { - channelName := c.GetAllCapturedArguments() - return channelName[len(channelName)-1] -} - -func (c *MockSlackClient_ChannelExists_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockSlackClient) PostMessage(channel string, applyResult webhooks.ApplyResult) *MockSlackClient_PostMessage_OngoingVerification { - params := []pegomock.Param{channel, applyResult} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PostMessage", params, verifier.timeout) - return &MockSlackClient_PostMessage_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockSlackClient_PostMessage_OngoingVerification struct { - mock *MockSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockSlackClient_PostMessage_OngoingVerification) GetCapturedArguments() (string, webhooks.ApplyResult) { - channel, applyResult := c.GetAllCapturedArguments() - return channel[len(channel)-1], applyResult[len(applyResult)-1] -} - -func (c *MockSlackClient_PostMessage_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []webhooks.ApplyResult) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]webhooks.ApplyResult, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(webhooks.ApplyResult) - } - } - return -} diff --git a/server/legacy/events/webhooks/mocks/mock_underlying_slack_client.go b/server/legacy/events/webhooks/mocks/mock_underlying_slack_client.go deleted file mode 100644 index 00ca6c656..000000000 --- a/server/legacy/events/webhooks/mocks/mock_underlying_slack_client.go +++ /dev/null @@ -1,208 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/events/webhooks (interfaces: UnderlyingSlackClient) - -package mocks - -import ( - "reflect" - "time" - - slack "github.com/nlopes/slack" - pegomock "github.com/petergtz/pegomock" -) - -type MockUnderlyingSlackClient struct { - fail func(message string, callerSkip ...int) -} - -func NewMockUnderlyingSlackClient(options ...pegomock.Option) *MockUnderlyingSlackClient { - mock := &MockUnderlyingSlackClient{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockUnderlyingSlackClient) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockUnderlyingSlackClient) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockUnderlyingSlackClient) AuthTest() (*slack.AuthTestResponse, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockUnderlyingSlackClient().") - } - params := []pegomock.Param{} - result := pegomock.GetGenericMockFrom(mock).Invoke("AuthTest", params, []reflect.Type{reflect.TypeOf((**slack.AuthTestResponse)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *slack.AuthTestResponse - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*slack.AuthTestResponse) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockUnderlyingSlackClient) GetConversations(conversationParams *slack.GetConversationsParameters) ([]slack.Channel, string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockUnderlyingSlackClient().") - } - params := []pegomock.Param{conversationParams} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetConversations", params, []reflect.Type{reflect.TypeOf((*[]slack.Channel)(nil)).Elem(), reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []slack.Channel - var ret1 string - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]slack.Channel) - } - if result[1] != nil { - ret1 = result[1].(string) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockUnderlyingSlackClient) PostMessage(channel string, text string, parameters slack.PostMessageParameters) (string, string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockUnderlyingSlackClient().") - } - params := []pegomock.Param{channel, text, parameters} - result := pegomock.GetGenericMockFrom(mock).Invoke("PostMessage", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 string - var ret2 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(string) - } - if result[2] != nil { - ret2 = result[2].(error) - } - } - return ret0, ret1, ret2 -} - -func (mock *MockUnderlyingSlackClient) VerifyWasCalledOnce() *VerifierMockUnderlyingSlackClient { - return &VerifierMockUnderlyingSlackClient{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockUnderlyingSlackClient) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockUnderlyingSlackClient { - return &VerifierMockUnderlyingSlackClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockUnderlyingSlackClient) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockUnderlyingSlackClient { - return &VerifierMockUnderlyingSlackClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockUnderlyingSlackClient) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockUnderlyingSlackClient { - return &VerifierMockUnderlyingSlackClient{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockUnderlyingSlackClient struct { - mock *MockUnderlyingSlackClient - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockUnderlyingSlackClient) AuthTest() *MockUnderlyingSlackClient_AuthTest_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "AuthTest", params, verifier.timeout) - return &MockUnderlyingSlackClient_AuthTest_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockUnderlyingSlackClient_AuthTest_OngoingVerification struct { - mock *MockUnderlyingSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockUnderlyingSlackClient_AuthTest_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockUnderlyingSlackClient_AuthTest_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockUnderlyingSlackClient) GetConversations(conversationParams *slack.GetConversationsParameters) *MockUnderlyingSlackClient_GetConversations_OngoingVerification { - params := []pegomock.Param{conversationParams} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetConversations", params, verifier.timeout) - return &MockUnderlyingSlackClient_GetConversations_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockUnderlyingSlackClient_GetConversations_OngoingVerification struct { - mock *MockUnderlyingSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockUnderlyingSlackClient_GetConversations_OngoingVerification) GetCapturedArguments() *slack.GetConversationsParameters { - conversationParams := c.GetAllCapturedArguments() - return conversationParams[len(conversationParams)-1] -} - -func (c *MockUnderlyingSlackClient_GetConversations_OngoingVerification) GetAllCapturedArguments() (_param0 []*slack.GetConversationsParameters) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]*slack.GetConversationsParameters, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(*slack.GetConversationsParameters) - } - } - return -} - -func (verifier *VerifierMockUnderlyingSlackClient) PostMessage(channel string, text string, parameters slack.PostMessageParameters) *MockUnderlyingSlackClient_PostMessage_OngoingVerification { - params := []pegomock.Param{channel, text, parameters} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PostMessage", params, verifier.timeout) - return &MockUnderlyingSlackClient_PostMessage_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockUnderlyingSlackClient_PostMessage_OngoingVerification struct { - mock *MockUnderlyingSlackClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockUnderlyingSlackClient_PostMessage_OngoingVerification) GetCapturedArguments() (string, string, slack.PostMessageParameters) { - channel, text, parameters := c.GetAllCapturedArguments() - return channel[len(channel)-1], text[len(text)-1], parameters[len(parameters)-1] -} - -func (c *MockUnderlyingSlackClient_PostMessage_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []slack.PostMessageParameters) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]slack.PostMessageParameters, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(slack.PostMessageParameters) - } - } - return -} diff --git a/server/legacy/events/webhooks/slack.go b/server/legacy/events/webhooks/slack.go deleted file mode 100644 index 3ef2371c0..000000000 --- a/server/legacy/events/webhooks/slack.go +++ /dev/null @@ -1,58 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package webhooks - -import ( - "regexp" - - "fmt" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" -) - -// SlackWebhook sends webhooks to Slack. -type SlackWebhook struct { - Client SlackClient - WorkspaceRegex *regexp.Regexp - Channel string -} - -func NewSlack(r *regexp.Regexp, channel string, client SlackClient) (*SlackWebhook, error) { - if err := client.AuthTest(); err != nil { - return nil, fmt.Errorf("testing slack authentication: %s. Verify your slack-token is valid", err) - } - - channelExists, err := client.ChannelExists(channel) - if err != nil { - return nil, err - } - if !channelExists { - return nil, errors.Errorf("slack channel %q doesn't exist", channel) - } - - return &SlackWebhook{ - Client: client, - WorkspaceRegex: r, - Channel: channel, - }, nil -} - -// Send sends the webhook to Slack if the workspace matches the regex. -func (s *SlackWebhook) Send(log logging.Logger, applyResult ApplyResult) error { - if !s.WorkspaceRegex.MatchString(applyResult.Workspace) { - return nil - } - return s.Client.PostMessage(s.Channel, applyResult) -} diff --git a/server/legacy/events/webhooks/slack_client.go b/server/legacy/events/webhooks/slack_client.go deleted file mode 100644 index 24e386391..000000000 --- a/server/legacy/events/webhooks/slack_client.go +++ /dev/null @@ -1,142 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package webhooks - -import ( - "fmt" - - "github.com/nlopes/slack" -) - -const ( - slackSuccessColour = "good" - slackFailureColour = "danger" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_slack_client.go SlackClient - -// SlackClient handles making API calls to Slack. -type SlackClient interface { - AuthTest() error - TokenIsSet() bool - ChannelExists(channelName string) (bool, error) - PostMessage(channel string, applyResult ApplyResult) error -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_underlying_slack_client.go UnderlyingSlackClient - -// UnderlyingSlackClient wraps the nlopes/slack.Client implementation so -// we can mock it during tests. -type UnderlyingSlackClient interface { - AuthTest() (response *slack.AuthTestResponse, error error) - GetConversations(conversationParams *slack.GetConversationsParameters) (channels []slack.Channel, nextCursor string, err error) - PostMessage(channel, text string, parameters slack.PostMessageParameters) (string, string, error) -} - -type DefaultSlackClient struct { - Slack UnderlyingSlackClient - Token string -} - -func NewSlackClient(token string) SlackClient { - return &DefaultSlackClient{ - Slack: slack.New(token), - Token: token, - } -} - -func (d *DefaultSlackClient) AuthTest() error { - _, err := d.Slack.AuthTest() - return err -} - -func (d *DefaultSlackClient) TokenIsSet() bool { - return d.Token != "" -} - -func (d *DefaultSlackClient) ChannelExists(channelName string) (bool, error) { - var ( - cursor string - channels []slack.Channel - err error - ) - - for { - channels, cursor, err = d.Slack.GetConversations(&slack.GetConversationsParameters{Cursor: cursor}) - if err != nil { - return false, err - } - for _, channel := range channels { - if channel.Name == channelName { - return true, nil - } - } - if cursor == "" { - break - } - } - - return false, nil -} - -func (d *DefaultSlackClient) PostMessage(channel string, applyResult ApplyResult) error { - params := slack.NewPostMessageParameters() - params.Attachments = d.createAttachments(applyResult) - params.AsUser = true - params.EscapeText = false - _, _, err := d.Slack.PostMessage(channel, "", params) - return err -} - -func (d *DefaultSlackClient) createAttachments(applyResult ApplyResult) []slack.Attachment { - var colour string - var successWord string - if applyResult.Success { - colour = slackSuccessColour - successWord = "succeeded" - } else { - colour = slackFailureColour - successWord = "failed" - } - - text := fmt.Sprintf("Apply %s for <%s|%s>", successWord, applyResult.Pull.URL, applyResult.Repo.FullName) - directory := applyResult.Directory - // Since "." looks weird, replace it with "/" to make it clear this is the root. - if directory == "." { - directory = "/" - } - - attachment := slack.Attachment{ - Color: colour, - Text: text, - Fields: []slack.AttachmentField{ - { - Title: "Workspace", - Value: applyResult.Workspace, - Short: true, - }, - { - Title: "User", - Value: applyResult.User.Username, - Short: true, - }, - { - Title: "Directory", - Value: directory, - Short: true, - }, - }, - } - return []slack.Attachment{attachment} -} diff --git a/server/legacy/events/webhooks/slack_client_test.go b/server/legacy/events/webhooks/slack_client_test.go deleted file mode 100644 index 1d9f32d84..000000000 --- a/server/legacy/events/webhooks/slack_client_test.go +++ /dev/null @@ -1,197 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package webhooks_test - -import ( - "encoding/json" - "errors" - "testing" - - "github.com/nlopes/slack" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks/mocks" - "github.com/runatlantis/atlantis/server/models" - - . "github.com/petergtz/pegomock" - . "github.com/runatlantis/atlantis/testing" -) - -var underlying *mocks.MockUnderlyingSlackClient -var client webhooks.DefaultSlackClient -var result webhooks.ApplyResult - -func TestAuthTest_Success(t *testing.T) { - t.Log("When the underlying client succeeds, function should succeed") - setup(t) - err := client.AuthTest() - Ok(t, err) -} - -func TestAuthTest_Error(t *testing.T) { - t.Log("When the underlying slack client errors, an error should be returned") - setup(t) - When(underlying.AuthTest()).ThenReturn(nil, errors.New("")) - err := client.AuthTest() - Assert(t, err != nil, "expected error") -} - -func TestTokenIsSet(t *testing.T) { - t.Log("When the Token is an empty string, function should return false") - c := webhooks.DefaultSlackClient{ - Token: "", - } - Equals(t, false, c.TokenIsSet()) - - t.Log("When the Token is not an empty string, function should return true") - c.Token = "random" - Equals(t, true, c.TokenIsSet()) -} - -func TestChannelExists_False(t *testing.T) { - t.Log("When the slack channel doesn't exist, function should return false") - setup(t) - When(underlying.GetConversations(new(slack.GetConversationsParameters))).ThenReturn(nil, "xyz", nil) - When(underlying.GetConversations(&slack.GetConversationsParameters{Cursor: "xyz"})).ThenReturn(nil, "", nil) - exists, err := client.ChannelExists("somechannel") - Ok(t, err) - Equals(t, false, exists) -} - -func TestChannelExists_True(t *testing.T) { - t.Log("When the slack channel exists, function should return true") - setup(t) - channelJSON := `{"name":"existingchannel"}` - var channel slack.Channel - err := json.Unmarshal([]byte(channelJSON), &channel) - Ok(t, err) - When(underlying.GetConversations(new(slack.GetConversationsParameters))).ThenReturn(nil, "xyz", nil) - When(underlying.GetConversations(&slack.GetConversationsParameters{Cursor: "xyz"})).ThenReturn([]slack.Channel{channel}, "", nil) - - exists, err := client.ChannelExists("existingchannel") - Ok(t, err) - Equals(t, true, exists) -} - -func TestChannelExists_Error(t *testing.T) { - t.Log("When the underlying slack client errors, an error should be returned") - setup(t) - When(underlying.GetConversations(new(slack.GetConversationsParameters))).ThenReturn(nil, "xyz", nil) - When(underlying.GetConversations(&slack.GetConversationsParameters{Cursor: "xyz"})).ThenReturn(nil, "", errors.New("")) - - _, err := client.ChannelExists("anychannel") - Assert(t, err != nil, "expected error") -} - -func TestPostMessage_Success(t *testing.T) { - t.Log("When apply succeeds, function should succeed and indicate success") - setup(t) - - expParams := slack.NewPostMessageParameters() - expParams.Attachments = []slack.Attachment{{ - Color: "good", - Text: "Apply succeeded for ", - Fields: []slack.AttachmentField{ - { - Title: "Workspace", - Value: result.Workspace, - Short: true, - }, - { - Title: "User", - Value: result.User.Username, - Short: true, - }, - { - Title: "Directory", - Value: result.Directory, - Short: true, - }, - }, - }} - expParams.AsUser = true - expParams.EscapeText = false - - channel := "somechannel" - err := client.PostMessage(channel, result) - Ok(t, err) - underlying.VerifyWasCalledOnce().PostMessage(channel, "", expParams) - - t.Log("When apply fails, function should succeed and indicate failure") - result.Success = false - expParams.Attachments[0].Color = "danger" - expParams.Attachments[0].Text = "Apply failed for " - - err = client.PostMessage(channel, result) - Ok(t, err) - underlying.VerifyWasCalledOnce().PostMessage(channel, "", expParams) -} - -func TestPostMessage_Error(t *testing.T) { - t.Log("When the underlying slack client errors, an error should be returned") - setup(t) - - expParams := slack.NewPostMessageParameters() - expParams.Attachments = []slack.Attachment{{ - Color: "good", - Text: "Apply succeeded for ", - Fields: []slack.AttachmentField{ - { - Title: "Workspace", - Value: result.Workspace, - Short: true, - }, - { - Title: "User", - Value: result.User.Username, - Short: true, - }, - { - Title: "Directory", - Value: result.Directory, - Short: true, - }, - }, - }} - expParams.AsUser = true - expParams.EscapeText = false - - channel := "somechannel" - When(underlying.PostMessage(channel, "", expParams)).ThenReturn("", "", errors.New("")) - - err := client.PostMessage(channel, result) - Assert(t, err != nil, "expected error") -} - -func setup(t *testing.T) { - RegisterMockTestingT(t) - underlying = mocks.NewMockUnderlyingSlackClient() - client = webhooks.DefaultSlackClient{ - Slack: underlying, - Token: "sometoken", - } - result = webhooks.ApplyResult{ - Workspace: "production", - Repo: models.Repo{ - FullName: "runatlantis/atlantis", - }, - Pull: models.PullRequest{ - Num: 1, - URL: "url", - }, - User: models.User{ - Username: "lkysow", - }, - Success: true, - } -} diff --git a/server/legacy/events/webhooks/slack_test.go b/server/legacy/events/webhooks/slack_test.go deleted file mode 100644 index 4110392bd..000000000 --- a/server/legacy/events/webhooks/slack_test.go +++ /dev/null @@ -1,68 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package webhooks_test - -import ( - "regexp" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks/mocks" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestSend_PostMessage(t *testing.T) { - t.Log("Sending a hook with a matching regex should call PostMessage") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - regex, err := regexp.Compile(".*") - Ok(t, err) - - channel := "somechannel" - hook := webhooks.SlackWebhook{ - Client: client, - WorkspaceRegex: regex, - Channel: channel, - } - result := webhooks.ApplyResult{ - Workspace: "production", - } - - t.Log("PostMessage should be called, doesn't matter if it errors or not") - _ = hook.Send(logging.NewNoopCtxLogger(t), result) - client.VerifyWasCalledOnce().PostMessage(channel, result) -} - -func TestSend_NoopSuccess(t *testing.T) { - t.Log("Sending a hook with a non-matching regex should succeed") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - regex, err := regexp.Compile("weirdemv") - Ok(t, err) - - channel := "somechannel" - hook := webhooks.SlackWebhook{ - Client: client, - WorkspaceRegex: regex, - Channel: channel, - } - result := webhooks.ApplyResult{ - Workspace: "production", - } - err = hook.Send(logging.NewNoopCtxLogger(t), result) - Ok(t, err) - client.VerifyWasCalled(Never()).PostMessage(channel, result) -} diff --git a/server/legacy/events/webhooks/webhooks.go b/server/legacy/events/webhooks/webhooks.go deleted file mode 100644 index d2e9461ed..000000000 --- a/server/legacy/events/webhooks/webhooks.go +++ /dev/null @@ -1,103 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package webhooks - -import ( - "fmt" - "regexp" - - "errors" - - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -const SlackKind = "slack" -const ApplyEvent = "apply" - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_sender.go Sender - -// Sender sends webhooks. -type Sender interface { - // Send sends the webhook (if the implementation thinks it should). - Send(log logging.Logger, applyResult ApplyResult) error -} - -// ApplyResult is the result of a terraform apply. -type ApplyResult struct { - Workspace string - Repo models.Repo - Pull models.PullRequest - User models.User - Success bool - Directory string -} - -// MultiWebhookSender sends multiple webhooks for each one it's configured for. -type MultiWebhookSender struct { - Webhooks []Sender -} - -type Config struct { - Event string - WorkspaceRegex string - Kind string - Channel string -} - -func NewMultiWebhookSender(configs []Config, client SlackClient) (*MultiWebhookSender, error) { - var webhooks []Sender - for _, c := range configs { - r, err := regexp.Compile(c.WorkspaceRegex) - if err != nil { - return nil, err - } - if c.Kind == "" || c.Event == "" { - return nil, errors.New("must specify \"kind\" and \"event\" keys for webhooks") - } - if c.Event != ApplyEvent { - return nil, fmt.Errorf("\"event: %s\" not supported. Only \"event: %s\" is supported right now", c.Event, ApplyEvent) - } - switch c.Kind { - case SlackKind: - if !client.TokenIsSet() { - return nil, errors.New("must specify top-level \"slack-token\" if using a webhook of \"kind: slack\"") - } - if c.Channel == "" { - return nil, errors.New("must specify \"channel\" if using a webhook of \"kind: slack\"") - } - slack, err := NewSlack(r, c.Channel, client) - if err != nil { - return nil, err - } - webhooks = append(webhooks, slack) - default: - return nil, fmt.Errorf("\"kind: %s\" not supported. Only \"kind: %s\" is supported right now", c.Kind, SlackKind) - } - } - - return &MultiWebhookSender{ - Webhooks: webhooks, - }, nil -} - -// Send sends the webhook using its Webhooks. -func (w *MultiWebhookSender) Send(log logging.Logger, result ApplyResult) error { - for _, w := range w.Webhooks { - if err := w.Send(log, result); err != nil { - log.Warn(fmt.Sprintf("error sending slack webhook: %s", err)) - } - } - return nil -} diff --git a/server/legacy/events/webhooks/webhooks_test.go b/server/legacy/events/webhooks/webhooks_test.go deleted file mode 100644 index cae6a0d93..000000000 --- a/server/legacy/events/webhooks/webhooks_test.go +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package webhooks_test - -import ( - "strings" - "testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks/mocks" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -const ( - validEvent = webhooks.ApplyEvent - validRegex = ".*" - validKind = webhooks.SlackKind - validChannel = "validchannel" -) - -var validConfig = webhooks.Config{ - Event: validEvent, - WorkspaceRegex: validRegex, - Kind: validKind, - Channel: validChannel, -} - -func validConfigs() []webhooks.Config { - return []webhooks.Config{validConfig} -} - -func TestNewWebhooksManager_InvalidRegex(t *testing.T) { - t.Log("When given an invalid regex in a config, an error is returned") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - When(client.ChannelExists(validChannel)).ThenReturn(true, nil) - - invalidRegex := "(" - configs := validConfigs() - configs[0].WorkspaceRegex = invalidRegex - _, err := webhooks.NewMultiWebhookSender(configs, client) - Assert(t, err != nil, "expected error") - Assert(t, strings.Contains(err.Error(), "error parsing regexp"), "expected regex error") -} - -func TestNewWebhooksManager_NoEvent(t *testing.T) { - t.Log("When the event key is not specified in a config, an error is returned") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - configs := validConfigs() - configs[0].Event = "" - _, err := webhooks.NewMultiWebhookSender(configs, client) - Assert(t, err != nil, "expected error") - Equals(t, "must specify \"kind\" and \"event\" keys for webhooks", err.Error()) -} - -func TestNewWebhooksManager_UnsupportedEvent(t *testing.T) { - t.Log("When given an unsupported event in a config, an error is returned") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - When(client.ChannelExists(validChannel)).ThenReturn(true, nil) - - unsupportedEvent := "badevent" - configs := validConfigs() - configs[0].Event = unsupportedEvent - _, err := webhooks.NewMultiWebhookSender(configs, client) - Assert(t, err != nil, "expected error") - Equals(t, "\"event: badevent\" not supported. Only \"event: apply\" is supported right now", err.Error()) -} - -func TestNewWebhooksManager_NoKind(t *testing.T) { - t.Log("When the kind key is not specified in a config, an error is returned") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - configs := validConfigs() - configs[0].Kind = "" - _, err := webhooks.NewMultiWebhookSender(configs, client) - Assert(t, err != nil, "expected error") - Equals(t, "must specify \"kind\" and \"event\" keys for webhooks", err.Error()) -} - -func TestNewWebhooksManager_UnsupportedKind(t *testing.T) { - t.Log("When given an unsupported kind in a config, an error is returned") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - When(client.ChannelExists(validChannel)).ThenReturn(true, nil) - - unsupportedKind := "badkind" - configs := validConfigs() - configs[0].Kind = unsupportedKind - _, err := webhooks.NewMultiWebhookSender(configs, client) - Assert(t, err != nil, "expected error") - Equals(t, "\"kind: badkind\" not supported. Only \"kind: slack\" is supported right now", err.Error()) -} - -func TestNewWebhooksManager_NoConfigSuccess(t *testing.T) { - t.Log("When there are no configs, function should succeed") - t.Log("passing any client should succeed") - var emptyConfigs []webhooks.Config - emptyToken := "" - m, err := webhooks.NewMultiWebhookSender(emptyConfigs, webhooks.NewSlackClient(emptyToken)) - Ok(t, err) - Equals(t, 0, len(m.Webhooks)) // nolint: staticcheck - - t.Log("passing nil client should succeed") - m, err = webhooks.NewMultiWebhookSender(emptyConfigs, nil) - Ok(t, err) - Equals(t, 0, len(m.Webhooks)) // nolint: staticcheck -} -func TestNewWebhooksManager_SingleConfigSuccess(t *testing.T) { - t.Log("When there is one valid config, function should succeed") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - When(client.TokenIsSet()).ThenReturn(true) - When(client.ChannelExists(validChannel)).ThenReturn(true, nil) - - configs := validConfigs() - m, err := webhooks.NewMultiWebhookSender(configs, client) - Ok(t, err) - Equals(t, 1, len(m.Webhooks)) // nolint: staticcheck -} - -func TestNewWebhooksManager_MultipleConfigSuccess(t *testing.T) { - t.Log("When there are multiple valid configs, function should succeed") - RegisterMockTestingT(t) - client := mocks.NewMockSlackClient() - When(client.TokenIsSet()).ThenReturn(true) - When(client.ChannelExists(validChannel)).ThenReturn(true, nil) - - var configs []webhooks.Config - nConfigs := 5 - for i := 0; i < nConfigs; i++ { - configs = append(configs, validConfig) - } - m, err := webhooks.NewMultiWebhookSender(configs, client) - Ok(t, err) - Equals(t, nConfigs, len(m.Webhooks)) // nolint: staticcheck -} - -func TestSend_SingleSuccess(t *testing.T) { - t.Log("Sending one webhook should succeed") - RegisterMockTestingT(t) - sender := mocks.NewMockSender() - manager := webhooks.MultiWebhookSender{ - Webhooks: []webhooks.Sender{sender}, - } - logger := logging.NewNoopCtxLogger(t) - result := webhooks.ApplyResult{} - manager.Send(logger, result) // nolint: errcheck - sender.VerifyWasCalledOnce().Send(logger, result) -} - -func TestSend_MultipleSuccess(t *testing.T) { - t.Log("Sending multiple webhooks should succeed") - RegisterMockTestingT(t) - senders := []*mocks.MockSender{ - mocks.NewMockSender(), - mocks.NewMockSender(), - mocks.NewMockSender(), - } - manager := webhooks.MultiWebhookSender{ - Webhooks: []webhooks.Sender{senders[0], senders[1], senders[2]}, - } - logger := logging.NewNoopCtxLogger(t) - result := webhooks.ApplyResult{} - err := manager.Send(logger, result) - Ok(t, err) - for _, s := range senders { - s.VerifyWasCalledOnce().Send(logger, result) - } -} diff --git a/server/legacy/events/working_dir.go b/server/legacy/events/working_dir.go deleted file mode 100644 index 1e57b881e..000000000 --- a/server/legacy/events/working_dir.go +++ /dev/null @@ -1,336 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "fmt" - "os" - "os/exec" - "path/filepath" - "strconv" - "strings" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -const workingDirPrefix = "repos" - -type WorkingDirIterator interface { - ListCurrentWorkingDirs() []WorkingDir -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_working_dir.go WorkingDir -//go:generate pegomock generate -m --use-experimental-model-gen --package events WorkingDir - -// WorkingDir handles the workspace on disk for running commands. -// -//nolint:interfacebloat -type WorkingDir interface { - // Clone git clones headRepo, checks out the branch and then returns the - // absolute path to the root of the cloned repo. It also returns - // a boolean indicating if we should warn users that the branch we're - // merging into has been updated since we cloned it. - Clone(log logging.Logger, headRepo models.Repo, p models.PullRequest, projectCloneDir string) (string, bool, error) - // GetWorkingDir returns the path to the workspace for this repo and pull. - // If workspace does not exist on disk, error will be of type os.IsNotExist. - GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) - HasDiverged(log logging.Logger, cloneDir string, baseRepo models.Repo) bool - GetPullDir(r models.Repo, p models.PullRequest) (string, error) - // Delete deletes the workspace for this repo and pull. - Delete(r models.Repo, p models.PullRequest) error - DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error -} - -// FileWorkspace implements WorkingDir with the file system. -type FileWorkspace struct { - DataDir string - // TestingOverrideHeadCloneURL can be used during testing to override the - // URL of the head repo to be cloned. If it's empty then we clone normally. - TestingOverrideHeadCloneURL string - // TestingOverrideBaseCloneURL can be used during testing to override the - // URL of the base repo to be cloned. If it's empty then we clone normally. - TestingOverrideBaseCloneURL string - - GlobalCfg valid.GlobalCfg -} - -// Clone git clones headRepo, checks out the branch and then returns the absolute -// path to the root of the cloned repo. It also returns -// a boolean indicating if we should warn users that the branch we're -// merging into has been updated since we cloned it. -// If the repo already exists and is at -// the right commit it does nothing. This is to support running commands in -// multiple dirs of the same repo without deleting existing plans. -func (w *FileWorkspace) Clone( - log logging.Logger, - headRepo models.Repo, - p models.PullRequest, - projectCloneDir string) (string, bool, error) { - cloneDir := w.cloneDir(p.BaseRepo, p, projectCloneDir) - - matchingRepo := w.GlobalCfg.MatchingRepo(p.BaseRepo.ID()) - checkoutMerge := matchingRepo.CheckoutStrategy == "merge" - - // If the directory already exists, check if it's at the right commit. - // If so, then we do nothing. - if _, err := os.Stat(cloneDir); err == nil { - // We use git rev-parse to see if our repo is at the right commit. - // If just checking out the pull request branch, we can use HEAD. - // If doing a merge, then HEAD won't be at the pull request's HEAD - // because we'll already have performed a merge. Instead, we'll check - // HEAD^2 since that will be the commit before our merge. - pullHead := "HEAD" - - if checkoutMerge { - pullHead = "HEAD^2" - } - revParseCmd := exec.Command("git", "rev-parse", pullHead) // #nosec - revParseCmd.Dir = cloneDir - outputRevParseCmd, err := revParseCmd.CombinedOutput() - logFields := map[string]interface{}{ - "repository": headRepo.FullName, - "pull-num": p.Num, - "workspace": projectCloneDir, - } - if err != nil { - log.Warn( - fmt.Sprintf("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)), - logFields) - return cloneDir, false, w.forceClone(log, cloneDir, headRepo, p, checkoutMerge) - } - currCommit := strings.Trim(string(outputRevParseCmd), "\n") - - // We're prefix matching here because BitBucket doesn't give us the full - // commit, only a 12 character prefix. - if strings.HasPrefix(currCommit, p.HeadCommit) { - return cloneDir, w.warnDiverged(log, p, headRepo, cloneDir, checkoutMerge), nil - } - // We'll fall through to re-clone. - } - - // Otherwise we clone the repo. - return cloneDir, false, w.forceClone(log, cloneDir, headRepo, p, checkoutMerge) -} - -// warnDiverged returns true if we should warn the user that the branch we're -// merging into has diverged from what we currently have checked out. -// This matters in the case of the merge checkout strategy because after -// cloning the repo and doing the merge, it's possible master was updated. -// Then users won't be getting the merge functionality they expected. -// If there are any errors we return false since we prefer things to succeed -// vs. stopping the plan/apply. -func (w *FileWorkspace) warnDiverged(log logging.Logger, p models.PullRequest, headRepo models.Repo, cloneDir string, checkoutMerge bool) bool { - if !checkoutMerge { - // It only makes sense to warn that master has diverged if we're using - // the checkout merge strategy. If we're just checking out the branch, - // then it doesn't matter what's going on with master because we've - // decided to always run off the branch. - return false - } - - // Bring our remote refs up to date. - // Reset the URL in case we are using github app credentials since these might have - // expired and refreshed and the URL would now be different. - // In this case, we should be using a proxy URL which substitutes the credentials in - // as a long term fix, but something like that requires more e2e testing/time - cmds := [][]string{ - { - "git", "remote", "set-url", "origin", p.BaseRepo.CloneURL, - }, - { - "git", "remote", "set-url", "head", headRepo.CloneURL, - }, - { - "git", "remote", "update", - }, - } - - logFields := map[string]interface{}{ - "repository": headRepo.FullName, - "pull-num": p.Num, - "workspace": cloneDir, - } - - for _, args := range cmds { - cmd := exec.Command(args[0], args[1:]...) // nolint: gosec - cmd.Dir = cloneDir - - output, err := cmd.CombinedOutput() - - if err != nil { - log.Warn(fmt.Sprintf("getting remote update failed: %s", string(output)), logFields) - return false - } - } - - hasDiverged := w.HasDiverged(log, cloneDir, p.BaseRepo) - if hasDiverged { - log.Info("remote master branch is ahead and thereby has new commits, it is recommended to pull new commits", logFields) - } - return hasDiverged -} - -func (w *FileWorkspace) HasDiverged(log logging.Logger, cloneDir string, baseRepo models.Repo) bool { - matchingRepo := w.GlobalCfg.MatchingRepo(baseRepo.ID()) - checkoutMerge := matchingRepo.CheckoutStrategy == "merge" - - if !checkoutMerge { - // Both the diverged warning and the UnDiverged apply requirement only apply to merge checkout strategy so - // we assume false here for 'branch' strategy. - return false - } - // Check if remote master branch has diverged. - statusUnoCmd := exec.Command("git", "status", "--untracked-files=no") - statusUnoCmd.Dir = cloneDir - outputStatusUno, err := statusUnoCmd.CombinedOutput() - if err != nil { - log.Warn(fmt.Sprintf("getting repo status has failed: %s", string(outputStatusUno))) - return false - } - hasDiverged := strings.Contains(string(outputStatusUno), "have diverged") - return hasDiverged -} - -func (w *FileWorkspace) forceClone(log logging.Logger, - cloneDir string, - headRepo models.Repo, - p models.PullRequest, checkoutMerge bool) error { - logFields := map[string]interface{}{ - "repository": headRepo.FullName, - "pull-num": p.Num, - "workspace": cloneDir, - } - - err := os.RemoveAll(cloneDir) - if err != nil { - return errors.Wrapf(err, "deleting dir %q before cloning", cloneDir) - } - - // Create the directory and parents if necessary. - log.Info(fmt.Sprintf("creating dir %q", cloneDir), logFields) - if err := os.MkdirAll(cloneDir, 0700); err != nil { - return errors.Wrap(err, "creating new workspace") - } - - // During testing, we mock some of this out. - headCloneURL := headRepo.CloneURL - if w.TestingOverrideHeadCloneURL != "" { - headCloneURL = w.TestingOverrideHeadCloneURL - } - baseCloneURL := p.BaseRepo.CloneURL - if w.TestingOverrideBaseCloneURL != "" { - baseCloneURL = w.TestingOverrideBaseCloneURL - } - - var cmds [][]string - if checkoutMerge { - // NOTE: We can't do a shallow clone when we're merging because we'll - // get merge conflicts if our clone doesn't have the commits that the - // branch we're merging branched off at. - // See https://groups.google.com/forum/#!topic/git-users/v3MkuuiDJ98. - cmds = [][]string{ - { - "git", "clone", "--branch", p.BaseBranch, "--single-branch", baseCloneURL, cloneDir, - }, - { - "git", "remote", "add", "head", headCloneURL, - }, - { - "git", "fetch", "head", fmt.Sprintf("+refs/heads/%s:", p.HeadBranch), - }, - // We use --no-ff because we always want there to be a merge commit. - // This way, our branch will look the same regardless if the merge - // could be fast forwarded. This is useful later when we run - // git rev-parse HEAD^2 to get the head commit because it will - // always succeed whereas without --no-ff, if the merge was fast - // forwarded then git rev-parse HEAD^2 would fail. - { - "git", "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD", - }, - } - } else { - cmds = [][]string{ - { - "git", "clone", "--branch", p.HeadBranch, "--depth=1", "--single-branch", headCloneURL, cloneDir, - }, - } - } - - for _, args := range cmds { - cmd := exec.Command(args[0], args[1:]...) // nolint: gosec - cmd.Dir = cloneDir - // The git merge command requires these env vars are set. - cmd.Env = append(os.Environ(), []string{ - "EMAIL=atlantis@runatlantis.io", - "GIT_AUTHOR_NAME=atlantis", - "GIT_COMMITTER_NAME=atlantis", - }...) - - cmdStr := w.sanitizeGitCredentials(strings.Join(cmd.Args, " "), p.BaseRepo, headRepo) - output, err := cmd.CombinedOutput() - sanitizedOutput := w.sanitizeGitCredentials(string(output), p.BaseRepo, headRepo) - if err != nil { - sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), p.BaseRepo, headRepo) - return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) - } - } - return nil -} - -// GetWorkingDir returns the path to the workspace for this repo and pull. -func (w *FileWorkspace) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) { - repoDir := w.cloneDir(r, p, workspace) - if _, err := os.Stat(repoDir); err != nil { - return "", errors.Wrap(err, "checking if workspace exists") - } - return repoDir, nil -} - -// GetPullDir returns the dir where the workspaces for this pull are cloned. -// If the dir doesn't exist it will return an error. -func (w *FileWorkspace) GetPullDir(r models.Repo, p models.PullRequest) (string, error) { - dir := w.repoPullDir(r, p) - if _, err := os.Stat(dir); err != nil { - return "", err - } - return dir, nil -} - -// Delete deletes the workspace for this repo and pull. -func (w *FileWorkspace) Delete(r models.Repo, p models.PullRequest) error { - return os.RemoveAll(w.repoPullDir(r, p)) -} - -// DeleteForWorkspace deletes the working dir for this workspace. -func (w *FileWorkspace) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { - return os.RemoveAll(w.cloneDir(r, p, workspace)) -} - -func (w *FileWorkspace) repoPullDir(r models.Repo, p models.PullRequest) string { - return filepath.Join(w.DataDir, workingDirPrefix, r.FullName, strconv.Itoa(p.Num)) -} - -func (w *FileWorkspace) cloneDir(r models.Repo, p models.PullRequest, workspace string) string { - return filepath.Join(w.repoPullDir(r, p), workspace) -} - -// sanitizeGitCredentials replaces any git clone urls that contain credentials -// in s with the sanitized versions. -func (w *FileWorkspace) sanitizeGitCredentials(s string, base models.Repo, head models.Repo) string { - baseReplaced := strings.Replace(s, base.CloneURL, base.SanitizedCloneURL, -1) - return strings.Replace(baseReplaced, head.CloneURL, head.SanitizedCloneURL, -1) -} diff --git a/server/legacy/events/working_dir_iterator.go b/server/legacy/events/working_dir_iterator.go deleted file mode 100644 index dee523875..000000000 --- a/server/legacy/events/working_dir_iterator.go +++ /dev/null @@ -1,110 +0,0 @@ -package events - -import ( - "fmt" - "io/fs" - "os" - "path/filepath" - "strconv" - "strings" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -type WorkDirIterator interface { - ListCurrentWorkingDirPulls() ([]models.PullRequest, error) -} - -type FileWorkDirIterator struct { - Log logging.Logger - DataDir string - GithubClient vcs.GithubPullRequestGetter - EventParser EventParsing -} - -func NewFileWorkDirIterator( - githubClient vcs.GithubPullRequestGetter, - eventParser EventParsing, - dataDir string, - log logging.Logger, -) *FileWorkDirIterator { - return &FileWorkDirIterator{ - Log: log, - DataDir: dataDir, - EventParser: eventParser, - GithubClient: githubClient, - } -} - -func (f *FileWorkDirIterator) ListCurrentWorkingDirPulls() ([]models.PullRequest, error) { - var results []models.PullRequest - - baseFilePath := filepath.Join(f.DataDir, workingDirPrefix) - - if _, err := os.Stat(baseFilePath); os.IsNotExist(err) { - f.Log.Warn(fmt.Sprintf("cannot list working dirs, %s doesn't exist", baseFilePath)) - return results, nil - } - - err := filepath.WalkDir(baseFilePath, func(path string, d fs.DirEntry, err error) error { - if err != nil { - return err - } - - relativePath, err := filepath.Rel(baseFilePath, path) - - if err != nil { - return errors.Wrap(err, "finding relative path") - } - - pathComponents := strings.Split(relativePath, string(os.PathSeparator)) - - if len(pathComponents) < 3 { - return nil - } - - ownerName := pathComponents[0] - repoName := pathComponents[1] - pullNum, err := strconv.Atoi(pathComponents[2]) - - if err != nil { - return errors.Wrapf(err, "parsing pull num %s", pathComponents[2]) - } - - pull, err := f.GithubClient.GetPullRequestFromName(repoName, ownerName, pullNum) - - if err != nil { - // let's just continue if we can't find the pull, this is rare and has happened in situations - // where the repository is renamed - notFoundErr, ok := err.(*vcs.PullRequestNotFound) - - if !ok { - return errors.Wrapf(err, "fetching pull for %s", filepath.Join(pathComponents...)) - } - - f.Log.Warn(fmt.Sprintf("%s/%s/#%d not found, %s", ownerName, repoName, pullNum, notFoundErr)) - - return fs.SkipDir - } - - internalPull, _, _, err := f.EventParser.ParseGithubPull(pull) - - if err != nil { - return errors.Wrap(err, "parsing pull request") - } - - results = append(results, internalPull) - - // if we've made it here we don't want to traverse further in the file tree - return fs.SkipDir - }) - - if err != nil { - return results, errors.Wrap(err, "listing current working dir prs") - } - - return results, nil -} diff --git a/server/legacy/events/working_dir_iterator_test.go b/server/legacy/events/working_dir_iterator_test.go deleted file mode 100644 index 84967152b..000000000 --- a/server/legacy/events/working_dir_iterator_test.go +++ /dev/null @@ -1,181 +0,0 @@ -package events_test - -import ( - "errors" - "os" - "path/filepath" - "testing" - - "github.com/google/go-github/v45/github" - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events" - eventmocks "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - vcsmocks "github.com/runatlantis/atlantis/server/legacy/events/vcs/mocks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/stretchr/testify/assert" -) - -func TestListCurrentWorkingDirPulls(t *testing.T) { - mockGHClient := vcsmocks.NewMockGithubPullRequestGetter() - mockEventParser := eventmocks.NewMockEventParsing() - log := logging.NewNoopCtxLogger(t) - - t.Run("repos subdir not exist", func(t *testing.T) { - baseDir := t.TempDir() - - subject := &events.FileWorkDirIterator{ - Log: log, - GithubClient: mockGHClient, - EventParser: mockEventParser, - DataDir: baseDir, - } - - pulls, err := subject.ListCurrentWorkingDirPulls() - - assert.Nil(t, err) - assert.Empty(t, pulls) - }) - - t.Run("pull not found", func(t *testing.T) { - baseDir := t.TempDir() - - _ = os.MkdirAll(filepath.Join(baseDir, "repos", "nish", "repo1", "1", "default"), os.ModePerm) - - pullNotFound := &vcs.PullRequestNotFound{Err: errors.New("error")} - - pegomock.When(mockGHClient.GetPullRequestFromName("repo1", "nish", 1)).ThenReturn(nil, pullNotFound) - - subject := &events.FileWorkDirIterator{ - Log: log, - GithubClient: mockGHClient, - EventParser: mockEventParser, - DataDir: baseDir, - } - - pulls, err := subject.ListCurrentWorkingDirPulls() - - assert.NoError(t, err) - assert.Empty(t, pulls) - }) - - t.Run("1 pull returned", func(t *testing.T) { - pullNum := 1 - - expectedGithubPull := &github.PullRequest{ - Number: &pullNum, - } - expectedInternalPull := models.PullRequest{ - Num: pullNum, - } - - baseDir := t.TempDir() - - _ = os.MkdirAll(filepath.Join(baseDir, "repos", "nish", "repo1", "1", "default"), os.ModePerm) - - pegomock.When(mockGHClient.GetPullRequestFromName("repo1", "nish", 1)).ThenReturn(expectedGithubPull, nil) - pegomock.When(mockEventParser.ParseGithubPull(expectedGithubPull)).ThenReturn(expectedInternalPull, models.Repo{}, models.Repo{}, nil) - - subject := &events.FileWorkDirIterator{ - Log: log, - GithubClient: mockGHClient, - EventParser: mockEventParser, - DataDir: baseDir, - } - - pulls, err := subject.ListCurrentWorkingDirPulls() - - assert.Nil(t, err) - assert.Len(t, pulls, 1) - assert.Contains(t, pulls, expectedInternalPull) - }) - - t.Run("2 pulls same repo", func(t *testing.T) { - pullNum1 := 1 - - expectedGithubPull1 := &github.PullRequest{ - Number: &pullNum1, - } - expectedInternalPull1 := models.PullRequest{ - Num: pullNum1, - } - - pullNum2 := 2 - - expectedGithubPull2 := &github.PullRequest{ - Number: &pullNum2, - } - expectedInternalPull2 := models.PullRequest{ - Num: pullNum2, - } - - baseDir := t.TempDir() - - _ = os.MkdirAll(filepath.Join(baseDir, "repos", "nish", "repo1", "1", "default"), os.ModePerm) - _ = os.MkdirAll(filepath.Join(baseDir, "repos", "nish", "repo1", "2", "default"), os.ModePerm) - - pegomock.When(mockGHClient.GetPullRequestFromName("repo1", "nish", pullNum1)).ThenReturn(expectedGithubPull1, nil) - pegomock.When(mockGHClient.GetPullRequestFromName("repo1", "nish", pullNum2)).ThenReturn(expectedGithubPull2, nil) - pegomock.When(mockEventParser.ParseGithubPull(expectedGithubPull1)).ThenReturn(expectedInternalPull1, models.Repo{}, models.Repo{}, nil) - pegomock.When(mockEventParser.ParseGithubPull(expectedGithubPull2)).ThenReturn(expectedInternalPull2, models.Repo{}, models.Repo{}, nil) - - subject := &events.FileWorkDirIterator{ - Log: log, - GithubClient: mockGHClient, - EventParser: mockEventParser, - DataDir: baseDir, - } - - pulls, err := subject.ListCurrentWorkingDirPulls() - - assert.Nil(t, err) - assert.Len(t, pulls, 2) - assert.Contains(t, pulls, expectedInternalPull1) - assert.Contains(t, pulls, expectedInternalPull2) - }) - - t.Run("2 pulls multiple repos", func(t *testing.T) { - pullNum1 := 1 - - expectedGithubPull1 := &github.PullRequest{ - Number: &pullNum1, - } - expectedInternalPull1 := models.PullRequest{ - Num: pullNum1, - } - - pullNum2 := 2 - - expectedGithubPull2 := &github.PullRequest{ - Number: &pullNum2, - } - expectedInternalPull2 := models.PullRequest{ - Num: pullNum2, - } - - baseDir := t.TempDir() - - _ = os.MkdirAll(filepath.Join(baseDir, "repos", "nish", "repo1", "1", "default"), os.ModePerm) - _ = os.MkdirAll(filepath.Join(baseDir, "repos", "nish", "repo2", "2", "default"), os.ModePerm) - - pegomock.When(mockGHClient.GetPullRequestFromName("repo1", "nish", pullNum1)).ThenReturn(expectedGithubPull1, nil) - pegomock.When(mockGHClient.GetPullRequestFromName("repo2", "nish", pullNum2)).ThenReturn(expectedGithubPull2, nil) - pegomock.When(mockEventParser.ParseGithubPull(expectedGithubPull1)).ThenReturn(expectedInternalPull1, models.Repo{}, models.Repo{}, nil) - pegomock.When(mockEventParser.ParseGithubPull(expectedGithubPull2)).ThenReturn(expectedInternalPull2, models.Repo{}, models.Repo{}, nil) - - subject := &events.FileWorkDirIterator{ - Log: log, - GithubClient: mockGHClient, - EventParser: mockEventParser, - DataDir: baseDir, - } - - pulls, err := subject.ListCurrentWorkingDirPulls() - - assert.Nil(t, err) - assert.Len(t, pulls, 2) - assert.Contains(t, pulls, expectedInternalPull1) - assert.Contains(t, pulls, expectedInternalPull2) - }) -} diff --git a/server/legacy/events/working_dir_locker.go b/server/legacy/events/working_dir_locker.go deleted file mode 100644 index 7642e0ca2..000000000 --- a/server/legacy/events/working_dir_locker.go +++ /dev/null @@ -1,130 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events - -import ( - "fmt" - "strings" - "sync" -) - -//go:generate pegomock generate --use-experimental-model-gen --package mocks -o mocks/mock_working_dir_locker.go WorkingDirLocker - -// WorkingDirLocker is used to prevent multiple commands from executing -// at the same time for a single repo, pull, and workspace. We need to prevent -// this from happening because a specific repo/pull/workspace has a single workspace -// on disk and we haven't written Atlantis (yet) to handle concurrent execution -// within this workspace. -type WorkingDirLocker interface { - // TryLock tries to acquire a lock for this repo, workspace and pull. - // It returns a function that should be used to unlock the workspace and - // an error if the workspace is already locked. The error is expected to - // be printed to the pull request. - TryLock(repoFullName string, pullNum int, workspace string) (func(), error) - // TryLockPull tries to acquire a lock for all the workspaces in this repo - // and pull. - // It returns a function that should be used to unlock the workspace and - // an error if the workspace is already locked. The error is expected to - // be printed to the pull request. - TryLockPull(repoFullName string, pullNum int) (func(), error) -} - -// DefaultWorkingDirLocker implements WorkingDirLocker. -type DefaultWorkingDirLocker struct { - // mutex prevents against multiple threads calling functions on this struct - // concurrently. It's only used for entry/exit to each function. - mutex sync.Mutex - // locks is a list of the keys that are locked. We then use prefix - // matching to determine if something is locked. It's naive but that's okay - // because there won't be many locks at one time. - locks []string -} - -// NewDefaultWorkingDirLocker is a constructor. -func NewDefaultWorkingDirLocker() *DefaultWorkingDirLocker { - return &DefaultWorkingDirLocker{} -} - -func (d *DefaultWorkingDirLocker) TryLockPull(repoFullName string, pullNum int) (func(), error) { - d.mutex.Lock() - defer d.mutex.Unlock() - - pullKey := d.pullKey(repoFullName, pullNum) - for _, l := range d.locks { - if l == pullKey || strings.HasPrefix(l, pullKey+"/") { - return func() {}, fmt.Errorf("The Atlantis working dir is currently locked by another" + - " command that is running for this pull request.\n" + - "Wait until the previous command is complete and try again.") - } - } - d.locks = append(d.locks, pullKey) - return func() { - d.UnlockPull(repoFullName, pullNum) - }, nil -} - -func (d *DefaultWorkingDirLocker) TryLock(repoFullName string, pullNum int, workspace string) (func(), error) { - d.mutex.Lock() - defer d.mutex.Unlock() - - pullKey := d.pullKey(repoFullName, pullNum) - workspaceKey := d.workspaceKey(repoFullName, pullNum, workspace) - for _, l := range d.locks { - if l == pullKey || l == workspaceKey { - return func() {}, fmt.Errorf("The %s workspace is currently locked by another"+ - " command that is running for this pull request.\n"+ - "Wait until the previous command is complete and try again.", workspace) - } - } - d.locks = append(d.locks, workspaceKey) - return func() { - d.unlock(repoFullName, pullNum, workspace) - }, nil -} - -// Unlock unlocks the workspace for this pull. -func (d *DefaultWorkingDirLocker) unlock(repoFullName string, pullNum int, workspace string) { - d.mutex.Lock() - defer d.mutex.Unlock() - - workspaceKey := d.workspaceKey(repoFullName, pullNum, workspace) - d.removeLock(workspaceKey) -} - -// Unlock unlocks all workspaces for this pull. -func (d *DefaultWorkingDirLocker) UnlockPull(repoFullName string, pullNum int) { - d.mutex.Lock() - defer d.mutex.Unlock() - - pullKey := d.pullKey(repoFullName, pullNum) - d.removeLock(pullKey) -} - -func (d *DefaultWorkingDirLocker) removeLock(key string) { - var newLocks []string - for _, l := range d.locks { - if l != key { - newLocks = append(newLocks, l) - } - } - d.locks = newLocks -} - -func (d *DefaultWorkingDirLocker) workspaceKey(repo string, pull int, workspace string) string { - return fmt.Sprintf("%s/%s", d.pullKey(repo, pull), workspace) -} - -func (d *DefaultWorkingDirLocker) pullKey(repo string, pull int) string { - return fmt.Sprintf("%s/%d", repo, pull) -} diff --git a/server/legacy/events/working_dir_locker_test.go b/server/legacy/events/working_dir_locker_test.go deleted file mode 100644 index 736b2f681..000000000 --- a/server/legacy/events/working_dir_locker_test.go +++ /dev/null @@ -1,198 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package events_test - -import ( - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events" - . "github.com/runatlantis/atlantis/testing" -) - -var repo = "repo/owner" -var workspace = "default" - -func TestTryLock(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - - // The first lock should succeed. - unlockFn, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - - // Now another lock for the same repo, workspace, and pull should fail - _, err = locker.TryLock(repo, 1, workspace) - ErrEquals(t, "The default workspace is currently locked by another"+ - " command that is running for this pull request.\n"+ - "Wait until the previous command is complete and try again.", err) - - // Unlock should work. - unlockFn() - _, err = locker.TryLock(repo, 1, workspace) - Ok(t, err) -} - -func TestTryLockDifferentWorkspaces(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - - t.Log("a lock for the same repo and pull but different workspace should succeed") - _, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - _, err = locker.TryLock(repo, 1, "new-workspace") - Ok(t, err) - - t.Log("and both should now be locked") - _, err = locker.TryLock(repo, 1, workspace) - Assert(t, err != nil, "exp err") - _, err = locker.TryLock(repo, 1, "new-workspace") - Assert(t, err != nil, "exp err") -} - -func TestTryLockDifferentRepo(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - - t.Log("a lock for a different repo but the same workspace and pull should succeed") - _, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - newRepo := "owner/newrepo" - _, err = locker.TryLock(newRepo, 1, workspace) - Ok(t, err) - - t.Log("and both should now be locked") - _, err = locker.TryLock(repo, 1, workspace) - ErrContains(t, "currently locked", err) - _, err = locker.TryLock(newRepo, 1, workspace) - ErrContains(t, "currently locked", err) -} - -func TestTryLockDifferentPulls(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - - t.Log("a lock for a different pull but the same repo and workspace should succeed") - _, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - newPull := 2 - _, err = locker.TryLock(repo, newPull, workspace) - Ok(t, err) - - t.Log("and both should now be locked") - _, err = locker.TryLock(repo, 1, workspace) - ErrContains(t, "currently locked", err) - _, err = locker.TryLock(repo, newPull, workspace) - ErrContains(t, "currently locked", err) -} - -func TestUnlock(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - - t.Log("unlocking should work") - unlockFn, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - unlockFn() - _, err = locker.TryLock(repo, 1, workspace) - Ok(t, err) -} - -func TestUnlockDifferentWorkspaces(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - t.Log("unlocking should work for different workspaces") - unlockFn1, err1 := locker.TryLock(repo, 1, workspace) - Ok(t, err1) - unlockFn2, err2 := locker.TryLock(repo, 1, "new-workspace") - Ok(t, err2) - unlockFn1() - unlockFn2() - - _, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - _, err = locker.TryLock(repo, 1, "new-workspace") - Ok(t, err) -} - -func TestUnlockDifferentRepos(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - t.Log("unlocking should work for different repos") - unlockFn1, err1 := locker.TryLock(repo, 1, workspace) - Ok(t, err1) - newRepo := "owner/newrepo" - unlockFn2, err2 := locker.TryLock(newRepo, 1, workspace) - Ok(t, err2) - unlockFn1() - unlockFn2() - - _, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - _, err = locker.TryLock(newRepo, 1, workspace) - Ok(t, err) -} - -func TestUnlockDifferentPulls(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - t.Log("unlocking should work for different pulls") - unlockFn1, err1 := locker.TryLock(repo, 1, workspace) - Ok(t, err1) - newPull := 2 - unlockFn2, err2 := locker.TryLock(repo, newPull, workspace) - Ok(t, err2) - unlockFn1() - unlockFn2() - - _, err := locker.TryLock(repo, 1, workspace) - Ok(t, err) - _, err = locker.TryLock(repo, newPull, workspace) - Ok(t, err) -} - -func TestLockPull(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - unlock, err := locker.TryLockPull("owner/repo", 1) - Ok(t, err) - - // Now a lock for the same pull or for a workspace should fail. - _, err = locker.TryLockPull("owner/repo", 1) - Assert(t, err != nil, "exp err") - _, err = locker.TryLock("owner/repo", 1, "workspace") - Assert(t, err != nil, "exp err") - - // Lock for a different pull and workspace should succeed. - _, err = locker.TryLockPull("owner/repo", 2) - Ok(t, err) - _, err = locker.TryLock("owner/repo", 3, "workspace") - Ok(t, err) - - // After unlocking, should be able to get a pull lock. - unlock() - unlock, err = locker.TryLockPull("owner/repo", 1) - Ok(t, err) - - // If we unlock that too, should be able to get the workspace lock. - unlock() - _, err = locker.TryLock("owner/repo", 1, "workspace") - Ok(t, err) - unlock() -} - -// If the workspace was locked first, we shouldn't be able to get the pull lock. -func TestLockPull_WorkspaceFirst(t *testing.T) { - locker := events.NewDefaultWorkingDirLocker() - unlock, err := locker.TryLock("owner/repo", 1, "workspace") - Ok(t, err) - - _, err = locker.TryLockPull("owner/repo", 1) - Assert(t, err != nil, "exp err") - - // After unlocking the workspace, should be able to get the lock. - unlock() - _, err = locker.TryLockPull("owner/repo", 1) - Ok(t, err) -} diff --git a/server/legacy/http/request.go b/server/legacy/http/request.go deleted file mode 100644 index 101882f88..000000000 --- a/server/legacy/http/request.go +++ /dev/null @@ -1,88 +0,0 @@ -package http - -import ( - "bytes" - "context" - "io" - "net/http" - - "github.com/pkg/errors" -) - -// BufferedRequest wraps an http request and contains a buffer of the request body, -// in addition to safe access to the request body and underlying request. -// BufferedRequest does not provide access to the original http request and instead -// vends copies of it. This is to ensure that the original request body can be read -// multiple times and removes the need to think about this from the consumer end. -// -// Note: the OG request body must not have been closed before construction of this object. -// -// Since this is a server request we do not need to close the original Body as per the documentation: -// -// " For server requests, the Request Body is always non-nil -// -// but will return EOF immediately when no body is present. -// The Server will close the request body. The ServeHTTP -// Handler does not need to. " -// -// Note: This should not be used for client requests at this time. -type BufferedRequest struct { - request *http.Request - body *bytes.Buffer -} - -func NewBufferedRequest(r *http.Request) (*BufferedRequest, error) { - body, err := getBody(r) - if err != nil { - return nil, errors.Wrap(err, "reading request body") - } - - wrapped := &BufferedRequest{ - // clone the request because we've already read and closed the body of the OG. - request: clone(r.Context(), r, body.Bytes()), - body: body, - } - - return wrapped, nil -} - -// GetHeader gets a specific header given a key -func (r *BufferedRequest) GetHeader(key string) string { - return r.request.Header.Get(key) -} - -// GetBody returns a copy of the request body -func (r *BufferedRequest) GetBody() (io.ReadCloser, error) { - copy := bytes.NewBuffer(r.body.Bytes()) - return io.NopCloser(copy), nil -} - -// GetRequest returns a clone of the underlying request in this struct -// Note: reading the request body directly from the returned object, will close it -// it's recommended to always be reading the body from GetBody instead. -func (r *BufferedRequest) GetRequest() *http.Request { - return r.GetRequestWithContext(r.request.Context()) -} - -func (r *BufferedRequest) GetRequestWithContext(ctx context.Context) *http.Request { - return clone(ctx, r.request, r.body.Bytes()) -} - -// Clone's a request and provides a new BufferedRequest -func clone(ctx context.Context, request *http.Request, body []byte) *http.Request { - clone := request.Clone(ctx) - - // create one copy for underlying request and one for the new wrapper - clone.Body = io.NopCloser(bytes.NewBuffer(body)) - return clone -} - -func getBody(request *http.Request) (*bytes.Buffer, error) { - var b bytes.Buffer - _, err := b.ReadFrom(request.Body) - if err != nil { - return nil, err - } - - return &b, nil -} diff --git a/server/legacy/http/request_test.go b/server/legacy/http/request_test.go deleted file mode 100644 index 0073dc185..000000000 --- a/server/legacy/http/request_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package http_test - -import ( - "bytes" - "io" - "net/http" - "testing" - - httputil "github.com/runatlantis/atlantis/server/legacy/http" - "github.com/stretchr/testify/assert" -) - -func TestGetBody(t *testing.T) { - requestBody := "body" - rawRequest, err := http.NewRequest(http.MethodPost, "", io.NopCloser(bytes.NewBuffer([]byte(requestBody)))) - assert.NoError(t, err) - - subject, err := httputil.NewBufferedRequest(rawRequest) - assert.NoError(t, err) - - // read first time - body, err := subject.GetBody() - assert.NoError(t, err) - - payload1, err := io.ReadAll(body) - assert.NoError(t, err) - - // read second time - body, err = subject.GetBody() - assert.NoError(t, err) - - payload2, err := io.ReadAll(body) - assert.NoError(t, err) - - assert.Equal(t, payload1, payload2) -} - -func TestGetRequest(t *testing.T) { - requestBody := "body" - rawRequest, err := http.NewRequest(http.MethodPost, "", io.NopCloser(bytes.NewBuffer([]byte(requestBody)))) - assert.NoError(t, err) - - subject1, err := httputil.NewBufferedRequest(rawRequest) - assert.NoError(t, err) - - // read from raw request first - body := subject1.GetRequest().Body - payload1, err := io.ReadAll(body) - assert.NoError(t, err) - - // read from wrapper next - body, err = subject1.GetBody() - assert.NoError(t, err) - - payload2, err := io.ReadAll(body) - assert.NoError(t, err) - - assert.Equal(t, payload1, payload2) -} diff --git a/server/legacy/instrumentation/pre_workflow_hook.go b/server/legacy/instrumentation/pre_workflow_hook.go deleted file mode 100644 index 523fc05e1..000000000 --- a/server/legacy/instrumentation/pre_workflow_hook.go +++ /dev/null @@ -1,34 +0,0 @@ -package instrumentation - -import ( - "context" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/logging/fields" - "github.com/runatlantis/atlantis/server/metrics" -) - -type PreWorkflowHookRunner struct { - events.PreWorkflowHooksCommandRunner - Logger logging.Logger -} - -func (r *PreWorkflowHookRunner) RunPreHooks(ctx context.Context, cmdCtx *command.Context) error { - scope := cmdCtx.Scope.SubScope("pre_workflow_hook") - - executionSuccess := scope.Counter(metrics.ExecutionSuccessMetric) - executionError := scope.Counter(metrics.ExecutionErrorMetric) - - err := r.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cmdCtx) - if err != nil { - executionError.Inc(1) - return err - } - - //TODO: thread context and use related logging methods. - r.Logger.InfoContext(ctx, "pre-workflow-hook success", fields.PullRequest(cmdCtx.Pull)) - executionSuccess.Inc(1) - return nil -} diff --git a/server/legacy/jobs/job_store.go b/server/legacy/jobs/job_store.go deleted file mode 100644 index ddcb6d1b5..000000000 --- a/server/legacy/jobs/job_store.go +++ /dev/null @@ -1,177 +0,0 @@ -package jobs - -import ( - "context" - "fmt" - "sync" - - "github.com/uber-go/tally/v4" - - "github.com/pkg/errors" -) - -type JobStatus int - -const ( - Processing JobStatus = iota - Complete -) - -type Job struct { - Output []string - Status JobStatus -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_job_store.go JobStore - -type JobStore interface { - // Gets the job from the in memory buffer, if available and if not, reaches to the storage backend - Get(ctx context.Context, jobID string) (*Job, error) - - // Appends a given string to a job's output if the job is not complete yet - AppendOutput(jobID string, output string) error - - // Sets a job status to complete and triggers any associated workflow, - // e.g: if the status is complete, the job is flushed to the associated storage backend - SetJobCompleteStatus(ctx context.Context, jobID string, status JobStatus) error - - // Removes a job from the store - RemoveJob(jobID string) -} - -func NewJobStore(storageBackend StorageBackend, scope tally.Scope) JobStore { - return &StorageBackendJobStore{ - JobStore: &InMemoryJobStore{ - jobs: map[string]*Job{}, - }, - storageBackend: storageBackend, - scope: scope, - } -} - -// Setup job store for testing -func NewTestJobStore(storageBackend StorageBackend, jobs map[string]*Job) JobStore { - return &StorageBackendJobStore{ - JobStore: &InMemoryJobStore{ - jobs: jobs, - }, - storageBackend: storageBackend, - scope: tally.NewTestScope("test_jobstore", map[string]string{}), - } -} - -// Memory Job store deals with handling jobs in memory -type InMemoryJobStore struct { - jobs map[string]*Job - lock sync.RWMutex -} - -func (m *InMemoryJobStore) Get(ctx context.Context, jobID string) (*Job, error) { - m.lock.RLock() - defer m.lock.RUnlock() - - if m.jobs[jobID] == nil { - return nil, nil - } - return m.jobs[jobID], nil -} - -func (m *InMemoryJobStore) AppendOutput(jobID string, output string) error { - m.lock.Lock() - defer m.lock.Unlock() - - // Create new job if job dne - if m.jobs[jobID] == nil { - m.jobs[jobID] = &Job{} - } - - if m.jobs[jobID].Status == Complete { - return fmt.Errorf("cannot append to a complete job") - } - - updatedOutput := append(m.jobs[jobID].Output, output) - m.jobs[jobID].Output = updatedOutput - return nil -} - -func (m *InMemoryJobStore) SetJobCompleteStatus(ctx context.Context, jobID string, status JobStatus) error { - m.lock.Lock() - defer m.lock.Unlock() - - // Error out when job dne - if m.jobs[jobID] == nil { - return fmt.Errorf("job: %s does not exist", jobID) - } - - // Error when job is already set to complete - if job := m.jobs[jobID]; job.Status == Complete { - return fmt.Errorf("job: %s is already complete", jobID) - } - - job := m.jobs[jobID] - job.Status = Complete - return nil -} - -func (m *InMemoryJobStore) RemoveJob(jobID string) { - m.lock.Lock() - defer m.lock.Unlock() - - delete(m.jobs, jobID) -} - -// Storage backend job store deals with handling jobs in backend storage -type StorageBackendJobStore struct { - JobStore - storageBackend StorageBackend - scope tally.Scope -} - -func (s *StorageBackendJobStore) Get(ctx context.Context, jobID string) (*Job, error) { - // Get job from memory - if jobInMem, _ := s.JobStore.Get(ctx, jobID); jobInMem != nil { - return jobInMem, nil - } - - // Get from storage backend if not in memory - logs, err := s.storageBackend.Read(ctx, jobID) - if err != nil { - return nil, errors.Wrap(err, "reading from backend storage") - } - - return &Job{ - Output: logs, - Status: Complete, - }, nil -} - -func (s StorageBackendJobStore) AppendOutput(jobID string, output string) error { - return s.JobStore.AppendOutput(jobID, output) -} - -func (s *StorageBackendJobStore) SetJobCompleteStatus(ctx context.Context, jobID string, status JobStatus) error { - if err := s.JobStore.SetJobCompleteStatus(ctx, jobID, status); err != nil { - return err - } - - job, err := s.JobStore.Get(ctx, jobID) - if err != nil || job == nil { - return errors.Wrapf(err, "retrieving job: %s from memory store", jobID) - } - subScope := s.scope.SubScope("set_job_complete_status") - subScope.Counter("write_attempt").Inc(1) - ok, err := s.storageBackend.Write(ctx, jobID, job.Output) - if err != nil { - return errors.Wrapf(err, "persisting job: %s", jobID) - } - - // Remove from memory if successfully persisted - if ok { - s.JobStore.RemoveJob(jobID) - } - return nil -} - -func (s *StorageBackendJobStore) RemoveJob(jobID string) { - s.JobStore.RemoveJob(jobID) -} diff --git a/server/legacy/jobs/job_store_test.go b/server/legacy/jobs/job_store_test.go deleted file mode 100644 index 7e003ad6e..000000000 --- a/server/legacy/jobs/job_store_test.go +++ /dev/null @@ -1,214 +0,0 @@ -package jobs_test - -import ( - "context" - "fmt" - "testing" - - "github.com/uber-go/tally/v4" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/jobs" - "github.com/runatlantis/atlantis/server/legacy/jobs/mocks" - "github.com/runatlantis/atlantis/server/legacy/jobs/mocks/matchers" - "github.com/stretchr/testify/assert" - - . "github.com/petergtz/pegomock" - . "github.com/runatlantis/atlantis/testing" -) - -func TestJobStore_Get(t *testing.T) { - t.Run("load from memory", func(t *testing.T) { - // Setup job store - storageBackend := mocks.NewMockStorageBackend() - expectedJob := &jobs.Job{ - Output: []string{"a"}, - Status: jobs.Complete, - } - jobsMap := make(map[string]*jobs.Job) - jobsMap["1234"] = expectedJob - jobStore := jobs.NewTestJobStore(storageBackend, jobsMap) - - // Assert job - gotJob, err := jobStore.Get(context.Background(), "1234") - assert.NoError(t, err) - assert.Equal(t, expectedJob.Output, gotJob.Output) - assert.Equal(t, expectedJob.Status, gotJob.Status) - }) - - t.Run("load from storage backend when not in memory", func(t *testing.T) { - // Setup job store - storageBackend := mocks.NewMockStorageBackend() - expectedLogs := []string{"a", "b"} - expectedJob := jobs.Job{ - Output: expectedLogs, - Status: jobs.Complete, - } - When(storageBackend.Read(matchers.AnyContextContext(), AnyString())).ThenReturn(expectedLogs, nil) - - // Assert job - jobStore := jobs.NewJobStore(storageBackend, tally.NewTestScope("test", map[string]string{})) - gotJob, err := jobStore.Get(context.Background(), "1234") - assert.NoError(t, err) - assert.Equal(t, expectedJob.Output, gotJob.Output) - assert.Equal(t, expectedJob.Status, gotJob.Status) - }) - - t.Run("error when reading from storage backend fails", func(t *testing.T) { - // Setup job store - storageBackend := mocks.NewMockStorageBackend() - expectedError := fmt.Errorf("reading from backend storage: error") - When(storageBackend.Read(matchers.AnyContextContext(), AnyString())).ThenReturn([]string{}, errors.New("error")) - - // Assert job - jobStore := jobs.NewJobStore(storageBackend, tally.NewTestScope("test", map[string]string{})) - gotJob, err := jobStore.Get(context.Background(), "1234") - assert.Empty(t, gotJob) - assert.EqualError(t, expectedError, err.Error()) - }) -} - -func TestJobStore_AppendOutput(t *testing.T) { - t.Run("append output when new job", func(t *testing.T) { - // Setup job store - storageBackend := mocks.NewMockStorageBackend() - jobStore := jobs.NewJobStore(storageBackend, tally.NewTestScope("test", map[string]string{})) - jobID := "1234" - output := "Test log message" - - err := jobStore.AppendOutput(jobID, output) - assert.NoError(t, err) - - // Assert job - job, err := jobStore.Get(context.Background(), jobID) - Ok(t, err) - assert.Equal(t, job.Output, []string{output}) - assert.Equal(t, job.Status, jobs.Processing) - }) - - t.Run("append output when existing job", func(t *testing.T) { - // Setup job store - storageBackend := mocks.NewMockStorageBackend() - jobStore := jobs.NewJobStore(storageBackend, tally.NewTestScope("test", map[string]string{})) - jobID := "1234" - output := []string{"Test log message", "Test log message 2"} - - err := jobStore.AppendOutput(jobID, output[0]) - assert.NoError(t, err) - - err = jobStore.AppendOutput(jobID, output[1]) - assert.NoError(t, err) - - // Assert job - job, err := jobStore.Get(context.Background(), jobID) - Ok(t, err) - assert.Equal(t, job.Output, output) - assert.Equal(t, job.Status, jobs.Processing) - }) - - t.Run("error when job status complete", func(t *testing.T) { - // Setup job store - storageBackend := mocks.NewMockStorageBackend() - jobID := "1234" - job := &jobs.Job{ - Output: []string{"a"}, - Status: jobs.Complete, - } - - // Add complete to job in store - jobsMap := make(map[string]*jobs.Job) - jobsMap[jobID] = job - jobStore := jobs.NewTestJobStore(storageBackend, jobsMap) - - // Assert error - err := jobStore.AppendOutput(jobID, "test message") - assert.Error(t, err) - }) -} - -func TestJobStore_UpdateJobStatus(t *testing.T) { - t.Run("retain job in memory when persist fails", func(t *testing.T) { - // Create new job and add it to store - jobID := "1234" - job := &jobs.Job{ - Output: []string{"a"}, - Status: jobs.Processing, - } - jobsMap := make(map[string]*jobs.Job) - jobsMap[jobID] = job - storageBackendErr := fmt.Errorf("random error") - expecterErr := errors.Wrapf(storageBackendErr, "persisting job: %s", jobID) - - // Setup storage backend - storageBackend := mocks.NewMockStorageBackend() - When(storageBackend.Write(matchers.AnyContextContext(), AnyString(), matchers.AnySliceOfString())).ThenReturn(false, storageBackendErr) - jobStore := jobs.NewTestJobStore(storageBackend, jobsMap) - err := jobStore.SetJobCompleteStatus(context.Background(), jobID, jobs.Complete) - - // Assert storage backend error - assert.EqualError(t, err, expecterErr.Error()) - - // Assert the job is in memory - jobInMem, err := jobStore.Get(context.Background(), jobID) - Ok(t, err) - assert.Equal(t, jobInMem.Output, job.Output) - assert.Equal(t, job.Status, jobs.Complete) - }) - - t.Run("retain job in memory when storage backend not configured", func(t *testing.T) { - // Create new job and add it to store - jobID := "1234" - job := &jobs.Job{ - Output: []string{"a"}, - Status: jobs.Processing, - } - jobsMap := make(map[string]*jobs.Job) - jobsMap[jobID] = job - - // Setup storage backend - storageBackend := &jobs.NoopStorageBackend{} - jobStore := jobs.NewTestJobStore(storageBackend, jobsMap) - err := jobStore.SetJobCompleteStatus(context.Background(), jobID, jobs.Complete) - - assert.Nil(t, err) - - // Assert the job is in memory - jobInMem, err := jobStore.Get(context.Background(), jobID) - Ok(t, err) - assert.Equal(t, jobInMem.Output, job.Output) - assert.Equal(t, job.Status, jobs.Complete) - }) - - t.Run("delete from memory when persist succeeds", func(t *testing.T) { - // Create new job and add it to store - jobID := "1234" - job := &jobs.Job{ - Output: []string{"a"}, - Status: jobs.Processing, - } - jobsMap := make(map[string]*jobs.Job) - jobsMap[jobID] = job - - // Setup storage backend - storageBackend := mocks.NewMockStorageBackend() - When(storageBackend.Write(matchers.AnyContextContext(), AnyString(), matchers.AnySliceOfString())).ThenReturn(true, nil) - jobStore := jobs.NewTestJobStore(storageBackend, jobsMap) - err := jobStore.SetJobCompleteStatus(context.Background(), jobID, jobs.Complete) - assert.Nil(t, err) - - When(storageBackend.Read(context.Background(), jobID)).ThenReturn([]string{}, nil) - gotJob, err := jobStore.Get(context.Background(), jobID) - assert.Nil(t, err) - assert.Empty(t, gotJob.Output) - }) - - t.Run("error when job does not exist", func(t *testing.T) { - storageBackend := mocks.NewMockStorageBackend() - jobStore := jobs.NewJobStore(storageBackend, tally.NewTestScope("test", map[string]string{})) - jobID := "1234" - expectedErrString := fmt.Sprintf("job: %s does not exist", jobID) - - err := jobStore.SetJobCompleteStatus(context.Background(), jobID, jobs.Complete) - assert.EqualError(t, err, expectedErrString) - }) -} diff --git a/server/legacy/jobs/mocks/matchers/chan_of_string.go b/server/legacy/jobs/mocks/matchers/chan_of_string.go deleted file mode 100644 index e1bfee572..000000000 --- a/server/legacy/jobs/mocks/matchers/chan_of_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnyChanOfString() chan string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(chan string))(nil)).Elem())) - var nullValue chan string - return nullValue -} - -func EqChanOfString(value chan string) chan string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue chan string - return nullValue -} - -func NotEqChanOfString(value chan string) chan string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue chan string - return nullValue -} - -func ChanOfStringThat(matcher pegomock.ArgumentMatcher) chan string { - pegomock.RegisterMatcher(matcher) - var nullValue chan string - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/command_name.go b/server/legacy/jobs/mocks/matchers/command_name.go deleted file mode 100644 index 35fdfcc41..000000000 --- a/server/legacy/jobs/mocks/matchers/command_name.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyCommandName() command.Name { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Name))(nil)).Elem())) - var nullValue command.Name - return nullValue -} - -func EqCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func NotEqCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func CommandNameThat(matcher pegomock.ArgumentMatcher) command.Name { - pegomock.RegisterMatcher(matcher) - var nullValue command.Name - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/command_projectcontext.go b/server/legacy/jobs/mocks/matchers/command_projectcontext.go deleted file mode 100644 index 8722b7ba3..000000000 --- a/server/legacy/jobs/mocks/matchers/command_projectcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - command "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyCommandProjectContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqCommandProjectContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func CommandProjectContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/context_context.go b/server/legacy/jobs/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/jobs/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/fmt_stringer.go b/server/legacy/jobs/mocks/matchers/fmt_stringer.go deleted file mode 100644 index f68c94720..000000000 --- a/server/legacy/jobs/mocks/matchers/fmt_stringer.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - fmt "fmt" -) - -func AnyFmtStringer() fmt.Stringer { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(fmt.Stringer))(nil)).Elem())) - var nullValue fmt.Stringer - return nullValue -} - -func EqFmtStringer(value fmt.Stringer) fmt.Stringer { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue fmt.Stringer - return nullValue -} - -func NotEqFmtStringer(value fmt.Stringer) fmt.Stringer { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue fmt.Stringer - return nullValue -} - -func FmtStringerThat(matcher pegomock.ArgumentMatcher) fmt.Stringer { - pegomock.RegisterMatcher(matcher) - var nullValue fmt.Stringer - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/io_readcloser.go b/server/legacy/jobs/mocks/matchers/io_readcloser.go deleted file mode 100644 index 3c4060ad3..000000000 --- a/server/legacy/jobs/mocks/matchers/io_readcloser.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - io "io" -) - -func AnyIoReadCloser() io.ReadCloser { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(io.ReadCloser))(nil)).Elem())) - var nullValue io.ReadCloser - return nullValue -} - -func EqIoReadCloser(value io.ReadCloser) io.ReadCloser { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue io.ReadCloser - return nullValue -} - -func NotEqIoReadCloser(value io.ReadCloser) io.ReadCloser { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue io.ReadCloser - return nullValue -} - -func IoReadCloserThat(matcher pegomock.ArgumentMatcher) io.ReadCloser { - pegomock.RegisterMatcher(matcher) - var nullValue io.ReadCloser - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/io_reader.go b/server/legacy/jobs/mocks/matchers/io_reader.go deleted file mode 100644 index 4686f91f7..000000000 --- a/server/legacy/jobs/mocks/matchers/io_reader.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - io "io" -) - -func AnyIoReader() io.Reader { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(io.Reader))(nil)).Elem())) - var nullValue io.Reader - return nullValue -} - -func EqIoReader(value io.Reader) io.Reader { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue io.Reader - return nullValue -} - -func NotEqIoReader(value io.Reader) io.Reader { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue io.Reader - return nullValue -} - -func IoReaderThat(matcher pegomock.ArgumentMatcher) io.Reader { - pegomock.RegisterMatcher(matcher) - var nullValue io.Reader - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/jobs_job.go b/server/legacy/jobs/mocks/matchers/jobs_job.go deleted file mode 100644 index e0aaab970..000000000 --- a/server/legacy/jobs/mocks/matchers/jobs_job.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -func AnyJobsJob() jobs.Job { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(jobs.Job))(nil)).Elem())) - var nullValue jobs.Job - return nullValue -} - -func EqJobsJob(value jobs.Job) jobs.Job { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue jobs.Job - return nullValue -} - -func NotEqJobsJob(value jobs.Job) jobs.Job { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue jobs.Job - return nullValue -} - -func JobsJobThat(matcher pegomock.ArgumentMatcher) jobs.Job { - pegomock.RegisterMatcher(matcher) - var nullValue jobs.Job - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/jobs_jobstatus.go b/server/legacy/jobs/mocks/matchers/jobs_jobstatus.go deleted file mode 100644 index 6f9381fcb..000000000 --- a/server/legacy/jobs/mocks/matchers/jobs_jobstatus.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -func AnyJobsJobStatus() jobs.JobStatus { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(jobs.JobStatus))(nil)).Elem())) - var nullValue jobs.JobStatus - return nullValue -} - -func EqJobsJobStatus(value jobs.JobStatus) jobs.JobStatus { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue jobs.JobStatus - return nullValue -} - -func NotEqJobsJobStatus(value jobs.JobStatus) jobs.JobStatus { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue jobs.JobStatus - return nullValue -} - -func JobsJobStatusThat(matcher pegomock.ArgumentMatcher) jobs.JobStatus { - pegomock.RegisterMatcher(matcher) - var nullValue jobs.JobStatus - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/jobs_pullinfo.go b/server/legacy/jobs/mocks/matchers/jobs_pullinfo.go deleted file mode 100644 index 25610e7bc..000000000 --- a/server/legacy/jobs/mocks/matchers/jobs_pullinfo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -func AnyJobsPullInfo() jobs.PullInfo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(jobs.PullInfo))(nil)).Elem())) - var nullValue jobs.PullInfo - return nullValue -} - -func EqJobsPullInfo(value jobs.PullInfo) jobs.PullInfo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue jobs.PullInfo - return nullValue -} - -func NotEqJobsPullInfo(value jobs.PullInfo) jobs.PullInfo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue jobs.PullInfo - return nullValue -} - -func JobsPullInfoThat(matcher pegomock.ArgumentMatcher) jobs.PullInfo { - pegomock.RegisterMatcher(matcher) - var nullValue jobs.PullInfo - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/models_commandname.go b/server/legacy/jobs/mocks/matchers/models_commandname.go deleted file mode 100644 index db61aecdc..000000000 --- a/server/legacy/jobs/mocks/matchers/models_commandname.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsCommandName() command.Name { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.Name))(nil)).Elem())) - var nullValue command.Name - return nullValue -} - -func EqModelsCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func NotEqModelsCommandName(value command.Name) command.Name { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.Name - return nullValue -} - -func ModelsCommandNameThat(matcher pegomock.ArgumentMatcher) command.Name { - pegomock.RegisterMatcher(matcher) - var nullValue command.Name - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/models_projectcommandcontext.go b/server/legacy/jobs/mocks/matchers/models_projectcommandcontext.go deleted file mode 100644 index dbde44f15..000000000 --- a/server/legacy/jobs/mocks/matchers/models_projectcommandcontext.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -func AnyModelsProjectCommandContext() command.ProjectContext { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(command.ProjectContext))(nil)).Elem())) - var nullValue command.ProjectContext - return nullValue -} - -func EqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func NotEqModelsProjectCommandContext(value command.ProjectContext) command.ProjectContext { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue command.ProjectContext - return nullValue -} - -func ModelsProjectCommandContextThat(matcher pegomock.ArgumentMatcher) command.ProjectContext { - pegomock.RegisterMatcher(matcher) - var nullValue command.ProjectContext - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/models_repo.go b/server/legacy/jobs/mocks/matchers/models_repo.go deleted file mode 100644 index b36c3ee7c..000000000 --- a/server/legacy/jobs/mocks/matchers/models_repo.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - models "github.com/runatlantis/atlantis/server/models" -) - -func AnyModelsRepo() models.Repo { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem())) - var nullValue models.Repo - return nullValue -} - -func EqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func NotEqModelsRepo(value models.Repo) models.Repo { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue models.Repo - return nullValue -} - -func ModelsRepoThat(matcher pegomock.ArgumentMatcher) models.Repo { - pegomock.RegisterMatcher(matcher) - var nullValue models.Repo - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/ptr_to_jobs_job.go b/server/legacy/jobs/mocks/matchers/ptr_to_jobs_job.go deleted file mode 100644 index 719bcdc94..000000000 --- a/server/legacy/jobs/mocks/matchers/ptr_to_jobs_job.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" -) - -func AnyPtrToJobsJob() *jobs.Job { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*jobs.Job))(nil)).Elem())) - var nullValue *jobs.Job - return nullValue -} - -func EqPtrToJobsJob(value *jobs.Job) *jobs.Job { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue *jobs.Job - return nullValue -} - -func NotEqPtrToJobsJob(value *jobs.Job) *jobs.Job { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue *jobs.Job - return nullValue -} - -func PtrToJobsJobThat(matcher pegomock.ArgumentMatcher) *jobs.Job { - pegomock.RegisterMatcher(matcher) - var nullValue *jobs.Job - return nullValue -} diff --git a/server/legacy/jobs/mocks/matchers/slice_of_string.go b/server/legacy/jobs/mocks/matchers/slice_of_string.go deleted file mode 100644 index f9281819d..000000000 --- a/server/legacy/jobs/mocks/matchers/slice_of_string.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfString() []string { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem())) - var nullValue []string - return nullValue -} - -func EqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func NotEqSliceOfString(value []string) []string { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []string - return nullValue -} - -func SliceOfStringThat(matcher pegomock.ArgumentMatcher) []string { - pegomock.RegisterMatcher(matcher) - var nullValue []string - return nullValue -} diff --git a/server/legacy/jobs/mocks/mock_job_store.go b/server/legacy/jobs/mocks/mock_job_store.go deleted file mode 100644 index d1e4e8a39..000000000 --- a/server/legacy/jobs/mocks/mock_job_store.go +++ /dev/null @@ -1,245 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/jobs (interfaces: JobStore) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" - "reflect" - "time" -) - -type MockJobStore struct { - fail func(message string, callerSkip ...int) -} - -func NewMockJobStore(options ...pegomock.Option) *MockJobStore { - mock := &MockJobStore{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockJobStore) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockJobStore) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockJobStore) AppendOutput(_param0 string, _param1 string) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobStore().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("AppendOutput", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockJobStore) Get(_param0 context.Context, _param1 string) (*jobs.Job, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobStore().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("Get", params, []reflect.Type{reflect.TypeOf((**jobs.Job)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 *jobs.Job - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(*jobs.Job) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockJobStore) RemoveJob(_param0 string) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobStore().") - } - params := []pegomock.Param{_param0} - pegomock.GetGenericMockFrom(mock).Invoke("RemoveJob", params, []reflect.Type{}) -} - -func (mock *MockJobStore) SetJobCompleteStatus(_param0 context.Context, _param1 string, _param2 jobs.JobStatus) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockJobStore().") - } - params := []pegomock.Param{_param0, _param1, _param2} - result := pegomock.GetGenericMockFrom(mock).Invoke("SetJobCompleteStatus", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockJobStore) VerifyWasCalledOnce() *VerifierMockJobStore { - return &VerifierMockJobStore{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockJobStore) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockJobStore { - return &VerifierMockJobStore{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockJobStore) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockJobStore { - return &VerifierMockJobStore{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockJobStore) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockJobStore { - return &VerifierMockJobStore{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockJobStore struct { - mock *MockJobStore - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockJobStore) AppendOutput(_param0 string, _param1 string) *MockJobStore_AppendOutput_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "AppendOutput", params, verifier.timeout) - return &MockJobStore_AppendOutput_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobStore_AppendOutput_OngoingVerification struct { - mock *MockJobStore - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobStore_AppendOutput_OngoingVerification) GetCapturedArguments() (string, string) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockJobStore_AppendOutput_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockJobStore) Get(_param0 context.Context, _param1 string) *MockJobStore_Get_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Get", params, verifier.timeout) - return &MockJobStore_Get_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobStore_Get_OngoingVerification struct { - mock *MockJobStore - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobStore_Get_OngoingVerification) GetCapturedArguments() (context.Context, string) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockJobStore_Get_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockJobStore) RemoveJob(_param0 string) *MockJobStore_RemoveJob_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RemoveJob", params, verifier.timeout) - return &MockJobStore_RemoveJob_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobStore_RemoveJob_OngoingVerification struct { - mock *MockJobStore - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobStore_RemoveJob_OngoingVerification) GetCapturedArguments() string { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockJobStore_RemoveJob_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockJobStore) SetJobCompleteStatus(_param0 context.Context, _param1 string, _param2 jobs.JobStatus) *MockJobStore_SetJobCompleteStatus_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetJobCompleteStatus", params, verifier.timeout) - return &MockJobStore_SetJobCompleteStatus_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockJobStore_SetJobCompleteStatus_OngoingVerification struct { - mock *MockJobStore - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockJobStore_SetJobCompleteStatus_OngoingVerification) GetCapturedArguments() (context.Context, string, jobs.JobStatus) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockJobStore_SetJobCompleteStatus_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string, _param2 []jobs.JobStatus) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]jobs.JobStatus, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(jobs.JobStatus) - } - } - return -} diff --git a/server/legacy/jobs/mocks/mock_project_command_output_handler.go b/server/legacy/jobs/mocks/mock_project_command_output_handler.go deleted file mode 100644 index 130b3047d..000000000 --- a/server/legacy/jobs/mocks/mock_project_command_output_handler.go +++ /dev/null @@ -1,251 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/jobs (interfaces: ProjectCommandOutputHandler) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - jobs "github.com/runatlantis/atlantis/server/legacy/jobs" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockProjectCommandOutputHandler struct { - fail func(message string, callerSkip ...int) -} - -func NewMockProjectCommandOutputHandler(options ...pegomock.Option) *MockProjectCommandOutputHandler { - mock := &MockProjectCommandOutputHandler{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockProjectCommandOutputHandler) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockProjectCommandOutputHandler) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockProjectCommandOutputHandler) CleanUp(_param0 jobs.PullInfo) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") - } - params := []pegomock.Param{_param0} - pegomock.GetGenericMockFrom(mock).Invoke("CleanUp", params, []reflect.Type{}) -} - -func (mock *MockProjectCommandOutputHandler) CloseJob(_param0 context.Context, _param1 string, _param2 models.Repo) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") - } - params := []pegomock.Param{_param0, _param1, _param2} - pegomock.GetGenericMockFrom(mock).Invoke("CloseJob", params, []reflect.Type{}) -} - -func (mock *MockProjectCommandOutputHandler) Handle() { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") - } - params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("Handle", params, []reflect.Type{}) -} - -func (mock *MockProjectCommandOutputHandler) Register(_param0 context.Context, _param1 string, _param2 chan string) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") - } - params := []pegomock.Param{_param0, _param1, _param2} - pegomock.GetGenericMockFrom(mock).Invoke("Register", params, []reflect.Type{}) -} - -func (mock *MockProjectCommandOutputHandler) Send(_param0 command.ProjectContext, _param1 string) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectCommandOutputHandler().") - } - params := []pegomock.Param{_param0, _param1} - pegomock.GetGenericMockFrom(mock).Invoke("Send", params, []reflect.Type{}) -} - -func (mock *MockProjectCommandOutputHandler) VerifyWasCalledOnce() *VerifierMockProjectCommandOutputHandler { - return &VerifierMockProjectCommandOutputHandler{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockProjectCommandOutputHandler) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectCommandOutputHandler { - return &VerifierMockProjectCommandOutputHandler{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockProjectCommandOutputHandler) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectCommandOutputHandler { - return &VerifierMockProjectCommandOutputHandler{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockProjectCommandOutputHandler) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectCommandOutputHandler { - return &VerifierMockProjectCommandOutputHandler{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockProjectCommandOutputHandler struct { - mock *MockProjectCommandOutputHandler - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockProjectCommandOutputHandler) CleanUp(_param0 jobs.PullInfo) *MockProjectCommandOutputHandler_CleanUp_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CleanUp", params, verifier.timeout) - return &MockProjectCommandOutputHandler_CleanUp_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandOutputHandler_CleanUp_OngoingVerification struct { - mock *MockProjectCommandOutputHandler - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandOutputHandler_CleanUp_OngoingVerification) GetCapturedArguments() jobs.PullInfo { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockProjectCommandOutputHandler_CleanUp_OngoingVerification) GetAllCapturedArguments() (_param0 []jobs.PullInfo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]jobs.PullInfo, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(jobs.PullInfo) - } - } - return -} - -func (verifier *VerifierMockProjectCommandOutputHandler) CloseJob(_param0 context.Context, _param1 string, _param2 models.Repo) *MockProjectCommandOutputHandler_CloseJob_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CloseJob", params, verifier.timeout) - return &MockProjectCommandOutputHandler_CloseJob_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandOutputHandler_CloseJob_OngoingVerification struct { - mock *MockProjectCommandOutputHandler - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandOutputHandler_CloseJob_OngoingVerification) GetCapturedArguments() (context.Context, string, models.Repo) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockProjectCommandOutputHandler_CloseJob_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string, _param2 []models.Repo) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]models.Repo, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(models.Repo) - } - } - return -} - -func (verifier *VerifierMockProjectCommandOutputHandler) Handle() *MockProjectCommandOutputHandler_Handle_OngoingVerification { - params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Handle", params, verifier.timeout) - return &MockProjectCommandOutputHandler_Handle_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandOutputHandler_Handle_OngoingVerification struct { - mock *MockProjectCommandOutputHandler - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandOutputHandler_Handle_OngoingVerification) GetCapturedArguments() { -} - -func (c *MockProjectCommandOutputHandler_Handle_OngoingVerification) GetAllCapturedArguments() { -} - -func (verifier *VerifierMockProjectCommandOutputHandler) Register(_param0 context.Context, _param1 string, _param2 chan string) *MockProjectCommandOutputHandler_Register_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Register", params, verifier.timeout) - return &MockProjectCommandOutputHandler_Register_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandOutputHandler_Register_OngoingVerification struct { - mock *MockProjectCommandOutputHandler - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandOutputHandler_Register_OngoingVerification) GetCapturedArguments() (context.Context, string, chan string) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockProjectCommandOutputHandler_Register_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string, _param2 []chan string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([]chan string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(chan string) - } - } - return -} - -func (verifier *VerifierMockProjectCommandOutputHandler) Send(_param0 command.ProjectContext, _param1 string) *MockProjectCommandOutputHandler_Send_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Send", params, verifier.timeout) - return &MockProjectCommandOutputHandler_Send_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectCommandOutputHandler_Send_OngoingVerification struct { - mock *MockProjectCommandOutputHandler - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectCommandOutputHandler_Send_OngoingVerification) GetCapturedArguments() (command.ProjectContext, string) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockProjectCommandOutputHandler_Send_OngoingVerification) GetAllCapturedArguments() (_param0 []command.ProjectContext, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(command.ProjectContext) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} diff --git a/server/legacy/jobs/mocks/mock_project_job_url_generator.go b/server/legacy/jobs/mocks/mock_project_job_url_generator.go deleted file mode 100644 index 39ec28e6e..000000000 --- a/server/legacy/jobs/mocks/mock_project_job_url_generator.go +++ /dev/null @@ -1,108 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/jobs (interfaces: ProjectJobURLGenerator) - -package mocks - -import ( - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockProjectJobURLGenerator struct { - fail func(message string, callerSkip ...int) -} - -func NewMockProjectJobURLGenerator(options ...pegomock.Option) *MockProjectJobURLGenerator { - mock := &MockProjectJobURLGenerator{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockProjectJobURLGenerator) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockProjectJobURLGenerator) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockProjectJobURLGenerator) GenerateProjectJobURL(jobID string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectJobURLGenerator().") - } - params := []pegomock.Param{jobID} - result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateProjectJobURL", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectJobURLGenerator) VerifyWasCalledOnce() *VerifierMockProjectJobURLGenerator { - return &VerifierMockProjectJobURLGenerator{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockProjectJobURLGenerator) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectJobURLGenerator { - return &VerifierMockProjectJobURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockProjectJobURLGenerator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectJobURLGenerator { - return &VerifierMockProjectJobURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockProjectJobURLGenerator) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectJobURLGenerator { - return &VerifierMockProjectJobURLGenerator{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockProjectJobURLGenerator struct { - mock *MockProjectJobURLGenerator - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockProjectJobURLGenerator) GenerateProjectJobURL(jobID string) *MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification { - params := []pegomock.Param{jobID} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateProjectJobURL", params, verifier.timeout) - return &MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification struct { - mock *MockProjectJobURLGenerator - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification) GetCapturedArguments() string { - jobID := c.GetAllCapturedArguments() - return jobID[len(jobID)-1] -} - -func (c *MockProjectJobURLGenerator_GenerateProjectJobURL_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(string) - } - } - return -} diff --git a/server/legacy/jobs/mocks/mock_project_status_updater.go b/server/legacy/jobs/mocks/mock_project_status_updater.go deleted file mode 100644 index 7f924677f..000000000 --- a/server/legacy/jobs/mocks/mock_project_status_updater.go +++ /dev/null @@ -1,132 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/jobs (interfaces: ProjectStatusUpdater) - -package mocks - -import ( - context "context" - fmt "fmt" - pegomock "github.com/petergtz/pegomock" - command "github.com/runatlantis/atlantis/server/legacy/events/command" - models "github.com/runatlantis/atlantis/server/models" - "reflect" - "time" -) - -type MockProjectStatusUpdater struct { - fail func(message string, callerSkip ...int) -} - -func NewMockProjectStatusUpdater(options ...pegomock.Option) *MockProjectStatusUpdater { - mock := &MockProjectStatusUpdater{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockProjectStatusUpdater) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockProjectStatusUpdater) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockProjectStatusUpdater) UpdateProject(_param0 context.Context, _param1 command.ProjectContext, _param2 fmt.Stringer, _param3 models.VCSStatus, _param4 string, _param5 string) (string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockProjectStatusUpdater().") - } - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} - result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateProject", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockProjectStatusUpdater) VerifyWasCalledOnce() *VerifierMockProjectStatusUpdater { - return &VerifierMockProjectStatusUpdater{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockProjectStatusUpdater) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockProjectStatusUpdater { - return &VerifierMockProjectStatusUpdater{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockProjectStatusUpdater) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockProjectStatusUpdater { - return &VerifierMockProjectStatusUpdater{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockProjectStatusUpdater) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockProjectStatusUpdater { - return &VerifierMockProjectStatusUpdater{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockProjectStatusUpdater struct { - mock *MockProjectStatusUpdater - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockProjectStatusUpdater) UpdateProject(_param0 context.Context, _param1 command.ProjectContext, _param2 fmt.Stringer, _param3 models.VCSStatus, _param4 string, _param5 string) *MockProjectStatusUpdater_UpdateProject_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2, _param3, _param4, _param5} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateProject", params, verifier.timeout) - return &MockProjectStatusUpdater_UpdateProject_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockProjectStatusUpdater_UpdateProject_OngoingVerification struct { - mock *MockProjectStatusUpdater - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockProjectStatusUpdater_UpdateProject_OngoingVerification) GetCapturedArguments() (context.Context, command.ProjectContext, fmt.Stringer, models.VCSStatus, string, string) { - _param0, _param1, _param2, _param3, _param4, _param5 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1], _param3[len(_param3)-1], _param4[len(_param4)-1], _param5[len(_param5)-1] -} - -func (c *MockProjectStatusUpdater_UpdateProject_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []command.ProjectContext, _param2 []fmt.Stringer, _param3 []models.VCSStatus, _param4 []string, _param5 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]command.ProjectContext, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(command.ProjectContext) - } - _param2 = make([]fmt.Stringer, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.(fmt.Stringer) - } - _param3 = make([]models.VCSStatus, len(c.methodInvocations)) - for u, param := range params[3] { - _param3[u] = param.(models.VCSStatus) - } - _param4 = make([]string, len(c.methodInvocations)) - for u, param := range params[4] { - _param4[u] = param.(string) - } - _param5 = make([]string, len(c.methodInvocations)) - for u, param := range params[5] { - _param5[u] = param.(string) - } - } - return -} diff --git a/server/legacy/jobs/mocks/mock_storage_backend.go b/server/legacy/jobs/mocks/mock_storage_backend.go deleted file mode 100644 index de03af7bd..000000000 --- a/server/legacy/jobs/mocks/mock_storage_backend.go +++ /dev/null @@ -1,167 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/jobs (interfaces: StorageBackend) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockStorageBackend struct { - fail func(message string, callerSkip ...int) -} - -func NewMockStorageBackend(options ...pegomock.Option) *MockStorageBackend { - mock := &MockStorageBackend{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockStorageBackend) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockStorageBackend) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockStorageBackend) Read(_param0 context.Context, _param1 string) ([]string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockStorageBackend().") - } - params := []pegomock.Param{_param0, _param1} - result := pegomock.GetGenericMockFrom(mock).Invoke("Read", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockStorageBackend) Write(_param0 context.Context, _param1 string, _param2 []string) (bool, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockStorageBackend().") - } - params := []pegomock.Param{_param0, _param1, _param2} - result := pegomock.GetGenericMockFrom(mock).Invoke("Write", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 bool - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(bool) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - -func (mock *MockStorageBackend) VerifyWasCalledOnce() *VerifierMockStorageBackend { - return &VerifierMockStorageBackend{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockStorageBackend) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockStorageBackend { - return &VerifierMockStorageBackend{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockStorageBackend) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockStorageBackend { - return &VerifierMockStorageBackend{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockStorageBackend) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockStorageBackend { - return &VerifierMockStorageBackend{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockStorageBackend struct { - mock *MockStorageBackend - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockStorageBackend) Read(_param0 context.Context, _param1 string) *MockStorageBackend_Read_OngoingVerification { - params := []pegomock.Param{_param0, _param1} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Read", params, verifier.timeout) - return &MockStorageBackend_Read_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockStorageBackend_Read_OngoingVerification struct { - mock *MockStorageBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockStorageBackend_Read_OngoingVerification) GetCapturedArguments() (context.Context, string) { - _param0, _param1 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1] -} - -func (c *MockStorageBackend_Read_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - } - return -} - -func (verifier *VerifierMockStorageBackend) Write(_param0 context.Context, _param1 string, _param2 []string) *MockStorageBackend_Write_OngoingVerification { - params := []pegomock.Param{_param0, _param1, _param2} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Write", params, verifier.timeout) - return &MockStorageBackend_Write_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockStorageBackend_Write_OngoingVerification struct { - mock *MockStorageBackend - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockStorageBackend_Write_OngoingVerification) GetCapturedArguments() (context.Context, string, []string) { - _param0, _param1, _param2 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1], _param1[len(_param1)-1], _param2[len(_param2)-1] -} - -func (c *MockStorageBackend_Write_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 []string, _param2 [][]string) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([]string, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.(string) - } - _param2 = make([][]string, len(c.methodInvocations)) - for u, param := range params[2] { - _param2[u] = param.([]string) - } - } - return -} diff --git a/server/legacy/jobs/project_command_output_handler.go b/server/legacy/jobs/project_command_output_handler.go deleted file mode 100644 index e09a9a730..000000000 --- a/server/legacy/jobs/project_command_output_handler.go +++ /dev/null @@ -1,216 +0,0 @@ -package jobs - -import ( - "context" - "fmt" - "sync" - - "github.com/runatlantis/atlantis/server/legacy/events/terraform/filter" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" -) - -type OutputBuffer struct { - OperationComplete bool - Buffer []string -} - -type PullInfo struct { - PullNum int - Repo string - ProjectName string - Workspace string -} - -type JobInfo struct { - PullInfo - HeadCommit string -} - -type ProjectCmdOutputLine struct { - JobID string - JobInfo JobInfo - Line string -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_output_handler.go ProjectCommandOutputHandler - -type ProjectCommandOutputHandler interface { - // Send will enqueue the msg and wait for Handle() to receive the message. - Send(ctx command.ProjectContext, msg string) - - // Listens for msg from channel - Handle() - - // Register registers a channel and blocks until it is caught up. Callers should call this asynchronously when attempting - // to read the channel in the same goroutine - Register(ctx context.Context, jobID string, receiver chan string) - - // Cleans up resources for a pull - CleanUp(pullInfo PullInfo) - - // Persists job to storage backend and marks operation complete - CloseJob(ctx context.Context, jobID string, repo models.Repo) -} - -// AsyncProjectCommandOutputHandler is a handler to transport terraform client -// outputs to the front end. -type AsyncProjectCommandOutputHandler struct { - // Main channel that receives output from the terraform client - projectCmdOutput chan *ProjectCmdOutputLine - - // Storage for jobs - JobStore JobStore - - // Registry to track active connections for a job - receiverRegistry receiverRegistry - - // Map to track jobs in a pull request - pullToJobMapping sync.Map - logger logging.Logger - logFilter filter.LogFilter -} - -func NewAsyncProjectCommandOutputHandler( - projectCmdOutput chan *ProjectCmdOutputLine, - logger logging.Logger, - jobStore JobStore, - logFilter filter.LogFilter, -) ProjectCommandOutputHandler { - return &AsyncProjectCommandOutputHandler{ - projectCmdOutput: projectCmdOutput, - logger: logger, - pullToJobMapping: sync.Map{}, - JobStore: jobStore, - receiverRegistry: NewReceiverRegistry(), - logFilter: logFilter, - } -} - -func (p *AsyncProjectCommandOutputHandler) Send(ctx command.ProjectContext, msg string) { - p.projectCmdOutput <- &ProjectCmdOutputLine{ - JobID: ctx.JobID, - JobInfo: JobInfo{ - HeadCommit: ctx.Pull.HeadCommit, - PullInfo: PullInfo{ - PullNum: ctx.Pull.Num, - Repo: ctx.BaseRepo.Name, - ProjectName: ctx.ProjectName, - Workspace: ctx.Workspace, - }, - }, - Line: msg, - } -} - -func (p *AsyncProjectCommandOutputHandler) Handle() { - for msg := range p.projectCmdOutput { - // Filter out log lines from job output - if p.logFilter.ShouldFilterLine(msg.Line) { - continue - } - - // Add job to pullToJob mapping - if _, ok := p.pullToJobMapping.Load(msg.JobInfo.PullInfo); !ok { - p.pullToJobMapping.Store(msg.JobInfo.PullInfo, map[string]bool{}) - } - value, _ := p.pullToJobMapping.Load(msg.JobInfo.PullInfo) - jobMapping := value.(map[string]bool) - jobMapping[msg.JobID] = true - - // Write logs to all active connections - for ch := range p.receiverRegistry.GetReceivers(msg.JobID) { - select { - case ch <- msg.Line: - default: - p.receiverRegistry.RemoveReceiver(msg.JobID, ch) - } - } - - // Append new log to the output buffer for the job - err := p.JobStore.AppendOutput(msg.JobID, msg.Line) - if err != nil { - p.logger.Warn(fmt.Sprintf("appending log: %s for job: %s: %v", msg.Line, msg.JobID, err)) - } - } -} - -func (p *AsyncProjectCommandOutputHandler) Register(ctx context.Context, jobID string, connection chan string) { - job, err := p.JobStore.Get(ctx, jobID) - if err != nil || job == nil { - p.logger.Error(fmt.Sprintf("getting job: %s, err: %v", jobID, err)) - return - } - - // Back fill contents from the output buffer - for _, line := range job.Output { - connection <- line - } - - // Close connection if job is complete - if job.Status == Complete { - close(connection) - return - } - - // add receiver to registry after backfilling contents from the buffer - p.receiverRegistry.AddReceiver(jobID, connection) -} - -func (p *AsyncProjectCommandOutputHandler) CloseJob(ctx context.Context, jobID string, repo models.Repo) { - // Close active connections and remove receivers from registry - p.receiverRegistry.CloseAndRemoveReceiversForJob(jobID) - - // Update job status and persist to storage if configured - if err := p.JobStore.SetJobCompleteStatus(ctx, jobID, Complete); err != nil { - p.logger.Error(fmt.Sprintf("updating jobs status to complete, %v", err)) - } -} - -func (p *AsyncProjectCommandOutputHandler) CleanUp(pullInfo PullInfo) { - if value, ok := p.pullToJobMapping.Load(pullInfo); ok { - jobMapping := value.(map[string]bool) - for jobID := range jobMapping { - // Clear output buffer for the job - p.JobStore.RemoveJob(jobID) - - // Close connections and clear registry for the job - p.receiverRegistry.CloseAndRemoveReceiversForJob(jobID) - } - - // Remove pull to job mapping for the job - p.pullToJobMapping.Delete(pullInfo) - } -} - -// Helper methods for testing -func (p *AsyncProjectCommandOutputHandler) GetReceiverBufferForPull(jobID string) map[chan string]bool { - return p.receiverRegistry.GetReceivers(jobID) -} - -func (p *AsyncProjectCommandOutputHandler) GetJobIDMapForPull(pullInfo PullInfo) map[string]bool { - if value, ok := p.pullToJobMapping.Load(pullInfo); ok { - return value.(map[string]bool) - } - return nil -} - -// NoopProjectOutputHandler is a mock that doesn't do anything -type NoopProjectOutputHandler struct{} - -func (p *NoopProjectOutputHandler) Send(ctx command.ProjectContext, msg string) { -} - -func (p *NoopProjectOutputHandler) Handle() { -} - -func (p *NoopProjectOutputHandler) Register(ctx context.Context, jobID string, receiver chan string) { -} - -func (p *NoopProjectOutputHandler) CleanUp(pullInfo PullInfo) { -} - -func (p *NoopProjectOutputHandler) CloseJob(ctx context.Context, jobID string, repo models.Repo) { -} diff --git a/server/legacy/jobs/project_command_output_handler_test.go b/server/legacy/jobs/project_command_output_handler_test.go deleted file mode 100644 index fd64575eb..000000000 --- a/server/legacy/jobs/project_command_output_handler_test.go +++ /dev/null @@ -1,247 +0,0 @@ -package jobs_test - -import ( - "regexp" - "sync" - "testing" - - "github.com/runatlantis/atlantis/server/legacy/events/terraform/filter" - "github.com/stretchr/testify/assert" - - . "github.com/petergtz/pegomock" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/jobs" - "github.com/runatlantis/atlantis/server/legacy/jobs/mocks" - "github.com/runatlantis/atlantis/server/legacy/jobs/mocks/matchers" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - - . "github.com/runatlantis/atlantis/testing" -) - -func createTestProjectCmdContext(t *testing.T) command.ProjectContext { - logger := logging.NewNoopCtxLogger(t) - return command.ProjectContext{ - BaseRepo: models.Repo{ - Name: "test-repo", - Owner: "test-org", - }, - HeadRepo: models.Repo{ - Name: "test-repo", - Owner: "test-org", - }, - Pull: models.PullRequest{ - Num: 1, - HeadBranch: "master", - BaseBranch: "master", - Author: "test-user", - HeadCommit: "234r232432", - }, - User: models.User{ - Username: "test-user", - }, - Log: logger, - Workspace: "myworkspace", - RepoRelDir: "test-dir", - ProjectName: "test-project", - JobID: "1234", - } -} - -func createProjectCommandOutputHandler(t *testing.T) (jobs.ProjectCommandOutputHandler, *mocks.MockJobStore) { - logger := logging.NewNoopCtxLogger(t) - prjCmdOutputChan := make(chan *jobs.ProjectCmdOutputLine) - jobStore := mocks.NewMockJobStore() - prjCmdOutputHandler := jobs.NewAsyncProjectCommandOutputHandler( - prjCmdOutputChan, - logger, - jobStore, - filter.LogFilter{ - Regexes: []*regexp.Regexp{regexp.MustCompile("InvalidMessage")}, - }, - ) - - go func() { - prjCmdOutputHandler.Handle() - }() - - return prjCmdOutputHandler, jobStore -} - -func TestProjectCommandOutputHandler(t *testing.T) { - Msg := "Test Terraform Output" - ctx := createTestProjectCmdContext(t) - - t.Run("receive message from main channel", func(t *testing.T) { - var wg sync.WaitGroup - var expectedMsg string - projectOutputHandler, jobStore := createProjectCommandOutputHandler(t) - - When(jobStore.Get(matchers.AnyContextContext(), AnyString())).ThenReturn(&jobs.Job{}, nil) - - // buffered channel to ensure it's not blocking if this goroutine has not started yet. - ch := make(chan string, 1) - - // read from channel - go func() { - for msg := range ch { - expectedMsg = msg - wg.Done() - } - }() - - // register channel and backfill from buffer - // Note: We call this synchronously because otherwise - // there could be a race where we are unable to register the channel - // before sending messages due to the way we lock our buffer memory cache - projectOutputHandler.Register(ctx.RequestCtx, ctx.JobID, ch) - - wg.Add(1) - projectOutputHandler.Send(ctx, Msg) - wg.Wait() - close(ch) - - Equals(t, expectedMsg, Msg) - }) - - t.Run("strip message from main channel", func(t *testing.T) { - var wg sync.WaitGroup - var expectedMsg string - projectOutputHandler, jobStore := createProjectCommandOutputHandler(t) - strippedMessage := "InvalidMessage test" - - When(jobStore.Get(matchers.AnyContextContext(), AnyString())).ThenReturn(&jobs.Job{}, nil) - - // buffered channel to ensure it's not blocking if this goroutine has not started yet. - ch := make(chan string, 1) - - // read from channel - go func() { - for msg := range ch { - expectedMsg = msg - wg.Done() - } - }() - - // register channel and backfill from buffer - // Note: We call this synchronously because otherwise - // there could be a race where we are unable to register the channel - // before sending messages due to the way we lock our buffer memory cache - projectOutputHandler.Register(ctx.RequestCtx, ctx.JobID, ch) - - wg.Add(1) - // even if stripped message is sent first, registered channel will never receive it, making expectedMsg == Msg - projectOutputHandler.Send(ctx, strippedMessage) - projectOutputHandler.Send(ctx, Msg) - wg.Wait() - close(ch) - - Equals(t, expectedMsg, Msg) - }) - - t.Run("copies buffer to new channels", func(t *testing.T) { - var wg sync.WaitGroup - var receivedMsg string - - projectOutputHandler, jobStore := createProjectCommandOutputHandler(t) - - // Mocking the job store acts like populating the buffer - When(jobStore.Get(matchers.AnyContextContext(), AnyString())).ThenReturn(&jobs.Job{ - Output: []string{Msg}, - Status: jobs.Processing, - }, nil) - - ch := make(chan string) - go func() { - for msg := range ch { - receivedMsg = msg - wg.Done() - } - }() - - wg.Add(1) - - // Register the channel and wait for msg in the buffer to be read - projectOutputHandler.Register(ctx.RequestCtx, ctx.JobID, ch) - wg.Wait() - - close(ch) - - // Assert received msg is copied from the buffer - assert.Equal(t, receivedMsg, Msg) - }) - - t.Run("clean up all jobs when PR is closed", func(t *testing.T) { - projectOutputHandler, jobStore := createProjectCommandOutputHandler(t) - When(jobStore.Get(matchers.AnyContextContext(), AnyString())).ThenReturn(&jobs.Job{}, nil) - - // buffered channel to ensure it's not blocking if this goroutine has not started yet. - ch := make(chan string, 1) - - // read from channel - var wg sync.WaitGroup - wg.Add(1) - go func() { - <-ch - wg.Done() - }() - - // register channel and backfill from buffer - // Note: We call this synchronously because otherwise - // there could be a race where we are unable to register the channel - // before sending messages due to the way we lock our buffer memory cache - projectOutputHandler.Register(ctx.RequestCtx, ctx.JobID, ch) - projectOutputHandler.Send(ctx, Msg) - - wg.Wait() - - pullContext := jobs.PullInfo{ - PullNum: ctx.Pull.Num, - Repo: ctx.BaseRepo.Name, - ProjectName: ctx.ProjectName, - Workspace: ctx.Workspace, - } - - // Cleanup is called when a PR is closed - projectOutputHandler.CleanUp(pullContext) - - // Check all the resources are cleaned up. - dfProjectOutputHandler, ok := projectOutputHandler.(*jobs.AsyncProjectCommandOutputHandler) - assert.True(t, ok) - - job, err := dfProjectOutputHandler.JobStore.Get(ctx.RequestCtx, ctx.JobID) - Ok(t, err) - - assert.Empty(t, job.Output) - assert.Empty(t, dfProjectOutputHandler.GetReceiverBufferForPull(ctx.JobID)) - assert.Empty(t, dfProjectOutputHandler.GetJobIDMapForPull(pullContext)) - }) - - t.Run("close conn buffer after streaming logs for completed operation", func(t *testing.T) { - projectOutputHandler, jobStore := createProjectCommandOutputHandler(t) - job := jobs.Job{ - Output: []string{"a", "b"}, - Status: jobs.Complete, - } - When(jobStore.Get(matchers.AnyContextContext(), AnyString())).ThenReturn(&job, nil) - - ch := make(chan string) - - opComplete := make(chan bool) - // buffer channel will be closed immediately after logs are streamed - go func() { - for range ch { - } - opComplete <- true - }() - - // register channel and backfill from buffer - // Note: We call this synchronously because otherwise - // there could be a race where we are unable to register the channel - // before sending messages due to the way we lock our buffer memory cache - projectOutputHandler.Register(ctx.RequestCtx, ctx.JobID, ch) - - assert.True(t, <-opComplete) - }) -} diff --git a/server/legacy/jobs/receiver_registry.go b/server/legacy/jobs/receiver_registry.go deleted file mode 100644 index a7412c16f..000000000 --- a/server/legacy/jobs/receiver_registry.go +++ /dev/null @@ -1,58 +0,0 @@ -package jobs - -import "sync" - -type receiverRegistry interface { - AddReceiver(jobID string, ch chan string) - RemoveReceiver(jobID string, ch chan string) - GetReceivers(jobID string) map[chan string]bool - CloseAndRemoveReceiversForJob(jobID string) -} - -type ReceiverRegistry struct { - receivers map[string]map[chan string]bool - lock sync.RWMutex -} - -func NewReceiverRegistry() *ReceiverRegistry { - return &ReceiverRegistry{ - receivers: map[string]map[chan string]bool{}, - } -} - -func (r *ReceiverRegistry) AddReceiver(jobID string, ch chan string) { - r.lock.Lock() - defer r.lock.Unlock() - - if r.receivers[jobID] == nil { - r.receivers[jobID] = map[chan string]bool{} - } - - r.receivers[jobID][ch] = true -} - -func (r *ReceiverRegistry) RemoveReceiver(jobID string, ch chan string) { - r.lock.Lock() - defer r.lock.Unlock() - - delete(r.receivers[jobID], ch) -} - -func (r *ReceiverRegistry) GetReceivers(jobID string) map[chan string]bool { - r.lock.RLock() - defer r.lock.RUnlock() - - return r.receivers[jobID] -} - -func (r *ReceiverRegistry) CloseAndRemoveReceiversForJob(jobID string) { - r.lock.Lock() - defer r.lock.Unlock() - - for ch := range r.receivers[jobID] { - close(ch) - delete(r.receivers[jobID], ch) - } - - delete(r.receivers, jobID) -} diff --git a/server/legacy/jobs/storage_backend.go b/server/legacy/jobs/storage_backend.go deleted file mode 100644 index 3767417bc..000000000 --- a/server/legacy/jobs/storage_backend.go +++ /dev/null @@ -1,118 +0,0 @@ -package jobs - -import ( - "context" - "fmt" - "io" - "strings" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - "github.com/runatlantis/atlantis/server/neptune/storage" - "github.com/uber-go/tally/v4" -) - -const PageSize = 100 -const OutputPrefix = "output" - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_storage_backend.go StorageBackend - -type StorageBackend interface { - // Read logs from the storage backend. Must close the reader - Read(ctx context.Context, key string) ([]string, error) - - // Write logs to the storage backend - Write(ctx context.Context, key string, logs []string) (bool, error) -} - -func NewStorageBackend(client *storage.Client, logger logging.Logger, featureAllocator feature.Allocator, scope tally.Scope) (StorageBackend, error) { - return &InstrumentedStorageBackend{ - StorageBackend: &storageBackend{ - client: client, - logger: logger, - }, - scope: scope.SubScope("storage_backend"), - }, nil -} - -type storageBackend struct { - client *storage.Client - logger logging.Logger -} - -func (s *storageBackend) Read(ctx context.Context, key string) ([]string, error) { - s.logger.Info(fmt.Sprintf("reading object for job: %s", key)) - reader, err := s.client.Get(ctx, key) - if err != nil { - return nil, errors.Wrap(err, "getting item") - } - defer reader.Close() - - buf := new(strings.Builder) - _, err = io.Copy(buf, reader) - if err != nil { - return []string{}, errors.Wrapf(err, "copying to buffer") - } - - logs := strings.Split(buf.String(), "\n") - return logs, nil -} - -func (s *storageBackend) Write(ctx context.Context, key string, logs []string) (bool, error) { - logString := strings.Join(logs, "\n") - object := []byte(logString) - - err := s.client.Set(ctx, key, object) - if err != nil { - return false, errors.Wrapf(err, "uploading object for job: %s", key) - } - - s.logger.Info(fmt.Sprintf("successfully uploaded object for job: %s", key)) - return true, nil -} - -// Adds instrumentation to storage backend -type InstrumentedStorageBackend struct { - StorageBackend - - scope tally.Scope -} - -func (i *InstrumentedStorageBackend) Read(ctx context.Context, key string) ([]string, error) { - failureCount := i.scope.Counter("read_failure") - latency := i.scope.Timer("read_latency") - span := latency.Start() - defer span.Stop() - logs, err := i.StorageBackend.Read(ctx, key) - if err != nil { - failureCount.Inc(1) - } - return logs, err -} - -func (i *InstrumentedStorageBackend) Write(ctx context.Context, key string, logs []string) (bool, error) { - failureCount := i.scope.Counter("write_failure") - successCount := i.scope.Counter("write_success") - latency := i.scope.Timer("write_latency") - span := latency.Start() - defer span.Stop() - ok, err := i.StorageBackend.Write(ctx, key, logs) - if err != nil { - failureCount.Inc(1) - return ok, err - } - successCount.Inc(1) - return ok, err -} - -// Used when log persistence is not configured -type NoopStorageBackend struct{} - -func (s *NoopStorageBackend) Read(ctx context.Context, key string) ([]string, error) { - return []string{}, nil -} - -func (s *NoopStorageBackend) Write(ctx context.Context, key string, logs []string) (bool, error) { - return false, nil -} diff --git a/server/legacy/lyft/aws/session.go b/server/legacy/lyft/aws/session.go deleted file mode 100644 index 86b0c4e08..000000000 --- a/server/legacy/lyft/aws/session.go +++ /dev/null @@ -1,12 +0,0 @@ -package aws - -import "github.com/aws/aws-sdk-go/aws/session" - -func NewSession() (*session.Session, error) { - awsSession, err := session.NewSession() - if err != nil { - return nil, err - } - - return awsSession, nil -} diff --git a/server/legacy/lyft/aws/sns/mocks/matchers/context_context.go b/server/legacy/lyft/aws/sns/mocks/matchers/context_context.go deleted file mode 100644 index 2e07bf9a5..000000000 --- a/server/legacy/lyft/aws/sns/mocks/matchers/context_context.go +++ /dev/null @@ -1,33 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" - - context "context" -) - -func AnyContextContext() context.Context { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(context.Context))(nil)).Elem())) - var nullValue context.Context - return nullValue -} - -func EqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func NotEqContextContext(value context.Context) context.Context { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue context.Context - return nullValue -} - -func ContextContextThat(matcher pegomock.ArgumentMatcher) context.Context { - pegomock.RegisterMatcher(matcher) - var nullValue context.Context - return nullValue -} diff --git a/server/legacy/lyft/aws/sns/mocks/matchers/slice_of_byte.go b/server/legacy/lyft/aws/sns/mocks/matchers/slice_of_byte.go deleted file mode 100644 index 951531345..000000000 --- a/server/legacy/lyft/aws/sns/mocks/matchers/slice_of_byte.go +++ /dev/null @@ -1,31 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "github.com/petergtz/pegomock" - "reflect" -) - -func AnySliceOfByte() []byte { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]byte))(nil)).Elem())) - var nullValue []byte - return nullValue -} - -func EqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func NotEqSliceOfByte(value []byte) []byte { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue []byte - return nullValue -} - -func SliceOfByteThat(matcher pegomock.ArgumentMatcher) []byte { - pegomock.RegisterMatcher(matcher) - var nullValue []byte - return nullValue -} diff --git a/server/legacy/lyft/aws/sns/mocks/mock_writer.go b/server/legacy/lyft/aws/sns/mocks/mock_writer.go deleted file mode 100644 index 81599268f..000000000 --- a/server/legacy/lyft/aws/sns/mocks/mock_writer.go +++ /dev/null @@ -1,151 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/lyft/aws/sns (interfaces: Writer) - -package mocks - -import ( - context "context" - pegomock "github.com/petergtz/pegomock" - "reflect" - "time" -) - -type MockWriter struct { - fail func(message string, callerSkip ...int) -} - -func NewMockWriter(options ...pegomock.Option) *MockWriter { - mock := &MockWriter{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockWriter) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockWriter) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockWriter) Write(_param0 []byte) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWriter().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("Write", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWriter) WriteWithContext(ctx context.Context, payload []byte) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockWriter().") - } - params := []pegomock.Param{ctx, payload} - result := pegomock.GetGenericMockFrom(mock).Invoke("WriteWithContext", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockWriter) VerifyWasCalledOnce() *VerifierMockWriter { - return &VerifierMockWriter{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockWriter) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockWriter { - return &VerifierMockWriter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockWriter) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockWriter { - return &VerifierMockWriter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockWriter) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockWriter { - return &VerifierMockWriter{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockWriter struct { - mock *MockWriter - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockWriter) Write(_param0 []byte) *MockWriter_Write_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Write", params, verifier.timeout) - return &MockWriter_Write_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWriter_Write_OngoingVerification struct { - mock *MockWriter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWriter_Write_OngoingVerification) GetCapturedArguments() []byte { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockWriter_Write_OngoingVerification) GetAllCapturedArguments() (_param0 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.([]byte) - } - } - return -} - -func (verifier *VerifierMockWriter) WriteWithContext(ctx context.Context, payload []byte) *MockWriter_WriteWithContext_OngoingVerification { - params := []pegomock.Param{ctx, payload} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "WriteWithContext", params, verifier.timeout) - return &MockWriter_WriteWithContext_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockWriter_WriteWithContext_OngoingVerification struct { - mock *MockWriter - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockWriter_WriteWithContext_OngoingVerification) GetCapturedArguments() (context.Context, []byte) { - ctx, payload := c.GetAllCapturedArguments() - return ctx[len(ctx)-1], payload[len(payload)-1] -} - -func (c *MockWriter_WriteWithContext_OngoingVerification) GetAllCapturedArguments() (_param0 []context.Context, _param1 [][]byte) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]context.Context, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(context.Context) - } - _param1 = make([][]byte, len(c.methodInvocations)) - for u, param := range params[1] { - _param1[u] = param.([]byte) - } - } - return -} diff --git a/server/legacy/lyft/aws/sns/writer.go b/server/legacy/lyft/aws/sns/writer.go deleted file mode 100644 index 1082c9226..000000000 --- a/server/legacy/lyft/aws/sns/writer.go +++ /dev/null @@ -1,111 +0,0 @@ -package sns - -import ( - "context" - - "github.com/aws/aws-sdk-go/aws" - "github.com/aws/aws-sdk-go/aws/client" - awsSns "github.com/aws/aws-sdk-go/service/sns" - snsApi "github.com/aws/aws-sdk-go/service/sns/snsiface" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/uber-go/tally/v4" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_writer.go Writer - -type Writer interface { - // Write a message to an SNS topic with the specified string payload - Write([]byte) error - - // WriteWithContext writes a message to an SNS topic with the specific - // string payload and supports context propagation. - // TODO: Actually add ctx propagation support - WriteWithContext(ctx context.Context, payload []byte) error -} - -// IOWriterAdapter allows us to use Writer in place of io.Writer -// Eventually we should just remove Writer and conform our implementations -// to that interface for consistence -type IOWriterAdapter struct { - Writer Writer -} - -func (w *IOWriterAdapter) Write(b []byte) (int, error) { - err := w.Writer.Write(b) - - if err != nil { - return 0, err - } - - return len(b), nil -} - -func NewNoopWriter() Writer { - return &noopWriter{} -} - -// NewWriterWithStats returns a new instance of Writer that will connect to the specifed -// sns topic using the specified session -func NewWriterWithStats( - session client.ConfigProvider, - topicArn string, - scope tally.Scope, -) Writer { - return &writerWithStats{ - scope: scope, - Writer: &writer{ - client: awsSns.New(session), - topicArn: aws.String(topicArn), - }, - } -} - -type writer struct { - client snsApi.SNSAPI - topicArn *string -} - -func (w *writer) Write(payload []byte) error { - _, err := w.client.Publish(&awsSns.PublishInput{ - Message: aws.String(string(payload)), - TopicArn: w.topicArn, - }) - return err -} - -func (w *writer) WriteWithContext(_ context.Context, payload []byte) error { - return w.Write(payload) -} - -// writerWithStats decorator to track writing to sns topic -type writerWithStats struct { - Writer - scope tally.Scope -} - -func (w *writerWithStats) Write(payload []byte) error { - executionTime := w.scope.Timer(metrics.ExecutionTimeMetric).Start() - defer executionTime.Stop() - - if err := w.Writer.Write(payload); err != nil { - w.scope.Counter(metrics.ExecutionErrorMetric).Inc(1) - return err - } - - w.scope.Counter(metrics.ExecutionSuccessMetric).Inc(1) - return nil -} - -func (w *writerWithStats) WriteWithContext(_ context.Context, payload []byte) error { - return w.Write(payload) -} - -type noopWriter struct{} - -func (n *noopWriter) Write(payload []byte) error { - return nil -} - -func (n *noopWriter) WriteWithContext(_ context.Context, payload []byte) error { - return n.Write(payload) -} diff --git a/server/legacy/lyft/aws/sqs/message.go b/server/legacy/lyft/aws/sqs/message.go deleted file mode 100644 index 5dceb15cf..000000000 --- a/server/legacy/lyft/aws/sqs/message.go +++ /dev/null @@ -1,75 +0,0 @@ -package sqs - -import ( - "bufio" - "bytes" - "net/http" - - "github.com/aws/aws-sdk-go-v2/service/sqs/types" - "github.com/pkg/errors" - "github.com/uber-go/tally/v4" -) - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_sqs_message_handler.go MessageProcessor -type MessageProcessor interface { - ProcessMessage(types.Message) error -} - -//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_vcs_post_handler.go VCSPostHandler -type VCSPostHandler interface { - Post(w http.ResponseWriter, r *http.Request) -} - -type VCSEventMessageProcessor struct { - PostHandler VCSPostHandler -} - -func (p *VCSEventMessageProcessor) ProcessMessage(msg types.Message) error { - if msg.Body == nil { - return errors.New("message received from sqs has no body") - } - - buffer := bytes.NewBufferString(*msg.Body) - buf := bufio.NewReader(buffer) - req, err := http.ReadRequest(buf) - if err != nil { - return errors.Wrap(err, "reading bytes from sqs into http request") - } - - // using a no-op writer since we shouldn't send response back in worker mode - p.PostHandler.Post(&NoOpResponseWriter{}, req) - return nil -} - -type VCSEventMessageProcessorStats struct { - Scope tally.Scope - VCSEventMessageProcessor -} - -func (s *VCSEventMessageProcessorStats) ProcessMessage(msg types.Message) error { - successCount := s.Scope.Counter(Success) - errorCount := s.Scope.Counter(Error) - - timer := s.Scope.Timer(Latency) - span := timer.Start() - defer span.Stop() - - if err := s.VCSEventMessageProcessor.ProcessMessage(msg); err != nil { - errorCount.Inc(1) - return err - } - successCount.Inc(1) - return nil -} - -type NoOpResponseWriter struct{} - -func (n *NoOpResponseWriter) Header() http.Header { - return nil -} - -func (n *NoOpResponseWriter) Write([]byte) (int, error) { - return 0, nil -} - -func (n *NoOpResponseWriter) WriteHeader(statusCode int) {} diff --git a/server/legacy/lyft/aws/sqs/message_test.go b/server/legacy/lyft/aws/sqs/message_test.go deleted file mode 100644 index 3f30f88bb..000000000 --- a/server/legacy/lyft/aws/sqs/message_test.go +++ /dev/null @@ -1,72 +0,0 @@ -package sqs_test - -import ( - "bytes" - "net/http" - "net/url" - - "github.com/aws/aws-sdk-go-v2/aws" - "github.com/aws/aws-sdk-go-v2/service/sqs/types" - . "github.com/petergtz/pegomock" - controller_mocks "github.com/runatlantis/atlantis/server/legacy/controllers/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/controllers/events/mocks/matchers" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sqs" - . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" - "github.com/uber-go/tally/v4" - - "testing" -) - -func TestAtlantisMessageHandler_PostSuccess(t *testing.T) { - RegisterMockTestingT(t) - testScope := tally.NewTestScope("test", nil) - req := createExampleRequest(t) - mockPostHandler := controller_mocks.NewMockVCSPostHandler() - handler := &sqs.VCSEventMessageProcessorStats{ - VCSEventMessageProcessor: sqs.VCSEventMessageProcessor{ - PostHandler: mockPostHandler, - }, - Scope: testScope, - } - - err := handler.ProcessMessage(toSqsMessage(t, req)) - assert.NoError(t, err) - mockPostHandler.VerifyWasCalledOnce().Post(matchers.AnyHTTPResponseWriter(), matchers.AnyPtrToHTTPRequest()) - Assert(t, testScope.Snapshot().Counters()["test.success+"].Value() == 1, "message handler was successful") -} - -func TestAtlantisMessageHandler_Error(t *testing.T) { - RegisterMockTestingT(t) - testScope := tally.NewTestScope("test", nil) - mockPostHandler := controller_mocks.NewMockVCSPostHandler() - handler := &sqs.VCSEventMessageProcessorStats{ - VCSEventMessageProcessor: sqs.VCSEventMessageProcessor{ - PostHandler: mockPostHandler, - }, - Scope: testScope, - } - invalidMessage := types.Message{} - err := handler.ProcessMessage(invalidMessage) - assert.Error(t, err) - mockPostHandler.VerifyWasCalled(Never()).Post(matchers.AnyHTTPResponseWriter(), matchers.AnyPtrToHTTPRequest()) - Assert(t, testScope.Snapshot().Counters()["test.error+"].Value() == 1, "message handler was not successful") -} - -func toSqsMessage(t *testing.T, req *http.Request) types.Message { - buffer := bytes.NewBuffer([]byte{}) - err := req.Write(buffer) - assert.NoError(t, err) - return types.Message{ - Body: aws.String(buffer.String()), - } -} - -func createExampleRequest(t *testing.T) *http.Request { - url, err := url.Parse("http://www.atlantis.com") - assert.NoError(t, err) - req := &http.Request{ - URL: url, - } - return req -} diff --git a/server/legacy/lyft/aws/sqs/mocks/matchers/types_message.go b/server/legacy/lyft/aws/sqs/mocks/matchers/types_message.go deleted file mode 100644 index be47cd424..000000000 --- a/server/legacy/lyft/aws/sqs/mocks/matchers/types_message.go +++ /dev/null @@ -1,34 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -package matchers - -import ( - "reflect" - - "github.com/petergtz/pegomock" - - types "github.com/aws/aws-sdk-go-v2/service/sqs/types" -) - -func AnyTypesMessage() types.Message { - pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(types.Message))(nil)).Elem())) - var nullValue types.Message - return nullValue -} - -func EqTypesMessage(value types.Message) types.Message { - pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value}) - var nullValue types.Message - return nullValue -} - -func NotEqTypesMessage(value types.Message) types.Message { - pegomock.RegisterMatcher(&pegomock.NotEqMatcher{Value: value}) - var nullValue types.Message - return nullValue -} - -func TypesMessageThat(matcher pegomock.ArgumentMatcher) types.Message { - pegomock.RegisterMatcher(matcher) - var nullValue types.Message - return nullValue -} diff --git a/server/legacy/lyft/aws/sqs/mocks/mock_sqs_message_handler.go b/server/legacy/lyft/aws/sqs/mocks/mock_sqs_message_handler.go deleted file mode 100644 index 497e122c7..000000000 --- a/server/legacy/lyft/aws/sqs/mocks/mock_sqs_message_handler.go +++ /dev/null @@ -1,106 +0,0 @@ -// Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/legacy/lyft/aws/sqs (interfaces: MessageProcessor) - -package mocks - -import ( - "reflect" - "time" - - types "github.com/aws/aws-sdk-go-v2/service/sqs/types" - pegomock "github.com/petergtz/pegomock" -) - -type MockMessageProcessor struct { - fail func(message string, callerSkip ...int) -} - -func NewMockMessageProcessor(options ...pegomock.Option) *MockMessageProcessor { - mock := &MockMessageProcessor{} - for _, option := range options { - option.Apply(mock) - } - return mock -} - -func (mock *MockMessageProcessor) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } -func (mock *MockMessageProcessor) FailHandler() pegomock.FailHandler { return mock.fail } - -func (mock *MockMessageProcessor) ProcessMessage(_param0 types.Message) error { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockMessageProcessor().") - } - params := []pegomock.Param{_param0} - result := pegomock.GetGenericMockFrom(mock).Invoke("ProcessMessage", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].(error) - } - } - return ret0 -} - -func (mock *MockMessageProcessor) VerifyWasCalledOnce() *VerifierMockMessageProcessor { - return &VerifierMockMessageProcessor{ - mock: mock, - invocationCountMatcher: pegomock.Times(1), - } -} - -func (mock *MockMessageProcessor) VerifyWasCalled(invocationCountMatcher pegomock.InvocationCountMatcher) *VerifierMockMessageProcessor { - return &VerifierMockMessageProcessor{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - } -} - -func (mock *MockMessageProcessor) VerifyWasCalledInOrder(invocationCountMatcher pegomock.InvocationCountMatcher, inOrderContext *pegomock.InOrderContext) *VerifierMockMessageProcessor { - return &VerifierMockMessageProcessor{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - inOrderContext: inOrderContext, - } -} - -func (mock *MockMessageProcessor) VerifyWasCalledEventually(invocationCountMatcher pegomock.InvocationCountMatcher, timeout time.Duration) *VerifierMockMessageProcessor { - return &VerifierMockMessageProcessor{ - mock: mock, - invocationCountMatcher: invocationCountMatcher, - timeout: timeout, - } -} - -type VerifierMockMessageProcessor struct { - mock *MockMessageProcessor - invocationCountMatcher pegomock.InvocationCountMatcher - inOrderContext *pegomock.InOrderContext - timeout time.Duration -} - -func (verifier *VerifierMockMessageProcessor) ProcessMessage(_param0 types.Message) *MockMessageProcessor_ProcessMessage_OngoingVerification { - params := []pegomock.Param{_param0} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ProcessMessage", params, verifier.timeout) - return &MockMessageProcessor_ProcessMessage_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockMessageProcessor_ProcessMessage_OngoingVerification struct { - mock *MockMessageProcessor - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockMessageProcessor_ProcessMessage_OngoingVerification) GetCapturedArguments() types.Message { - _param0 := c.GetAllCapturedArguments() - return _param0[len(_param0)-1] -} - -func (c *MockMessageProcessor_ProcessMessage_OngoingVerification) GetAllCapturedArguments() (_param0 []types.Message) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]types.Message, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(types.Message) - } - } - return -} diff --git a/server/legacy/lyft/aws/sqs/queue.go b/server/legacy/lyft/aws/sqs/queue.go deleted file mode 100644 index 4191d10a3..000000000 --- a/server/legacy/lyft/aws/sqs/queue.go +++ /dev/null @@ -1,63 +0,0 @@ -package sqs - -import ( - "context" - "errors" - "fmt" - - "github.com/aws/aws-sdk-go-v2/service/sqs" - "github.com/uber-go/tally/v4" -) - -// Queue mirrors a strict set of AWS SQS Interface -type Queue interface { - ReceiveMessage(ctx context.Context, req *sqs.ReceiveMessageInput, optFns ...func(*sqs.Options)) (*sqs.ReceiveMessageOutput, error) - DeleteMessage(ctx context.Context, req *sqs.DeleteMessageInput, optFns ...func(*sqs.Options)) (*sqs.DeleteMessageOutput, error) -} - -// QueueWithStats proxies request to the underlying queue and wraps it with metrics -// and error handling. -type QueueWithStats struct { - Queue - Scope tally.Scope - QueueURL string -} - -func (q *QueueWithStats) ReceiveMessage(ctx context.Context, req *sqs.ReceiveMessageInput, optFns ...func(*sqs.Options)) (*sqs.ReceiveMessageOutput, error) { - scope := q.Scope.SubScope(ReceiveMessageMetricName) - - timer := scope.Timer(Latency).Start() - defer timer.Stop() - - successCount := scope.Counter(Success) - errorCount := scope.Counter(Error) - - response, err := q.Queue.ReceiveMessage(ctx, req, optFns...) - // only consider it a failure if the error isn't due to a context cancellation - if err != nil && !errors.Is(err, context.Canceled) { - errorCount.Inc(1) - return response, fmt.Errorf("receiving messages from queue: %s: %w", q.QueueURL, err) - } - - successCount.Inc(1) - return response, err -} - -func (q *QueueWithStats) DeleteMessage(ctx context.Context, req *sqs.DeleteMessageInput, optFns ...func(*sqs.Options)) (*sqs.DeleteMessageOutput, error) { - scope := q.Scope.SubScope(DeleteMessageMetricName) - - timer := scope.Timer(Latency).Start() - defer timer.Stop() - - successCount := scope.Counter(Success) - errorCount := scope.Counter(Error) - - response, err := q.Queue.DeleteMessage(ctx, req, optFns...) - if err != nil { - errorCount.Inc(1) - return response, fmt.Errorf("deleting messages from queue: %s, receipt handle: %s: %w", q.QueueURL, *req.ReceiptHandle, err) - } - - successCount.Inc(1) - return response, err -} diff --git a/server/legacy/lyft/aws/sqs/worker.go b/server/legacy/lyft/aws/sqs/worker.go deleted file mode 100644 index 4b46e31f5..000000000 --- a/server/legacy/lyft/aws/sqs/worker.go +++ /dev/null @@ -1,117 +0,0 @@ -package sqs - -import ( - "context" - "sync" - - "github.com/aws/aws-sdk-go-v2/config" - "github.com/aws/aws-sdk-go-v2/service/sqs" - "github.com/aws/aws-sdk-go-v2/service/sqs/types" - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" - "github.com/uber-go/tally/v4" -) - -const ( - ProcessMessageMetricName = "process" - ReceiveMessageMetricName = "receive" - DeleteMessageMetricName = "delete" - - Latency = "latency" - Success = "success" - Error = "error" -) - -type Worker struct { - Queue Queue - QueueURL string - MessageProcessor MessageProcessor - Logger logging.Logger -} - -func NewGatewaySQSWorker(ctx context.Context, scope tally.Scope, logger logging.Logger, queueURL string, postHandler VCSPostHandler) (*Worker, error) { - cfg, err := config.LoadDefaultConfig(ctx) - if err != nil { - return nil, errors.Wrap(err, "error loading aws config for sqs worker") - } - scope = scope.SubScope("aws.sqs.msg") - sqsQueueWrapper := &QueueWithStats{ - Queue: sqs.NewFromConfig(cfg), - Scope: scope, - QueueURL: queueURL, - } - - handler := &VCSEventMessageProcessorStats{ - VCSEventMessageProcessor: VCSEventMessageProcessor{ - PostHandler: postHandler, - }, - Scope: scope.SubScope(ProcessMessageMetricName), - } - - return &Worker{ - Queue: sqsQueueWrapper, - QueueURL: queueURL, - MessageProcessor: handler, - Logger: logger, - }, nil -} - -func (w *Worker) Work(ctx context.Context) { - messages := make(chan types.Message) - // Used to synchronize stopping message retrieval and processing - var wg sync.WaitGroup - wg.Add(1) - go func() { - defer wg.Done() - w.Logger.InfoContext(ctx, "start processing sqs messages") - w.processMessage(ctx, messages) - }() - request := &sqs.ReceiveMessageInput{ - QueueUrl: &w.QueueURL, - MaxNumberOfMessages: 10, //max number of batch-able messages - WaitTimeSeconds: 20, //max duration long polling - } - w.Logger.InfoContext(ctx, "start receiving sqs messages") - w.receiveMessages(ctx, messages, request) - wg.Wait() -} - -func (w *Worker) receiveMessages(ctx context.Context, messages chan types.Message, request *sqs.ReceiveMessageInput) { - for { - select { - case <-ctx.Done(): - close(messages) - w.Logger.InfoContext(ctx, "closed sqs messages channel") - return - default: - response, err := w.Queue.ReceiveMessage(ctx, request) - if err != nil { - w.Logger.WarnContext(ctx, "unable to receive sqs message", map[string]interface{}{"err": err}) - continue - } - for _, message := range response.Messages { - messages <- message - } - } - } -} - -func (w *Worker) processMessage(ctx context.Context, messages chan types.Message) { - // VisibilityTimeout is 30s, ideally enough time to "processMessage" < 10 messages (i.e. spin up goroutine for each) - for message := range messages { - err := w.MessageProcessor.ProcessMessage(message) - if err != nil { - w.Logger.ErrorContext(ctx, "unable to process sqs message", map[string]interface{}{"err": err}) - continue - } - - // Since we've successfully processed the message, let's go ahead and delete it from the queue - _, err = w.Queue.DeleteMessage(ctx, &sqs.DeleteMessageInput{ - QueueUrl: &w.QueueURL, - ReceiptHandle: message.ReceiptHandle, - }) - if err != nil { - w.Logger.WarnContext(ctx, "unable to delete processed sqs message", map[string]interface{}{"err": err}) - } - } -} diff --git a/server/legacy/lyft/aws/sqs/worker_test.go b/server/legacy/lyft/aws/sqs/worker_test.go deleted file mode 100644 index c44672fa5..000000000 --- a/server/legacy/lyft/aws/sqs/worker_test.go +++ /dev/null @@ -1,207 +0,0 @@ -package sqs_test - -import ( - "github.com/aws/aws-sdk-go-v2/aws" - awssqs "github.com/aws/aws-sdk-go-v2/service/sqs" - "github.com/aws/aws-sdk-go-v2/service/sqs/types" - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sqs" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sqs/mocks" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sqs/mocks/matchers" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" - "github.com/uber-go/tally/v4" - - "context" - "errors" - "sync" - "testing" - "time" -) - -type testQueue struct { - receiveError error - - // represents an underlying queue with messages. - // ReceiveMessage retrieves these messages while - // DeleteMessage will remove items from this list. - // Note: This is not threadsafe, tests should only have one thread - // capable of mutating this. - messages []types.Message - - // This should be called during ReceiveMessage so that - // future calls cannot be made which therefore ends the worker. - cancel context.CancelFunc -} - -func (t *testQueue) ReceiveMessage(ctx context.Context, req *awssqs.ReceiveMessageInput, optFns ...func(*awssqs.Options)) (*awssqs.ReceiveMessageOutput, error) { - t.cancel() - if t.receiveError != nil { - return nil, t.receiveError - } - - return &awssqs.ReceiveMessageOutput{Messages: t.messages}, nil -} - -func (t *testQueue) DeleteMessage(ctx context.Context, req *awssqs.DeleteMessageInput, optFns ...func(*awssqs.Options)) (*awssqs.DeleteMessageOutput, error) { - var prunedMsgs []types.Message - - // remove deleted message from array. - for _, msg := range t.messages { - if msg.ReceiptHandle == req.ReceiptHandle { - continue - } - prunedMsgs = append(prunedMsgs, msg) - } - - t.messages = prunedMsgs - return &awssqs.DeleteMessageOutput{}, nil -} - -func TestWorker_Success(t *testing.T) { - RegisterMockTestingT(t) - ctx, cancelFunc := context.WithCancel(context.Background()) - - var wg sync.WaitGroup - testScope := tally.NewTestScope("test", nil) - - expectedMessage := types.Message{ - Body: aws.String("body"), - ReceiptHandle: aws.String("receipt_handle"), - MessageId: aws.String("message_id"), - } - tq := &testQueue{ - messages: []types.Message{ - expectedMessage, - }, - cancel: cancelFunc, - } - queue := &sqs.QueueWithStats{ - Queue: tq, - Scope: testScope, - QueueURL: "testUrl", - } - handler := mocks.NewMockMessageProcessor() - When(handler.ProcessMessage(matchers.AnyTypesMessage())).ThenReturn(nil) - worker := &sqs.Worker{ - Queue: queue, - QueueURL: "testUrl", - MessageProcessor: handler, - Logger: logging.NewNoopCtxLogger(t), - } - - wg.Add(1) - go func() { - worker.Work(ctx) - wg.Done() - }() - - // wait for listen to complete or timeout. - assertCompletes(t, &wg, time.Second) - Assert(t, testScope.Snapshot().Counters()["test.receive.success+"].Value() == 1, "should have received message") - Assert(t, testScope.Snapshot().Counters()["test.delete.success+"].Value() == 1, "should have deleted message") - Assert(t, len(tq.messages) == 0, "should have processed all messages") - handler.VerifyWasCalledOnce().ProcessMessage(matchers.AnyTypesMessage()) -} - -func TestWorker_Error(t *testing.T) { - RegisterMockTestingT(t) - ctx, cancelFunc := context.WithCancel(context.Background()) - - var wg sync.WaitGroup - testScope := tally.NewTestScope("test", nil) - - queue := &sqs.QueueWithStats{ - Queue: &testQueue{ - receiveError: errors.New("reading messages off of SQS queue"), - cancel: cancelFunc, - }, - Scope: testScope, - QueueURL: "foo", - } - handler := mocks.NewMockMessageProcessor() - When(handler.ProcessMessage(matchers.AnyTypesMessage())).ThenReturn(nil) - worker := &sqs.Worker{ - Queue: queue, - QueueURL: "testUrl", - MessageProcessor: handler, - Logger: logging.NewNoopCtxLogger(t), - } - - wg.Add(1) - go func() { - worker.Work(ctx) - wg.Done() - }() - - // wait for listen to complete or timeout. - assertCompletes(t, &wg, time.Second) - Assert(t, testScope.Snapshot().Counters()["test.receive.error+"].Value() == 1, "should have not received message") - handler.VerifyWasCalled(Never()).ProcessMessage(matchers.AnyTypesMessage()) -} - -func TestWorker_HandlerError(t *testing.T) { - RegisterMockTestingT(t) - ctx, cancelFunc := context.WithCancel(context.Background()) - - var wg sync.WaitGroup - testScope := tally.NewTestScope("test", nil) - - expectedMessage := types.Message{ - Body: aws.String("body"), - ReceiptHandle: aws.String("receipt_handle"), - MessageId: aws.String("message_id"), - } - tq := &testQueue{ - messages: []types.Message{ - expectedMessage, - }, - cancel: cancelFunc, - } - - queue := &sqs.QueueWithStats{ - Queue: tq, - Scope: testScope, - QueueURL: "foo", - } - handler := mocks.NewMockMessageProcessor() - When(handler.ProcessMessage(matchers.AnyTypesMessage())).ThenReturn(errors.New("unable to process msg")) - worker := &sqs.Worker{ - Queue: queue, - QueueURL: "testUrl", - MessageProcessor: handler, - Logger: logging.NewNoopCtxLogger(t), - } - - wg.Add(1) - go func() { - worker.Work(ctx) - wg.Done() - }() - - // wait for listen to complete or timeout. - assertCompletes(t, &wg, time.Second) - Assert(t, testScope.Snapshot().Counters()["test.receive.success+"].Value() == 1, "should have received message") - Assert(t, len(tq.messages) == 1, "should have not successfully processed message") - handler.VerifyWasCalled(Once()).ProcessMessage(matchers.AnyTypesMessage()) -} - -// assertCompletes places a timeout on a sync.WaitGroup and fails if the -// groups doesn't complete before the timeout occurs -func assertCompletes(t *testing.T, waitGroup *sync.WaitGroup, timeout time.Duration) { - Assert(t, !timedOut(waitGroup, timeout), "wait group timed out after %s", timeout) -} - -func timedOut(waitGroup *sync.WaitGroup, timeout time.Duration) bool { - c := make(chan struct{}) - go func() { - defer close(c) - waitGroup.Wait() - }() - select { - case <-c: - return false - case <-time.After(timeout): - return true - } -} diff --git a/server/legacy/lyft/command/feature_runner.go b/server/legacy/lyft/command/feature_runner.go deleted file mode 100644 index 69d5122df..000000000 --- a/server/legacy/lyft/command/feature_runner.go +++ /dev/null @@ -1,135 +0,0 @@ -package command - -import ( - "fmt" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - "github.com/runatlantis/atlantis/server/neptune/template" -) - -type Commenter interface { - CreateComment(repo models.Repo, pullNum int, comment string, command string) error -} - -type LegacyApplyCommentInput struct{} - -type PlatformModeRunner struct { - command.Runner - Allocator feature.Allocator - Logger logging.Logger - Builder events.ProjectApplyCommandBuilder - TemplateLoader template.Loader[LegacyApplyCommentInput] - VCSClient Commenter -} - -func (a *PlatformModeRunner) Run(ctx *command.Context, cmd *command.Comment) { - if cmd.Name != command.Apply { - a.Runner.Run(ctx, cmd) - return - } - - shouldAllocate, err := a.Allocator.ShouldAllocate(feature.PlatformMode, feature.FeatureContext{RepoName: ctx.HeadRepo.FullName}) - if err != nil { - a.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to allocate for feature: %s, error: %s", feature.PlatformMode, err)) - } - - // if this isn't allocated don't worry about the rest - if !shouldAllocate { - a.Runner.Run(ctx, cmd) - return - } - - // now let's determine whether the repo is configured for platform mode by building commands - var projectCmds []command.ProjectContext - projectCmds, err = a.Builder.BuildApplyCommands(ctx, cmd) - if err != nil { - a.Logger.ErrorContext(ctx.RequestCtx, err.Error()) - return - } - - // is this possible? Not sure, let's be safe tho and just bail into the delegate - if len(projectCmds) == 0 { - a.Logger.WarnContext(ctx.RequestCtx, "no project commands. unable to determine workflow mode type") - a.Runner.Run(ctx, cmd) - return - } - - // at this point we've either commented about this being a legacy apply or not, so let's just proceed with - // the run now. - a.Runner.Run(ctx, cmd) -} - -// DefaultProjectCommandRunner implements ProjectCommandRunner. -type PlatformModeProjectRunner struct { //create object and test - PlatformModeRunner events.ProjectCommandRunner - PrModeRunner events.ProjectCommandRunner - Allocator feature.Allocator - Logger logging.Logger -} - -// Plan runs terraform plan for the project described by ctx. -func (p *PlatformModeProjectRunner) Plan(ctx command.ProjectContext) command.ProjectResult { - shouldAllocate, err := p.Allocator.ShouldAllocate(feature.PlatformMode, feature.FeatureContext{RepoName: ctx.HeadRepo.FullName}) - if err != nil { - p.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to allocate for feature: %s, error: %s", feature.PlatformMode, err)) - } - - if shouldAllocate && (ctx.WorkflowModeType == valid.PlatformWorkflowMode) { - return p.PlatformModeRunner.Plan(ctx) - } - - return p.PrModeRunner.Plan(ctx) -} - -// PolicyCheck evaluates policies defined with Rego for the project described by ctx. -func (p *PlatformModeProjectRunner) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - shouldAllocate, err := p.Allocator.ShouldAllocate(feature.PlatformMode, feature.FeatureContext{RepoName: ctx.HeadRepo.FullName}) - if err != nil { - p.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to allocate for feature: %s, error: %s", feature.PlatformMode, err)) - } - - if shouldAllocate && (ctx.WorkflowModeType == valid.PlatformWorkflowMode) { - return p.PlatformModeRunner.PolicyCheck(ctx) - } - - return p.PrModeRunner.PolicyCheck(ctx) -} - -// Apply runs terraform apply for the project described by ctx. -func (p *PlatformModeProjectRunner) Apply(ctx command.ProjectContext) command.ProjectResult { - shouldAllocate, err := p.Allocator.ShouldAllocate(feature.PlatformMode, feature.FeatureContext{RepoName: ctx.HeadRepo.FullName}) - if err != nil { - p.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to allocate for feature: %s, error: %s", feature.PlatformMode, err)) - } - - if shouldAllocate && (ctx.WorkflowModeType == valid.PlatformWorkflowMode) { - return command.ProjectResult{ - Command: command.Apply, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - StatusID: ctx.StatusID, - ApplySuccess: "atlantis apply is disabled for this project. Please track the deployment when the PR is merged. ", - } - } - - return p.PrModeRunner.Apply(ctx) -} - -func (p *PlatformModeProjectRunner) Version(ctx command.ProjectContext) command.ProjectResult { - shouldAllocate, err := p.Allocator.ShouldAllocate(feature.PlatformMode, feature.FeatureContext{RepoName: ctx.HeadRepo.FullName}) - if err != nil { - p.Logger.ErrorContext(ctx.RequestCtx, fmt.Sprintf("unable to allocate for feature: %s, error: %s", feature.PlatformMode, err)) - } - - if shouldAllocate && (ctx.WorkflowModeType == valid.PlatformWorkflowMode) { - return p.PlatformModeRunner.Version(ctx) - } - - return p.PrModeRunner.Version(ctx) -} diff --git a/server/legacy/lyft/command/feature_runner_test.go b/server/legacy/lyft/command/feature_runner_test.go deleted file mode 100644 index abc69ef58..000000000 --- a/server/legacy/lyft/command/feature_runner_test.go +++ /dev/null @@ -1,647 +0,0 @@ -package command_test - -import ( - "context" - "testing" - - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - lyftCommand "github.com/runatlantis/atlantis/server/legacy/lyft/command" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - "github.com/runatlantis/atlantis/server/neptune/template" - "github.com/stretchr/testify/assert" -) - -type testAllocator struct { - expectedFeatureName feature.Name - expectedCtx feature.FeatureContext - expectedT *testing.T - - expectedResult bool - expectedErr error -} - -func (t *testAllocator) ShouldAllocate(name feature.Name, ctx feature.FeatureContext) (bool, error) { - assert.Equal(t.expectedT, t.expectedFeatureName, name) - assert.Equal(t.expectedT, t.expectedCtx, ctx) - - return t.expectedResult, t.expectedErr -} - -type testRunner struct { - expectedPlanResult command.ProjectResult - expectedPolicyCheckResult command.ProjectResult - expectedApplyResult command.ProjectResult - expectedApprovePoliciesResult command.ProjectResult - expectedVersionResult command.ProjectResult -} - -// Plan runs terraform plan for the project described by ctx. -func (r *testRunner) Plan(ctx command.ProjectContext) command.ProjectResult { - return r.expectedPlanResult -} - -// PolicyCheck evaluates policies defined with Rego for the project described by ctx. -func (r *testRunner) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - return r.expectedPolicyCheckResult -} - -// Apply runs terraform apply for the project described by ctx. -func (r *testRunner) Apply(ctx command.ProjectContext) command.ProjectResult { - return r.expectedApplyResult -} - -func (r *testRunner) ApprovePolicies(ctx command.ProjectContext) command.ProjectResult { - return r.expectedApprovePoliciesResult -} - -func (r *testRunner) Version(ctx command.ProjectContext) command.ProjectResult { - return r.expectedVersionResult -} - -type testCMDRunner struct { - expectedCmd *command.Comment - t *testing.T - called bool -} - -func (r *testCMDRunner) Run(ctx *command.Context, cmd *command.Comment) { - r.called = true - assert.Equal(r.t, r.expectedCmd, cmd) -} - -type TestBuilder struct { - Type valid.WorkflowModeType - called bool -} - -func (b *TestBuilder) BuildApplyCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - b.called = true - return []command.ProjectContext{ - { - WorkflowModeType: b.Type, - }, - }, nil -} - -type TestMultiBuilder struct { - called bool -} - -func (b *TestMultiBuilder) BuildApplyCommands(ctx *command.Context, comment *command.Comment) ([]command.ProjectContext, error) { - b.called = true - return []command.ProjectContext{ - { - WorkflowModeType: valid.PlatformWorkflowMode, - }, - { - WorkflowModeType: valid.DefaultWorkflowMode, - }, - }, nil -} - -type TestCommenter struct { - expectedComment string - expectedPullNum int - expectedRepo models.Repo - expectedCommand string - expectedT *testing.T - - called bool -} - -func (c *TestCommenter) CreateComment(repo models.Repo, pullNum int, comment string, command string) error { - c.called = true - assert.Equal(c.expectedT, c.expectedComment, comment) - assert.Equal(c.expectedT, c.expectedPullNum, pullNum) - assert.Equal(c.expectedT, c.expectedRepo, repo) - assert.Equal(c.expectedT, c.expectedCommand, command) - - return nil -} - -func TestPlatformModeRunner_allocatesButNotPlatformMode(t *testing.T) { - ctx := &command.Context{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "owner/repo", - }, - Pull: models.PullRequest{ - Num: 1, - BaseRepo: models.Repo{ - FullName: "owner/base", - }, - }, - } - cmd := &command.Comment{ - Workspace: "hi", - } - - commenter := &TestCommenter{ - expectedT: t, - expectedComment: "Platform mode does not support legacy apply commands. Please merge your PR to apply the changes. ", - expectedPullNum: 1, - expectedRepo: ctx.Pull.BaseRepo, - } - - builder := &TestBuilder{ - Type: valid.DefaultWorkflowMode, - } - runner := &testCMDRunner{ - t: t, - expectedCmd: cmd, - } - - subject := &lyftCommand.PlatformModeRunner{ - Allocator: &testAllocator{ - expectedFeatureName: feature.PlatformMode, - expectedT: t, - expectedCtx: feature.FeatureContext{RepoName: "owner/repo"}, - expectedResult: true, - }, - Logger: logging.NewNoopCtxLogger(t), - Builder: builder, - TemplateLoader: template.Loader[lyftCommand.LegacyApplyCommentInput]{ - GlobalCfg: valid.GlobalCfg{}, - }, - VCSClient: commenter, - Runner: runner, - } - - subject.Run(ctx, cmd) - - assert.True(t, runner.called) - assert.True(t, builder.called) - assert.False(t, commenter.called) -} - -func TestPlatformModeRunner_allocatesButPartialPlatformMode(t *testing.T) { - ctx := &command.Context{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "owner/repo", - }, - Pull: models.PullRequest{ - Num: 1, - BaseRepo: models.Repo{ - FullName: "owner/base", - }, - }, - } - cmd := &command.Comment{ - Workspace: "hi", - } - - commenter := &TestCommenter{ - expectedT: t, - expectedComment: "Platform mode does not support legacy apply commands. Please merge your PR to apply the changes. ", - expectedPullNum: 1, - expectedRepo: ctx.Pull.BaseRepo, - } - - builder := &TestMultiBuilder{} - runner := &testCMDRunner{ - t: t, - expectedCmd: cmd, - } - - subject := &lyftCommand.PlatformModeRunner{ - Allocator: &testAllocator{ - expectedFeatureName: feature.PlatformMode, - expectedT: t, - expectedCtx: feature.FeatureContext{RepoName: "owner/repo"}, - expectedResult: true, - }, - Logger: logging.NewNoopCtxLogger(t), - Builder: builder, - TemplateLoader: template.Loader[lyftCommand.LegacyApplyCommentInput]{ - GlobalCfg: valid.GlobalCfg{}, - }, - VCSClient: commenter, - Runner: runner, - } - - subject.Run(ctx, cmd) - - assert.True(t, runner.called) - assert.True(t, builder.called) - assert.False(t, commenter.called) -} - -func TestPlatformModeRunner_doesntAllocate(t *testing.T) { - ctx := &command.Context{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "owner/repo", - }, - Pull: models.PullRequest{ - Num: 1, - BaseRepo: models.Repo{ - FullName: "owner/base", - }, - }, - } - cmd := &command.Comment{ - Workspace: "hi", - } - - commenter := &TestCommenter{ - expectedT: t, - expectedComment: "Platform mode does not support legacy apply commands. Please merge your PR to apply the changes. ", - expectedPullNum: 1, - expectedRepo: ctx.Pull.BaseRepo, - } - - builder := &TestBuilder{ - Type: valid.PlatformWorkflowMode, - } - runner := &testCMDRunner{ - t: t, - expectedCmd: cmd, - } - - subject := &lyftCommand.PlatformModeRunner{ - Allocator: &testAllocator{ - expectedFeatureName: feature.PlatformMode, - expectedT: t, - expectedCtx: feature.FeatureContext{RepoName: "owner/repo"}, - expectedResult: false, - }, - Logger: logging.NewNoopCtxLogger(t), - Builder: builder, - TemplateLoader: template.Loader[lyftCommand.LegacyApplyCommentInput]{ - GlobalCfg: valid.GlobalCfg{}, - }, - VCSClient: commenter, - Runner: runner, - } - - subject.Run(ctx, cmd) - - assert.True(t, runner.called) - assert.False(t, builder.called) - assert.False(t, commenter.called) -} - -func TestPlatformModeRunner_success(t *testing.T) { - ctx := &command.Context{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "owner/repo", - }, - Pull: models.PullRequest{ - Num: 1, - BaseRepo: models.Repo{ - FullName: "owner/base", - }, - }, - } - cmd := &command.Comment{ - Workspace: "hi", - } - - builder := &TestBuilder{ - Type: valid.PlatformWorkflowMode, - } - runner := &testCMDRunner{ - t: t, - expectedCmd: cmd, - } - - commenter := &TestCommenter{} - - subject := &lyftCommand.PlatformModeRunner{ - Allocator: &testAllocator{ - expectedFeatureName: feature.PlatformMode, - expectedT: t, - expectedCtx: feature.FeatureContext{RepoName: "owner/repo"}, - expectedResult: true, - }, - Logger: logging.NewNoopCtxLogger(t), - Builder: builder, - TemplateLoader: template.Loader[lyftCommand.LegacyApplyCommentInput]{ - GlobalCfg: valid.GlobalCfg{}, - }, - VCSClient: commenter, - Runner: runner, - } - - subject.Run(ctx, cmd) - - assert.True(t, runner.called) - assert.True(t, builder.called) - assert.False(t, commenter.called) -} - -func TestPlatformModeProjectRunner_plan(t *testing.T) { - expectedResult := command.ProjectResult{ - JobID: "1234y", - } - - cases := []struct { - description string - shouldAllocate bool - workflowModeType valid.WorkflowModeType - platformRunner events.ProjectCommandRunner - prModeRunner events.ProjectCommandRunner - subject lyftCommand.PlatformModeProjectRunner - }{ - { - description: "allocated and platform mode enabled", - shouldAllocate: true, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{ - expectedPlanResult: expectedResult, - }, - prModeRunner: &testRunner{}, - }, - { - description: "allocated and platform mode not enabled", - shouldAllocate: true, - workflowModeType: valid.DefaultWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedPlanResult: expectedResult, - }, - }, - { - description: "not allocated and platform mode enabled", - shouldAllocate: false, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedPlanResult: expectedResult, - }, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - subject := lyftCommand.PlatformModeProjectRunner{ - PlatformModeRunner: c.platformRunner, - PrModeRunner: c.prModeRunner, - Allocator: &testAllocator{ - expectedResult: c.shouldAllocate, - expectedFeatureName: feature.PlatformMode, - expectedCtx: feature.FeatureContext{ - RepoName: "nish/repo", - }, - expectedT: t, - }, - Logger: logging.NewNoopCtxLogger(t), - } - - result := subject.Plan(command.ProjectContext{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "nish/repo", - }, - WorkflowModeType: c.workflowModeType, - }) - - assert.Equal(t, expectedResult, result) - }) - } -} - -func TestPlatformModeProjectRunner_policyCheck(t *testing.T) { - expectedResult := command.ProjectResult{ - JobID: "1234y", - } - - cases := []struct { - description string - shouldAllocate bool - workflowModeType valid.WorkflowModeType - platformRunner events.ProjectCommandRunner - prModeRunner events.ProjectCommandRunner - subject lyftCommand.PlatformModeProjectRunner - }{ - { - description: "allocated and platform mode enabled", - shouldAllocate: true, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{ - expectedPolicyCheckResult: expectedResult, - }, - prModeRunner: &testRunner{}, - }, - { - description: "allocated and platform mode not enabled", - shouldAllocate: true, - workflowModeType: valid.DefaultWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedPolicyCheckResult: expectedResult, - }, - }, - { - description: "not allocated and platform mode enabled", - shouldAllocate: false, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedPolicyCheckResult: expectedResult, - }, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - subject := lyftCommand.PlatformModeProjectRunner{ - PlatformModeRunner: c.platformRunner, - PrModeRunner: c.prModeRunner, - Allocator: &testAllocator{ - expectedResult: c.shouldAllocate, - expectedFeatureName: feature.PlatformMode, - expectedCtx: feature.FeatureContext{ - RepoName: "nish/repo", - }, - expectedT: t, - }, - Logger: logging.NewNoopCtxLogger(t), - } - - result := subject.PolicyCheck(command.ProjectContext{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "nish/repo", - }, - WorkflowModeType: c.workflowModeType, - }) - - assert.Equal(t, expectedResult, result) - }) - } -} - -func TestPlatformModeProjectRunner_apply(t *testing.T) { - cases := []struct { - description string - shouldAllocate bool - workflowModeType valid.WorkflowModeType - platformRunner events.ProjectCommandRunner - prModeRunner events.ProjectCommandRunner - subject lyftCommand.PlatformModeProjectRunner - expectedResult command.ProjectResult - }{ - { - description: "allocated and platform mode enabled", - shouldAllocate: true, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{ - expectedApplyResult: command.ProjectResult{ - RepoRelDir: "reldir", - Workspace: "default", - ProjectName: "project", - StatusID: "id", - Command: command.Apply, - ApplySuccess: "atlantis apply is disabled for this project. Please track the deployment when the PR is merged. ", - }, - }, - expectedResult: command.ProjectResult{ - RepoRelDir: "reldir", - Workspace: "default", - ProjectName: "project", - StatusID: "id", - Command: command.Apply, - ApplySuccess: "atlantis apply is disabled for this project. Please track the deployment when the PR is merged. ", - }, - prModeRunner: &testRunner{}, - }, - { - description: "allocated and platform mode not enabled", - shouldAllocate: true, - workflowModeType: valid.DefaultWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedApplyResult: command.ProjectResult{ - JobID: "1234y", - }, - }, - expectedResult: command.ProjectResult{ - JobID: "1234y", - }, - }, - { - description: "not allocated and platform mode enabled", - shouldAllocate: false, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedApplyResult: command.ProjectResult{ - JobID: "1234y", - }, - }, - expectedResult: command.ProjectResult{ - JobID: "1234y", - }, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - subject := lyftCommand.PlatformModeProjectRunner{ - PlatformModeRunner: c.platformRunner, - PrModeRunner: c.prModeRunner, - Allocator: &testAllocator{ - expectedResult: c.shouldAllocate, - expectedFeatureName: feature.PlatformMode, - expectedCtx: feature.FeatureContext{ - RepoName: "nish/repo", - }, - expectedT: t, - }, - Logger: logging.NewNoopCtxLogger(t), - } - - result := subject.Apply(command.ProjectContext{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "nish/repo", - }, - RepoRelDir: "reldir", - Workspace: "default", - ProjectName: "project", - StatusID: "id", - WorkflowModeType: c.workflowModeType, - }) - - assert.Equal(t, c.expectedResult, result) - }) - } -} - -func TestPlatformModeProjectRunner_version(t *testing.T) { - expectedResult := command.ProjectResult{ - JobID: "1234y", - } - - cases := []struct { - description string - shouldAllocate bool - workflowModeType valid.WorkflowModeType - platformRunner events.ProjectCommandRunner - prModeRunner events.ProjectCommandRunner - subject lyftCommand.PlatformModeProjectRunner - }{ - { - description: "allocated and platform mode enabled", - shouldAllocate: true, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{ - expectedVersionResult: expectedResult, - }, - prModeRunner: &testRunner{}, - }, - { - description: "allocated and platform mode not enabled", - shouldAllocate: true, - workflowModeType: valid.DefaultWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedVersionResult: expectedResult, - }, - }, - { - description: "not allocated and platform mode enabled", - shouldAllocate: false, - workflowModeType: valid.PlatformWorkflowMode, - platformRunner: &testRunner{}, - prModeRunner: &testRunner{ - expectedVersionResult: expectedResult, - }, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - subject := lyftCommand.PlatformModeProjectRunner{ - PlatformModeRunner: c.platformRunner, - PrModeRunner: c.prModeRunner, - Allocator: &testAllocator{ - expectedResult: c.shouldAllocate, - expectedFeatureName: feature.PlatformMode, - expectedCtx: feature.FeatureContext{ - RepoName: "nish/repo", - }, - expectedT: t, - }, - Logger: logging.NewNoopCtxLogger(t), - } - - result := subject.Version(command.ProjectContext{ - RequestCtx: context.Background(), - HeadRepo: models.Repo{ - FullName: "nish/repo", - }, - WorkflowModeType: c.workflowModeType, - }) - - assert.Equal(t, expectedResult, result) - }) - } -} diff --git a/server/legacy/lyft/core/runtime/destroy_plan_step_runner.go b/server/legacy/lyft/core/runtime/destroy_plan_step_runner.go deleted file mode 100644 index 63687ff01..000000000 --- a/server/legacy/lyft/core/runtime/destroy_plan_step_runner.go +++ /dev/null @@ -1,27 +0,0 @@ -package runtime - -import ( - "context" - - "github.com/runatlantis/atlantis/server/legacy/events/command" -) - -const Deprecated = "deprecated" -const Destroy = "-destroy" - -type StepRunner interface { - // Run runs the step. - Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) -} - -type DestroyPlanStepRunner struct { - StepRunner -} - -func (d *DestroyPlanStepRunner) Run(ctx context.Context, prjCtx command.ProjectContext, extraArgs []string, path string, envs map[string]string) (string, error) { - // DestroyPlan tag is true when the Terraform client should construct a destroy plan given a repo config. - if prjCtx.Tags[Deprecated] == Destroy { - extraArgs = append(extraArgs, Destroy) - } - return d.StepRunner.Run(ctx, prjCtx, extraArgs, path, envs) -} diff --git a/server/legacy/lyft/core/runtime/destroy_plan_step_runner_test.go b/server/legacy/lyft/core/runtime/destroy_plan_step_runner_test.go deleted file mode 100644 index e30d0e134..000000000 --- a/server/legacy/lyft/core/runtime/destroy_plan_step_runner_test.go +++ /dev/null @@ -1,132 +0,0 @@ -package runtime_test - -import ( - "context" - "fmt" - "os" - "path/filepath" - "testing" - - "github.com/hashicorp/go-version" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform/mocks" - - "github.com/runatlantis/atlantis/server/legacy/events/command" - lyftRuntime "github.com/runatlantis/atlantis/server/legacy/lyft/core/runtime" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - - . "github.com/petergtz/pegomock" - . "github.com/runatlantis/atlantis/testing" -) - -func TestRun_DestroyPlan(t *testing.T) { - RegisterMockTestingT(t) - - // Create the env/workspace.tfvars file. - tmpDir, cleanup := TempDir(t) - defer cleanup() - err := os.MkdirAll(filepath.Join(tmpDir, "env"), 0700) - Ok(t, err) - - cases := []struct { - description string - expPlanArgs []string - tags map[string]string - }{ - { - description: "uses destroy plan", - expPlanArgs: []string{ - "plan", - "-input=false", - "-refresh", - "-out", - fmt.Sprintf("%q", filepath.Join(tmpDir, "workspace.tfplan")), - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "-destroy", - "comment", - "args", - }, - tags: map[string]string{ - lyftRuntime.Deprecated: lyftRuntime.Destroy, - }, - }, - { - description: "no destroy plan", - expPlanArgs: []string{ - "plan", - "-input=false", - "-refresh", - "-out", - fmt.Sprintf("%q", filepath.Join(tmpDir, "workspace.tfplan")), - "-var", - "atlantis_user=\"username\"", - "-var", - "atlantis_repo=\"owner/repo\"", - "-var", - "atlantis_repo_name=\"repo\"", - "-var", - "atlantis_repo_owner=\"owner\"", - "-var", - "atlantis_pull_num=2", - "extra", - "args", - "comment", - "args", - }, - tags: map[string]string{}, - }, - } - - for _, c := range cases { - t.Run(c.description, func(t *testing.T) { - // Using version >= 0.10 here so we don't expect any env commands. - terraform := mocks.NewMockClient() - tfVersion, _ := version.NewVersion("0.10.0") - logger := logging.NewNoopCtxLogger(t) - planStepRunner := runtime.PlanStepRunner{ - TerraformExecutor: terraform, - DefaultTFVersion: tfVersion, - } - stepRunner := lyftRuntime.DestroyPlanStepRunner{ - StepRunner: &planStepRunner, - } - ctx := context.Background() - prjCtx := command.ProjectContext{ - Log: logger, - Workspace: "workspace", - RepoRelDir: ".", - User: models.User{Username: "username"}, - EscapedCommentArgs: []string{"comment", "args"}, - Pull: models.PullRequest{ - Num: 2, - }, - BaseRepo: models.Repo{ - FullName: "owner/repo", - Owner: "owner", - Name: "repo", - }, - Tags: c.tags, - } - When(terraform.RunCommandWithVersion(ctx, prjCtx, tmpDir, c.expPlanArgs, map[string]string(nil), tfVersion, "workspace")).ThenReturn("output", nil) - - output, err := stepRunner.Run(ctx, prjCtx, []string{"extra", "args"}, tmpDir, map[string]string(nil)) - Ok(t, err) - - // Verify that we next called for the actual - terraform.VerifyWasCalledOnce().RunCommandWithVersion(ctx, prjCtx, tmpDir, c.expPlanArgs, map[string]string(nil), tfVersion, "workspace") - Equals(t, "output", output) - }) - } -} diff --git a/server/legacy/lyft/decorators/audit_project_commands_wrapper.go b/server/legacy/lyft/decorators/audit_project_commands_wrapper.go deleted file mode 100644 index ddcde78ed..000000000 --- a/server/legacy/lyft/decorators/audit_project_commands_wrapper.go +++ /dev/null @@ -1,139 +0,0 @@ -package decorators - -import ( - "encoding/json" - "fmt" - "strconv" - "time" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sns" -) - -// AtlantisJobState represent current state of the job -// Job can be in 3 states: -// - RUNNING - when the job is initiated -// - FAILURE - when the job fails the execution -// - SUCCESS - when the job runs successfully -type AtlantisJobState string - -// AtlantisJobType represent the type of the job -// Currently only apply is supported -type AtlantisJobType string - -const ( - AtlantisJobStateRunning AtlantisJobState = "RUNNING" - AtlantisJobStateSuccess AtlantisJobState = "SUCCESS" - AtlantisJobStateFailure AtlantisJobState = "FAILURE" - - AtlantisApplyJob AtlantisJobType = "APPLY" -) - -// AuditProjectCommandWrapper is a decorator that notifies sns topic -// about the state of the command. It is used for auditing purposes -type AuditProjectCommandWrapper struct { - SnsWriter sns.Writer - events.ProjectCommandRunner -} - -func (p *AuditProjectCommandWrapper) Apply(ctx command.ProjectContext) command.ProjectResult { - id := ctx.JobID - startTime := strconv.FormatInt(time.Now().Unix(), 10) - - atlantisJobEvent := &AtlantisJobEvent{ - Version: 1, - ID: id, - RootName: ctx.ProjectName, - JobType: AtlantisApplyJob, - Repository: ctx.BaseRepo.FullName, - Environment: ctx.Tags["environment"], - PullNumber: ctx.Pull.Num, - InitiatingUser: ctx.User.Username, - Project: ctx.Tags["service_name"], - ForceApply: ctx.ForceApply, - StartTime: startTime, - Revision: ctx.Pull.HeadCommit, - } - - if err := p.emit(AtlantisJobStateRunning, atlantisJobEvent); err != nil { - // return an error if we are not able to write to sns - return command.ProjectResult{ - Error: errors.Wrap(err, "emitting atlantis job event"), - } - } - - result := p.ProjectCommandRunner.Apply(ctx) - - if result.Error != nil || result.Failure != "" { - if err := p.emit(AtlantisJobStateFailure, atlantisJobEvent); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("failed to emit atlantis job event %v", err)) - } - - return result - } - - if err := p.emit(AtlantisJobStateSuccess, atlantisJobEvent); err != nil { - ctx.Log.ErrorContext(ctx.RequestCtx, fmt.Sprintf("failed to emit atlantis job event %v", err)) - } - - return result -} - -func (p *AuditProjectCommandWrapper) emit( - state AtlantisJobState, - atlantisJobEvent *AtlantisJobEvent, -) error { - atlantisJobEvent.State = state - - if state == AtlantisJobStateFailure || state == AtlantisJobStateSuccess { - atlantisJobEvent.EndTime = strconv.FormatInt(time.Now().Unix(), 10) - } - - payload, err := atlantisJobEvent.Marshal() - if err != nil { - return errors.Wrap(err, "marshaling atlantis job event") - } - - if err := p.SnsWriter.Write(payload); err != nil { - return errors.Wrap(err, "writing to sns topic") - } - - return nil -} - -// AtlantisJobEvent contains metadata of the state of the AtlantisJobType command -type AtlantisJobEvent struct { - Version int `json:"version"` - ID string `json:"id"` - State AtlantisJobState `json:"state"` - JobType AtlantisJobType `json:"job_type"` - Revision string `json:"revision"` - Repository string `json:"repository"` - PullNumber int `json:"pull_number"` - Environment string `json:"environment"` - InitiatingUser string `json:"initiating_user"` - StartTime string `json:"start_time"` - EndTime string `json:"end_time"` - ForceApply bool `json:"force_apply"` - - // Service name in the manifest.yaml - Project string `json:"project"` - // ProjectName in the atlantis.yaml - RootName string `json:"root_name"` - - // Currently we do not track approvers metadata. - // ORCA-954 will implement this feature - ApprovedBy string `json:"approved_by"` - ApprovedTime string `json:"approved_time"` -} - -func (a *AtlantisJobEvent) Marshal() ([]byte, error) { - eventPayload, err := json.Marshal(a) - if err != nil { - return nil, errors.Wrap(err, "marshaling atlantis job event") - } - - return eventPayload, nil -} diff --git a/server/legacy/lyft/decorators/audit_project_commands_wrapper_test.go b/server/legacy/lyft/decorators/audit_project_commands_wrapper_test.go deleted file mode 100644 index cbc904a53..000000000 --- a/server/legacy/lyft/decorators/audit_project_commands_wrapper_test.go +++ /dev/null @@ -1,122 +0,0 @@ -package decorators_test - -import ( - "encoding/json" - "errors" - "testing" - - . "github.com/runatlantis/atlantis/testing" - - . "github.com/petergtz/pegomock" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/mocks" - "github.com/runatlantis/atlantis/server/legacy/events/mocks/matchers" - snsMocks "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sns/mocks" - "github.com/runatlantis/atlantis/server/legacy/lyft/decorators" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" -) - -func TestAuditProjectCommandsWrapper(t *testing.T) { - RegisterMockTestingT(t) - - cases := []struct { - Description string - Success bool - Failure bool - Error bool - }{ - { - Description: "apply success", - Success: true, - }, - { - Description: "apply error", - Error: true, - }, - { - Description: "apply failure", - Failure: true, - }, - } - - for _, c := range cases { - t.Run(c.Description, func(t *testing.T) { - snsMock := snsMocks.NewMockWriter() - projectCmdRunnerMock := mocks.NewMockProjectCommandRunner() - auditPrjCmds := &decorators.AuditProjectCommandWrapper{ - SnsWriter: snsMock, - ProjectCommandRunner: projectCmdRunnerMock, - } - - prjRslt := command.ProjectResult{} - - if c.Error { - prjRslt.Error = errors.New("oh-no") - } - - if c.Failure { - prjRslt.Failure = "oh-no" - } - - logger := logging.NewNoopCtxLogger(t) - - scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") - - ctx := command.ProjectContext{ - Scope: scope, - Log: logger, - Steps: []valid.Step{}, - ProjectName: "test-project", - User: models.User{ - Username: "test-user", - }, - Workspace: "default", - PullReqStatus: models.PullReqStatus{ - ApprovalStatus: models.ApprovalStatus{ - IsApproved: true, - }, - }, - RepoRelDir: ".", - Tags: map[string]string{ - "environment": "production", - "service_name": "test-service", - }, - } - - When(snsMock.Write(matchers.AnySliceOfByte())).ThenReturn(nil) - When(projectCmdRunnerMock.Apply(matchers.AnyModelsProjectCommandContext())).ThenReturn(prjRslt) - - auditPrjCmds.Apply(ctx) - - eventBefore := &decorators.AtlantisJobEvent{} - eventAfter := &decorators.AtlantisJobEvent{} - eventPayload := snsMock.VerifyWasCalled(Twice()).Write(matchers.AnySliceOfByte()).GetAllCapturedArguments() - - err := json.Unmarshal(eventPayload[0], eventBefore) - Ok(t, err) - err = json.Unmarshal(eventPayload[1], eventAfter) - Ok(t, err) - - Equals(t, eventBefore.State, decorators.AtlantisJobStateRunning) - Equals(t, eventBefore.RootName, "test-project") - Equals(t, eventBefore.Environment, "production") - Equals(t, eventBefore.InitiatingUser, "test-user") - Equals(t, eventBefore.Project, "test-service") - Assert(t, eventBefore.EndTime == "", "end time must be empty") - Assert(t, eventBefore.StartTime != "", "start time must be set") - - if c.Success { - Equals(t, eventAfter.State, decorators.AtlantisJobStateSuccess) - } else { - Equals(t, eventAfter.State, decorators.AtlantisJobStateFailure) - } - - Assert(t, eventBefore.StartTime == eventAfter.StartTime, "start time should not change") - Assert(t, eventAfter.EndTime != "", "end time must be set") - Assert(t, eventBefore.ID == eventAfter.ID, "id should not change") - }) - } -} diff --git a/server/legacy/lyft/gateway/events_controller.go b/server/legacy/lyft/gateway/events_controller.go deleted file mode 100644 index 335060a52..000000000 --- a/server/legacy/lyft/gateway/events_controller.go +++ /dev/null @@ -1,212 +0,0 @@ -package gateway - -import ( - "context" - "net/http" - - "github.com/runatlantis/atlantis/server/neptune/gateway/pr" - - "github.com/palantir/go-githubapp/githubapp" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/events/command" - - "github.com/runatlantis/atlantis/server/vcs/provider/github" - - events_controllers "github.com/runatlantis/atlantis/server/legacy/controllers/events" - "github.com/runatlantis/atlantis/server/legacy/controllers/events/handlers" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/neptune/gateway/config" - "github.com/runatlantis/atlantis/server/neptune/gateway/deploy" - gateway_handlers "github.com/runatlantis/atlantis/server/neptune/gateway/event" - "github.com/runatlantis/atlantis/server/neptune/gateway/requirement" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - "github.com/runatlantis/atlantis/server/neptune/sync" - converters "github.com/runatlantis/atlantis/server/vcs/provider/github/converter" - "github.com/runatlantis/atlantis/server/vcs/provider/github/request" - "github.com/uber-go/tally/v4" - "go.temporal.io/sdk/client" -) - -type scheduler interface { - Schedule(ctx context.Context, f sync.Executor) error -} - -func NewVCSEventsController( - scope tally.Scope, - webhookSecret []byte, - allowDraftPRs bool, - snsWriter gateway_handlers.Writer, - commentParser events.CommentParsing, - repoAllowlistChecker *events.RepoAllowlistChecker, - vcsClient vcs.Client, - logger logging.Logger, - supportedVCSProviders []models.VCSHostType, - repoConverter converters.RepoConverter, - pullConverter converters.PullConverter, - githubClient converters.PullGetter, - featureAllocator feature.Allocator, - syncScheduler scheduler, - asyncScheduler scheduler, - temporalClient client.Client, - rootDeployer *deploy.RootDeployer, - rootConfigBuilder *config.Builder, - deploySignaler *deploy.WorkflowSignaler, - checkRunFetcher *github.CheckRunsFetcher, - vcsStatusUpdater *command.VCSStatusUpdater, - globalCfg valid.GlobalCfg, - commentCreator *github.CommentCreator, - clientCreator githubapp.ClientCreator, - defaultTFVersion string, -) *VCSEventsController { - pullEventSNSProxy := gateway_handlers.NewSNSWorkerProxy( - snsWriter, logger, - ) - legacyHandler := &gateway_handlers.LegacyPullHandler{ - Logger: logger, - WorkerProxy: pullEventSNSProxy, - VCSStatusUpdater: vcsStatusUpdater, - } - prSignaler := &pr.WorkflowSignaler{TemporalClient: temporalClient, DefaultTFVersion: defaultTFVersion} - prRequirementChecker := requirement.NewPRAggregate(globalCfg) - modifiedPullHandler := gateway_handlers.NewModifiedPullHandler(logger, asyncScheduler, rootConfigBuilder, globalCfg, prRequirementChecker, prSignaler, legacyHandler) - closedPullHandler := &gateway_handlers.ClosedPullRequestHandler{ - WorkerProxy: pullEventSNSProxy, - Logger: logger, - PRCloseSignaler: prSignaler, - Scope: scope.SubScope("pull.closed"), - } - - prHandler := handlers.NewPullRequestEventWithEventTypeHandlers( - repoAllowlistChecker, - modifiedPullHandler, - modifiedPullHandler, - closedPullHandler, - ) - - legacyErrorHandler := gateway_handlers.NewLegacyErrorHandler( - commentCreator, - globalCfg, - logger, - featureAllocator, - ) - - neptuneErrorHandler := gateway_handlers.NewNeptuneErrorHandler( - commentCreator, - globalCfg, - logger, - featureAllocator, - ) - - teamMemberFetcher := &github.TeamMemberFetcher{ - ClientCreator: clientCreator, - - // Using the policy set org for now, we should probably bundle team and org together in one struct though - Org: globalCfg.PolicySets.Organization, - } - - reviewFetcher := &github.PRReviewFetcher{ - ClientCreator: clientCreator, - } - - requirementChecker := requirement.NewDeployAggregate(globalCfg, teamMemberFetcher, reviewFetcher, checkRunFetcher, logger) - commentHandler := handlers.NewCommentEventWithCommandHandler( - commentParser, - repoAllowlistChecker, - vcsClient, - gateway_handlers.NewCommentEventWorkerProxy( - logger, - snsWriter, - asyncScheduler, - prSignaler, - deploySignaler, - vcsClient, - vcsStatusUpdater, - globalCfg, - rootConfigBuilder, - legacyErrorHandler, - neptuneErrorHandler, - requirementChecker), - logger, - ) - - pushHandler := &gateway_handlers.PushHandler{ - Scheduler: asyncScheduler, - Logger: logger, - RootDeployer: rootDeployer, - } - - checkRunHandler := &gateway_handlers.CheckRunHandler{ - Logger: logger, - RootDeployer: rootDeployer, - SyncScheduler: syncScheduler, - AsyncScheduler: asyncScheduler, - DeploySignaler: deploySignaler, - } - - checkSuiteHandler := &gateway_handlers.CheckSuiteHandler{ - Logger: logger, - Scheduler: asyncScheduler, - RootDeployer: rootDeployer, - } - - pullRequestReviewHandler := &gateway_handlers.PullRequestReviewWorkerProxy{ - Scheduler: asyncScheduler, - SnsWriter: snsWriter, - Logger: logger, - CheckRunFetcher: checkRunFetcher, - WorkflowSignaler: prSignaler, - Scope: scope.SubScope("pull.review"), - RootConfigBuilder: rootConfigBuilder, - GlobalCfg: globalCfg, - } - pullFetcher := &github.PRFetcher{ - ClientCreator: clientCreator, - } - - // lazy map of resolver providers to their resolver - // laziness ensures we only instantiate the providers we support. - providerResolverInitializer := map[models.VCSHostType]func() events_controllers.RequestResolver{ - models.Github: func() events_controllers.RequestResolver { - return request.NewHandler( - logger, - scope, - webhookSecret, - pullFetcher, - commentHandler, - prHandler, - pushHandler, - pullRequestReviewHandler, - checkRunHandler, - checkSuiteHandler, - allowDraftPRs, - repoConverter, - pullConverter, - githubClient, - ) - }, - } - - router := &events_controllers.RequestRouter{ - Resolvers: events_controllers.NewRequestResolvers(providerResolverInitializer, supportedVCSProviders), - Logger: logger, - } - - return &VCSEventsController{ - router: router, - } -} - -// TODO: remove this once event_controllers.VCSEventsController has the same function -// VCSEventsController handles all webhook requests which signify 'events' in the -// VCS host, ex. GitHub. -type VCSEventsController struct { - router *events_controllers.RequestRouter -} - -// Post handles POST webhook requests. -func (g *VCSEventsController) Post(w http.ResponseWriter, r *http.Request) { - g.router.Route(w, r) -} diff --git a/server/legacy/lyft/scheduled/executor_service.go b/server/legacy/lyft/scheduled/executor_service.go deleted file mode 100644 index c6cc7931e..000000000 --- a/server/legacy/lyft/scheduled/executor_service.go +++ /dev/null @@ -1,277 +0,0 @@ -package scheduled - -import ( - "context" - "fmt" - "io" - "os" - "os/signal" - "strconv" - "sync" - "syscall" - "text/template" - "time" - - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/models" - "github.com/uber-go/tally/v4" -) - -type ExecutorService struct { - log logging.Logger - - // jobs - garbageCollector JobDefinition - rateLimitPublisher JobDefinition - runtimeStatsPublisher JobDefinition -} - -func NewExecutorService( - workingDirIterator events.WorkDirIterator, - statsScope tally.Scope, - log logging.Logger, - closedPullCleaner events.PullCleaner, - openPullCleaner events.PullCleaner, - githubClient *vcs.GithubClient, -) *ExecutorService { - scheduledScope := statsScope.SubScope("scheduled") - garbageCollector := &GarbageCollector{ - workingDirIterator: workingDirIterator, - stats: scheduledScope.SubScope("garbagecollector"), - log: log, - closedPullCleaner: closedPullCleaner, - openPullCleaner: openPullCleaner, - } - - garbageCollectorJob := JobDefinition{ - Job: garbageCollector, - - Period: 30 * time.Minute, - } - - rateLimitPublisher := &RateLimitStatsPublisher{ - client: githubClient, - stats: scheduledScope.SubScope("ratelimitpublisher"), - log: log, - } - - rateLimitPublisherJob := JobDefinition{ - Job: rateLimitPublisher, - - // since rate limit api doesn't contribute to the rate limit we can call this every minute - Period: 1 * time.Minute, - } - - runtimeStatsPublisher := NewRuntimeStats(scheduledScope) - - runtimeStatsPublisherJob := JobDefinition{ - Job: runtimeStatsPublisher, - Period: 10 * time.Second, - } - - return &ExecutorService{ - log: log, - garbageCollector: garbageCollectorJob, - rateLimitPublisher: rateLimitPublisherJob, - runtimeStatsPublisher: runtimeStatsPublisherJob, - } -} - -type JobDefinition struct { - Job Job - Period time.Duration -} - -func (s *ExecutorService) Run() { - s.log.Info("Scheduled Executor Service started") - - ctx, cancel := context.WithCancel(context.Background()) - - var wg sync.WaitGroup - - s.runScheduledJob(ctx, &wg, s.garbageCollector) - s.runScheduledJob(ctx, &wg, s.rateLimitPublisher) - s.runScheduledJob(ctx, &wg, s.runtimeStatsPublisher) - - interrupt := make(chan os.Signal, 1) - - // Stop on SIGINTs and SIGTERMs. - signal.Notify(interrupt, os.Interrupt, syscall.SIGTERM) - - <-interrupt - - s.log.Warn("Received interrupt. Attempting to Shut down scheduled executor service") - - cancel() - wg.Wait() - - s.log.Warn("All jobs completed, exiting.") -} - -func (s *ExecutorService) runScheduledJob(ctx context.Context, wg *sync.WaitGroup, jd JobDefinition) { - ticker := time.NewTicker(jd.Period) - wg.Add(1) - - go func() { - defer wg.Done() - defer ticker.Stop() - - // Ensure we recover from any panics to keep the jobs isolated. - // Keep the recovery outside the select to ensure that we don't infinitely panic. - defer func() { - if r := recover(); r != nil { - s.log.Error(fmt.Sprintf("Recovered from panic: %v", r)) - } - }() - - for { - select { - case <-ctx.Done(): - s.log.Warn("Received interrupt, cancelling job") - return - case <-ticker.C: - jd.Job.Run() - } - } - }() -} - -type Job interface { - Run() -} - -type RateLimitStatsPublisher struct { - log logging.Logger - stats tally.Scope - client *vcs.GithubClient -} - -func (r *RateLimitStatsPublisher) Run() { - errCounter := r.stats.Counter(metrics.ExecutionErrorMetric) - rateLimitRemainingCounter := r.stats.Gauge("ratelimitremaining") - - rateLimits, err := r.client.GetRateLimits() - - if err != nil { - errCounter.Inc(1) - return - } - - rateLimitRemainingCounter.Update(float64(rateLimits.GetCore().Remaining)) -} - -var gcStaleClosedPullTemplate = template.Must(template.New("").Parse( - "Pull Request has been closed for 30 days. Atlantis GC has deleted the locks and plans for the following projects and workspaces:\n" + - "{{ range . }}\n" + - "- dir: `{{ .RepoRelDir }}` {{ .Workspaces }}{{ end }}")) - -var gcStaleOpenPullTemplate = template.Must(template.New("").Parse( - "Pull Request has not been updated for 30 days. Atlantis GC has deleted the locks and plans for the following projects and workspaces:\n" + - "{{ range . }}\n" + - "- dir: `{{ .RepoRelDir }}` {{ .Workspaces }}{{ end }}")) - -type GCStalePullTemplate struct { - template *template.Template -} - -func NewGCStaleClosedPull() events.PullCleanupTemplate { - return &GCStalePullTemplate{ - template: gcStaleClosedPullTemplate, - } -} - -func NewGCStaleOpenPull() events.PullCleanupTemplate { - return &GCStalePullTemplate{ - template: gcStaleOpenPullTemplate, - } -} - -func (t *GCStalePullTemplate) Execute(wr io.Writer, data interface{}) error { - return t.template.Execute(wr, data) -} - -type GarbageCollector struct { - workingDirIterator events.WorkDirIterator - stats tally.Scope - log logging.Logger - closedPullCleaner events.PullCleaner - openPullCleaner events.PullCleaner -} - -func (g *GarbageCollector) Run() { - errCounter := g.stats.Counter(metrics.ExecutionErrorMetric) - - pulls, err := g.workingDirIterator.ListCurrentWorkingDirPulls() - - if err != nil { - g.log.Error(fmt.Sprintf("error listing pulls %s", err)) - errCounter.Inc(1) - } - - openPullsCounter := g.stats.Counter("pulls.open") - updatedthirtyDaysAgoOpenPullsCounter := g.stats.Counter("pulls.open.updated.thirtydaysago") - closedPullsCounter := g.stats.Counter("pulls.closed") - fiveMinutesAgoClosedPullsCounter := g.stats.Counter("pulls.closed.fiveminutesago") - - // we can make this shorter, but this allows us to see trends more clearly - // to determine if there is an issue or not - thirtyDaysAgo := time.Now().Add(-720 * time.Hour) - fiveMinutesAgo := time.Now().Add(-5 * time.Minute) - - for _, pull := range pulls { - if pull.State == models.OpenPullState { - openPullsCounter.Inc(1) - - if pull.UpdatedAt.Before(thirtyDaysAgo) { - updatedthirtyDaysAgoOpenPullsCounter.Inc(1) - - g.log.Warn("Pull hasn't been updated for more than 30 days.", map[string]interface{}{ - "repository": pull.BaseRepo.FullName, - "pull-num": strconv.Itoa(pull.Num), - }) - - err := g.openPullCleaner.CleanUpPull(pull.BaseRepo, pull) - - if err != nil { - g.log.Error("Error cleaning up open pulls that haven't been updated in 30 days", map[string]interface{}{ - "repository": pull.BaseRepo.FullName, - "pull-num": strconv.Itoa(pull.Num), - "err": err, - }) - errCounter.Inc(1) - return - } - } - continue - } - - // assume only other state is closed - closedPullsCounter.Inc(1) - - // Let's clean up any closed pulls within 5 minutes of closing to ensure that - // any locks are released. - if pull.ClosedAt.Before(fiveMinutesAgo) { - fiveMinutesAgoClosedPullsCounter.Inc(1) - - g.log.Warn("Pull closed for more than 5 minutes but data still on disk", map[string]interface{}{ - "repository": pull.BaseRepo.FullName, - "pull-num": strconv.Itoa(pull.Num), - }) - - err := g.closedPullCleaner.CleanUpPull(pull.BaseRepo, pull) - - if err != nil { - g.log.Error("Error cleaning up 5 minutes old closed pulls", map[string]interface{}{ - "repository": pull.BaseRepo.FullName, - "pull-num": strconv.Itoa(pull.Num), - "err": err, - }) - errCounter.Inc(1) - return - } - } - } -} diff --git a/server/legacy/lyft/scheduled/runtime_stats.go b/server/legacy/lyft/scheduled/runtime_stats.go deleted file mode 100644 index df7850af8..000000000 --- a/server/legacy/lyft/scheduled/runtime_stats.go +++ /dev/null @@ -1,127 +0,0 @@ -package scheduled - -import ( - "runtime" - - "github.com/uber-go/tally/v4" -) - -type RuntimeStatCollector struct { - runtimeMetrics runtimeMetrics -} - -type runtimeMetrics struct { - cpuGoroutines tally.Gauge - cpuCgoCalls tally.Gauge - - memoryAlloc tally.Gauge - memoryTotal tally.Gauge - memorySys tally.Gauge - memoryLookups tally.Gauge - memoryMalloc tally.Gauge - memoryFrees tally.Gauge - - memoryHeapAlloc tally.Gauge - memoryHeapSys tally.Gauge - memoryHeapIdle tally.Gauge - memoryHeapInuse tally.Gauge - memoryHeapReleased tally.Gauge - memoryHeapObjects tally.Gauge - - memoryStackInuse tally.Gauge - memoryStackSys tally.Gauge - memoryStackMSpanInuse tally.Gauge - memoryStackMSpanSys tally.Gauge - memoryStackMCacheInuse tally.Gauge - memoryStackMCacheSys tally.Gauge - - memoryOtherSys tally.Gauge - - memoryGCSys tally.Gauge - memoryGCNext tally.Gauge - memoryGCLast tally.Gauge - memoryGCPauseTotal tally.Gauge - memoryGCCount tally.Gauge -} - -func NewRuntimeStats(scope tally.Scope) *RuntimeStatCollector { - runtimeScope := scope.SubScope("runtime") - runtimeMetrics := runtimeMetrics{ - // cpu - cpuGoroutines: runtimeScope.Gauge("cpu.goroutines"), - cpuCgoCalls: runtimeScope.Gauge("cpu.cgo_calls"), - // memory - memoryAlloc: runtimeScope.Gauge("memory.alloc"), - memoryTotal: runtimeScope.Gauge("memory.total"), - memorySys: runtimeScope.Gauge("memory.sys"), - memoryLookups: runtimeScope.Gauge("memory.lookups"), - memoryMalloc: runtimeScope.Gauge("memory.malloc"), - memoryFrees: runtimeScope.Gauge("memory.frees"), - // heap - memoryHeapAlloc: runtimeScope.Gauge("memory.heap.alloc"), - memoryHeapSys: runtimeScope.Gauge("memory.heap.sys"), - memoryHeapIdle: runtimeScope.Gauge("memory.heap.idle"), - memoryHeapInuse: runtimeScope.Gauge("memory.heap.inuse"), - memoryHeapReleased: runtimeScope.Gauge("memory.heap.released"), - memoryHeapObjects: runtimeScope.Gauge("memory.heap.objects"), - // stack - memoryStackInuse: runtimeScope.Gauge("memory.stack.inuse"), - memoryStackSys: runtimeScope.Gauge("memory.stack.sys"), - memoryStackMSpanInuse: runtimeScope.Gauge("memory.stack.mspan_inuse"), - memoryStackMSpanSys: runtimeScope.Gauge("memory.stack.sys"), - memoryStackMCacheInuse: runtimeScope.Gauge("memory.stack.mcache_inuse"), - memoryStackMCacheSys: runtimeScope.Gauge("memory.stack.mcache_sys"), - memoryOtherSys: runtimeScope.Gauge("memory.othersys"), - // GC - memoryGCSys: runtimeScope.Gauge("memory.gc.sys"), - memoryGCNext: runtimeScope.Gauge("memory.gc.next"), - memoryGCLast: runtimeScope.Gauge("memory.gc.last"), - memoryGCPauseTotal: runtimeScope.Gauge("memory.gc.pause_total"), - memoryGCCount: runtimeScope.Gauge("memory.gc.count"), - } - - return &RuntimeStatCollector{ - runtimeMetrics: runtimeMetrics, - } -} - -func (r *RuntimeStatCollector) Run() { - // cpu stats - r.runtimeMetrics.cpuGoroutines.Update(float64(runtime.NumGoroutine())) - r.runtimeMetrics.cpuCgoCalls.Update(float64(runtime.NumCgoCall())) - - var memStats runtime.MemStats - runtime.ReadMemStats(&memStats) - - // general - r.runtimeMetrics.memoryAlloc.Update(float64(memStats.Alloc)) - r.runtimeMetrics.memoryTotal.Update(float64(memStats.TotalAlloc)) - r.runtimeMetrics.memorySys.Update(float64(memStats.Sys)) - r.runtimeMetrics.memoryLookups.Update(float64(memStats.Lookups)) - r.runtimeMetrics.memoryMalloc.Update(float64(memStats.Mallocs)) - r.runtimeMetrics.memoryFrees.Update(float64(memStats.Frees)) - - // heap - r.runtimeMetrics.memoryHeapAlloc.Update(float64(memStats.HeapAlloc)) - r.runtimeMetrics.memoryHeapSys.Update(float64(memStats.HeapSys)) - r.runtimeMetrics.memoryHeapIdle.Update(float64(memStats.HeapIdle)) - r.runtimeMetrics.memoryHeapInuse.Update(float64(memStats.HeapInuse)) - r.runtimeMetrics.memoryHeapReleased.Update(float64(memStats.HeapReleased)) - r.runtimeMetrics.memoryHeapObjects.Update(float64(memStats.HeapObjects)) - - // stack - r.runtimeMetrics.memoryStackInuse.Update(float64(memStats.StackInuse)) - r.runtimeMetrics.memoryStackSys.Update(float64(memStats.StackSys)) - r.runtimeMetrics.memoryStackMSpanInuse.Update(float64(memStats.MSpanInuse)) - r.runtimeMetrics.memoryStackMSpanSys.Update(float64(memStats.MSpanSys)) - r.runtimeMetrics.memoryStackMCacheInuse.Update(float64(memStats.MCacheInuse)) - r.runtimeMetrics.memoryStackMCacheSys.Update(float64(memStats.MCacheSys)) - r.runtimeMetrics.memoryOtherSys.Update(float64(memStats.OtherSys)) - - // GC - r.runtimeMetrics.memoryGCSys.Update(float64(memStats.GCSys)) - r.runtimeMetrics.memoryGCNext.Update(float64(memStats.NextGC)) - r.runtimeMetrics.memoryGCLast.Update(float64(memStats.LastGC)) - r.runtimeMetrics.memoryGCPauseTotal.Update(float64(memStats.PauseTotalNs)) - r.runtimeMetrics.memoryGCCount.Update(float64(memStats.NumGC)) -} diff --git a/server/legacy/middleware.go b/server/legacy/middleware.go deleted file mode 100644 index 83796192e..000000000 --- a/server/legacy/middleware.go +++ /dev/null @@ -1,35 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package legacy - -import ( - "net/http" - - "github.com/runatlantis/atlantis/server/logging" -) - -// NewRequestLogger creates a RequestLogger. -func NewRequestLogger(logger logging.Logger) *RequestLogger { - return &RequestLogger{logger} -} - -// RequestLogger logs requests and their response codes. -type RequestLogger struct { - logger logging.Logger -} - -// ServeHTTP implements the middleware function. It logs all requests at DEBUG level. -func (l *RequestLogger) ServeHTTP(rw http.ResponseWriter, r *http.Request, next http.HandlerFunc) { - next(rw, r) -} diff --git a/server/legacy/router.go b/server/legacy/router.go deleted file mode 100644 index 6dfd55b79..000000000 --- a/server/legacy/router.go +++ /dev/null @@ -1,53 +0,0 @@ -package legacy - -import ( - "fmt" - "net/url" - - "github.com/gorilla/mux" - "github.com/pkg/errors" -) - -// Router can be used to retrieve Atlantis URLs. It acts as an intermediary -// between the underlying router and the rest of Atlantis that might need to -// construct URLs to different resources. -type Router struct { - // Underlying is the router that the routes have been constructed on. - Underlying *mux.Router - // LockViewRouteName is the named route for the lock view that can be Get'd - // from the Underlying router. - LockViewRouteName string - // ProjectJobsViewRouteName is the named route for the projects active jobs - ProjectJobsViewRouteName string - // LockViewRouteIDQueryParam is the query parameter needed to construct the - // lock view: underlying.Get(LockViewRouteName).URL(LockViewRouteIDQueryParam, "my id"). - LockViewRouteIDQueryParam string - // AtlantisURL is the fully qualified URL that Atlantis is - // accessible from externally. - AtlantisURL *url.URL -} - -// GenerateLockURL returns a fully qualified URL to view the lock at lockID. -func (r *Router) GenerateLockURL(lockID string) string { - lockURL, _ := r.Underlying.Get(r.LockViewRouteName).URL(r.LockViewRouteIDQueryParam, url.QueryEscape(lockID)) - // At this point, lockURL will just be a path because r.Underlying isn't - // configured with host or scheme information. So to generate the fully - // qualified LockURL we just append the router's url to our base url. - // We're not doing anything fancy here with the actual url object because - // golang likes to double escape the lockURL path when using url.Parse(). - return r.AtlantisURL.String() + lockURL.String() -} - -func (r *Router) GenerateProjectJobURL(jobID string) (string, error) { - if jobID == "" { - return "", fmt.Errorf("no job id in ctx") - } - jobURL, err := r.Underlying.Get((r.ProjectJobsViewRouteName)).URL( - "job-id", jobID, - ) - if err != nil { - return "", errors.Wrapf(err, "creating job url for %s", jobID) - } - - return r.AtlantisURL.String() + jobURL.String(), nil -} diff --git a/server/legacy/router_test.go b/server/legacy/router_test.go deleted file mode 100644 index 1a29343bd..000000000 --- a/server/legacy/router_test.go +++ /dev/null @@ -1,112 +0,0 @@ -package legacy_test - -import ( - "fmt" - "net/http" - "testing" - - "github.com/google/uuid" - "github.com/gorilla/mux" - server "github.com/runatlantis/atlantis/server/legacy" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" -) - -func TestRouter_GenerateLockURL(t *testing.T) { - cases := []struct { - AtlantisURL string - ExpURL string - }{ - { - "http://localhost:4141", - "http://localhost:4141/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - }, - { - "https://localhost:4141", - "https://localhost:4141/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - }, - { - "https://localhost:4141/", - "https://localhost:4141/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - }, - { - "https://example.com/basepath", - "https://example.com/basepath/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - }, - { - "https://example.com/basepath/", - "https://example.com/basepath/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - }, - { - "https://example.com/path/1/", - "https://example.com/path/1/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - }, - } - - queryParam := "id" - routeName := "routename" - underlyingRouter := mux.NewRouter() - underlyingRouter.HandleFunc("/lock", func(_ http.ResponseWriter, _ *http.Request) {}).Methods(http.MethodGet).Queries(queryParam, "{id}").Name(routeName) - - for _, c := range cases { - t.Run(c.AtlantisURL, func(t *testing.T) { - atlantisURL, err := server.ParseAtlantisURL(c.AtlantisURL) - Ok(t, err) - - router := &server.Router{ - AtlantisURL: atlantisURL, - LockViewRouteIDQueryParam: queryParam, - LockViewRouteName: routeName, - Underlying: underlyingRouter, - } - Equals(t, c.ExpURL, router.GenerateLockURL("lkysow/atlantis-example/./default")) - }) - } -} - -func setupJobsRouter(t *testing.T) *server.Router { - atlantisURL, err := server.ParseAtlantisURL("http://localhost:4141") - Ok(t, err) - - underlyingRouter := mux.NewRouter() - underlyingRouter.HandleFunc("/jobs/{job-id}", func(_ http.ResponseWriter, _ *http.Request) {}).Methods(http.MethodGet).Name("project-jobs-detail") - - return &server.Router{ - AtlantisURL: atlantisURL, - Underlying: underlyingRouter, - ProjectJobsViewRouteName: "project-jobs-detail", - } -} - -func TestGenerateProjectJobURL_ShouldGenerateURLWhenJobIDSpecified(t *testing.T) { - router := setupJobsRouter(t) - jobID := uuid.New().String() - ctx := command.ProjectContext{ - JobID: jobID, - } - expectedURL := fmt.Sprintf("http://localhost:4141/jobs/%s", jobID) - gotURL, err := router.GenerateProjectJobURL(ctx.JobID) - Ok(t, err) - - Equals(t, expectedURL, gotURL) -} - -func TestGenerateProjectJobURL_ShouldReturnErrorWhenJobIDNotSpecified(t *testing.T) { - router := setupJobsRouter(t) - ctx := command.ProjectContext{ - Pull: models.PullRequest{ - BaseRepo: models.Repo{ - Owner: "test-owner", - Name: "test-repo", - }, - Num: 1, - }, - RepoRelDir: "ops/terraform/", - } - expectedErrString := "no job id in ctx" - gotURL, err := router.GenerateProjectJobURL(ctx.JobID) - assert.EqualError(t, err, expectedErrString) - Equals(t, "", gotURL) -} diff --git a/server/legacy/server.go b/server/legacy/server.go deleted file mode 100644 index 5784a55e3..000000000 --- a/server/legacy/server.go +++ /dev/null @@ -1,1114 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -// Package server handles the web server and executing commands that come in -// via webhooks. -package legacy - -import ( - "context" - "encoding/json" - "fmt" - "io" - "log" - "net/http" - "net/url" - "os" - "os/signal" - "path/filepath" - "sort" - "strings" - "syscall" - "time" - - "github.com/runatlantis/atlantis/server/vcs/provider/github" - - "github.com/palantir/go-githubapp/githubapp" - "github.com/runatlantis/atlantis/server/neptune/template" - middleware "github.com/runatlantis/atlantis/server/neptune/workflows/activities/github" - - "github.com/runatlantis/atlantis/server/legacy/events/terraform/filter" - "github.com/runatlantis/atlantis/server/neptune/storage" - - assetfs "github.com/elazarl/go-bindata-assetfs" - "github.com/runatlantis/atlantis/server/legacy/instrumentation" - "github.com/runatlantis/atlantis/server/static" - - "github.com/mitchellh/go-homedir" - "github.com/runatlantis/atlantis/server/config/valid" - "github.com/runatlantis/atlantis/server/legacy/core/db" - "github.com/runatlantis/atlantis/server/legacy/core/runtime/policy" - "github.com/runatlantis/atlantis/server/legacy/jobs" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sns" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sqs" - lyftCommands "github.com/runatlantis/atlantis/server/legacy/lyft/command" - lyftRuntime "github.com/runatlantis/atlantis/server/legacy/lyft/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/lyft/scheduled" - "github.com/runatlantis/atlantis/server/legacy/wrappers" - "github.com/runatlantis/atlantis/server/metrics" - "github.com/runatlantis/atlantis/server/neptune/lyft/feature" - github_converter "github.com/runatlantis/atlantis/server/vcs/provider/github/converter" - "github.com/uber-go/tally/v4" - - "github.com/gorilla/mux" - "github.com/pkg/errors" - cfgParser "github.com/runatlantis/atlantis/server/config" - "github.com/runatlantis/atlantis/server/legacy/controllers" - events_controllers "github.com/runatlantis/atlantis/server/legacy/controllers/events" - "github.com/runatlantis/atlantis/server/legacy/controllers/templates" - "github.com/runatlantis/atlantis/server/legacy/controllers/websocket" - "github.com/runatlantis/atlantis/server/legacy/core/locking" - "github.com/runatlantis/atlantis/server/legacy/core/runtime" - "github.com/runatlantis/atlantis/server/legacy/core/terraform" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/events/vcs" - lyft_vcs "github.com/runatlantis/atlantis/server/legacy/events/vcs/lyft" - "github.com/runatlantis/atlantis/server/legacy/events/webhooks" - "github.com/runatlantis/atlantis/server/logging" - "github.com/runatlantis/atlantis/server/models" - "github.com/runatlantis/atlantis/server/vcs/markdown" - "github.com/urfave/cli" - "github.com/urfave/negroni" -) - -const ( - // LockViewRouteName is the named route in mux.Router for the lock view. - // The route can be retrieved by this name, ex: - // mux.Router.Get(LockViewRouteName) - LockViewRouteName = "lock-detail" - // LockViewRouteIDQueryParam is the query parameter needed to construct the lock view - // route. ex: - // mux.Router.Get(LockViewRouteName).URL(LockViewRouteIDQueryParam, "my id") - LockViewRouteIDQueryParam = "id" - // ProjectJobsViewRouteName is the named route in mux.Router for the log stream view. - ProjectJobsViewRouteName = "project-jobs-detail" - // binDirName is the name of the directory inside our data dir where - // we download binaries. - BinDirName = "bin" - // terraformPluginCacheDir is the name of the dir inside our data dir - // where we tell terraform to cache plugins and modules. - TerraformPluginCacheDirName = "plugin-cache" -) - -// Server runs the Atlantis web server. -type Server struct { - AtlantisVersion string - AtlantisURL *url.URL - Router *mux.Router - Port int - PreWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner - CommandRunner *events.DefaultCommandRunner - CtxLogger logging.Logger - StatsScope tally.Scope - StatsCloser io.Closer - Locker locking.Locker - ApplyLocker locking.ApplyLocker - VCSPostHandler sqs.VCSPostHandler - GithubAppController *controllers.GithubAppController - LocksController *controllers.LocksController - StatusController *controllers.StatusController - JobsController *controllers.JobsController - IndexTemplate templates.TemplateWriter - LockDetailTemplate templates.TemplateWriter - ProjectJobsTemplate templates.TemplateWriter - ProjectJobsErrorTemplate templates.TemplateWriter - SSLCertFile string - SSLKeyFile string - Drainer *events.Drainer - ScheduledExecutorService *scheduled.ExecutorService - ProjectCmdOutputHandler jobs.ProjectCommandOutputHandler - LyftMode Mode - CancelWorker context.CancelFunc -} - -// Config holds config for server that isn't passed in by the user. -type Config struct { - AtlantisURLFlag string - AtlantisVersion string - DefaultTFVersionFlag string - RepoConfigJSONFlag string - AppCfg githubapp.Config -} - -// WebhookConfig is nested within UserConfig. It's used to configure webhooks. -type WebhookConfig struct { - // Event is the type of event we should send this webhook for, ex. apply. - Event string `mapstructure:"event"` - // WorkspaceRegex is a regex that is used to match against the workspace - // that is being modified for this event. If the regex matches, we'll - // send the webhook, ex. "production.*". - WorkspaceRegex string `mapstructure:"workspace-regex"` - // Kind is the type of webhook we should send, ex. slack. - Kind string `mapstructure:"kind"` - // Channel is the channel to send this webhook to. It only applies to - // slack webhooks. Should be without '#'. - Channel string `mapstructure:"channel"` -} - -// NewServer returns a new server. If there are issues starting the server or -// its dependencies an error will be returned. This is like the main() function -// for the server CLI command because it injects all the dependencies. -func NewServer(userConfig UserConfig, config Config) (*Server, error) { - ctxLogger, err := logging.NewLoggerFromLevel(userConfig.ToLogLevel()) - if err != nil { - return nil, err - } - - var supportedVCSHosts []models.VCSHostType - - // not to be used directly, currently this is just used - // for reporting rate limits - var rawGithubClient *vcs.GithubClient - - var githubClient vcs.IGithubClient - var githubAppEnabled bool - var githubCredentials vcs.GithubCredentials - var featureAllocator feature.Allocator - - mergeabilityChecker := vcs.NewLyftPullMergeabilityChecker(userConfig.VCSStatusName) - - validator := &cfgParser.ParserValidator{} - - globalCfg := valid.NewGlobalCfg(userConfig.DataDir) - - if userConfig.RepoConfig != "" { - globalCfg, err = validator.ParseGlobalCfg(userConfig.RepoConfig, globalCfg) - if err != nil { - return nil, errors.Wrapf(err, "parsing %s file", userConfig.RepoConfig) - } - } else if userConfig.RepoConfigJSON != "" { - globalCfg, err = validator.ParseGlobalCfgJSON(userConfig.RepoConfigJSON, globalCfg) - if err != nil { - return nil, errors.Wrapf(err, "parsing --%s", config.RepoConfigJSONFlag) - } - } - - statsScope, closer, err := metrics.NewScope(globalCfg.Metrics, ctxLogger, userConfig.StatsNamespace) - if err != nil { - return nil, errors.Wrapf(err, "instantiating metrics scope") - } - - statsScope = statsScope.Tagged(map[string]string{ - "mode": "legacyworker", - }) - - logFilter := filter.LogFilter{ - Regexes: globalCfg.TerraformLogFilter.Regexes, - } - - clientCreator, err := githubapp.NewDefaultCachingClientCreator( - config.AppCfg, - githubapp.WithClientMiddleware( - middleware.ClientMetrics(statsScope.SubScope("github")), - )) - if err != nil { - return nil, errors.Wrap(err, "creating github client creator") - } - - if userConfig.GithubUser != "" || userConfig.GithubAppID != 0 { - supportedVCSHosts = append(supportedVCSHosts, models.Github) - if userConfig.GithubUser != "" { - githubCredentials = &vcs.GithubUserCredentials{ - User: userConfig.GithubUser, - Token: userConfig.GithubToken, - } - } else if userConfig.GithubAppID != 0 && userConfig.GithubAppKeyFile != "" { - privateKey, err := os.ReadFile(userConfig.GithubAppKeyFile) - if err != nil { - return nil, err - } - githubCredentials = &vcs.GithubAppCredentials{ - AppID: userConfig.GithubAppID, - Key: privateKey, - Hostname: userConfig.GithubHostname, - AppSlug: userConfig.GithubAppSlug, - } - githubAppEnabled = true - } else if userConfig.GithubAppID != 0 && userConfig.GithubAppKey != "" { - githubCredentials = &vcs.GithubAppCredentials{ - AppID: userConfig.GithubAppID, - Key: []byte(userConfig.GithubAppKey), - Hostname: userConfig.GithubHostname, - AppSlug: userConfig.GithubAppSlug, - } - githubAppEnabled = true - } - - var err error - - repoConfig := feature.RepoConfig{ - Owner: userConfig.FFOwner, - Repo: userConfig.FFRepo, - Branch: userConfig.FFBranch, - Path: userConfig.FFPath, - } - installationFetcher := &github.InstallationRetriever{ - ClientCreator: clientCreator, - } - fileFetcher := &github.SingleFileContentsFetcher{ - ClientCreator: clientCreator, - } - retriever := &feature.CustomGithubInstallationRetriever{ - InstallationFetcher: installationFetcher, - FileContentsFetcher: fileFetcher, - Cfg: repoConfig, - } - featureAllocator, err = feature.NewGHSourcedAllocator(retriever, ctxLogger) - if err != nil { - return nil, errors.Wrap(err, "initializing feature allocator") - } - - rawGithubClient, err = vcs.NewGithubClient(userConfig.GithubHostname, githubCredentials, ctxLogger, featureAllocator, mergeabilityChecker) - if err != nil { - return nil, err - } - - githubClient = vcs.NewInstrumentedGithubClient(rawGithubClient, statsScope, ctxLogger) - } - - if userConfig.WriteGitCreds { - home, err := homedir.Dir() - if err != nil { - return nil, errors.Wrap(err, "getting home dir to write ~/.git-credentials file") - } - if userConfig.GithubUser != "" { - if err := github.WriteGitCreds(userConfig.GithubUser, userConfig.GithubToken, userConfig.GithubHostname, home, ctxLogger, false); err != nil { - return nil, err - } - } - } - - var webhooksConfig []webhooks.Config - for _, c := range userConfig.Webhooks { - config := webhooks.Config{ - Channel: c.Channel, - Event: c.Event, - Kind: c.Kind, - WorkspaceRegex: c.WorkspaceRegex, - } - webhooksConfig = append(webhooksConfig, config) - } - webhooksManager, err := webhooks.NewMultiWebhookSender(webhooksConfig, webhooks.NewSlackClient(userConfig.SlackToken)) - if err != nil { - return nil, errors.Wrap(err, "initializing webhooks") - } - vcsClient := vcs.NewClientProxy(githubClient) - vcsStatusUpdater := &command.VCSStatusUpdater{ - Client: vcsClient, - TitleBuilder: vcs.StatusTitleBuilder{ - TitlePrefix: userConfig.VCSStatusName, - }, - DefaultDetailsURL: userConfig.DefaultCheckrunDetailsURL, - } - - binDir, err := mkSubDir(userConfig.DataDir, BinDirName) - if err != nil { - return nil, err - } - - cacheDir, err := mkSubDir(userConfig.DataDir, TerraformPluginCacheDirName) - if err != nil { - return nil, err - } - - parsedURL, err := ParseAtlantisURL(userConfig.AtlantisURL) - if err != nil { - return nil, errors.Wrapf(err, - "parsing --%s flag %q", config.AtlantisURLFlag, userConfig.AtlantisURL) - } - - underlyingRouter := mux.NewRouter() - router := &Router{ - AtlantisURL: parsedURL, - LockViewRouteIDQueryParam: LockViewRouteIDQueryParam, - LockViewRouteName: LockViewRouteName, - ProjectJobsViewRouteName: ProjectJobsViewRouteName, - Underlying: underlyingRouter, - } - - projectJobsScope := statsScope.SubScope("getprojectjobs") - - storageClient, err := storage.NewClient(globalCfg.PersistenceConfig.Jobs) - if err != nil { - return nil, errors.Wrapf(err, "initializing stow client") - } - - storageBackend, err := jobs.NewStorageBackend(storageClient, ctxLogger, featureAllocator, projectJobsScope) - if err != nil { - return nil, errors.Wrapf(err, "initializing storage backend") - } - - jobStore := jobs.NewJobStore(storageBackend, statsScope.SubScope("jobstore")) - - var projectCmdOutputHandler jobs.ProjectCommandOutputHandler - // When TFE is enabled log streaming is not necessary. - - projectCmdOutput := make(chan *jobs.ProjectCmdOutputLine) - projectCmdOutputHandler = jobs.NewAsyncProjectCommandOutputHandler( - projectCmdOutput, - ctxLogger, - jobStore, - logFilter, - ) - - terraformClient, err := terraform.NewClient( - binDir, - cacheDir, - userConfig.DefaultTFVersion, - config.DefaultTFVersionFlag, - userConfig.TFDownloadURL, - &terraform.DefaultDownloader{}, - true, - projectCmdOutputHandler) - if err != nil { - return nil, errors.Wrap(err, "initializing terraform") - } - - templateResolver := markdown.TemplateResolver{ - DisableMarkdownFolding: userConfig.DisableMarkdownFolding, - GlobalCfg: globalCfg, - LogFilter: logFilter, - } - markdownRenderer := &markdown.Renderer{ - DisableApplyAll: userConfig.DisableApplyAll, - DisableApply: userConfig.DisableApply, - EnableDiffMarkdownFormat: userConfig.EnableDiffMarkdownFormat, - TemplateResolver: templateResolver, - } - - boltdb, err := db.New(userConfig.DataDir) - if err != nil { - return nil, err - } - var lockingClient locking.Locker - var applyLockingClient locking.ApplyLocker - - lockingClient = locking.NewClient(boltdb) - applyLockingClient = locking.NewApplyClient(boltdb, userConfig.DisableApply) - workingDirLocker := events.NewDefaultWorkingDirLocker() - - var workingDir events.WorkingDir = &events.FileWorkspace{ - DataDir: userConfig.DataDir, - GlobalCfg: globalCfg, - } - // provide fresh tokens before clone from the GitHub Apps integration, proxy workingDir - if githubAppEnabled { - if !userConfig.WriteGitCreds { - return nil, errors.New("Github App requires --write-git-creds to support cloning") - } - workingDir = &events.GithubAppWorkingDir{ - WorkingDir: workingDir, - Credentials: githubCredentials, - GithubHostname: userConfig.GithubHostname, - } - } - - projectLocker := &events.DefaultProjectLocker{ - Locker: lockingClient, - VCSClient: vcsClient, - } - - deleteLockCommand := &events.DefaultDeleteLockCommand{ - Locker: lockingClient, - Logger: ctxLogger, - WorkingDir: workingDir, - WorkingDirLocker: workingDirLocker, - DB: boltdb, - } - - pullClosedExecutor := events.NewInstrumentedPullClosedExecutor( - statsScope, - ctxLogger, - &events.PullClosedExecutor{ - Locker: lockingClient, - WorkingDir: workingDir, - Logger: ctxLogger, - DB: boltdb, - PullClosedTemplate: &events.PullClosedEventTemplate{}, - LogStreamResourceCleaner: projectCmdOutputHandler, - VCSClient: vcsClient, - }, - ) - - eventParser := &events.EventParser{ - GithubUser: userConfig.GithubUser, - GithubToken: userConfig.GithubToken, - AllowDraftPRs: userConfig.PlanDrafts, - } - commentParser := &events.CommentParser{ - GithubUser: userConfig.GithubUser, - ApplyDisabled: userConfig.DisableApply, - } - defaultTfVersion := terraformClient.DefaultVersion() - pendingPlanFinder := &events.DefaultPendingPlanFinder{} - - drainer := &events.Drainer{} - statusController := &controllers.StatusController{ - Logger: ctxLogger, - Drainer: drainer, - } - - var preWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner - preWorkflowHooksCommandRunner = &events.DefaultPreWorkflowHooksCommandRunner{ - VCSClient: vcsClient, - GlobalCfg: globalCfg, - WorkingDirLocker: workingDirLocker, - WorkingDir: workingDir, - PreWorkflowHookRunner: runtime.DefaultPreWorkflowHookRunner{}, - } - preWorkflowHooksCommandRunner = &instrumentation.PreWorkflowHookRunner{ - PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, - Logger: ctxLogger, - } - - legacyProjectContextBuilder := wrappers. - WrapProjectContext(events.NewProjectCommandContextBuilder(commentParser)). - WithInstrumentation(statsScope) - - projectContextBuilder := wrappers. - WrapProjectContext(events.NewPlatformModeProjectCommandContextBuilder(commentParser, legacyProjectContextBuilder, ctxLogger, featureAllocator)). - WithInstrumentation(statsScope) - - if userConfig.EnablePolicyChecks { - projectContextBuilder = projectContextBuilder.EnablePolicyChecks(commentParser) - } - - projectCommandBuilder := events.NewProjectCommandBuilder( - projectContextBuilder, - validator, - &events.DefaultProjectFinder{}, - vcsClient, - workingDir, - workingDirLocker, - globalCfg, - pendingPlanFinder, - userConfig.EnableRegExpCmd, - userConfig.AutoplanFileList, - ctxLogger, - userConfig.MaxProjectsPerPR, - ) - - initStepRunner := &runtime.InitStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTfVersion, - } - - planStepRunner := &runtime.PlanStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTfVersion, - VCSStatusUpdater: vcsStatusUpdater, - AsyncTFExec: terraformClient, - } - - destroyPlanStepRunner := &lyftRuntime.DestroyPlanStepRunner{ - StepRunner: planStepRunner, - } - - showStepRunner, err := runtime.NewShowStepRunner(terraformClient, defaultTfVersion) - if err != nil { - return nil, errors.Wrap(err, "initializing show step runner") - } - - conftestEnsurer := policy.NewConfTestVersionEnsurer(ctxLogger, binDir, &terraform.DefaultDownloader{}) - conftestExecutor := policy.NewConfTestExecutor(clientCreator, globalCfg.PolicySets, featureAllocator, ctxLogger) - policyCheckStepRunner, err := runtime.NewPolicyCheckStepRunner( - defaultTfVersion, - conftestEnsurer, - conftestExecutor, - ) - if err != nil { - return nil, errors.Wrap(err, "initializing policy check runner") - } - - applyStepRunner := &runtime.ApplyStepRunner{ - TerraformExecutor: terraformClient, - VCSStatusUpdater: vcsStatusUpdater, - AsyncTFExec: terraformClient, - } - - versionStepRunner := &runtime.VersionStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTfVersion, - } - - runStepRunner := &runtime.RunStepRunner{ - TerraformExecutor: terraformClient, - DefaultTFVersion: defaultTfVersion, - TerraformBinDir: binDir, - } - - envStepRunner := &runtime.EnvStepRunner{ - RunStepRunner: runStepRunner, - } - - stepsRunner := runtime.NewStepsRunner( - initStepRunner, - destroyPlanStepRunner, - showStepRunner, - policyCheckStepRunner, - applyStepRunner, - versionStepRunner, - runStepRunner, - envStepRunner, - ) - - dbUpdater := &events.DBUpdater{ - DB: boltdb, - } - - checksOutputUpdater := &events.ChecksOutputUpdater{ - VCSClient: vcsClient, - MarkdownRenderer: markdownRenderer, - TitleBuilder: vcs.StatusTitleBuilder{TitlePrefix: userConfig.VCSStatusName}, - JobURLGenerator: router, - } - - session, err := aws.NewSession() - if err != nil { - return nil, errors.Wrap(err, "initializing new aws session") - } - - var snsWriter sns.Writer - - if userConfig.LyftAuditJobsSnsTopicArn != "" { - snsWriter = sns.NewWriterWithStats( - session, - userConfig.LyftAuditJobsSnsTopicArn, - statsScope.SubScope("aws.sns.jobs"), - ) - } else { - snsWriter = sns.NewNoopWriter() - } - - applyRequirementHandler := &events.AggregateApplyRequirements{ - WorkingDir: workingDir, - } - - unwrappedPrjCmdRunner := events.NewProjectCommandRunner( - stepsRunner, - workingDir, - webhooksManager, - workingDirLocker, - applyRequirementHandler, - ) - - statusUpdater := command.ProjectStatusUpdater{ - ProjectJobURLGenerator: router, - JobCloser: projectCmdOutputHandler, - ProjectVCSStatusUpdater: vcsStatusUpdater, - } - - legacyPrjCmdRunner := wrappers. - WrapProjectRunner(unwrappedPrjCmdRunner). - WithSync( - projectLocker, - router, - ). - WithAuditing(snsWriter). - WithInstrumentation(). - WithJobs( - statusUpdater, - ) - - unwrappedPRPrjCmdRunner := events.NewProjectCommandRunner( - stepsRunner, - workingDir, - webhooksManager, - workingDirLocker, - applyRequirementHandler, - ) - - platformModePrjCmdRunner := wrappers. - WrapProjectRunner(unwrappedPRPrjCmdRunner). - WithAuditing(snsWriter). - WithInstrumentation(). - WithJobs( - statusUpdater, - ) - - prjCmdRunner := &lyftCommands.PlatformModeProjectRunner{ - PlatformModeRunner: platformModePrjCmdRunner, - PrModeRunner: legacyPrjCmdRunner, - Allocator: featureAllocator, - Logger: ctxLogger, - } - - pullReqStatusFetcher := lyft_vcs.NewSQBasedPullStatusFetcher( - githubClient, - mergeabilityChecker, - ) - - policyCheckCommandRunner := events.NewPolicyCheckCommandRunner( - dbUpdater, - checksOutputUpdater, - vcsStatusUpdater, - prjCmdRunner, - userConfig.ParallelPoolSize, - ) - - planCommandRunner := events.NewPlanCommandRunner( - vcsClient, - pendingPlanFinder, - workingDir, - vcsStatusUpdater, - projectCommandBuilder, - prjCmdRunner, - dbUpdater, - checksOutputUpdater, - policyCheckCommandRunner, - userConfig.ParallelPoolSize, - ) - - legacyApplyCommandRunner := events.NewApplyCommandRunner( - vcsClient, - userConfig.DisableApplyAll, - applyLockingClient, - vcsStatusUpdater, - projectCommandBuilder, - prjCmdRunner, - checksOutputUpdater, - dbUpdater, - userConfig.ParallelPoolSize, - pullReqStatusFetcher, - ) - - applyCommandRunner := &lyftCommands.PlatformModeRunner{ - Runner: legacyApplyCommandRunner, - Allocator: featureAllocator, - Logger: ctxLogger, - Builder: projectCommandBuilder, - TemplateLoader: template.NewLoader[lyftCommands.LegacyApplyCommentInput](globalCfg), - VCSClient: vcsClient, - } - - unlockCommandRunner := events.NewUnlockCommandRunner( - deleteLockCommand, - vcsClient, - ) - - pullOutputUpdater := &events.PullOutputUpdater{ - VCSClient: vcsClient, - MarkdownRenderer: markdownRenderer, - HidePrevPlanComments: userConfig.HidePrevPlanComments, - } - - // Using pull updater for version commands until we move off of PR comments entirely - versionCommandRunner := events.NewVersionCommandRunner( - pullOutputUpdater, - projectCommandBuilder, - prjCmdRunner, - userConfig.ParallelPoolSize, - ) - - commentCommandRunnerByCmd := map[command.Name]command.Runner{ - command.Plan: planCommandRunner, - command.Apply: applyCommandRunner, - command.Unlock: unlockCommandRunner, - command.Version: versionCommandRunner, - } - cmdStatsScope := statsScope.SubScope("cmd") - staleCommandChecker := &events.StaleCommandHandler{ - StaleStatsScope: cmdStatsScope.SubScope("stale"), - } - prrPolicyCommandRunner := &events.PRRPolicyCheckCommandRunner{ - PrjCmdBuilder: projectCommandBuilder, - PolicyCheckCommandRunner: policyCheckCommandRunner, - } - - commandRunner := &events.DefaultCommandRunner{ - VCSClient: vcsClient, - CommentCommandRunnerByCmd: commentCommandRunnerByCmd, - GlobalCfg: globalCfg, - StatsScope: cmdStatsScope, - DisableAutoplan: userConfig.DisableAutoplan, - Drainer: drainer, - PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, - PullStatusFetcher: boltdb, - StaleCommandChecker: staleCommandChecker, - VCSStatusUpdater: vcsStatusUpdater, - Logger: ctxLogger, - PolicyCommandRunner: prrPolicyCommandRunner, - } - - forceApplyCommandRunner := &events.ForceApplyCommandRunner{ - CommandRunner: commandRunner, - VCSClient: vcsClient, - Logger: ctxLogger, - } - - repoAllowlist, err := events.NewRepoAllowlistChecker(userConfig.RepoAllowlist) - if err != nil { - return nil, err - } - locksController := &controllers.LocksController{ - AtlantisVersion: config.AtlantisVersion, - AtlantisURL: parsedURL, - Locker: lockingClient, - ApplyLocker: applyLockingClient, - Logger: ctxLogger, - VCSClient: vcsClient, - LockDetailTemplate: templates.LockTemplate, - WorkingDir: workingDir, - WorkingDirLocker: workingDirLocker, - DB: boltdb, - DeleteLockCommand: deleteLockCommand, - } - - wsMux := websocket.NewInstrumentedMultiplexor( - websocket.NewMultiplexor( - ctxLogger, - controllers.JobIDKeyGenerator{}, - projectCmdOutputHandler, - ), - projectJobsScope, - ) - - jobsController := &controllers.JobsController{ - AtlantisVersion: config.AtlantisVersion, - AtlantisURL: parsedURL, - Logger: ctxLogger, - ProjectJobsTemplate: templates.ProjectJobsTemplate, - ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, - Db: boltdb, - WsMux: wsMux, - StatsScope: projectJobsScope, - KeyGenerator: controllers.JobIDKeyGenerator{}, - } - githubAppController := &controllers.GithubAppController{ - AtlantisURL: parsedURL, - Logger: ctxLogger, - GithubSetupComplete: githubAppEnabled, - GithubHostname: userConfig.GithubHostname, - GithubOrg: userConfig.GithubOrg, - GithubStatusName: userConfig.VCSStatusName, - Allocator: featureAllocator, - } - - scheduledExecutorService := scheduled.NewExecutorService( - events.NewFileWorkDirIterator( - githubClient, - eventParser, - userConfig.DataDir, - ctxLogger, - ), - statsScope, - ctxLogger, - &events.PullClosedExecutor{ - VCSClient: vcsClient, - Locker: lockingClient, - WorkingDir: workingDir, - Logger: ctxLogger, - DB: boltdb, - LogStreamResourceCleaner: projectCmdOutputHandler, - - // using a specific template to signal that this is from an async process - PullClosedTemplate: scheduled.NewGCStaleClosedPull(), - }, - - // using a pullclosed executor for stale open PRs. Naming is weird, we need to come up with something better. - &events.PullClosedExecutor{ - VCSClient: vcsClient, - Locker: lockingClient, - WorkingDir: workingDir, - Logger: ctxLogger, - DB: boltdb, - LogStreamResourceCleaner: projectCmdOutputHandler, - - // using a specific template to signal that this is from an async process - PullClosedTemplate: scheduled.NewGCStaleOpenPull(), - }, - - rawGithubClient, - ) - - ctx, cancel := context.WithCancel(context.Background()) - - repoConverter := github_converter.RepoConverter{ - GithubUser: userConfig.GithubUser, - GithubToken: userConfig.GithubToken, - } - - pullConverter := github_converter.PullConverter{ - RepoConverter: repoConverter, - } - pullFetcher := &github.PRFetcher{ - ClientCreator: clientCreator, - } - - defaultEventsController := events_controllers.NewVCSEventsController( - statsScope, - []byte(userConfig.GithubWebhookSecret), - userConfig.PlanDrafts, - forceApplyCommandRunner, - commentParser, - eventParser, - pullClosedExecutor, - repoAllowlist, - vcsClient, - ctxLogger, - userConfig.DisableApply, - supportedVCSHosts, - repoConverter, - pullConverter, - githubClient, - pullFetcher, - ) - - var vcsPostHandler sqs.VCSPostHandler - lyftMode := userConfig.ToLyftMode() - switch lyftMode { - case Default: // default eventsController handles POST - vcsPostHandler = defaultEventsController - ctxLogger.Info("running Atlantis in default mode") - case Worker: // an SQS worker is set up to handle messages via default eventsController - worker, err := sqs.NewGatewaySQSWorker(ctx, statsScope, ctxLogger, userConfig.LyftWorkerQueueURL, defaultEventsController) - if err != nil { - ctxLogger.Error("unable to set up worker", map[string]interface{}{ - "err": err, - }) - cancel() - return nil, errors.Wrapf(err, "setting up sqs handler for worker mode") - } - go worker.Work(ctx) - ctxLogger.Info("running Atlantis in worker mode", map[string]interface{}{ - "queue": userConfig.LyftWorkerQueueURL, - }) - } - - return &Server{ - AtlantisVersion: config.AtlantisVersion, - AtlantisURL: parsedURL, - Router: underlyingRouter, - Port: userConfig.Port, - PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, - CommandRunner: commandRunner, - CtxLogger: ctxLogger, - StatsScope: statsScope, - StatsCloser: closer, - Locker: lockingClient, - ApplyLocker: applyLockingClient, - VCSPostHandler: vcsPostHandler, - GithubAppController: githubAppController, - LocksController: locksController, - JobsController: jobsController, - StatusController: statusController, - IndexTemplate: templates.IndexTemplate, - LockDetailTemplate: templates.LockTemplate, - ProjectJobsTemplate: templates.ProjectJobsTemplate, - ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, - SSLKeyFile: userConfig.SSLKeyFile, - SSLCertFile: userConfig.SSLCertFile, - Drainer: drainer, - ScheduledExecutorService: scheduledExecutorService, - ProjectCmdOutputHandler: projectCmdOutputHandler, - LyftMode: lyftMode, - CancelWorker: cancel, - }, nil -} - -// Start creates the routes and starts serving traffic. -func (s *Server) Start() error { - s.Router.HandleFunc("/healthz", s.Healthz).Methods(http.MethodGet) - s.Router.HandleFunc("/status", s.StatusController.Get).Methods(http.MethodGet) - if s.LyftMode != Worker { - s.Router.HandleFunc("/events", s.VCSPostHandler.Post).Methods(http.MethodPost) - } - s.Router.HandleFunc("/", s.Index).Methods(http.MethodGet).MatcherFunc(func(r *http.Request, rm *mux.RouteMatch) bool { - return r.URL.Path == "/" || r.URL.Path == "/index.html" - }) - s.Router.PathPrefix("/static/").Handler(http.FileServer(&assetfs.AssetFS{Asset: static.Asset, AssetDir: static.AssetDir, AssetInfo: static.AssetInfo})) - s.Router.HandleFunc("/apply/lock", s.LocksController.LockApply).Methods(http.MethodPost).Queries() - s.Router.HandleFunc("/apply/unlock", s.LocksController.UnlockApply).Methods("DELETE").Queries() - s.Router.HandleFunc("/locks", s.LocksController.DeleteLock).Methods("DELETE").Queries("id", "{id:.*}") - s.Router.HandleFunc("/lock", s.LocksController.GetLock).Methods(http.MethodGet). - Queries(LockViewRouteIDQueryParam, fmt.Sprintf("{%s}", LockViewRouteIDQueryParam)).Name(LockViewRouteName) - s.Router.HandleFunc("/jobs/{job-id}", s.JobsController.GetProjectJobs).Methods(http.MethodGet).Name(ProjectJobsViewRouteName) - s.Router.HandleFunc("/jobs/{job-id}/ws", s.JobsController.GetProjectJobsWS).Methods(http.MethodGet) - s.Router.HandleFunc("/github-app/exchange-code", s.GithubAppController.ExchangeCode).Methods(http.MethodGet) - s.Router.HandleFunc("/github-app/setup", s.GithubAppController.New).Methods(http.MethodGet) - - n := negroni.New(&negroni.Recovery{ - Logger: log.New(os.Stdout, "", log.LstdFlags), - PrintStack: false, - StackAll: false, - StackSize: 1024 * 8, - }, NewRequestLogger(s.CtxLogger)) - n.UseHandler(s.Router) - - defer s.CtxLogger.Close() - - // Ensure server gracefully drains connections when stopped. - stop := make(chan os.Signal, 1) - // Stop on SIGINTs and SIGTERMs. - signal.Notify(stop, os.Interrupt, syscall.SIGTERM) - - go s.ScheduledExecutorService.Run() - - go func() { - s.ProjectCmdOutputHandler.Handle() - }() - - server := &http.Server{Addr: fmt.Sprintf(":%d", s.Port), Handler: n, ReadHeaderTimeout: time.Second * 10} - go func() { - s.CtxLogger.Info(fmt.Sprintf("Atlantis started - listening on port %v", s.Port)) - - var err error - if s.SSLCertFile != "" && s.SSLKeyFile != "" { - err = server.ListenAndServeTLS(s.SSLCertFile, s.SSLKeyFile) - } else { - err = server.ListenAndServe() - } - - if err != nil && err != http.ErrServerClosed { - s.CtxLogger.Error(err.Error()) - } - }() - <-stop - - // Shutdown sqs polling. Any received messages being processed will either succeed/fail depending on if drainer started. - if s.LyftMode == Worker { - s.CtxLogger.Warn("Received interrupt. Shutting down the sqs handler") - s.CancelWorker() - } - - s.CtxLogger.Warn("Received interrupt. Waiting for in-progress operations to complete") - s.waitForDrain() - - // flush stats before shutdown - if err := s.StatsCloser.Close(); err != nil { - s.CtxLogger.Error(err.Error()) - } - - ctx, cancel := context.WithTimeout(context.Background(), 5*time.Second) // nolint: vet - defer cancel() - if err := server.Shutdown(ctx); err != nil { - return cli.NewExitError(fmt.Sprintf("while shutting down: %s", err), 1) - } - return nil -} - -// waitForDrain blocks until draining is complete. -func (s *Server) waitForDrain() { - drainComplete := make(chan bool, 1) - go func() { - s.Drainer.ShutdownBlocking() - drainComplete <- true - }() - ticker := time.NewTicker(5 * time.Second) - for { - select { - case <-drainComplete: - s.CtxLogger.Info("All in-progress operations complete, shutting down") - return - case <-ticker.C: - s.CtxLogger.Info(fmt.Sprintf("Waiting for in-progress operations to complete, current in-progress ops: %d", s.Drainer.GetStatus().InProgressOps)) - } - } -} - -// Index is the / route. -func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { - locks, err := s.Locker.List() - if err != nil { - w.WriteHeader(http.StatusServiceUnavailable) - fmt.Fprintf(w, "Could not retrieve locks: %s", err) - return - } - - var lockResults []templates.LockIndexData - for id, v := range locks { - lockURL, _ := s.Router.Get(LockViewRouteName).URL("id", url.QueryEscape(id)) - lockResults = append(lockResults, templates.LockIndexData{ - // NOTE: must use .String() instead of .Path because we need the - // query params as part of the lock URL. - LockPath: lockURL.String(), - RepoFullName: v.Project.RepoFullName, - PullNum: v.Pull.Num, - Path: v.Project.Path, - Workspace: v.Workspace, - Time: v.Time, - TimeFormatted: v.Time.Format("02-01-2006 15:04:05"), - }) - } - - applyCmdLock, err := s.ApplyLocker.CheckApplyLock() - s.CtxLogger.Info(fmt.Sprintf("Apply Lock: %v", applyCmdLock)) - if err != nil { - w.WriteHeader(http.StatusServiceUnavailable) - fmt.Fprintf(w, "Could not retrieve global apply lock: %s", err) - return - } - - applyLockData := templates.ApplyLockData{ - Time: applyCmdLock.Time, - Locked: applyCmdLock.Locked, - TimeFormatted: applyCmdLock.Time.Format("02-01-2006 15:04:05"), - } - // Sort by date - newest to oldest. - sort.SliceStable(lockResults, func(i, j int) bool { return lockResults[i].Time.After(lockResults[j].Time) }) - - err = s.IndexTemplate.Execute(w, templates.IndexData{ - Locks: lockResults, - ApplyLock: applyLockData, - AtlantisVersion: s.AtlantisVersion, - CleanedBasePath: s.AtlantisURL.Path, - }) - if err != nil { - s.CtxLogger.Error(err.Error()) - } -} - -func mkSubDir(parentDir string, subDir string) (string, error) { - fullDir := filepath.Join(parentDir, subDir) - if err := os.MkdirAll(fullDir, 0o700); err != nil { - return "", errors.Wrapf(err, "unable to creare dir %q", fullDir) - } - - return fullDir, nil -} - -// Healthz returns the health check response. It always returns a 200 currently. -func (s *Server) Healthz(w http.ResponseWriter, _ *http.Request) { - data, err := json.MarshalIndent(&struct { - Status string `json:"status"` - }{ - Status: "ok", - }, "", " ") - if err != nil { - w.WriteHeader(http.StatusInternalServerError) - fmt.Fprintf(w, "Error creating status json response: %s", err) - return - } - w.Header().Set("Content-Type", "application/json") - w.Write(data) // nolint: errcheck -} - -// ParseAtlantisURL parses the user-passed atlantis URL to ensure it is valid -// and we can use it in our templates. -// It removes any trailing slashes from the path so we can concatenate it -// with other paths without checking. -func ParseAtlantisURL(u string) (*url.URL, error) { - parsed, err := url.Parse(u) - if err != nil { - return nil, err - } - if !(parsed.Scheme == "http" || parsed.Scheme == "https") { - return nil, errors.New("http or https must be specified") - } - // We want the path to end without a trailing slash so we know how to - // use it in the rest of the program. - parsed.Path = strings.TrimSuffix(parsed.Path, "/") - return parsed, nil -} diff --git a/server/legacy/server_test.go b/server/legacy/server_test.go deleted file mode 100644 index c95b27a0d..000000000 --- a/server/legacy/server_test.go +++ /dev/null @@ -1,203 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package legacy_test - -import ( - "bytes" - "errors" - "io" - "net/http" - "net/http/httptest" - "net/url" - "testing" - "time" - - "github.com/runatlantis/atlantis/server/logging" - - "github.com/gorilla/mux" - . "github.com/petergtz/pegomock" - server "github.com/runatlantis/atlantis/server/legacy" - "github.com/runatlantis/atlantis/server/legacy/controllers/templates" - tMocks "github.com/runatlantis/atlantis/server/legacy/controllers/templates/mocks" - "github.com/runatlantis/atlantis/server/legacy/core/locking/mocks" - "github.com/runatlantis/atlantis/server/models" - . "github.com/runatlantis/atlantis/testing" -) - -func TestIndex_LockErrorf(t *testing.T) { - t.Log("index should return a 503 if unable to list locks") - RegisterMockTestingT(t) - l := mocks.NewMockLocker() - When(l.List()).ThenReturn(nil, errors.New("err")) - s := server.Server{ - Locker: l, - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - s.Index(w, req) - ResponseContains(t, w, 503, "Could not retrieve locks: err") -} - -func TestIndex_Success(t *testing.T) { - t.Log("Index should render the index template successfully.") - RegisterMockTestingT(t) - l := mocks.NewMockLocker() - al := mocks.NewMockApplyLocker() - // These are the locks that we expect to be rendered. - now := time.Now() - locks := map[string]models.ProjectLock{ - "lkysow/atlantis-example/./default": { - Pull: models.PullRequest{ - Num: 9, - }, - Project: models.Project{ - RepoFullName: "lkysow/atlantis-example", - }, - Time: now, - }, - } - When(l.List()).ThenReturn(locks, nil) - it := tMocks.NewMockTemplateWriter() - r := mux.NewRouter() - atlantisVersion := "0.3.1" - // Need to create a lock route since the server expects this route to exist. - r.NewRoute().Path("/lock"). - Queries("id", "{id}").Name(server.LockViewRouteName) - u, err := url.Parse("https://example.com") - Ok(t, err) - s := server.Server{ - Locker: l, - ApplyLocker: al, - IndexTemplate: it, - Router: r, - AtlantisVersion: atlantisVersion, - AtlantisURL: u, - CtxLogger: logging.NewNoopCtxLogger(t), - } - req, _ := http.NewRequest(http.MethodGet, "", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - s.Index(w, req) - it.VerifyWasCalledOnce().Execute(w, templates.IndexData{ - ApplyLock: templates.ApplyLockData{ - Locked: false, - Time: time.Time{}, - TimeFormatted: "01-01-0001 00:00:00", - }, - Locks: []templates.LockIndexData{ - { - LockPath: "/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", - RepoFullName: "lkysow/atlantis-example", - PullNum: 9, - Time: now, - TimeFormatted: now.Format("02-01-2006 15:04:05"), - }, - }, - AtlantisVersion: atlantisVersion, - }) - ResponseContains(t, w, http.StatusOK, "") -} - -func TestHealthz(t *testing.T) { - s := server.Server{} - req, _ := http.NewRequest(http.MethodGet, "/healthz", bytes.NewBuffer(nil)) - w := httptest.NewRecorder() - s.Healthz(w, req) - Equals(t, http.StatusOK, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) - Equals(t, "application/json", w.Result().Header["Content-Type"][0]) - Equals(t, - `{ - "status": "ok" -}`, string(body)) -} - -func TestParseAtlantisURL(t *testing.T) { - cases := []struct { - In string - ExpErr string - ExpURL string - }{ - // Valid URLs should work. - { - In: "https://example.com", - ExpURL: "https://example.com", - }, - { - In: "http://example.com", - ExpURL: "http://example.com", - }, - { - In: "http://example.com/", - ExpURL: "http://example.com", - }, - { - In: "http://example.com", - ExpURL: "http://example.com", - }, - { - In: "http://example.com:4141", - ExpURL: "http://example.com:4141", - }, - { - In: "http://example.com:4141/", - ExpURL: "http://example.com:4141", - }, - { - In: "http://example.com/baseurl", - ExpURL: "http://example.com/baseurl", - }, - { - In: "http://example.com/baseurl/", - ExpURL: "http://example.com/baseurl", - }, - { - In: "http://example.com/baseurl/test", - ExpURL: "http://example.com/baseurl/test", - }, - - // Must be valid URL. - { - In: "::", - ExpErr: "parse \"::\": missing protocol scheme", - }, - - // Must be absolute. - { - In: "/hi", - ExpErr: "http or https must be specified", - }, - - // Must have http or https scheme.. - { - In: "localhost/test", - ExpErr: "http or https must be specified", - }, - { - In: "http0://localhost/test", - ExpErr: "http or https must be specified", - }, - } - - for _, c := range cases { - t.Run(c.In, func(t *testing.T) { - act, err := server.ParseAtlantisURL(c.In) - if c.ExpErr != "" { - ErrEquals(t, c.ExpErr, err) - } else { - Ok(t, err) - Equals(t, c.ExpURL, act.String()) - } - }) - } -} diff --git a/server/legacy/sync/project.go b/server/legacy/sync/project.go deleted file mode 100644 index b71a50d01..000000000 --- a/server/legacy/sync/project.go +++ /dev/null @@ -1,83 +0,0 @@ -package sync - -import ( - "fmt" - - "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/models" -) - -// ProjectSyncer implements project locks. -type ProjectSyncer struct { - events.ProjectCommandRunner - Locker events.ProjectLocker - LockURLGenerator events.LockURLGenerator -} - -func (p *ProjectSyncer) Plan(ctx command.ProjectContext) command.ProjectResult { - result, lockResponse := p.sync(ctx, command.Plan, p.ProjectCommandRunner.Plan) - lockURL := p.LockURLGenerator.GenerateLockURL(lockResponse.LockKey) - - if result.PlanSuccess != nil { - result.PlanSuccess.LockURL = lockURL - } - - return result -} - -func (p *ProjectSyncer) PolicyCheck(ctx command.ProjectContext) command.ProjectResult { - // Acquire Atlantis lock for this repo/dir/workspace. - // This should already be acquired from the prior plan operation. - // if for some reason an unlock happens between the plan and policy check step - // we will attempt to capture the lock here but fail to get the working directory - // at which point we will unlock again to preserve functionality - // If we fail to capture the lock here (super unlikely) then we error out and the user is forced to replan - result, lockResponse := p.sync(ctx, command.PolicyCheck, p.ProjectCommandRunner.PolicyCheck) - lockURL := p.LockURLGenerator.GenerateLockURL(lockResponse.LockKey) - - if result.PolicyCheckSuccess != nil { - result.PolicyCheckSuccess.LockURL = lockURL - } - - return result -} - -func (p *ProjectSyncer) sync( - ctx command.ProjectContext, - cmdName command.Name, - execute func(ctx command.ProjectContext) command.ProjectResult, -) ( - result command.ProjectResult, - lockResponse *events.TryLockResponse, -) { - result = command.ProjectResult{ - Command: cmdName, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, - } - - // Acquire Atlantis lock for this repo/dir/workspace. - lockResponse, err := p.Locker.TryLock(ctx.RequestCtx, ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir)) - if err != nil { - result.Error = errors.Wrap(err, "acquiring lock") - return - } - - if !lockResponse.LockAcquired { - result.Failure = lockResponse.LockFailureReason - return - } - - result = execute(ctx) - if result.Error != nil { - if unlockErr := lockResponse.UnlockFn(); unlockErr != nil { - ctx.Log.Error(fmt.Sprintf("error unlocking state after %s error: %v", cmdName, unlockErr)) - } - return - } - - return -} diff --git a/server/legacy/user_config.go b/server/legacy/user_config.go deleted file mode 100644 index 486e45999..000000000 --- a/server/legacy/user_config.go +++ /dev/null @@ -1,112 +0,0 @@ -package legacy - -import ( - "github.com/runatlantis/atlantis/server/logging" -) - -type Mode int - -const ( - Default Mode = iota - Gateway - Worker - TemporalWorker - Adhoc -) - -// UserConfig holds config values passed in by the user. -// The mapstructure tags correspond to flags in cmd/server.go and are used when -// the config is parsed from a YAML file. -type UserConfig struct { - AtlantisURL string `mapstructure:"atlantis-url"` - AutoplanFileList string `mapstructure:"autoplan-file-list"` - CheckoutStrategy string `mapstructure:"checkout-strategy"` - DataDir string `mapstructure:"data-dir"` - DisableApplyAll bool `mapstructure:"disable-apply-all"` - DisableApply bool `mapstructure:"disable-apply"` - DisableAutoplan bool `mapstructure:"disable-autoplan"` - DisableMarkdownFolding bool `mapstructure:"disable-markdown-folding"` - EnablePolicyChecks bool `mapstructure:"enable-policy-checks"` - EnableRegExpCmd bool `mapstructure:"enable-regexp-cmd"` - EnableDiffMarkdownFormat bool `mapstructure:"enable-diff-markdown-format"` - FFOwner string `mapstructure:"ff-owner"` - FFRepo string `mapstructure:"ff-repo"` - FFBranch string `mapstructure:"ff-branch"` - FFPath string `mapstructure:"ff-path"` - GithubHostname string `mapstructure:"gh-hostname"` - GithubToken string `mapstructure:"gh-token"` - GithubUser string `mapstructure:"gh-user"` - GithubWebhookSecret string `mapstructure:"gh-webhook-secret"` - GithubOrg string `mapstructure:"gh-org"` - GithubAppID int64 `mapstructure:"gh-app-id"` - GithubAppKey string `mapstructure:"gh-app-key"` - GithubAppKeyFile string `mapstructure:"gh-app-key-file"` - GithubAppSlug string `mapstructure:"gh-app-slug"` - HidePrevPlanComments bool `mapstructure:"hide-prev-plan-comments"` - LogLevel string `mapstructure:"log-level"` - ParallelPoolSize int `mapstructure:"parallel-pool-size"` - MaxProjectsPerPR int `mapstructure:"max-projects-per-pr"` - StatsNamespace string `mapstructure:"stats-namespace"` - PlanDrafts bool `mapstructure:"allow-draft-prs"` - Port int `mapstructure:"port"` - RepoConfig string `mapstructure:"repo-config"` - RepoConfigJSON string `mapstructure:"repo-config-json"` - RepoAllowlist string `mapstructure:"repo-allowlist"` - // RepoWhitelist is deprecated in favour of RepoAllowlist. - RepoWhitelist string `mapstructure:"repo-whitelist"` - - // RequireUnDiverged is whether to require pull requests to rebase default branch before - // allowing terraform apply's to run. - RequireUnDiverged bool `mapstructure:"require-undiverged"` - // RequireSQUnlocked is whether to require pull requests to be unlocked before running - // terraform apply. - RequireSQUnlocked bool `mapstructure:"require-unlocked"` - SlackToken string `mapstructure:"slack-token"` - SSLCertFile string `mapstructure:"ssl-cert-file"` - SSLKeyFile string `mapstructure:"ssl-key-file"` - TFDownloadURL string `mapstructure:"tf-download-url"` - VCSStatusName string `mapstructure:"vcs-status-name"` - DefaultTFVersion string `mapstructure:"default-tf-version"` - Webhooks []WebhookConfig `mapstructure:"webhooks"` - WriteGitCreds bool `mapstructure:"write-git-creds"` - LyftAuditJobsSnsTopicArn string `mapstructure:"lyft-audit-jobs-sns-topic-arn"` - LyftGatewaySnsTopicArn string `mapstructure:"lyft-gateway-sns-topic-arn"` - LyftMode string `mapstructure:"lyft-mode"` - LyftWorkerQueueURL string `mapstructure:"lyft-worker-queue-url"` - - // Supports adding a default URL to the checkrun UI when details URL is not set - DefaultCheckrunDetailsURL string `mapstructure:"default-checkrun-details-url"` -} - -// ToLogLevel returns the LogLevel object corresponding to the user-passed -// log level. -func (u UserConfig) ToLogLevel() logging.LogLevel { - switch u.LogLevel { - case "debug": - return logging.Debug - case "info": - return logging.Info - case "warn": - return logging.Warn - case "error": - return logging.Error - } - return logging.Info -} - -// ToLyftMode returns mode type to run atlantis on. -func (u UserConfig) ToLyftMode() Mode { - switch u.LyftMode { - case "default": - return Default - case "gateway": - return Gateway - case "worker": - return Worker - case "temporalworker": - return TemporalWorker - case "adhoc": - return Adhoc - } - return Default -} diff --git a/server/legacy/user_config_test.go b/server/legacy/user_config_test.go deleted file mode 100644 index 669e27cd7..000000000 --- a/server/legacy/user_config_test.go +++ /dev/null @@ -1,91 +0,0 @@ -package legacy_test - -import ( - "testing" - - server "github.com/runatlantis/atlantis/server/legacy" - "github.com/runatlantis/atlantis/server/logging" - . "github.com/runatlantis/atlantis/testing" -) - -func TestUserConfig_ToLogLevel(t *testing.T) { - cases := []struct { - userLvl string - expLvl logging.LogLevel - }{ - { - "debug", - logging.Debug, - }, - { - "info", - logging.Info, - }, - { - "warn", - logging.Warn, - }, - { - "error", - logging.Error, - }, - { - "unknown", - logging.Info, - }, - } - - for _, c := range cases { - t.Run(c.userLvl, func(t *testing.T) { - u := server.UserConfig{ - LogLevel: c.userLvl, - } - Equals(t, c.expLvl, u.ToLogLevel()) - }) - } -} - -func TestUserConfig_ToLyftMode(t *testing.T) { - cases := []struct { - userMode string - expMode server.Mode - }{ - { - "default", - server.Default, - }, - { - "gateway", - server.Gateway, - }, - { - "worker", - server.Worker, - }, - { - "unknown", - server.Default, - }, - { - "", - server.Default, - }, - { - "adhoc", - server.Adhoc, - }, - { - "temporalworker", - server.TemporalWorker, - }, - } - - for _, c := range cases { - t.Run(c.userMode, func(t *testing.T) { - u := server.UserConfig{ - LyftMode: c.userMode, - } - Equals(t, c.expMode, u.ToLyftMode()) - }) - } -} diff --git a/server/legacy/wrappers/project_context.go b/server/legacy/wrappers/project_context.go deleted file mode 100644 index e125414d3..000000000 --- a/server/legacy/wrappers/project_context.go +++ /dev/null @@ -1,36 +0,0 @@ -package wrappers - -import ( - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/uber-go/tally/v4" -) - -type projectContext struct { - events.ProjectCommandContextBuilder -} - -func WrapProjectContext( - projectCtxBuilder events.ProjectCommandContextBuilder, -) *projectContext { //nolint:revive // avoiding refactor while adding linter action - return &projectContext{ - projectCtxBuilder, - } -} - -func (p *projectContext) EnablePolicyChecks( - commentBuilder events.CommentBuilder, -) *projectContext { - p.ProjectCommandContextBuilder = &events.PolicyCheckProjectContextBuilder{ - ProjectCommandContextBuilder: p.ProjectCommandContextBuilder, - CommentBuilder: commentBuilder, - } - return p -} - -func (p *projectContext) WithInstrumentation(scope tally.Scope) *projectContext { - p.ProjectCommandContextBuilder = &events.InstrumentedProjectCommandContextBuilder{ - ProjectCommandContextBuilder: p.ProjectCommandContextBuilder, - ProjectCounter: scope.Counter("projects"), - } - return p -} diff --git a/server/legacy/wrappers/project_runners.go b/server/legacy/wrappers/project_runners.go deleted file mode 100644 index b183312dc..000000000 --- a/server/legacy/wrappers/project_runners.go +++ /dev/null @@ -1,67 +0,0 @@ -package wrappers - -import ( - "github.com/runatlantis/atlantis/server/legacy/events" - "github.com/runatlantis/atlantis/server/legacy/events/command" - "github.com/runatlantis/atlantis/server/legacy/lyft/aws/sns" - "github.com/runatlantis/atlantis/server/legacy/lyft/decorators" - "github.com/runatlantis/atlantis/server/legacy/sync" - "github.com/runatlantis/atlantis/server/models" -) - -type projectStatusUpdater interface { - UpdateProjectStatus(ctx command.ProjectContext, status models.VCSStatus) (string, error) -} - -type projectCommand struct { - events.ProjectCommandRunner -} - -func WrapProjectRunner(projectRunner events.ProjectCommandRunner) *projectCommand { //nolint:revive // avoiding refactor while adding linter action - return &projectCommand{ - projectRunner, - } -} - -// WithSync add project level locking to projects -func (d *projectCommand) WithSync( - projectLocker events.ProjectLocker, - projectLockURL events.LockURLGenerator, -) *projectCommand { - d.ProjectCommandRunner = &sync.ProjectSyncer{ - ProjectCommandRunner: d.ProjectCommandRunner, - Locker: projectLocker, - LockURLGenerator: projectLockURL, - } - - return d -} - -// WithJobs adds streaming capabilities to terraform output. With it end user -// can see their terraform command's execution in real time. -func (d *projectCommand) WithJobs( - projectStatusUpdater projectStatusUpdater, -) *projectCommand { - d.ProjectCommandRunner = &events.ProjectOutputWrapper{ - ProjectCommandRunner: d.ProjectCommandRunner, - ProjectStatusUpdater: projectStatusUpdater, - } - return d -} - -func (d *projectCommand) WithAuditing( - snsWriter sns.Writer, -) *projectCommand { - d.ProjectCommandRunner = &decorators.AuditProjectCommandWrapper{ - ProjectCommandRunner: d.ProjectCommandRunner, - SnsWriter: snsWriter, - } - return d -} - -func (d *projectCommand) WithInstrumentation() *projectCommand { - d.ProjectCommandRunner = &events.InstrumentedProjectCommandRunner{ - ProjectCommandRunner: d.ProjectCommandRunner, - } - return d -}