From 5ff99510d0714e5d6bd06c5c4f6bcad27fdfee76 Mon Sep 17 00:00:00 2001 From: Alex-Mussell Date: Mon, 18 Sep 2023 20:49:53 +0100 Subject: [PATCH 01/12] docs: Update Terragrunt custom workflows docs to implement target plans, imports, and state removals (#3776) --- runatlantis.io/docs/custom-workflows.md | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/runatlantis.io/docs/custom-workflows.md b/runatlantis.io/docs/custom-workflows.md index a6ee35ab6a..f46981a620 100644 --- a/runatlantis.io/docs/custom-workflows.md +++ b/runatlantis.io/docs/custom-workflows.md @@ -279,9 +279,11 @@ workflows: name: TF_IN_AUTOMATION value: 'true' - run: - command: terragrunt plan -input=false -out=$PLANFILE - output: strip_refreshing - - run: terragrunt show -json $PLANFILE > $SHOWFILE + # Allow for targetted plans/applies as not supported for Terraform wrappers by default + command: terragrunt plan -input=false $(printf '%s' $COMMENT_ARGS | sed 's/,/ /g' | tr -d '\\') -no-color -out $PLANFILE + output: hide + - run: | + terragrunt show $PLANFILE apply: steps: - env: @@ -292,6 +294,23 @@ workflows: name: TF_IN_AUTOMATION value: 'true' - run: terragrunt apply -input=false $PLANFILE + import: + steps: + - env: + name: TERRAGRUNT_TFPATH + command: 'echo "terraform${DEFAULT_TERRAFORM_VERSION}"' + - env: + name: TF_VAR_author + command: 'git show -s --format="%ae" $HEAD_COMMIT' + # Allow for imports as not supported for Terraform wrappers by default + - run: terragrunt import -input=false $(printf '%s' $COMMENT_ARGS | sed 's/,/ /' | tr -d '\\') + state_rm: + steps: + - env: + name: TERRAGRUNT_TFPATH + command: 'echo "terraform${DEFAULT_TERRAFORM_VERSION}"' + # Allow for state removals as not supported for Terraform wrappers by default + - run: terragrunt state rm $(printf '%s' $COMMENT_ARGS | sed 's/,/ /' | tr -d '\\') ``` If using the repo's `atlantis.yaml` file you would use the following config: From 16d080be03b5ea275caa57733543c390286afe43 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 19 Sep 2023 01:56:00 +0000 Subject: [PATCH 02/12] chore(deps): update dependency node to v18.18.0 in .node-version (#3779) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- .node-version | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.node-version b/.node-version index 4a1f488b6c..02c8b485ed 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -18.17.1 +18.18.0 From 7cf6a670bf0b63c423cca4e8d42e8f2995f5972c Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Thu, 21 Sep 2023 00:33:39 +0000 Subject: [PATCH 03/12] fix(deps): update module github.com/redis/go-redis/v9 to v9.2.0 in go.mod (#3786) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 12 ++++++------ 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/go.mod b/go.mod index 671ef113f2..e503311d3c 100644 --- a/go.mod +++ b/go.mod @@ -31,7 +31,7 @@ require ( github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 github.com/petergtz/pegomock/v4 v4.0.0 github.com/pkg/errors v0.9.1 - github.com/redis/go-redis/v9 v9.1.0 + github.com/redis/go-redis/v9 v9.2.0 github.com/remeh/sizedwaitgroup v1.0.0 github.com/shurcooL/githubv4 v0.0.0-20230704064427-599ae7bbf278 github.com/slack-go/slack v0.12.3 diff --git a/go.sum b/go.sum index c92c746f8a..d09bea7b4a 100644 --- a/go.sum +++ b/go.sum @@ -77,10 +77,10 @@ github.com/bradleyfalzon/ghinstallation/v2 v2.7.0 h1:ranXaC3Zz/F6G/f0Joj3LrFp2Oz github.com/bradleyfalzon/ghinstallation/v2 v2.7.0/go.mod h1:ymxfmloxXBFXvvF1KpeUhOQM6Dfz9NYtfvTiJyk82UE= github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A= github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE= -github.com/bsm/ginkgo/v2 v2.9.5 h1:rtVBYPs3+TC5iLUVOis1B9tjLTup7Cj5IfzosKtvTJ0= -github.com/bsm/ginkgo/v2 v2.9.5/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= -github.com/bsm/gomega v1.26.0 h1:LhQm+AFcgV2M0WyKroMASzAzCAJVpAxQXv4SaI9a69Y= -github.com/bsm/gomega v1.26.0/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= +github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= +github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= +github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= +github.com/bsm/gomega v1.27.10/go.mod h1:JyEr/xRbxbtgWNi8tIEVPUYZ5Dzef52k01W3YH0H+O0= github.com/bwesterb/go-ristretto v1.2.0/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/bwesterb/go-ristretto v1.2.3/go.mod h1:fUIoIZaG73pV5biE2Blr2xEzDoMj7NFEuV9ekS419A0= github.com/cactus/go-statsd-client/v5 v5.0.0/go.mod h1:COEvJ1E+/E2L4q6QE5CkjWPi4eeDw9maJBMIuMPBZbY= @@ -388,8 +388,8 @@ github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= -github.com/redis/go-redis/v9 v9.1.0 h1:137FnGdk+EQdCbye1FW+qOEcY5S+SpY9T0NiuqvtfMY= -github.com/redis/go-redis/v9 v9.1.0/go.mod h1:urWj3He21Dj5k4TK1y59xH8Uj6ATueP8AH1cY3lZl4c= +github.com/redis/go-redis/v9 v9.2.0 h1:zwMdX0A4eVzse46YN18QhuDiM4uf3JmkOB4VZrdt5uI= +github.com/redis/go-redis/v9 v9.2.0/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E= github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= From 83964cc05c529ddc71c25a3761dae890b7e52ae2 Mon Sep 17 00:00:00 2001 From: PePe Amengual Date: Thu, 21 Sep 2023 14:49:33 -0700 Subject: [PATCH 04/12] Updating curl package (#3787) --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 66a2a06c89..2e741ce186 100644 --- a/Dockerfile +++ b/Dockerfile @@ -172,7 +172,7 @@ COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh # We place this last as it will bust less docker layer caches when packages update RUN apk add --no-cache \ ca-certificates~=20230506 \ - curl~=8.2 \ + curl~=8.3 \ git~=2.40 \ unzip~=6.0 \ bash~=5.2 \ From c35ba0d69f8643aa218900c2651059d3ab4d9996 Mon Sep 17 00:00:00 2001 From: wolmi Date: Fri, 22 Sep 2023 23:42:58 +0200 Subject: [PATCH 05/12] feat: Add use plugin cache flag (#3720) * feat: Add use plugin cache bool flag * feat: Added use-plugin-cache doc * feat: refactor to reflect terrafrom in plugin cache flag Co-authored-by: Simon Heather <32168619+X-Guardian@users.noreply.github.com> * fix: missing closing in bash block documentation * fix: typo in flag example * feat: improve docs * feat: performance implications documented * feat: increase terraform minor version to 1.3.10 --------- Co-authored-by: Simon Heather <32168619+X-Guardian@users.noreply.github.com> --- Dockerfile | 2 +- cmd/server.go | 5 +++++ runatlantis.io/docs/server-configuration.md | 15 +++++++++++++++ server/server.go | 2 +- server/user_config.go | 1 + 5 files changed, 23 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 2e741ce186..b1c6406426 100644 --- a/Dockerfile +++ b/Dockerfile @@ -125,7 +125,7 @@ ENV DEFAULT_TERRAFORM_VERSION=1.5.7 # In the official Atlantis image, we only have the latest of each Terraform version. # Each binary is about 80 MB so we limit it to the 4 latest minor releases or fewer -RUN AVAILABLE_TERRAFORM_VERSIONS="1.2.9 1.3.9 1.4.6 ${DEFAULT_TERRAFORM_VERSION}" && \ +RUN AVAILABLE_TERRAFORM_VERSIONS="1.2.9 1.3.10 1.4.6 ${DEFAULT_TERRAFORM_VERSION}" && \ case "${TARGETPLATFORM}" in \ "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ diff --git a/cmd/server.go b/cmd/server.go index e79012408c..19ed98763e 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -134,6 +134,7 @@ const ( RestrictFileList = "restrict-file-list" TFDownloadFlag = "tf-download" TFDownloadURLFlag = "tf-download-url" + UseTFPluginCache = "use-tf-plugin-cache" VarFileAllowlistFlag = "var-file-allowlist" VCSStatusName = "vcs-status-name" TFEHostnameFlag = "tfe-hostname" @@ -568,6 +569,10 @@ var boolFlags = map[string]boolFlag{ description: "Remove no-changes plan comments from the pull request.", defaultValue: false, }, + UseTFPluginCache: { + description: "Enable the use of the Terraform plugin cache", + defaultValue: true, + }, } var intFlags = map[string]intFlag{ CheckoutDepthFlag: { diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index ccdd328330..2e9cfe52fd 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -989,6 +989,21 @@ Setting this to `false` can be useful in an air-gapped environment where a downl ``` A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.html) for more details. +### `--use-tf-plugin-cache` +```bash +atlantis server --use-tf-plugin-cache=false +# or +ATLANTIS_USE_TF_PLUGIN_CACHE=false +``` +Set to false if you want to disable terraform plugin cache. + +This flag is useful when having multiple projects that need to run a plan and apply in the same PR to avoid the race condition of `plugin_cache_dir` concurrently, this is a terraform known issue, more info: + +- [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) +- [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) + +The effect of the race condition is more evident when using parallel configuration to run plan and apply, by disabling the use of plugin cache will impact in the performance when starting a new plan or apply, but in large atlantis deployments with multiple projects and shared modules the use of `--parallel_plan` and `--parallel_apply` is mandatory for an efficient managment of the PRs. + ### `--var-file-allowlist` ```bash atlantis server --var-file-allowlist='/path/to/tfvars/dir' diff --git a/server/server.go b/server/server.go index c93ed170d7..a2410a1315 100644 --- a/server/server.go +++ b/server/server.go @@ -413,7 +413,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.TFDownloadURL, &terraform.DefaultDownloader{}, userConfig.TFDownload, - true, + userConfig.UseTFPluginCache, projectCmdOutputHandler) // The flag.Lookup call is to detect if we're running in a unit test. If we // are, then we don't error out because we don't have/want terraform diff --git a/server/user_config.go b/server/user_config.go index 7104b2df5a..edfc6dd1da 100644 --- a/server/user_config.go +++ b/server/user_config.go @@ -125,6 +125,7 @@ type UserConfig struct { WebPassword string `mapstructure:"web-password"` WriteGitCreds bool `mapstructure:"write-git-creds"` WebsocketCheckOrigin bool `mapstructure:"websocket-check-origin"` + UseTFPluginCache bool `mapstructure:"use-tf-plugin-cache"` } // ToAllowCommandNames parse AllowCommands into a slice of CommandName From fad6f0f9562d74509cc69cb8e8a1f169936652a4 Mon Sep 17 00:00:00 2001 From: Luke Massa Date: Mon, 25 Sep 2023 12:01:16 -0400 Subject: [PATCH 06/12] fix: Do not unnecessarily update apply check if it doesn't exist yet (#3747) * Do not unnecessarily create apply pipeline if it doesn't exist yet * Updates * Fix remaining * Fix test logic * Cleanup more tests * Fix test --------- Co-authored-by: PePe Amengual --- server/events/command_runner_internal_test.go | 38 +++++++------ server/events/plan_command_runner.go | 5 +- server/events/plan_command_runner_test.go | 55 +++++++++++-------- 3 files changed, 54 insertions(+), 44 deletions(-) diff --git a/server/events/command_runner_internal_test.go b/server/events/command_runner_internal_test.go index 4cf3076a0f..f80c718802 100644 --- a/server/events/command_runner_internal_test.go +++ b/server/events/command_runner_internal_test.go @@ -166,11 +166,12 @@ func TestPlanUpdatePlanCommitStatus(t *testing.T) { func TestPlanUpdateApplyCommitStatus(t *testing.T) { cases := map[string]struct { - cmd command.Name - pullStatus models.PullStatus - expStatus models.CommitStatus - expNumSuccess int - expNumTotal int + cmd command.Name + pullStatus models.PullStatus + expStatus models.CommitStatus + doNotCallUpdateApply bool // In certain situations, we don't expect updateCommitStatus to call the underlying commitStatusUpdater code at all + expNumSuccess int + expNumTotal int }{ "all plans success with no changes": { cmd: command.Apply, @@ -200,9 +201,7 @@ func TestPlanUpdateApplyCommitStatus(t *testing.T) { }, }, }, - expStatus: models.PendingCommitStatus, - expNumSuccess: 1, - expNumTotal: 2, + doNotCallUpdateApply: true, }, "one plan, one apply, one plan success with no changes": { cmd: command.Apply, @@ -219,9 +218,7 @@ func TestPlanUpdateApplyCommitStatus(t *testing.T) { }, }, }, - expStatus: models.PendingCommitStatus, - expNumSuccess: 2, - expNumTotal: 3, + doNotCallUpdateApply: true, }, "one apply error, one apply, one plan success with no changes": { cmd: command.Apply, @@ -251,12 +248,17 @@ func TestPlanUpdateApplyCommitStatus(t *testing.T) { commitStatusUpdater: csu, } cr.updateCommitStatus(&command.Context{}, c.pullStatus, command.Apply) - Equals(t, models.Repo{}, csu.CalledRepo) - Equals(t, models.PullRequest{}, csu.CalledPull) - Equals(t, c.expStatus, csu.CalledStatus) - Equals(t, c.cmd, csu.CalledCommand) - Equals(t, c.expNumSuccess, csu.CalledNumSuccess) - Equals(t, c.expNumTotal, csu.CalledNumTotal) + if c.doNotCallUpdateApply { + Equals(t, csu.Called, false) + } else { + Equals(t, csu.Called, true) + Equals(t, models.Repo{}, csu.CalledRepo) + Equals(t, models.PullRequest{}, csu.CalledPull) + Equals(t, c.expStatus, csu.CalledStatus) + Equals(t, c.cmd, csu.CalledCommand) + Equals(t, c.expNumSuccess, csu.CalledNumSuccess) + Equals(t, c.expNumTotal, csu.CalledNumTotal) + } }) } } @@ -268,9 +270,11 @@ type MockCSU struct { CalledCommand command.Name CalledNumSuccess int CalledNumTotal int + Called bool } func (m *MockCSU) UpdateCombinedCount(repo models.Repo, pull models.PullRequest, status models.CommitStatus, command command.Name, numSuccess int, numTotal int) error { + m.Called = true m.CalledRepo = repo m.CalledPull = pull m.CalledStatus = status diff --git a/server/events/plan_command_runner.go b/server/events/plan_command_runner.go index 9313f14d4c..8563cee13e 100644 --- a/server/events/plan_command_runner.go +++ b/server/events/plan_command_runner.go @@ -315,9 +315,8 @@ func (p *PlanCommandRunner) updateCommitStatus(ctx *command.Context, pullStatus if numErrored > 0 { status = models.FailedCommitStatus } else if numSuccess < len(pullStatus.Projects) { - // If there are plans that haven't been applied yet, we'll use a pending - // status. - status = models.PendingCommitStatus + // If there are plans that haven't been applied yet, no need to update the status + return } } diff --git a/server/events/plan_command_runner_test.go b/server/events/plan_command_runner_test.go index e765f434c5..8cc9f6c851 100644 --- a/server/events/plan_command_runner_test.go +++ b/server/events/plan_command_runner_test.go @@ -517,11 +517,12 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { ProjectContexts []command.ProjectContext ProjectResults []command.ProjectResult PrevPlanStored bool // stores a previous "No changes" plan in the backend + DoNotUpdateApply bool // certain circumtances we want to skip the call to update apply ExpVCSApplyStatusTotal int ExpVCSApplyStatusSucc int }{ { - Description: "When planning with changes, set the 0/1 apply status", + Description: "When planning with changes, do not change the apply status", ProjectContexts: []command.ProjectContext{ { CommandName: command.Plan, @@ -537,8 +538,7 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { }, }, }, - ExpVCSApplyStatusTotal: 1, - ExpVCSApplyStatusSucc: 0, + DoNotUpdateApply: true, }, { Description: "When planning with no changes, set the 1/1 apply status", @@ -561,7 +561,7 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { ExpVCSApplyStatusSucc: 1, }, { - Description: "When planning with no changes and previous plan with no changes, set the 1/2 apply status", + Description: "When planning with no changes and previous plan with no changes do not set the apply status", ProjectContexts: []command.ProjectContext{ { CommandName: command.Plan, @@ -577,9 +577,8 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { }, }, }, - PrevPlanStored: true, - ExpVCSApplyStatusTotal: 2, - ExpVCSApplyStatusSucc: 1, + DoNotUpdateApply: true, + PrevPlanStored: true, }, { Description: "When planning with no changes and previous 'No changes' plan, set the 2/2 apply status", @@ -603,7 +602,7 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { ExpVCSApplyStatusSucc: 2, }, { - Description: "When planning again with changes following a previous 'No changes' plan, set the 0/1 apply status", + Description: "When planning again with changes following a previous 'No changes' plan do not set the apply status", ProjectContexts: []command.ProjectContext{ { CommandName: command.Plan, @@ -621,12 +620,11 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { }, }, }, - PrevPlanStored: true, - ExpVCSApplyStatusTotal: 1, - ExpVCSApplyStatusSucc: 0, + DoNotUpdateApply: true, + PrevPlanStored: true, }, { - Description: "When planning again with changes following a previous 'No changes' plan, while another plan with 'No changes', set the 1/2 apply status.", + Description: "When planning again with changes following a previous 'No changes' plan, while another plan with 'No changes' do not set the apply status.", ProjectContexts: []command.ProjectContext{ { CommandName: command.Plan, @@ -655,9 +653,8 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { }, }, }, - PrevPlanStored: true, - ExpVCSApplyStatusTotal: 2, - ExpVCSApplyStatusSucc: 1, + DoNotUpdateApply: true, + PrevPlanStored: true, }, { Description: "When planning again with no changes following a previous 'No changes' plan, while another plan also with 'No changes', set the 2/2 apply status.", @@ -748,15 +745,25 @@ func TestPlanCommandRunner_AtlantisApplyStatus(t *testing.T) { if c.ExpVCSApplyStatusSucc != c.ExpVCSApplyStatusTotal { ExpCommitStatus = models.PendingCommitStatus } - - commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( - Any[models.Repo](), - Any[models.PullRequest](), - Eq[models.CommitStatus](ExpCommitStatus), - Eq[command.Name](command.Apply), - Eq(c.ExpVCSApplyStatusSucc), - Eq(c.ExpVCSApplyStatusTotal), - ) + if c.DoNotUpdateApply { + commitUpdater.VerifyWasCalled(Never()).UpdateCombinedCount( + Any[models.Repo](), + Any[models.PullRequest](), + Any[models.CommitStatus](), + Eq[command.Name](command.Apply), + AnyInt(), + AnyInt(), + ) + } else { + commitUpdater.VerifyWasCalledOnce().UpdateCombinedCount( + Any[models.Repo](), + Any[models.PullRequest](), + Eq[models.CommitStatus](ExpCommitStatus), + Eq[command.Name](command.Apply), + Eq(c.ExpVCSApplyStatusSucc), + Eq(c.ExpVCSApplyStatusTotal), + ) + } }) } } From 80ecc3827354b7feaa7281f9c7aa03d523eff406 Mon Sep 17 00:00:00 2001 From: Ghais Zaher Date: Mon, 25 Sep 2023 19:43:35 +0200 Subject: [PATCH 07/12] feat: disable autoplan label (#3649) * feat: disable autoplan label * documentation * revert unrelated change * fix property * gitlab and github * dd more test * small fixes * add tests for github and gitlab clients * fix: remove unrelated comments * fmt --------- Co-authored-by: PePe Amengual --- cmd/server.go | 5 + cmd/server_test.go | 1 + runatlantis.io/docs/server-configuration.md | 10 ++ server/events/command_runner.go | 10 ++ server/events/command_runner_test.go | 56 ++++++++- server/events/vcs/azuredevops_client.go | 4 + server/events/vcs/bitbucketcloud/client.go | 4 + server/events/vcs/bitbucketserver/client.go | 4 + server/events/vcs/client.go | 3 + server/events/vcs/github_client.go | 15 +++ server/events/vcs/github_client_test.go | 107 ++++++++++++++++++ server/events/vcs/gitlab_client.go | 10 ++ server/events/vcs/gitlab_client_test.go | 59 ++++++++++ server/events/vcs/mocks/mock_client.go | 50 ++++++++ .../events/vcs/not_configured_vcs_client.go | 4 + server/events/vcs/proxy.go | 4 + server/server.go | 1 + server/user_config.go | 1 + 18 files changed, 345 insertions(+), 3 deletions(-) diff --git a/cmd/server.go b/cmd/server.go index 19ed98763e..5aeecbb909 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -72,6 +72,7 @@ const ( DisableApplyAllFlag = "disable-apply-all" DisableApplyFlag = "disable-apply" DisableAutoplanFlag = "disable-autoplan" + DisableAutoplanLabelFlag = "disable-autoplan-label" DisableMarkdownFoldingFlag = "disable-markdown-folding" DisableRepoLockingFlag = "disable-repo-locking" DiscardApprovalOnPlanFlag = "discard-approval-on-plan" @@ -257,6 +258,10 @@ var stringFlags = map[string]stringFlag{ description: "Path to directory to store Atlantis data.", defaultValue: DefaultDataDir, }, + DisableAutoplanLabelFlag: { + description: "Pull request label to disable atlantis auto planning feature only if present.", + defaultValue: "", + }, EmojiReaction: { description: "Emoji Reaction to use to react to comments", defaultValue: DefaultEmojiReaction, diff --git a/cmd/server_test.go b/cmd/server_test.go index a597fced70..b5bfba2574 100644 --- a/cmd/server_test.go +++ b/cmd/server_test.go @@ -115,6 +115,7 @@ var testFlags = map[string]interface{}{ VCSStatusName: "my-status", WriteGitCredsFlag: true, DisableAutoplanFlag: true, + DisableAutoplanLabelFlag: "no-auto-plan", EnablePolicyChecksFlag: false, EnableRegExpCmdFlag: false, EnableDiffMarkdownFormat: false, diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index 2e9cfe52fd..e4370c7046 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -355,6 +355,16 @@ and set `--autoplan-modules` to `false`. ``` Disable atlantis auto planning. +### `--disable-autoplan-label` + ```bash + atlantis server --disable-autoplan-label="no-autoplan" + # or + ATLANTIS_DISABLE_AUTOPLAN_LABEL="no-autoplan" + ``` + Disable atlantis auto planning only on pull requests with the specified label. + + If `disable-autoplan` property is `true`, this flag has no effect. + ### `--disable-markdown-folding` ```bash atlantis server --disable-markdown-folding diff --git a/server/events/command_runner.go b/server/events/command_runner.go index a61d63ee20..24d697392b 100644 --- a/server/events/command_runner.go +++ b/server/events/command_runner.go @@ -15,6 +15,7 @@ package events import ( "fmt" + "github.com/runatlantis/atlantis/server/utils" "strconv" "github.com/google/go-github/v54/github" @@ -98,6 +99,7 @@ type DefaultCommandRunner struct { GitlabMergeRequestGetter GitlabMergeRequestGetter // User config option: Disables autoplan when a pull request is opened or updated. DisableAutoplan bool + DisableAutoplanLabel string EventParser EventParsing // User config option: Fail and do not run the Atlantis command request if any of the pre workflow hooks error FailOnPreWorkflowHookError bool @@ -165,6 +167,14 @@ func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo if c.DisableAutoplan { return } + if len(c.DisableAutoplanLabel) > 0 { + labels, err := c.VCSClient.GetPullLabels(baseRepo, pull) + if err != nil { + ctx.Log.Err("Unable to get pull labels. Proceeding with %s command.", err, command.Plan) + } else if utils.SlicesContains(labels, c.DisableAutoplanLabel) { + return + } + } cmd := &CommentCommand{ Name: command.Autoplan, diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index 90995ba296..59b6a6b21d 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -496,9 +496,11 @@ func TestRunCommentCommand_DisableApplyAllDisabled(t *testing.T) { vcsClient.VerifyWasCalledOnce().CreateComment(testdata.GithubRepo, modelPull.Num, "**Error:** Running `atlantis apply` without flags is disabled. You must specify which project to apply via the `-d `, `-w ` or `-p ` flags.", "apply") } -func TestRunCommentCommand_DisableDisableAutoplan(t *testing.T) { - t.Log("if \"DisableAutoplan is true\" are disabled and we are silencing return and do not comment with error") +func TestRunCommentCommand_DisableAutoplan(t *testing.T) { + t.Log("if \"DisableAutoplan\" is true, auto plans are disabled and we are silencing return and do not comment with error") setup(t) + modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, BaseBranch: "main"} + ch.DisableAutoplan = true defer func() { ch.DisableAutoplan = false }() @@ -512,8 +514,56 @@ func TestRunCommentCommand_DisableDisableAutoplan(t *testing.T) { }, }, nil) - ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, testdata.Pull, testdata.User) + ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, modelPull, testdata.User) + projectCommandBuilder.VerifyWasCalled(Never()).BuildAutoplanCommands(Any[*command.Context]()) +} + +func TestRunCommentCommand_DisableAutoplanLabel(t *testing.T) { + t.Log("if \"DisableAutoplanLabel\" is present and pull request has that label, auto plans are disabled and we are silencing return and do not comment with error") + vcsClient := setup(t) + modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, BaseBranch: "main"} + + ch.DisableAutoplanLabel = "disable-auto-plan" + defer func() { ch.DisableAutoplanLabel = "" }() + + When(projectCommandBuilder.BuildAutoplanCommands(Any[*command.Context]())). + ThenReturn([]command.ProjectContext{ + { + CommandName: command.Plan, + }, + { + CommandName: command.Plan, + }, + }, nil) + When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn([]string{"disable-auto-plan", "need-help"}, nil) + + ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, modelPull, testdata.User) projectCommandBuilder.VerifyWasCalled(Never()).BuildAutoplanCommands(Any[*command.Context]()) + vcsClient.VerifyWasCalledOnce().GetPullLabels(testdata.GithubRepo, modelPull) +} + +func TestRunCommentCommand_DisableAutoplanLabel_PullNotLabeled(t *testing.T) { + t.Log("if \"DisableAutoplanLabel\" is present but pull request doesn't have that label, auto plans run") + vcsClient := setup(t) + modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, BaseBranch: "main"} + + ch.DisableAutoplanLabel = "disable-auto-plan" + defer func() { ch.DisableAutoplanLabel = "" }() + + When(projectCommandBuilder.BuildAutoplanCommands(Any[*command.Context]())). + ThenReturn([]command.ProjectContext{ + { + CommandName: command.Plan, + }, + { + CommandName: command.Plan, + }, + }, nil) + When(ch.VCSClient.GetPullLabels(testdata.GithubRepo, modelPull)).ThenReturn(nil, nil) + + ch.RunAutoplanCommand(testdata.GithubRepo, testdata.GithubRepo, modelPull, testdata.User) + projectCommandBuilder.VerifyWasCalled(Once()).BuildAutoplanCommands(Any[*command.Context]()) + vcsClient.VerifyWasCalledOnce().GetPullLabels(testdata.GithubRepo, modelPull) } func TestRunCommentCommand_ClosedPull(t *testing.T) { diff --git a/server/events/vcs/azuredevops_client.go b/server/events/vcs/azuredevops_client.go index 2d48e5aba2..5ff3f3ff38 100644 --- a/server/events/vcs/azuredevops_client.go +++ b/server/events/vcs/azuredevops_client.go @@ -424,3 +424,7 @@ func GitStatusContextFromSrc(src string) *azuredevops.GitStatusContext { func (g *AzureDevopsClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { return "", fmt.Errorf("not yet implemented") } + +func (g *AzureDevopsClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + return nil, fmt.Errorf("not yet implemented") +} diff --git a/server/events/vcs/bitbucketcloud/client.go b/server/events/vcs/bitbucketcloud/client.go index 38179590fa..fa1751db19 100644 --- a/server/events/vcs/bitbucketcloud/client.go +++ b/server/events/vcs/bitbucketcloud/client.go @@ -281,3 +281,7 @@ func (b *Client) GetFileContent(pull models.PullRequest, fileName string) (bool, func (b *Client) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { return "", fmt.Errorf("not yet implemented") } + +func (b *Client) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + return nil, fmt.Errorf("not yet implemented") +} diff --git a/server/events/vcs/bitbucketserver/client.go b/server/events/vcs/bitbucketserver/client.go index 6df5c7c9a2..943fd3b6c8 100644 --- a/server/events/vcs/bitbucketserver/client.go +++ b/server/events/vcs/bitbucketserver/client.go @@ -365,3 +365,7 @@ func (b *Client) GetFileContent(pull models.PullRequest, fileName string) (bool, func (b *Client) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { return "", fmt.Errorf("not yet implemented") } + +func (b *Client) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + return nil, fmt.Errorf("not yet implemented") +} diff --git a/server/events/vcs/client.go b/server/events/vcs/client.go index 6ab283c8a7..dd2e489f6f 100644 --- a/server/events/vcs/client.go +++ b/server/events/vcs/client.go @@ -49,4 +49,7 @@ type Client interface { GetFileContent(pull models.PullRequest, fileName string) (bool, []byte, error) SupportsSingleFileDownload(repo models.Repo) bool GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) + + // GetPullLabels returns the labels of a pull request + GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) } diff --git a/server/events/vcs/github_client.go b/server/events/vcs/github_client.go index 608deda5d7..76ad3bf421 100644 --- a/server/events/vcs/github_client.go +++ b/server/events/vcs/github_client.go @@ -724,3 +724,18 @@ func (g *GithubClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) } return repository.GetCloneURL(), nil } + +func (g *GithubClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + pullDetails, _, err := g.client.PullRequests.Get(g.ctx, repo.Owner, repo.Name, pull.Num) + if err != nil { + return nil, err + } + + var labels []string + + for _, label := range pullDetails.Labels { + labels = append(labels, *label.Name) + } + + return labels, nil +} diff --git a/server/events/vcs/github_client_test.go b/server/events/vcs/github_client_test.go index e42e353027..4f2f8fd3e7 100644 --- a/server/events/vcs/github_client_test.go +++ b/server/events/vcs/github_client_test.go @@ -1474,3 +1474,110 @@ func TestGithubClient_DiscardReviews(t *testing.T) { }) } } + +func TestGithubClient_GetPullLabels(t *testing.T) { + logger := logging.NewNoopLogger(t) + resp := `{ + "url": "https://api.github.com/repos/runatlantis/atlantis/pulls/1", + "id": 167530667, + "merge_commit_sha": "3fe6aa34bc25ac3720e639fcad41b428e83bdb37", + "labels": [ + { + "id": 1303230720, + "node_id": "MDU6TGFiZWwxMzAzMjMwNzIw", + "url": "https://api.github.com/repos/runatlantis/atlantis/labels/docs", + "name": "docs", + "color": "d87165", + "default": false, + "description": "Documentation" + }, + { + "id": 2552271640, + "node_id": "MDU6TGFiZWwyNTUyMjcxNjQw", + "url": "https://api.github.com/repos/runatlantis/atlantis/labels/go", + "name": "go", + "color": "16e2e2", + "default": false, + "description": "Pull requests that update Go code" + }, + { + "id": 2696098981, + "node_id": "MDU6TGFiZWwyNjk2MDk4OTgx", + "url": "https://api.github.com/repos/runatlantis/atlantis/labels/needs%20tests", + "name": "needs tests", + "color": "FBB1DE", + "default": false, + "description": "Change requires tests" + }, + { + "id": 4439792681, + "node_id": "LA_kwDOBy76Zc8AAAABCKHcKQ", + "url": "https://api.github.com/repos/runatlantis/atlantis/labels/work-in-progress", + "name": "work-in-progress", + "color": "B1E20A", + "default": false, + "description": "" + } + ] + }` + testServer := httptest.NewTLSServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v3/repos/runatlantis/atlantis/pulls/1": + w.Write([]byte(resp)) // nolint: errcheck + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return + } + })) + testServerURL, err := url.Parse(testServer.URL) + Ok(t, err) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logger) + Ok(t, err) + defer disableSSLVerification()() + + labels, err := client.GetPullLabels(models.Repo{ + Owner: "runatlantis", + Name: "atlantis", + }, models.PullRequest{ + Num: 1, + }) + Ok(t, err) + Equals(t, []string{"docs", "go", "needs tests", "work-in-progress"}, labels) +} + +func TestGithubClient_GetPullLabels_EmptyResponse(t *testing.T) { + logger := logging.NewNoopLogger(t) + resp := `{ + "url": "https://api.github.com/repos/runatlantis/atlantis/pulls/1", + "id": 167530667, + "merge_commit_sha": "3fe6aa34bc25ac3720e639fcad41b428e83bdb37", + "labels": [] + }` + testServer := httptest.NewTLSServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v3/repos/runatlantis/atlantis/pulls/1": + w.Write([]byte(resp)) // nolint: errcheck + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return + } + })) + testServerURL, err := url.Parse(testServer.URL) + Ok(t, err) + client, err := vcs.NewGithubClient(testServerURL.Host, &vcs.GithubUserCredentials{"user", "pass"}, vcs.GithubConfig{}, logger) + Ok(t, err) + defer disableSSLVerification()() + + labels, err := client.GetPullLabels(models.Repo{ + Owner: "runatlantis", + Name: "atlantis", + }, models.PullRequest{ + Num: 1, + }) + Ok(t, err) + Equals(t, 0, len(labels)) +} diff --git a/server/events/vcs/gitlab_client.go b/server/events/vcs/gitlab_client.go index 030629c8b8..b98c4513c8 100644 --- a/server/events/vcs/gitlab_client.go +++ b/server/events/vcs/gitlab_client.go @@ -515,3 +515,13 @@ func (g *GitlabClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) } return project.HTTPURLToRepo, nil } + +func (g *GitlabClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + mr, _, err := g.Client.MergeRequests.GetMergeRequest(repo.FullName, pull.Num, nil) + + if err != nil { + return nil, err + } + + return mr.Labels, nil +} diff --git a/server/events/vcs/gitlab_client_test.go b/server/events/vcs/gitlab_client_test.go index a7f1a77be6..fdadf5da8b 100644 --- a/server/events/vcs/gitlab_client_test.go +++ b/server/events/vcs/gitlab_client_test.go @@ -604,6 +604,65 @@ func TestGitlabClient_HideOldComments(t *testing.T) { Equals(t, summaryFooter, gotNotePutCalls[1].comment[2]) } +func TestGithubClient_GetPullLabels(t *testing.T) { + var mergeSuccessWithLabel = strings.ReplaceAll(mergeSuccess, `"labels":[]`, `"labels":["work in progress"]`) + testServer := httptest.NewServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v4/projects/runatlantis%2Fatlantis/merge_requests/1": + w.WriteHeader(http.StatusOK) + w.Write([]byte(mergeSuccessWithLabel)) // nolint: errcheck + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + } + })) + + internalClient, err := gitlab.NewClient("token", gitlab.WithBaseURL(testServer.URL)) + Ok(t, err) + client := &GitlabClient{ + Client: internalClient, + Version: nil, + } + + labels, err := client.GetPullLabels(models.Repo{ + FullName: "runatlantis/atlantis", + }, models.PullRequest{ + Num: 1, + }) + Ok(t, err) + Equals(t, []string{"work in progress"}, labels) +} + +func TestGithubClient_GetPullLabels_EmptyResponse(t *testing.T) { + testServer := httptest.NewServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v4/projects/runatlantis%2Fatlantis/merge_requests/1": + w.WriteHeader(http.StatusOK) + w.Write([]byte(pipelineSuccess)) // nolint: errcheck + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + } + })) + + internalClient, err := gitlab.NewClient("token", gitlab.WithBaseURL(testServer.URL)) + Ok(t, err) + client := &GitlabClient{ + Client: internalClient, + Version: nil, + } + + labels, err := client.GetPullLabels(models.Repo{ + FullName: "runatlantis/atlantis", + }, models.PullRequest{ + Num: 1, + }) + Ok(t, err) + Equals(t, 0, len(labels)) +} + var mergeSuccess = `{"id":22461274,"iid":13,"project_id":4580910,"title":"Update main.tf","description":"","state":"merged","created_at":"2019-01-15T18:27:29.375Z","updated_at":"2019-01-25T17:28:01.437Z","merged_by":{"id":1755902,"name":"Luke Kysow","username":"lkysow","state":"active","avatar_url":"https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80\u0026d=identicon","web_url":"https://gitlab.com/lkysow"},"merged_at":"2019-01-25T17:28:01.459Z","closed_by":null,"closed_at":null,"target_branch":"patch-1","source_branch":"patch-1-merger","upvotes":0,"downvotes":0,"author":{"id":1755902,"name":"Luke Kysow","username":"lkysow","state":"active","avatar_url":"https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80\u0026d=identicon","web_url":"https://gitlab.com/lkysow"},"assignee":null,"source_project_id":4580910,"target_project_id":4580910,"labels":[],"work_in_progress":false,"milestone":null,"merge_when_pipeline_succeeds":false,"merge_status":"can_be_merged","detailed_merge_status":"mergeable","sha":"cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0","merge_commit_sha":"c9b336f1c71d3e64810b8cfa2abcfab232d6bff6","user_notes_count":0,"discussion_locked":null,"should_remove_source_branch":null,"force_remove_source_branch":false,"web_url":"https://gitlab.com/lkysow/atlantis-example/merge_requests/13","time_stats":{"time_estimate":0,"total_time_spent":0,"human_time_estimate":null,"human_total_time_spent":null},"squash":false,"subscribed":true,"changes_count":"1","latest_build_started_at":null,"latest_build_finished_at":null,"first_deployed_to_production_at":null,"pipeline":null,"diff_refs":{"base_sha":"67cb91d3f6198189f433c045154a885784ba6977","head_sha":"cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0","start_sha":"67cb91d3f6198189f433c045154a885784ba6977"},"merge_error":null,"approvals_before_merge":null}` var pipelineSuccess = `{"id": 22461274,"iid": 13,"project_id": 4580910,"title": "Update main.tf","description": "","state": "opened","created_at": "2019-01-15T18:27:29.375Z","updated_at": "2019-01-25T17:28:01.437Z","merged_by": null,"merged_at": null,"closed_by": null,"closed_at": null,"target_branch": "patch-1","source_branch": "patch-1-merger","user_notes_count": 0,"upvotes": 0,"downvotes": 0,"author": {"id": 1755902,"name": "Luke Kysow","username": "lkysow","state": "active","avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80\u0026d=identicon","web_url": "https://gitlab.com/lkysow"},"assignee": null,"reviewers": [],"source_project_id": 4580910,"target_project_id": 4580910,"labels": [],"work_in_progress": false,"milestone": null,"merge_when_pipeline_succeeds": false,"merge_status": "can_be_merged","detailed_merge_status": "mergeable","sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0","merge_commit_sha": null,"squash_commit_sha": null,"discussion_locked": null,"should_remove_source_branch": null,"force_remove_source_branch": true,"reference": "!13","references": {"short": "!13","relative": "!13","full": "lkysow/atlantis-example!13"},"web_url": "https://gitlab.com/lkysow/atlantis-example/merge_requests/13","time_stats": {"time_estimate": 0,"total_time_spent": 0,"human_time_estimate": null,"human_total_time_spent": null},"squash": true,"task_completion_status": {"count": 0,"completed_count": 0},"has_conflicts": false,"blocking_discussions_resolved": true,"approvals_before_merge": null,"subscribed": false,"changes_count": "1","latest_build_started_at": "2019-01-15T18:27:29.375Z","latest_build_finished_at": "2019-01-25T17:28:01.437Z","first_deployed_to_production_at": null,"pipeline": {"id": 488598,"sha": "67cb91d3f6198189f433c045154a885784ba6977","ref": "patch-1-merger","status": "success","created_at": "2019-01-15T18:27:29.375Z","updated_at": "2019-01-25T17:28:01.437Z","web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598"},"head_pipeline": {"id": 488598,"sha": "67cb91d3f6198189f433c045154a885784ba6977","ref": "patch-1-merger","status": "success","created_at": "2019-01-15T18:27:29.375Z","updated_at": "2019-01-25T17:28:01.437Z","web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598","before_sha": "0000000000000000000000000000000000000000","tag": false,"yaml_errors": null,"user": {"id": 1755902,"name": "Luke Kysow","username": "lkysow","state": "active","avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80\u0026d=identicon","web_url": "https://gitlab.com/lkysow"},"started_at": "2019-01-15T18:27:29.375Z","finished_at": "2019-01-25T17:28:01.437Z","committed_at": null,"duration": 31,"coverage": null,"detailed_status": {"icon": "status_success","text": "passed","label": "passed","group": "success","tooltip": "passed","has_details": true,"details_path": "/lkysow/atlantis-example/-/pipelines/488598","illustration": null,"favicon": "/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png"}},"diff_refs": {"base_sha": "67cb91d3f6198189f433c045154a885784ba6977","head_sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0","start_sha": "67cb91d3f6198189f433c045154a885784ba6977"},"merge_error": null,"first_contribution": false,"user": {"can_merge": true}}` var projectSuccess = `{"id": 4580910,"description": "","name": "atlantis-example","name_with_namespace": "lkysow / atlantis-example","path": "atlantis-example","path_with_namespace": "lkysow/atlantis-example","created_at": "2018-04-30T13:44:28.367Z","default_branch": "patch-1","tag_list": [],"ssh_url_to_repo": "git@gitlab.com:lkysow/atlantis-example.git","http_url_to_repo": "https://gitlab.com/lkysow/atlantis-example.git","web_url": "https://gitlab.com/lkysow/atlantis-example","readme_url": "https://gitlab.com/lkysow/atlantis-example/-/blob/main/README.md","avatar_url": "https://gitlab.com/uploads/-/system/project/avatar/4580910/avatar.png","forks_count": 0,"star_count": 7,"last_activity_at": "2021-06-29T21:10:43.968Z","namespace": {"id": 1,"name": "lkysow","path": "lkysow","kind": "group","full_path": "lkysow","parent_id": 1,"avatar_url": "/uploads/-/system/group/avatar/1651/platform.png","web_url": "https://gitlab.com/groups/lkysow"},"_links": {"self": "https://gitlab.com/api/v4/projects/4580910","issues": "https://gitlab.com/api/v4/projects/4580910/issues","merge_requests": "https://gitlab.com/api/v4/projects/4580910/merge_requests","repo_branches": "https://gitlab.com/api/v4/projects/4580910/repository/branches","labels": "https://gitlab.com/api/v4/projects/4580910/labels","events": "https://gitlab.com/api/v4/projects/4580910/events","members": "https://gitlab.com/api/v4/projects/4580910/members"},"packages_enabled": false,"empty_repo": false,"archived": false,"visibility": "private","resolve_outdated_diff_discussions": false,"container_registry_enabled": false,"container_expiration_policy": {"cadence": "1d","enabled": false,"keep_n": 10,"older_than": "90d","name_regex": ".*","name_regex_keep": null,"next_run_at": "2021-05-01T13:44:28.397Z"},"issues_enabled": true,"merge_requests_enabled": true,"wiki_enabled": false,"jobs_enabled": true,"snippets_enabled": true,"service_desk_enabled": false,"service_desk_address": null,"can_create_merge_request_in": true,"issues_access_level": "private","repository_access_level": "enabled","merge_requests_access_level": "enabled","forking_access_level": "enabled","wiki_access_level": "disabled","builds_access_level": "enabled","snippets_access_level": "enabled","pages_access_level": "private","operations_access_level": "disabled","analytics_access_level": "enabled","emails_disabled": null,"shared_runners_enabled": true,"lfs_enabled": false,"creator_id": 818,"import_status": "none","import_error": null,"open_issues_count": 0,"runners_token": "1234456","ci_default_git_depth": 50,"ci_forward_deployment_enabled": true,"public_jobs": true,"build_git_strategy": "fetch","build_timeout": 3600,"auto_cancel_pending_pipelines": "enabled","build_coverage_regex": null,"ci_config_path": "","shared_with_groups": [],"only_allow_merge_if_pipeline_succeeds": true,"allow_merge_on_skipped_pipeline": false,"restrict_user_defined_variables": false,"request_access_enabled": true,"only_allow_merge_if_all_discussions_are_resolved": true,"remove_source_branch_after_merge": true,"printing_merge_request_link_enabled": true,"merge_method": "merge","suggestion_commit_message": "","auto_devops_enabled": false,"auto_devops_deploy_strategy": "continuous","autoclose_referenced_issues": true,"repository_storage": "default","approvals_before_merge": 0,"mirror": false,"external_authorization_classification_label": null,"marked_for_deletion_at": null,"marked_for_deletion_on": null,"requirements_enabled": false,"compliance_frameworks": [],"permissions": {"project_access": null,"group_access": {"access_level": 50,"notification_level": 3}}}` diff --git a/server/events/vcs/mocks/mock_client.go b/server/events/vcs/mocks/mock_client.go index 9f80f46def..7583e22fac 100644 --- a/server/events/vcs/mocks/mock_client.go +++ b/server/events/vcs/mocks/mock_client.go @@ -116,6 +116,25 @@ func (mock *MockClient) GetModifiedFiles(repo models.Repo, pull models.PullReque return ret0, ret1 } +func (mock *MockClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockClient().") + } + params := []pegomock.Param{repo, pull} + result := pegomock.GetGenericMockFrom(mock).Invoke("GetPullLabels", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 []string + var ret1 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].([]string) + } + if result[1] != nil { + ret1 = result[1].(error) + } + } + return ret0, ret1 +} + func (mock *MockClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") @@ -467,6 +486,37 @@ func (c *MockClient_GetModifiedFiles_OngoingVerification) GetAllCapturedArgument return } +func (verifier *VerifierMockClient) GetPullLabels(repo models.Repo, pull models.PullRequest) *MockClient_GetPullLabels_OngoingVerification { + params := []pegomock.Param{repo, pull} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetPullLabels", params, verifier.timeout) + return &MockClient_GetPullLabels_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockClient_GetPullLabels_OngoingVerification struct { + mock *MockClient + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockClient_GetPullLabels_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { + repo, pull := c.GetAllCapturedArguments() + return repo[len(repo)-1], pull[len(pull)-1] +} + +func (c *MockClient_GetPullLabels_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]models.Repo, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(models.Repo) + } + _param1 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(models.PullRequest) + } + } + return +} + func (verifier *VerifierMockClient) GetTeamNamesForUser(repo models.Repo, user models.User) *MockClient_GetTeamNamesForUser_OngoingVerification { params := []pegomock.Param{repo, user} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetTeamNamesForUser", params, verifier.timeout) diff --git a/server/events/vcs/not_configured_vcs_client.go b/server/events/vcs/not_configured_vcs_client.go index 8ab7e03e89..6ce6f2da56 100644 --- a/server/events/vcs/not_configured_vcs_client.go +++ b/server/events/vcs/not_configured_vcs_client.go @@ -73,3 +73,7 @@ func (a *NotConfiguredVCSClient) GetFileContent(pull models.PullRequest, fileNam func (a *NotConfiguredVCSClient) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { return "", a.err() } + +func (a *NotConfiguredVCSClient) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + return nil, a.err() +} diff --git a/server/events/vcs/proxy.go b/server/events/vcs/proxy.go index 64fb8fa8ef..25637bcd0f 100644 --- a/server/events/vcs/proxy.go +++ b/server/events/vcs/proxy.go @@ -107,3 +107,7 @@ func (d *ClientProxy) SupportsSingleFileDownload(repo models.Repo) bool { func (d *ClientProxy) GetCloneURL(VCSHostType models.VCSHostType, repo string) (string, error) { return d.clients[VCSHostType].GetCloneURL(VCSHostType, repo) } + +func (d *ClientProxy) GetPullLabels(repo models.Repo, pull models.PullRequest) ([]string, error) { + return d.clients[repo.VCSHost.Type].GetPullLabels(repo, pull) +} diff --git a/server/server.go b/server/server.go index a2410a1315..1fbbce19f3 100644 --- a/server/server.go +++ b/server/server.go @@ -805,6 +805,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { SilenceForkPRErrors: userConfig.SilenceForkPRErrors, SilenceForkPRErrorsFlag: config.SilenceForkPRErrorsFlag, DisableAutoplan: userConfig.DisableAutoplan, + DisableAutoplanLabel: userConfig.DisableAutoplanLabel, Drainer: drainer, PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, diff --git a/server/user_config.go b/server/user_config.go index edfc6dd1da..81fb7bef7a 100644 --- a/server/user_config.go +++ b/server/user_config.go @@ -34,6 +34,7 @@ type UserConfig struct { DisableApplyAll bool `mapstructure:"disable-apply-all"` DisableApply bool `mapstructure:"disable-apply"` DisableAutoplan bool `mapstructure:"disable-autoplan"` + DisableAutoplanLabel string `mapstructure:"disable-autoplan-label"` DisableMarkdownFolding bool `mapstructure:"disable-markdown-folding"` DisableRepoLocking bool `mapstructure:"disable-repo-locking"` DiscardApprovalOnPlanFlag bool `mapstructure:"discard-approval-on-plan"` From 078af7037652aaec8b4b348b3a4952d933706d98 Mon Sep 17 00:00:00 2001 From: Finn Arne Gangstad Date: Mon, 25 Sep 2023 21:17:27 +0200 Subject: [PATCH 08/12] fix: safer re-merging with updated upstream (#3499) * Safer handling of merging with an updated upstream. We used to call forceClone() to update with upstream, but this deletes the checked out directory. This is inefficient, can delete existing plan files, and is very surprising if you are working manually in the working directory. We now fetch an updated upstream, and re-do the merge operation. This leaves any working files intact. * Rename SafeToReClone -> CheckForUpstreamChanges It's never safe to clone again. But sometimes we need to check for upstream changes to avoid reverting changes. The flag is now used to know when we need to merge again non-destructively with new changes. * Update fixtures.go * Add test to make sure plans are not wiped out As long as the branch itself has not been updated, plans should be kept. Even if upstream has changed. * renamed HasDiverged to MergedAgain in PlanResult and from Clone() This flag was only set to true in case a call to Clone() ended up merging with an updated upstream, so the new name better represents what it means. * Test that Clone on branch update wipes old plans This complements the test that Clone with unmodified branch but modified upstream does _not_ wipe plans. * runGit now runs git instead of returning a function that runs git * Updated template to merged again instead of diverged This is no longer a warning, but expected behavior in merge chekout mode * Rename git wrapper to wrappedGit, add a type for static config Every call to wrappedGit for the same PR uses identical setup for directory, head repo and PR, so passing the --------- Co-authored-by: nitrocode <7775707+nitrocode@users.noreply.github.com> Co-authored-by: PePe Amengual --- server/events/markdown_renderer_test.go | 8 +- server/events/mock_workingdir_test.go | 16 +- server/events/mocks/mock_working_dir.go | 16 +- server/events/models/models.go | 8 +- server/events/project_command_runner.go | 6 +- server/events/templates/diverged.tmpl | 5 - server/events/templates/merged_again.tmpl | 5 + .../templates/plan_success_unwrapped.tmpl | 2 +- .../templates/plan_success_wrapped.tmpl | 2 +- server/events/working_dir.go | 150 +++++++++++------- server/events/working_dir_test.go | 75 +++++---- 11 files changed, 173 insertions(+), 120 deletions(-) delete mode 100644 server/events/templates/diverged.tmpl create mode 100644 server/events/templates/merged_again.tmpl diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index 9cbc4e3df5..548ce03cdb 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -191,7 +191,7 @@ $$$ LockURL: "lock-url", RePlanCmd: "atlantis plan -d path -w workspace", ApplyCmd: "atlantis apply -d path -w workspace", - HasDiverged: true, + MergedAgain: true, }, Workspace: "workspace", RepoRelDir: "path", @@ -210,7 +210,7 @@ $$$ * :repeat: To **plan** this project again, comment: * $atlantis plan -d path -w workspace$ -:warning: The branch we're merging into is ahead, it is recommended to pull new commits first. +:twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- * :fast_forward: To **apply** all unapplied plans from this pull request, comment: @@ -1974,7 +1974,7 @@ $$$ LockURL: "lock-url", RePlanCmd: "atlantis plan -d path -w workspace", ApplyCmd: "atlantis apply -d path -w workspace", - HasDiverged: true, + MergedAgain: true, }, Workspace: "workspace", RepoRelDir: "path", @@ -1992,7 +1992,7 @@ $$$ * :repeat: To **plan** this project again, comment: * $atlantis plan -d path -w workspace$ -:warning: The branch we're merging into is ahead, it is recommended to pull new commits first. +:twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- * :fast_forward: To **apply** all unapplied plans from this pull request, comment: diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index 27a0695ea5..30b344ea3a 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -165,12 +165,12 @@ func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { return ret0 } -func (mock *MockWorkingDir) SetSafeToReClone() { +func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetSafeToReClone", params, []reflect.Type{}) + pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -482,19 +482,19 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetSafeToReClone() *MockWorkingDir_SetSafeToReClone_OngoingVerification { +func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetSafeToReClone", params, verifier.timeout) - return &MockWorkingDir_SetSafeToReClone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) + return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetSafeToReClone_OngoingVerification struct { +type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { } diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 8f051f1937..55ecc1ca4c 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -165,12 +165,12 @@ func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { return ret0 } -func (mock *MockWorkingDir) SetSafeToReClone() { +func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetSafeToReClone", params, []reflect.Type{}) + pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -482,19 +482,19 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetSafeToReClone() *MockWorkingDir_SetSafeToReClone_OngoingVerification { +func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetSafeToReClone", params, verifier.timeout) - return &MockWorkingDir_SetSafeToReClone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) + return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetSafeToReClone_OngoingVerification struct { +type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { } diff --git a/server/events/models/models.go b/server/events/models/models.go index 492fc36fea..bdc821c285 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -361,10 +361,10 @@ type PlanSuccess struct { RePlanCmd string // ApplyCmd is the command that users should run to apply this plan. ApplyCmd string - // HasDiverged is true if we're using the checkout merge strategy and the - // branch we're merging into has been updated since we cloned and merged - // it. - HasDiverged bool + // MergedAgain is true if we're using the checkout merge strategy and the + // branch we're merging into had been updated, and we had to merge again + // before planning + MergedAgain bool } type PolicySetResult struct { diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 276a9ef81d..6b938b2b95 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -543,9 +543,9 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model } defer unlockFn() - p.WorkingDir.SetSafeToReClone() + p.WorkingDir.SetCheckForUpstreamChanges() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -576,7 +576,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model TerraformOutput: strings.Join(outputs, "\n"), RePlanCmd: ctx.RePlanCmd, ApplyCmd: ctx.ApplyCmd, - HasDiverged: hasDiverged, + MergedAgain: mergedAgain, }, "", nil } diff --git a/server/events/templates/diverged.tmpl b/server/events/templates/diverged.tmpl deleted file mode 100644 index f0f4be0ed2..0000000000 --- a/server/events/templates/diverged.tmpl +++ /dev/null @@ -1,5 +0,0 @@ -{{ define "diverged" -}} -{{ if .HasDiverged }} -:warning: The branch we're merging into is ahead, it is recommended to pull new commits first. -{{ end -}} -{{ end -}} diff --git a/server/events/templates/merged_again.tmpl b/server/events/templates/merged_again.tmpl new file mode 100644 index 0000000000..796afe552a --- /dev/null +++ b/server/events/templates/merged_again.tmpl @@ -0,0 +1,5 @@ +{{ define "mergedAgain" -}} +{{ if .MergedAgain }} +:twisted_rightwards_arrows: Upstream was modified, a new merge was performed. +{{ end -}} +{{ end -}} diff --git a/server/events/templates/plan_success_unwrapped.tmpl b/server/events/templates/plan_success_unwrapped.tmpl index 1f34216a0d..6bd81de233 100644 --- a/server/events/templates/plan_success_unwrapped.tmpl +++ b/server/events/templates/plan_success_unwrapped.tmpl @@ -16,5 +16,5 @@ This plan was not saved because one or more projects failed and automerge requir * :repeat: To **plan** this project again, comment: * `{{ .RePlanCmd }}` {{ end -}} -{{ template "diverged" . }} +{{ template "mergedAgain" . }} {{ end -}} diff --git a/server/events/templates/plan_success_wrapped.tmpl b/server/events/templates/plan_success_wrapped.tmpl index 9630de1b2e..cef96d0609 100644 --- a/server/events/templates/plan_success_wrapped.tmpl +++ b/server/events/templates/plan_success_wrapped.tmpl @@ -20,5 +20,5 @@ This plan was not saved because one or more projects failed and automerge requir {{ end -}} {{ .PlanSummary -}} -{{ template "diverged" . -}} +{{ template "mergedAgain" . -}} {{ end -}} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 65fd28a304..a6b2c73656 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -53,7 +53,7 @@ type WorkingDir interface { // Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if // the upstream branch has been modified. This is only safe after grabbing the project lock // and before running any plans - SetSafeToReClone() + SetCheckForUpstreamChanges() // DeletePlan deletes the plan for this repo, pull, workspace path and project name DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error // GetGitUntrackedFiles returns a list of Git untracked files in the working dir. @@ -84,9 +84,9 @@ type FileWorkspace struct { GithubAppEnabled bool // use the global setting without overriding GpgNoSigningEnabled bool - // flag indicating if a re-clone will be safe (project lock held, about to run plan) - SafeToReClone bool - Logger logging.SimpleLogging + // flag indicating if we have to merge with potential new changes upstream (directly after grabbing project lock) + CheckForUpstreamChanges bool + Logger logging.SimpleLogging } // Clone git clones headRepo, checks out the branch and then returns the absolute @@ -100,9 +100,9 @@ func (w *FileWorkspace) Clone( p models.PullRequest, workspace string) (string, bool, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) - hasDiverged := false - defer func() { w.SafeToReClone = false }() + defer func() { w.CheckForUpstreamChanges = false }() + c := wrappedGitContext{cloneDir, headRepo, p} // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. if _, err := os.Stat(cloneDir); err == nil { @@ -122,16 +122,16 @@ func (w *FileWorkspace) Clone( outputRevParseCmd, err := revParseCmd.CombinedOutput() if err != nil { w.Logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(cloneDir, headRepo, p) + return cloneDir, false, w.forceClone(c) } currCommit := strings.Trim(string(outputRevParseCmd), "\n") // We're prefix matching here because BitBucket doesn't give us the full // commit, only a 12 character prefix. if strings.HasPrefix(currCommit, p.HeadCommit) { - if w.SafeToReClone && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) { + if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) { w.Logger.Info("base branch has been updated, using merge strategy and will clone again") - hasDiverged = true + return cloneDir, true, w.mergeAgain(c) } else { w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) return cloneDir, false, nil @@ -143,7 +143,7 @@ func (w *FileWorkspace) Clone( } // Otherwise we clone the repo. - return cloneDir, hasDiverged, w.forceClone(cloneDir, headRepo, p) + return cloneDir, false, w.forceClone(c) } // recheckDiverged returns true if the branch we're merging into has diverged @@ -212,8 +212,8 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool { return hasDiverged } -func (w *FileWorkspace) forceClone(cloneDir string, headRepo models.Repo, p models.PullRequest) error { - value, _ := cloneLocks.LoadOrStore(cloneDir, new(sync.Mutex)) +func (w *FileWorkspace) forceClone(c wrappedGitContext) error { + value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) mutex := value.(*sync.Mutex) defer mutex.Unlock() @@ -222,97 +222,131 @@ func (w *FileWorkspace) forceClone(cloneDir string, headRepo models.Repo, p mode return nil } - err := os.RemoveAll(cloneDir) + err := os.RemoveAll(c.dir) if err != nil { - return errors.Wrapf(err, "deleting dir %q before cloning", cloneDir) + return errors.Wrapf(err, "deleting dir %q before cloning", c.dir) } // Create the directory and parents if necessary. - w.Logger.Info("creating dir %q", cloneDir) - if err := os.MkdirAll(cloneDir, 0700); err != nil { + w.Logger.Info("creating dir %q", c.dir) + if err := os.MkdirAll(c.dir, 0700); err != nil { return errors.Wrap(err, "creating new workspace") } // During testing, we mock some of this out. - headCloneURL := headRepo.CloneURL + headCloneURL := c.head.CloneURL if w.TestingOverrideHeadCloneURL != "" { headCloneURL = w.TestingOverrideHeadCloneURL } - baseCloneURL := p.BaseRepo.CloneURL + baseCloneURL := c.pr.BaseRepo.CloneURL if w.TestingOverrideBaseCloneURL != "" { baseCloneURL = w.TestingOverrideBaseCloneURL } - runGit := func(args ...string) error { - cmd := exec.Command("git", args...) // nolint: gosec - cmd.Dir = cloneDir - // The git merge command requires these env vars are set. - cmd.Env = append(os.Environ(), []string{ - "EMAIL=atlantis@runatlantis.io", - "GIT_AUTHOR_NAME=atlantis", - "GIT_COMMITTER_NAME=atlantis", - }...) - - cmdStr := w.sanitizeGitCredentials(strings.Join(cmd.Args, " "), p.BaseRepo, headRepo) - output, err := cmd.CombinedOutput() - sanitizedOutput := w.sanitizeGitCredentials(string(output), p.BaseRepo, headRepo) - if err != nil { - sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), p.BaseRepo, headRepo) - return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) - } - w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) - return nil - } - // if branch strategy, use depth=1 if !w.CheckoutMerge { - return runGit("clone", "--depth=1", "--branch", p.HeadBranch, "--single-branch", headCloneURL, cloneDir) + return w.wrappedGit(c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir) } // if merge strategy... // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := runGit("clone", "--branch", p.BaseBranch, "--single-branch", baseCloneURL, cloneDir); err != nil { + if err := w.wrappedGit(c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } else { - if err := runGit("clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", p.BaseBranch, "--single-branch", baseCloneURL, cloneDir); err != nil { + if err := w.wrappedGit(c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } - if err := runGit("remote", "add", "head", headCloneURL); err != nil { + if err := w.wrappedGit(c, "remote", "add", "head", headCloneURL); err != nil { return err } + if w.GpgNoSigningEnabled { + if err := w.wrappedGit(c, "config", "--local", "commit.gpgsign", "false"); err != nil { + return err + } + } + + return w.mergeToBaseBranch(c) +} + +// There is a new upstream update that we need, and we want to update to it +// without deleting any existing plans +func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error { + value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) + mutex := value.(*sync.Mutex) - fetchRef := fmt.Sprintf("+refs/heads/%s:", p.HeadBranch) + defer mutex.Unlock() + if locked := mutex.TryLock(); !locked { + mutex.Lock() + return nil + } + + // Reset branch as if it was cloned again + if err := w.wrappedGit(c, "reset", "--hard", fmt.Sprintf("refs/remotes/head/%s", c.pr.BaseBranch)); err != nil { + return err + } + + return w.mergeToBaseBranch(c) +} + +// wrappedGitContext is the configuration for wrappedGit that is typically unchanged +// for a series of calls to wrappedGit +type wrappedGitContext struct { + dir string + head models.Repo + pr models.PullRequest +} + +// wrappedGit runs git with additional environment settings required for git merge, +// and with sanitized error logging to avoid leaking git credentials +func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error { + cmd := exec.Command("git", args...) // nolint: gosec + cmd.Dir = c.dir + // The git merge command requires these env vars are set. + cmd.Env = append(os.Environ(), []string{ + "EMAIL=atlantis@runatlantis.io", + "GIT_AUTHOR_NAME=atlantis", + "GIT_COMMITTER_NAME=atlantis", + }...) + cmdStr := w.sanitizeGitCredentials(strings.Join(cmd.Args, " "), c.pr.BaseRepo, c.head) + output, err := cmd.CombinedOutput() + sanitizedOutput := w.sanitizeGitCredentials(string(output), c.pr.BaseRepo, c.head) + if err != nil { + sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), c.pr.BaseRepo, c.head) + return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) + } + w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) + return nil +} + +// Merge the PR into the base branch. +func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { + fetchRef := fmt.Sprintf("+refs/heads/%s:", c.pr.HeadBranch) fetchRemote := "head" if w.GithubAppEnabled { - fetchRef = fmt.Sprintf("pull/%d/head:", p.Num) + fetchRef = fmt.Sprintf("pull/%d/head:", c.pr.Num) fetchRemote = "origin" } // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := runGit("fetch", fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(c, "fetch", fetchRemote, fetchRef); err != nil { return err } } else { - if err := runGit("fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { return err } } - if w.GpgNoSigningEnabled { - if err := runGit("config", "--local", "commit.gpgsign", "false"); err != nil { - return err - } - } - if err := runGit("merge-base", p.BaseBranch, "FETCH_HEAD"); err != nil { + if err := w.wrappedGit(c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil { // git merge-base returning error means that we did not receive enough commits in shallow clone. // Fall back to retrieving full repo history. - if err := runGit("fetch", "--unshallow"); err != nil { + if err := w.wrappedGit(c, "fetch", "--unshallow"); err != nil { return err } } @@ -323,7 +357,7 @@ func (w *FileWorkspace) forceClone(cloneDir string, headRepo models.Repo, p mode // git rev-parse HEAD^2 to get the head commit because it will // always succeed whereas without --no-ff, if the merge was fast // forwarded then git rev-parse HEAD^2 would fail. - return runGit("merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") + return w.wrappedGit(c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") } // GetWorkingDir returns the path to the workspace for this repo and pull. @@ -374,9 +408,9 @@ func (w *FileWorkspace) sanitizeGitCredentials(s string, base models.Repo, head return strings.Replace(baseReplaced, head.CloneURL, head.SanitizedCloneURL, -1) } -// Set the flag that indicates it is safe to re-clone if necessary -func (w *FileWorkspace) SetSafeToReClone() { - w.SafeToReClone = true +// Set the flag that indicates we need to check for upstream changes (if using merge checkout strategy) +func (w *FileWorkspace) SetCheckForUpstreamChanges() { + w.CheckForUpstreamChanges = true } func (w *FileWorkspace) DeletePlan(r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index 225f299291..8eae7c730d 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -3,6 +3,7 @@ package events_test import ( "crypto/tls" "fmt" + "github.com/stretchr/testify/assert" "net/http" "os" "path/filepath" @@ -97,13 +98,13 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check the commits. actBaseCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD~1") @@ -149,25 +150,25 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { Logger: logger, } - _, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -202,25 +203,25 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { Logger: logger, } - _, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -320,13 +321,13 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in shallow repo") @@ -351,13 +352,13 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in full repo") @@ -387,12 +388,12 @@ func TestClone_NoReclone(t *testing.T) { GpgNoSigningEnabled: true, Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check that our proof file is still there. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -416,6 +417,13 @@ func TestClone_RecloneWrongCommit(t *testing.T) { runCmd(t, repoDir, "git", "commit", "-m", "newfile") expCommit := runCmd(t, repoDir, "git", "rev-parse", "HEAD") + // Pretend that terraform has created a plan file, we'll check for it later + planFile := filepath.Join(dataDir, "repos/0/default/default.tfplan") + assert.NoFileExists(t, planFile) + _, err := os.Create(planFile) + Assert(t, err == nil, "creating plan file: %v", err) + assert.FileExists(t, planFile) + logger := logging.NewNoopLogger(t) wd := &events.FileWorkspace{ @@ -425,13 +433,14 @@ func TestClone_RecloneWrongCommit(t *testing.T) { GpgNoSigningEnabled: true, Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) + assert.NoFileExists(t, planFile, "Plan file should have been wiped out by Clone") // Use rev-parse to verify at correct commit. actCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD") @@ -499,37 +508,47 @@ func TestClone_MasterHasDiverged(t *testing.T) { Logger: logger, } + // Pretend terraform has created a plan file, we'll check for it later + planFile := filepath.Join(repoDir, "repos/0/default/default.tfplan") + assert.NoFileExists(t, planFile) + _, err := os.Create(planFile) + Assert(t, err == nil, "creating plan file: %v", err) + assert.FileExists(t, planFile) + // Run the clone without the checkout merge strategy. It should return - // false for hasDiverged - _, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + // false for mergedAgain + _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, hasDiverged == false, "Clone with CheckoutMerge=false should not merge") + Assert(t, mergedAgain == false, "Clone with CheckoutMerge=false should not merge") + assert.FileExists(t, planFile, "Existing plan file should not be deleted by Clone with merge disabled") wd.CheckoutMerge = true - wd.SetSafeToReClone() + wd.SetCheckForUpstreamChanges() // Run the clone twice with the merge strategy, the first run should - // return true for hasDiverged, subsequent runs should + // return true for mergedAgain, subsequent runs should // return false since the first call is supposed to merge. - _, hasDiverged, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, hasDiverged == true, "First clone with CheckoutMerge=true with diverged base should have merged") + assert.FileExists(t, planFile, "Existing plan file should not be deleted by merging again") + Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged") - wd.SetSafeToReClone() - _, hasDiverged, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + wd.SetCheckForUpstreamChanges() + _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, hasDiverged == false, "Second clone with CheckoutMerge=true and initially diverged base should not merge again") + Assert(t, mergedAgain == false, "Second clone with CheckoutMerge=true and initially diverged base should not merge again") + assert.FileExists(t, planFile, "Existing plan file should not have been deleted") } func TestHasDiverged_MasterHasDiverged(t *testing.T) { From 352bbedfbf385c69b22c13f0e53dcb14dabd1baa Mon Sep 17 00:00:00 2001 From: JSNortal <124635010+JSNortal@users.noreply.github.com> Date: Mon, 25 Sep 2023 20:53:06 +0100 Subject: [PATCH 09/12] fix: issue with GH App credential not writing if lines already exist (#3679) * Fix issue with GH App credential not writing if lines already exist * Fix lint issue of unused variable. --------- Co-authored-by: PePe Amengual Co-authored-by: Dylan Page --- server/events/vcs/git_cred_writer.go | 34 ++++++++++++++++++++--- server/events/vcs/git_cred_writer_test.go | 19 +++++++++++++ 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/server/events/vcs/git_cred_writer.go b/server/events/vcs/git_cred_writer.go index 6d6cf85317..eca5dc00d7 100644 --- a/server/events/vcs/git_cred_writer.go +++ b/server/events/vcs/git_cred_writer.go @@ -38,11 +38,23 @@ func WriteGitCreds(gitUser string, gitToken string, gitHostname string, home str } if ghAccessToken { - // Need to replace the line. - if err := fileLineReplace(config, gitUser, gitHostname, credsFile); err != nil { - return errors.Wrap(err, "replacing git credentials line for github app") + hasGHToken, err := fileHasGHToken(gitUser, gitHostname, credsFile) + if err != nil { + return err + } + if hasGHToken { + // Need to replace the line. + if err := fileLineReplace(config, gitUser, gitHostname, credsFile); err != nil { + return errors.Wrap(err, "replacing git credentials line for github app") + } + logger.Info("updated git app credentials in %s", credsFile) + } else { + if err := fileAppend(config, credsFile); err != nil { + return err + } + logger.Info("wrote git credentials to %s", credsFile) } - logger.Info("updated git app credentials in %s", credsFile) + } else { // Otherwise we need to append the line. if err := fileAppend(config, credsFile); err != nil { @@ -113,3 +125,17 @@ func fileLineReplace(line, user, host, filename string) error { return os.WriteFile(filename, []byte(toWrite), 0600) } + +func fileHasGHToken(user, host, filename string) (bool, error) { + currContents, err := os.ReadFile(filename) // nolint: gosec + if err != nil { + return false, err + } + prevLines := strings.Split(string(currContents), "\n") + for _, l := range prevLines { + if strings.HasPrefix(l, "https://"+user) && strings.HasSuffix(l, host) { + return true, nil + } + } + return false, nil +} diff --git a/server/events/vcs/git_cred_writer_test.go b/server/events/vcs/git_cred_writer_test.go index 082f732475..64e7588672 100644 --- a/server/events/vcs/git_cred_writer_test.go +++ b/server/events/vcs/git_cred_writer_test.go @@ -86,6 +86,25 @@ func TestWriteGitCreds_ReplaceApp(t *testing.T) { Equals(t, expContets, string(actContents)) } +// Test that the github app credential gets added even if there are other credentials. +func TestWriteGitCreds_AppendAppWhenFileNotEmpty(t *testing.T) { + logger := logging.NewNoopLogger(t) + tmp := t.TempDir() + t.Setenv("HOME", tmp) + + credsFile := filepath.Join(tmp, ".git-credentials") + contents := "line1\nhttps://user:token@host.com\nline2" + err := os.WriteFile(credsFile, []byte(contents), 0600) + Ok(t, err) + + err = vcs.WriteGitCreds("x-access-token", "token", "github.com", tmp, logger, true) + Ok(t, err) + expContets := "line1\nhttps://user:token@host.com\nline2\nhttps://x-access-token:token@github.com" + actContents, err := os.ReadFile(filepath.Join(tmp, ".git-credentials")) + Ok(t, err) + Equals(t, expContets, string(actContents)) +} + // Test that the github app credentials get updated when cred file is empty. func TestWriteGitCreds_AppendApp(t *testing.T) { logger := logging.NewNoopLogger(t) From dd9d0362b189c70093ceccd4f8871b926e9d5e4e Mon Sep 17 00:00:00 2001 From: Dylan Page Date: Mon, 25 Sep 2023 17:09:54 -0400 Subject: [PATCH 10/12] ci: auto generated release notes (#3790) --- .github/release.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/release.yml b/.github/release.yml index 96a4e9bf8b..df2619416f 100644 --- a/.github/release.yml +++ b/.github/release.yml @@ -2,7 +2,6 @@ changelog: exclude: labels: - ignore-for-release - - docs - github-actions authors: - octocat @@ -22,9 +21,3 @@ changelog: - title: Other Changes labels: - "*" - - title: Dependencies - labels: - - dependencies - - title: Build - labels: - - build From f83609a51b47f8ace314f2ea9a45d4c391c88554 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Tue, 26 Sep 2023 01:46:33 +0000 Subject: [PATCH 11/12] fix(deps): update github.com/hashicorp/terraform-config-inspect digest to 5a6f8d1 in go.mod (#3792) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index e503311d3c..0e46a373cf 100644 --- a/go.mod +++ b/go.mod @@ -21,7 +21,7 @@ require ( github.com/hashicorp/go-multierror v1.1.1 github.com/hashicorp/go-version v1.6.0 github.com/hashicorp/golang-lru/v2 v2.0.6 - github.com/hashicorp/terraform-config-inspect v0.0.0-20230825013512-b800820f61b8 + github.com/hashicorp/terraform-config-inspect v0.0.0-20230925220900-5a6f8d18746d github.com/kr/pretty v0.3.1 github.com/mcdafydd/go-azuredevops v0.12.1 github.com/microcosm-cc/bluemonday v1.0.25 diff --git a/go.sum b/go.sum index d09bea7b4a..d7364b2013 100644 --- a/go.sum +++ b/go.sum @@ -267,8 +267,8 @@ github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= github.com/hashicorp/hcl/v2 v2.18.0 h1:wYnG7Lt31t2zYkcquwgKo6MWXzRUDIeIVU5naZwHLl8= github.com/hashicorp/hcl/v2 v2.18.0/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE= -github.com/hashicorp/terraform-config-inspect v0.0.0-20230825013512-b800820f61b8 h1:SzE5lAYh9XXR3b1q3p3uBNqEY+syiiLZiFCIvr/JTsg= -github.com/hashicorp/terraform-config-inspect v0.0.0-20230825013512-b800820f61b8/go.mod h1:l8HcFPm9cQh6Q0KSWoYPiePqMvRFenybP1CH2MjKdlg= +github.com/hashicorp/terraform-config-inspect v0.0.0-20230925220900-5a6f8d18746d h1:g6kHlvZrFPFKeWRj5q/zyJA5gu7rlJGPf17h8hX7LHY= +github.com/hashicorp/terraform-config-inspect v0.0.0-20230925220900-5a6f8d18746d/go.mod h1:l8HcFPm9cQh6Q0KSWoYPiePqMvRFenybP1CH2MjKdlg= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= From 9f1175d59218d7b9bcc2e78ca0016362c5558748 Mon Sep 17 00:00:00 2001 From: "renovate[bot]" <29139614+renovate[bot]@users.noreply.github.com> Date: Wed, 27 Sep 2023 01:02:08 +0000 Subject: [PATCH 12/12] fix(deps): update module github.com/xanzy/go-gitlab to v0.92.1 in go.mod (#3794) Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com> --- go.mod | 2 +- go.sum | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/go.mod b/go.mod index 0e46a373cf..96a7805cce 100644 --- a/go.mod +++ b/go.mod @@ -42,7 +42,7 @@ require ( github.com/uber-go/tally/v4 v4.1.7 github.com/urfave/negroni/v3 v3.0.0 github.com/warrensbox/terraform-switcher v0.1.1-0.20221027055942-201c8e92e997 - github.com/xanzy/go-gitlab v0.91.1 + github.com/xanzy/go-gitlab v0.92.1 go.etcd.io/bbolt v1.3.7 go.uber.org/zap v1.26.0 golang.org/x/term v0.12.0 diff --git a/go.sum b/go.sum index d7364b2013..d66e918f22 100644 --- a/go.sum +++ b/go.sum @@ -453,8 +453,8 @@ github.com/urfave/negroni/v3 v3.0.0 h1:Vo8CeZfu1lFR9gW8GnAb6dOGCJyijfil9j/jKKc/J github.com/urfave/negroni/v3 v3.0.0/go.mod h1:jWvnX03kcSjDBl/ShB0iHvx5uOs7mAzZXW+JvJ5XYAs= github.com/warrensbox/terraform-switcher v0.1.1-0.20221027055942-201c8e92e997 h1:be5WC0FHdhimAhe2G3DPhduX117RM8qdTMYCMHDt4DM= github.com/warrensbox/terraform-switcher v0.1.1-0.20221027055942-201c8e92e997/go.mod h1:saryXNaL624mlulV138FP+HhVw7IpvETUXLS3nTvH1g= -github.com/xanzy/go-gitlab v0.91.1 h1:gnV57IPGYywWer32oXKBcdmc8dVxeKl3AauV8Bu17rw= -github.com/xanzy/go-gitlab v0.91.1/go.mod h1:5ryv+MnpZStBH8I/77HuQBsMbBGANtVpLWC15qOjWAw= +github.com/xanzy/go-gitlab v0.92.1 h1:4HfRQtGtGd1M/Xn3G6hOikfWaysL7/G6y4EEzVKINPs= +github.com/xanzy/go-gitlab v0.92.1/go.mod h1:5ryv+MnpZStBH8I/77HuQBsMbBGANtVpLWC15qOjWAw= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=