diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index b72bdf6676dae5..9cfb7c0528f506 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1 +1 @@ -FROM ghcr.io/containerbase/devcontainer:9.30.0 +FROM ghcr.io/containerbase/devcontainer:9.31.3 diff --git a/.github/actions/setup-node/action.yml b/.github/actions/setup-node/action.yml index 28ad2e1b14dd04..ca92594ff0fcdf 100644 --- a/.github/actions/setup-node/action.yml +++ b/.github/actions/setup-node/action.yml @@ -52,7 +52,7 @@ runs: run: corepack enable - name: Setup Node - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 with: node-version: ${{ inputs.node-version }} cache: ${{ env.CACHE_HIT != 'true' && 'pnpm' || '' }} diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index 5c036f6b76743c..fc2cdd3076fb87 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -511,7 +511,7 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Setup Node.js - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 46141798cb498f..0f8bfb56c0837c 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -39,7 +39,7 @@ jobs: # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@c0d1daa7f7e14667747d73a7dbbe8c074bc8bfe2 # v2.22.9 + uses: github/codeql-action/init@e5f05b81d5b6ff8cfa111c80c22c5fd02a384118 # v3.23.0 with: languages: javascript @@ -49,7 +49,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@c0d1daa7f7e14667747d73a7dbbe8c074bc8bfe2 # v2.22.9 + uses: github/codeql-action/autobuild@e5f05b81d5b6ff8cfa111c80c22c5fd02a384118 # v3.23.0 # ℹ️ Command-line programs to run using the OS shell. # 📚 https://git.io/JvXDl @@ -63,4 +63,4 @@ jobs: # make release - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@c0d1daa7f7e14667747d73a7dbbe8c074bc8bfe2 # v2.22.9 + uses: github/codeql-action/analyze@e5f05b81d5b6ff8cfa111c80c22c5fd02a384118 # v3.23.0 diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml index b3d1eaf354c805..2df475006e109d 100644 --- a/.github/workflows/dependency-review.yml +++ b/.github/workflows/dependency-review.yml @@ -12,4 +12,4 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: 'Dependency Review' - uses: actions/dependency-review-action@01bc87099ba56df1e897b6874784491ea6309bc4 # v3.1.4 + uses: actions/dependency-review-action@c74b580d73376b7750d3d2a50bfb8adc2c937507 # v3.1.5 diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml index 77f35cee730547..e71f5b0cdb0a6f 100644 --- a/.github/workflows/devcontainer.yml +++ b/.github/workflows/devcontainer.yml @@ -21,6 +21,6 @@ jobs: uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1 - name: Build and run dev container task - uses: devcontainers/ci@c3e31cc561800ac318ed000e22ffc6713c93d009 # v0.3.1900000338 + uses: devcontainers/ci@3d462823359c481c587cb7426f39775f24257115 # v0.3.1900000339 with: runCmd: pnpm build diff --git a/.github/workflows/release-npm.yml b/.github/workflows/release-npm.yml index 8ce17cbb547ca5..675102e557102c 100644 --- a/.github/workflows/release-npm.yml +++ b/.github/workflows/release-npm.yml @@ -48,7 +48,7 @@ jobs: run: corepack enable - name: Set up Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 with: node-version: ${{ env.NODE_VERSION }} cache: pnpm diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 59eb40cbdcf8de..d19ded7344caa5 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -50,6 +50,6 @@ jobs: # Upload the results to GitHub's code scanning dashboard. - name: 'Upload to code-scanning' - uses: github/codeql-action/upload-sarif@c0d1daa7f7e14667747d73a7dbbe8c074bc8bfe2 # v2.22.9 + uses: github/codeql-action/upload-sarif@e5f05b81d5b6ff8cfa111c80c22c5fd02a384118 # v3.23.0 with: sarif_file: results.sarif diff --git a/.github/workflows/update-data.yml b/.github/workflows/update-data.yml index 6fa631fbfb56ba..42e31279da666e 100644 --- a/.github/workflows/update-data.yml +++ b/.github/workflows/update-data.yml @@ -24,7 +24,7 @@ jobs: run: corepack enable - name: Set up Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@8f152de45cc393bb48ce5d89d36b731f54556e65 # v4.0.0 + uses: actions/setup-node@b39b52d1213e96004bfcb1c61a8a6fa8ab84f3e8 # v4.0.1 with: node-version: ${{ env.NODE_VERSION }} cache: pnpm diff --git a/.vscode/settings.json b/.vscode/settings.json index 42a102533e72a0..9e9e9e57616939 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -9,7 +9,7 @@ "editor.defaultFormatter": "esbenp.prettier-vscode" }, "[md]": { - "editor.wordBasedSuggestions": false + "editor.wordBasedSuggestions": "off" }, "files.associations": { "Dockerfile.*": "dockerfile", @@ -21,6 +21,6 @@ "npm.packageManager": "pnpm", "editor.formatOnSave": true, "editor.codeActionsOnSave": { - "source.fixAll.eslint": true + "source.fixAll.eslint": "explicit" } } diff --git a/data/debian-distro-info.json b/data/debian-distro-info.json index 0df090a94b72a0..a650201981946e 100644 --- a/data/debian-distro-info.json +++ b/data/debian-distro-info.json @@ -39,7 +39,7 @@ "series": "potato", "created": "1999-03-09", "release": "2000-08-15", - "eol": "2003-07-30" + "eol": "2003-06-30" }, "v3": { "codename": "Woody", diff --git a/data/kubernetes-api.json5 b/data/kubernetes-api.json5 index f84031941bcc94..19aa2e192e6307 100644 --- a/data/kubernetes-api.json5 +++ b/data/kubernetes-api.json5 @@ -110,17 +110,52 @@ // https://kubernetes.io/docs/reference/using-api/deprecation-guide/#v1-27 // https://kubernetes.io/docs/reference/using-api/deprecation-guide/#csistoragecapacity-v127 CSIStorageCapacity: ['storage.k8s.io/v1beta1', 'storage.k8s.io/v1'], - // https://github.com/fluxcd/flux2/releases/tag/v2.0.0 + + // https://fluxcd.io + Alert: [ + 'notification.toolkit.fluxcd.io/v1beta2', + 'notification.toolkit.fluxcd.io/v1beta3', + ], + Bucket: [ + 'source.toolkit.fluxcd.io/v1alpha1', + 'source.toolkit.fluxcd.io/v1beta1', + 'source.toolkit.fluxcd.io/v1beta2', + ], GitRepository: [ + 'source.toolkit.fluxcd.io/v1alpha1', + 'source.toolkit.fluxcd.io/v1beta1', 'source.toolkit.fluxcd.io/v1beta2', 'source.toolkit.fluxcd.io/v1', ], - Kustomization: [ - 'kustomize.toolkit.fluxcd.io/v1beta2', - 'kustomize.toolkit.fluxcd.io/v1', + HelmChart: [ + 'source.toolkit.fluxcd.io/v1alpha1', + 'source.toolkit.fluxcd.io/v1beta1', + ], + HelmRelease: [ + 'helm.toolkit.fluxcd.io/v2beta1', + 'helm.toolkit.fluxcd.io/v2beta2', + ], + HelmRepository: [ + 'source.toolkit.fluxcd.io/v1alpha1', + 'source.toolkit.fluxcd.io/v1beta1', + 'source.toolkit.fluxcd.io/v1beta2', + ], + ImageRepository: ['image.toolkit.fluxcd.io/v1beta2'], + OCIRepository: ['source.toolkit.fluxcd.io/v1beta2'], + Provider: [ + 'notification.toolkit.fluxcd.io/v1beta2', + 'notification.toolkit.fluxcd.io/v1beta3', ], Receiver: [ 'notification.toolkit.fluxcd.io/v1beta2', 'notification.toolkit.fluxcd.io/v1', ], + + // https://fluxcd.io/flux/components/kustomize/kustomizations + // https://kubectl.docs.kubernetes.io/references/kustomize/kustomization + Kustomization: [ + 'kustomize.toolkit.fluxcd.io/v1beta2', + 'kustomize.toolkit.fluxcd.io/v1', + 'kustomize.config.k8s.io/v1beta1', + ], } diff --git a/docs/development/style-guide.md b/docs/development/style-guide.md index 82c5939dc4ee93..628697993fd0d8 100644 --- a/docs/development/style-guide.md +++ b/docs/development/style-guide.md @@ -21,6 +21,24 @@ Second sentence on a new line. And so on. ``` +## Avoid manually ordering numbered lists + +Avoid: + +```markdown +1. First item +2. Second item +3. Third item +``` + +Do: + +```markdown +1. First item +1. Second item +1. Third item +``` + ## Avoid punctuation at the end of list items In Markdown files, avoid punctuation at the end of a list item. diff --git a/docs/usage/assets/images/gerrit-http-password.png b/docs/usage/assets/images/gerrit-http-password.png new file mode 100644 index 00000000000000..fcbb6226f36756 Binary files /dev/null and b/docs/usage/assets/images/gerrit-http-password.png differ diff --git a/docs/usage/config-presets.md b/docs/usage/config-presets.md index 89ccd6fc4cfdf0..54dad726f10e56 100644 --- a/docs/usage/config-presets.md +++ b/docs/usage/config-presets.md @@ -217,13 +217,12 @@ Create a [discussion](https://github.com/renovatebot/renovate/discussions) to pr The maintainers can also help improve the preset, and let you know where to put it in the code. If you are proposing a "monorepo" preset addition then it's OK to raise a PR directly as that can be more efficient than a GitHub Discussion. -## Organization level presets +## Group/Organization level presets -Whenever repository onboarding happens, Renovate checks if the current user/group/org has a default config to extend. -It looks for: - -- A repository called `renovate-config` under the same user/group/org with a `default.json` file or -- A repository named like `.{{platform}}` (e.g. `.github`) under the same user/group/org with `renovate-config.json` +Whenever repository onboarding happens, Renovate checks for a a default config to extend. +Renovate will check for a repository called `renovate-config` with a `default.json` file in the parent user/group/org of the repository. +On platforms that support nested groups (e.g. GitLab), Renovate will check for this repository at each level of grouping, from nearest to furthest, and use the first one it finds. +On all platforms, it will then look for a repository named like `.{{platform}}` (e.g. `.github`) with a `renovate-config.json`, under the same top-level user/group/org. If found, that repository's preset will be suggested as the sole extended preset, and any existing `onboardingConfig` config will be ignored/overridden. For example the result may be: diff --git a/docs/usage/configuration-options.md b/docs/usage/configuration-options.md index dae3bfa13345b4..1cfc1ca727120d 100644 --- a/docs/usage/configuration-options.md +++ b/docs/usage/configuration-options.md @@ -410,9 +410,21 @@ This is an advance field and it's recommend you seek a config review before appl ## bumpVersion -Currently this setting supports `helmv3`, `npm`, `nuget`, `maven` and `sbt` only, so raise a feature request if you have a use for it with other package managers. -Its purpose is if you want Renovate to update the `version` field within your package file any time it updates dependencies within. -Usually this is for automatic release purposes, so that you don't need to add another step after Renovate before you can release a new version. +Currently, this config option only works with these managers: + +- `helmv3` +- `npm` +- `nuget` +- `maven` +- `ocb` +- `pep621` +- `poetry` +- `sbt` + +Raise a feature request if you want to use this config option with other package managers. + +Use `bumpVersion` if you want Renovate to update the `version` field in your package file when it updates the dependencies in that file. +This can be handy when you have automated your package's release, as you you don't need extra steps after the Renovate upgrade, you can just release a new version. Configure this value to `"prerelease"`, `"patch"`, `"minor"` or `"major"` to have Renovate update the version in your edited package file. e.g. if you wish Renovate to always increase the target `package.json` version with a patch update, configure this to `"patch"`. @@ -1768,6 +1780,26 @@ Example config: } ``` +### maxRetryAfter + +A remote host may return a `4xx` response with a `Retry-After` header value, which indicates that Renovate has been rate-limited. +Renovate may try to contact the host again after waiting a certain time, that's set by the host. +By default, Renovate tries again after the `Retry-After` header value has passed, up to a maximum of 60 seconds. +If the `Retry-After` value is more than 60 seconds, Renovate will abort the request instead of waiting. + +You can configure a different maximum value in seconds using `maxRetryAfter`: + +```json +{ + "hostRules": [ + { + "matchHost": "api.github.com", + "maxRetryAfter": 25 + } + ] +} +``` + ### dnsCache Enable got [dnsCache](https://github.com/sindresorhus/got/blob/v11.5.2/readme.md#dnsCache) support. @@ -3585,6 +3617,23 @@ Configure this to `true` if you wish to get one PR for every separate major vers e.g. if you are on webpack@v1 currently then default behavior is a PR for upgrading to webpack@v3 and not for webpack@v2. If this setting is true then you would get one PR for webpack@v2 and one for webpack@v3. +## statusCheckNames + +You can customize the name/context of status checks that Renovate adds to commits/branches/PRs. + +This option enables you to modify any existing status checks name/context, but adding new status checks this way is _not_ supported. +Setting the value to `null` or an empty string, effectively disables or skips that status check. +This option is mergeable, which means you only have to specify the status checks that you want to modify. + +```json title="Example of overriding status check strings" +{ + "statusCheckNames": { + "minimumReleaseAge": "custom/stability-days", + "mergeConfidence": "custom/merge-confidence-level" + } +} +``` + ## stopUpdatingLabel This feature only works on supported platforms, check the table above. diff --git a/docs/usage/configuration-templates.md b/docs/usage/configuration-templates.md index c995e940791d0c..c37f45a28e2bce 100644 --- a/docs/usage/configuration-templates.md +++ b/docs/usage/configuration-templates.md @@ -32,7 +32,7 @@ Be careful, and consider creating a new "config help" post at the [discussions t Renovate uses one commit per branch. The `commitMessage` reflects the contents of the branch and is usually the same as the PR title. -`commitMessage` has a default value of `{{commitMessagePrefix}} {{commitMessageAction}} {{commitMessageTopic}} {{commitMessageExtra}} {{commitMessageSuffix}}`, with the intention that you only edit some of those subcomponents. +`commitMessage` has a default value of `{{{commitMessagePrefix}}} {{{commitMessageAction}}} {{{commitMessageTopic}}} {{{commitMessageExtra}}} {{{commitMessageSuffix}}}`, with the intention that you only edit some of those subcomponents. You usually don't need to edit `commitMessagePrefix`, this option is used by Renovate if it needs to add a prefix to conform to the Semantic Commit convention. Avoid editing the commit message, unless you know what you're doing. diff --git a/docs/usage/docker.md b/docs/usage/docker.md index cb823e09145cbd..9e250ee1b7113a 100644 --- a/docs/usage/docker.md +++ b/docs/usage/docker.md @@ -383,7 +383,7 @@ To get access to the token a custom Renovate Docker image is needed that include The Dockerfile to create such an image can look like this: ```Dockerfile -FROM renovate/renovate:37.83.4 +FROM renovate/renovate:37.126.2 # Include the "Docker tip" which you can find here https://cloud.google.com/sdk/docs/install # under "Installation" for "Debian/Ubuntu" RUN ... diff --git a/docs/usage/gitlab-bot-security.md b/docs/usage/gitlab-bot-security.md index f44ad1bf7a3d02..1fd31351cc129c 100644 --- a/docs/usage/gitlab-bot-security.md +++ b/docs/usage/gitlab-bot-security.md @@ -4,25 +4,33 @@ title: GitLab bot security # GitLab bot security -You should understand GitLab's security model, before deciding to run a "bot" service like Renovate on GitLab, particularly the pipeline credentials. +Make sure you understand GitLab's security model, before you run a "bot" service like Renovate on GitLab, particularly the pipeline credentials. -**Important**: If you have any doubts or concerns about this content that could affect other users, please follow our [Security Policy](https://github.com/renovatebot/renovate/security/policy) and report them confidentially. + +!!! warning + If you have any doubts or concerns about this content that could affect other users, please follow our [Security Policy](https://github.com/renovatebot/renovate/security/policy) and report them confidentially. ## `CI_JOB_TOKEN` permissions -The concept of `CI_JOB_TOKEN` permissions was [overhauled in GitLab release 8.12](https://about.gitlab.com/releases/2016/09/22/gitlab-8-12-released/), jobs are now run with the permissions of the user account which _triggered_ the pipeline. +The concept of `CI_JOB_TOKEN` permissions was [overhauled in GitLab release 8.12](https://about.gitlab.com/releases/2016/09/22/gitlab-8-12-released/), jobs now run with the permissions of the user account which _triggered_ the pipeline. For security reasons the token was limited to read-only permissions and a limited set of API endpoints, but it’s been extended to allow [write access to the GitLab Package Registry](https://docs.gitlab.com/ee/api/index.html#gitlab-ci-job-token). -Any pipeline triggered by a user account thus has permissions to read any repository which that account has access to as well as publish packages to them. +Any pipeline triggered by a user account thus has permissions to: -With the current GitLab CI permissions model, you should avoid committing to any project which you don’t trust completely, because that project could maliciously steal repository data, publish fake releases, or spam releases. +- read any repository which that account has access to +- publish packages to them + +With the current GitLab CI permissions model, you should only commit to a project which you trust completely. +Because that project could maliciously steal repository data, publish fake releases, or spam releases. ## Risks of hosting a Renovate GitLab app/bot/service -The GitLab security model means that the risks of running a _public_ bot service on GitLab are too high, which is why the existing service has been suspended until an alternate security model is ready. +With GitLab's current security model, we find the risks of running a _public_ bot service like Renovate are too high. +Therefore we stopped hosting Renovate on GitLab, and are waiting for a better security model. -It's also important to remember that when accounts are invited into projects or groups on GitLab, acceptance happens automatically (which was a useful feature to leverage for a shared service). +You should remember that when accounts are invited into projects or groups on GitLab, acceptance happens automatically. +This was a useful feature to leverage for a shared service. -If you are running a self-hosted Renovate service, it is advisable to: +If you are running a self-hosted Renovate service, we recommend you: - Run a shared service only within projects which have shared visibility/security within the users, or which have a low risk that a user would try to gain access to a private project they don't otherwise have access to - If running with `autodiscover`, also configure a value for `autodiscoverFilter` so that the bot can't be invited to projects or groups you don't intend @@ -33,33 +41,34 @@ The following research notes may help you to assess the GitLab bot security risk ### Public projects only -If a bot service is run on public projects only, then the risk of private project data being accessed by unauthorized users is zero. -But malicious users can still spoof or spam packages to any other public project they are not a member of, so that rules out this approach for a public hosted service. +If you only run a bot service on _public_ projects, the risk of unauthorized users accessing private project data is zero. +But malicious users can still spoof or spam packages to any other public project they are not a member of, this rules out this approach for a public hosted service. A public-visibility-only bot service should be low risk for most self-hosted GitLab instances. -There is still a small problem that you can't _prevent_ users from inviting the bot into private projects if they are not aware of the risks of doing so. +But you _can't stop users_ from inviting the bot into _private_ projects by accident, which is risky. ### Project Access Tokens -[Project Access Tokens](https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html) are a recently added feature for GitLab. -The main downsides to their use for a shared bot service are: +[Project Access Tokens](https://docs.gitlab.com/ee/user/project/settings/project_access_tokens.html) (PATs) are a recently added feature for GitLab. +The main downsides to using PATs for a shared bot service are: -- It is not yet possible to [provision them through the API](https://gitlab.com/gitlab-org/gitlab/-/issues/238991), so project maintainers would need to provision a project bot account and then save it to Renovate manually and per-project -- Project Access Tokens are a paid-only feature for gitlab.com, which excludes a large percentage of the public service user base -- At the time of writing, there are still some issues with getting Project Access Tokens to trigger and authenticate CI -- Any service using such tokens would get MRs from a user like `@project_123_bot` which is less intuitive than `@renovate-bot` +- You can not [provision PATs through the API](https://gitlab.com/gitlab-org/gitlab/-/issues/238991), so project maintainers would need to provision a project bot account and then save it to Renovate manually and per-project +- PATs are a paid-only feature for gitlab.com, which prevents users on the free plan from using them +- At the time of writing, there are still some issues with getting PATs to trigger and authenticate CI +- Any service using PATs would get MRs from a user like `@project_123_bot` instead of `@renovate-bot` -The big benefit of Project Access Tokens is their limited scope, users with write access to one project cannot read/write to other projects. +The big benefit of PATs is their limited scope: users with write access to one project cannot read/write to other projects. ### Group Access Tokens Group Access Tokens are still in the planning stage, but may offer a more scalable way to manage a Renovate service. -Tokens could be provisioned into Renovate per-group and permissions/visibility would need to be kept uniform throughout the group to ensure escalation of privileges is not possible. +Tokens could be provisioned into Renovate per-group. +Permissions and visibility _must_ be kept uniform throughout the group to prevent a privilege escalation. -It should be noted though that many GitLab users _do not_ have uniform permissions/visibility throughout groups today, so this is a risk of Group Access Tokens in general. -Even [https://gitlab.com/gitlab-org](https://gitlab.com/gitlab-org) is a good example of how common it is to mix project visibility within a same group. +Many GitLab users _do not_ have uniform permissions and visibility throughout groups today, so this is a risk of Group Access Tokens in general. +The [`gitlab-org` organization on GitLab](https://gitlab.com/gitlab-org) shows how common it is to mix project visibility within a same group. -Similarly with Project Access Tokens, if they are a paid-only feature then it would exclude free users from using such a service. +And the same as with PATs, if Group Access Tokens becomes a paid feature then users on a free plan can't use the feature. ### Skipping CI @@ -77,13 +86,13 @@ Bot services are better if they are provisioned with a "bot identity" so that us ## Recommended migration -Until the hosted app can be reactivated, we recommend users migrate to use self-hosted pipelines to run Renovate. -Please see the [renovate-bot/renovate-runner README on GitLab](https://gitlab.com/renovate-bot/renovate-runner/-/blob/HEAD/README.md) for instructions on how to set this up as easily as possible. +Until we can safely reactivate the hosted app, we recommend users migrate to use self-hosted pipelines to run Renovate. +Read the [renovate-bot/renovate-runner README on GitLab](https://gitlab.com/renovate-bot/renovate-runner/-/blob/HEAD/README.md) to learn how. ## Status of the Renovate app for GitLab We're trying to find a workable design for the GitLab app, so we can enable it safely again. -If you have any ideas, open a [discussion](https://github.com/renovatebot/renovate/discussions) and let us know! +If you have any ideas, please open a [discussion](https://github.com/renovatebot/renovate/discussions) and let us know! GitLab introduced Group Access Tokens & API for paid & self-hosted instances, but a good permission setup/flow is still not possible. Check out [GitLab issue #346298](https://gitlab.com/gitlab-org/gitlab/-/issues/346298). diff --git a/docs/usage/key-concepts/.pages b/docs/usage/key-concepts/.pages index b6ede935c13f2f..37704ea70abd4a 100644 --- a/docs/usage/key-concepts/.pages +++ b/docs/usage/key-concepts/.pages @@ -1,6 +1,8 @@ nav: + - 'How Renovate works': 'how-renovate-works.md' - 'Presets': 'presets.md' - 'Dependency Dashboard': 'dashboard.md' - 'Pull Requests': 'pull-requests.md' - 'Renovate Scheduling': 'scheduling.md' - 'Automerge': 'automerge.md' + - 'Renovate and changelogs': 'changelogs.md' diff --git a/docs/usage/key-concepts/changelogs.md b/docs/usage/key-concepts/changelogs.md new file mode 100644 index 00000000000000..8af4280e864286 --- /dev/null +++ b/docs/usage/key-concepts/changelogs.md @@ -0,0 +1,109 @@ +--- +title: Renovate and changelogs +description: Learn how Renovate fetches changelogs +--- + +This page explains how Renovate fetches changelogs, when it can display them, and more. + +## How Renovate detects changelogs + +Renovate detects and populates changelogs by: + +1. Identifying a source URL from the datasource response for a package, and saving that internally as `sourceUrl` if found +1. Checking if Renovate's internal [_manual_ metadata](https://github.com/renovatebot/renovate/blob/main/lib/modules/datasource/metadata-manual.ts) for the package includes a source URL +1. Looking up the source URL, if it resides on a supported platform (e.g. GitHub) +1. Checking for both "Releases" metadata in the repository and any commonly known "changelog" file names +1. Filtering the found releases to only include those versions being updated by the current PR +1. Formatting and embedding the results into the PR body + +## Changelogs for private packages + +For private packages, the algorithm is mostly the same as described above, with the additional considerations: + +- Renovate must be able to access the private package in the first place +- The private registry must include the source URL in its response +- Renovate must be able to detect and authenticate with whatever private repository corresponds to the source URL + +For more details, see [Private packages, looking up changelogs](../getting-started/private-packages.md#looking-up-changelogs). + +## Relevant configuration options + +### [`fetchChangelogs`](../configuration-options.md#fetchchangelogs) + +Set to `off` if changelog fetching is causing a problem. + +Set to `branch` if you have an advanced use case where you're embedding changelogs in the Git commit itself, we don't recommend this due to its potential size. + +### [`customChangelogUrl`](../configuration-options.md#customchangelogurl) + +This doesn't help with _fetching_ the changelogs, but if you configure it then Renovate will include a link to this URL in the PR body, so users can click through to read the changelog. + +## Platforms that Renovate can fetch changelogs from + +See the list of platforms in the [`fetchChangelogs` config option docs](../configuration-options.md#fetchchangelogs). + +### Running Renovate on a non-GitHub platform + +Most Open Source packages are hosted on github.com, which means most changelogs are hosted there too. +Fetching changelogs from github.com requires a GitHub token because GitHub blocks unauthenticated GraphQL API use. + +This means that if you run Renovate on self-hosted GitHub Enterprise Server, or any non-GitHub platform which Renovate supports, then you need to configure a github.com Personal Access Token in Renovate in order to fetch changelogs. + +Read [Running Renovate, GitHub.com token for changelogs](../getting-started/running.md#githubcom-token-for-changelogs) to learn more. + +## Troubleshooting missing changelogs + +Follow these steps to find out why Renovate does not find a changelog: + +1. The datasource for this package does not support sourceUrls. + - If the registry fundamentally does not provide this data, then the only possibility is for it to be manually populated through PRs to Renovate's source code + - If the registry provides source URLs in its response but Renovate does not understand the required fields, then raise a feature request with examples, or better yet a Pull Request to implement support for the source URL parsing/mapping yourself + - Sometimes self-hosted versions of registries don't include the full metadata compared to what the public registries do +1. The package was published without source URL information being included. + - For example, occasionally `npm` packages don't have `repository` fields included + - For example, Docker images regularly do not have the required `LABEL` entry +1. Renovate cannot access the source repository + - This is typically a concern for private repositories only + - Check if the token Renovate uses has access rights to the repository you need it to access +1. Renovate cannot detect the file names or release name convention within the repository + - In this case an enhancement to Renovate might be needed to better detect the releases/formats, assuming the package/repository has a reasonable convention to follow + +If none of this helps, search the Renovate issues and discussions to see if this is a known problem. + +## Advice for package maintainers + +This section is for package maintainers that want to make sure Renovate can see their changelogs. + +There isn't much information to add other than what's already written above. + +Make sure that you have the required source URL in your package metadata, not just in your repository but also in the final data which the registry returns. +For example, we have seen cases where the `repository` field in npm's `package.json` is populated correctly in the repository, but stripped out as part of the publishing process. + +### Let Renovate understand your versioning and changelogs + +In general, Renovate can understand your versions and changelogs best when you: + +- Use SemVer versioning, so `major.minor.patch` +- Use the [`semantic-release` bot](https://github.com/semantic-release/semantic-release) to automate the release process + +Try to avoid things like: + +- Stripping out the trailing `.0` unnecessarily (e.g. having a package `3.1.0` on a registry but using only `3.1` in your changelogs) +- Using "Release names" in a way which makes the actual version hard to discern (e.g. instead of `3.0.0` you title your release notes `Big news! v3 is here` + +### npm package maintainers + +As maintainer, make sure the `package.json` has a filled in `repository` field, read the [npm Docs, configuring npm `repository` field](https://docs.npmjs.com/cli/v10/configuring-npm/package-json#repository) to learn more. +If your repository uses the monorepo pattern make sure _each_ `package.json` file has a `repository` field. + +### maven package maintainers + +Read [`maven` datasource, making your changelogs fetchable](https://docs.renovatebot.com/modules/datasource/maven/#making-your-changelogs-fetchable). + +### Docker image maintainers + +Read the [Docker datasource](https://docs.renovatebot.com/modules/datasource/docker/) docs. + +### Nuget package maintainers + +See [Renovate issue #14128 about using NuGet's changelogs](https://github.com/renovatebot/renovate/issues/14128). diff --git a/docs/usage/key-concepts/how-renovate-works.md b/docs/usage/key-concepts/how-renovate-works.md new file mode 100644 index 00000000000000..bed2d0e12a0b62 --- /dev/null +++ b/docs/usage/key-concepts/how-renovate-works.md @@ -0,0 +1,113 @@ +--- +title: How Renovate works +description: Learn how Renovate works +--- + +# Introduction + +Renovate usually performs these steps: + +- Cloning the repository +- Scanning package files to extract dependencies +- Looking up registries to check for updates +- Applying any grouping rules defined +- Pushing branches and raising Pull Requests + +Because Renovate must support a lot of dependency naming and versioning conventions, it has modules for each known convention. +You can contribute your own modules, if you want. + +## Modules + +Renovate's modules are: + +- [datasource](../modules/datasource/index.md) +- [manager](../modules/manager/index.md) +- [platform](../modules/platform/index.md) +- [versioning](../modules/versioning.md) + +Renovate uses these modules in order: + +1. The platform module interacts with the source control platform and clones the repository +1. The manager module looks for files based on their name and extracts the dependencies (each dependency has a datasource) +1. The datasource module looks up versions of the dependency +1. The versioning module validates and sorts the returned versions + +For example: + +1. The `gitlabci` manager finds a dependency: `python:3.10-alpine` which has the `docker` datasource +2. The `docker` datasource looks for versions and finds: `[python:3.9,python:3.9-alpine,python:3.10,python:3.10-alpine,python:3.11,python:3.11-alpine]` +3. The `docker` versioning returns `python:3.11-alpine`, because that version is compatible with `python:3.10-alpine` + +# Workflow + +Here's an overview of the workflow: + +```mermaid +flowchart TB + subgraph INITIALIZATION + direction TB + MC[Merge configurations \n most important to least: \n cli > env > file > default] + MC --> IP[Initialize platform] + IP --> AD[Query the platform for repositories] + AD --> NFIL[Narrow the list with filters] + end + + subgraph REPOSITORY + direction TB + FER{{For each repository}} + + subgraph EXTRACTD[EXTRACT DEPENDENCIES] + direction TB + CLBRANCH[Extract base branches] + CLBRANCH --> VULN[Check for vulnerabilities] + VULN --> CC{{For each manager}} + CC -->|manager A| CD["..."] + CC -->|manager B| CCF["match files"] + CCF --> CFEF{{For each file}} + CFEF -->|file 1| CCD1[Extract dependency] + CFEF -->|file 2| CCD2[...] + end + + subgraph LOOKUP[LOOK UP UPDATES] + direction TB + UC{{For each manager}} + UC -->|manager A| UD["..."] + UC -->|manager B| UFEF{{For each file}} + UFEF -->|file 1| FED{{For each dependency}} + UFEF -->|file 2| FED2[...] + FED -->|dep 1| D1[...] + D1 -..-> CU + FED -->|dep 2| D2[use datasource to \n fetch versions] + D2 --> J[use versioning to find \n next valid update] + FED2 -...-> CU + UD -....-> CU + J --> CU[Look up updates] + end + + subgraph WRITEU[WRITE UPDATES] + direction TB + FEU{{For each update}} + FEU --> AUCOND[Check if branch needed: \n existing/rebase/concurrent amount] + AUCOND --> AU[Create branch\nApply update\nCreate PR] + end + + subgraph FINALIZE[FINALIZE] + direction TB + CM[Check for config migration] + CM --> CSB[Clean stale branches] + + end + + FER --> IRPO[Initialize repository] + + IRPO --> EXTRACTD + EXTRACTD --> LOOKUP + + LOOKUP --> WRITEU + + WRITEU --> FINALIZE + + end + + INITIALIZATION --> REPOSITORY +``` diff --git a/docs/usage/known-limitations.md b/docs/usage/known-limitations.md index 0ea36edb92a2ad..5912b1556964e8 100644 --- a/docs/usage/known-limitations.md +++ b/docs/usage/known-limitations.md @@ -30,7 +30,8 @@ This makes it likely that Renovate bot checks your repository at least once duri ## Automerge limitations -- Renovate automerges at most one branch per run +- Renovate automerges at most one branch/PR per run +- If an automerge happened, the repository run will be restarted at most once. The second run can also potentially automerge, so it may appear as like two automerges in one run. - Renovate will only automerge a branch when it is up-to-date with the target branch - Renovate may not be able to automerge as many branches as you expect, especially if your base branch is receiving regular commits at the same time diff --git a/docs/usage/merge-confidence.md b/docs/usage/merge-confidence.md index 76e361aa616b1c..01a24c4633abbb 100644 --- a/docs/usage/merge-confidence.md +++ b/docs/usage/merge-confidence.md @@ -29,6 +29,7 @@ Renovate will show Merge Confidence badges for these languages: | Language | Datasource | | ---------- | ----------- | +| Golang | `go` | | JavaScript | `npm` | | Java | `maven` | | Python | `pypi` | diff --git a/docs/usage/modules/versioning.md b/docs/usage/modules/versioning.md index 75832b13604b1a..eab2373bcb1b40 100644 --- a/docs/usage/modules/versioning.md +++ b/docs/usage/modules/versioning.md @@ -6,24 +6,25 @@ title: Versioning Once Managers have extracted dependencies, and Datasources have located available versions, then Renovate will use a "Versioning" scheme to perform sorting and filtering of results. The "versioning" is different for each package manager, because different package managers use different versioning schemes. -For example, `npm` uses`1.0.0-beta.1` and `pip` uses `1.0.0b1`. +For example, `npm` uses `1.0.0-beta.1` while `pip` uses `1.0.0b1`. ## Why you might need to manually configure versioning Renovate interprets versions correctly out-of-the-box most of the time. -It's impossible to automatically detect **all** versioning schemes, so sometimes you need to tell the bot what versioning scheme it should use. +But Renovate can't automatically detect **all** versioning schemes. +So sometimes you need to tell the bot what versioning scheme it should use. -You can manually configure/override the `versioning` value for a particular dependency. +You can manually configure or override the `versioning` value for a particular dependency. You generally won't need to override the defaults for ecosystems which enforce a strict version scheme like `npm`. -Configuring or overriding the default `versioning` can be particularly helpful for ecosystems like Docker/Kubernetes/Helm, where versioning is barely a "convention". +Configuring or overriding the default `versioning` can be extra helpful for ecosystems like Docker, Kubernetes or Helm, where versioning is barely a "convention". ## General concepts behind overriding versioning -- Although you can reconfigure versioning per-manager or per-datasource, it's unlikely that such a broad change would ever be needed +- Although you can reconfigure versioning per-manager or per-datasource, you probably don't need such a broad change - More commonly you would need to configure `versioning` for individual packages or potentially package patterns - The best way to do this is with `packageRules`, with a combination of `matchManagers`, `matchDatasources`, `matchPackageNames` and `matchPackagePatterns`. - Avoid configuring `versioning` in a rule that also uses `matchUpdateTypes`, as the update types aren't known at the time the `versioning` is applied. + Avoid configuring `versioning` in a rule that also uses `matchUpdateTypes`, as the update types aren't known at the time the `versioning` is applied ## Examples of versioning overrides diff --git a/docs/usage/self-hosted-configuration.md b/docs/usage/self-hosted-configuration.md index e2c1d9a4df4784..c1093b0b608d12 100644 --- a/docs/usage/self-hosted-configuration.md +++ b/docs/usage/self-hosted-configuration.md @@ -806,6 +806,11 @@ Used as an alternative to `privateKeyOld`, if you want the key to be read from d Override this object if you want to change the URLs that Renovate links to, e.g. if you have an internal forum for asking for help. +## redisPrefix + +If this value is set then Renovate will prepend this string to the name of all Redis cache entries used in Renovate. +It's only used if `redisUrl` is configured. + ## redisUrl If this value is set then Renovate will use Redis for its global cache instead of the local file system. diff --git a/docs/usage/self-hosted-experimental.md b/docs/usage/self-hosted-experimental.md index 45d7c8fffc7003..7ee6a8ee1f39df 100644 --- a/docs/usage/self-hosted-experimental.md +++ b/docs/usage/self-hosted-experimental.md @@ -96,7 +96,7 @@ If set, Renovate will terminate the whole process group of a terminated child pr ## `RENOVATE_X_GITLAB_AUTO_MERGEABLE_CHECK_ATTEMPS` If set to an positive integer, Renovate will use this as the number of attempts to check if a merge request on GitLab is mergable before trying to automerge. -The formula for the delay between attempts is `250 * attempt * attempt` milliseconds. +The formula for the delay between attempts is `RENOVATE_X_GITLAB_MERGE_REQUEST_DELAY * attempt * attempt` milliseconds. Default value: `5` (attempts results in max. 13.75 seconds timeout). @@ -108,6 +108,12 @@ Can be useful for slow-running, self-hosted GitLab instances that don't react fa Default value: `1000` (milliseconds). +## `RENOVATE_X_GITLAB_MERGE_REQUEST_DELAY` + +If set, Renovate will use this as a delay to proceed with an automerge. + +Default value: `250` (milliseconds). + ## `RENOVATE_X_HARD_EXIT` If set to any value, Renovate will use a "hard" `process.exit()` once all work is done, even if a sub-process is otherwise delaying Node.js from exiting. @@ -126,6 +132,11 @@ Skip initializing `RE2` for regular expressions and instead use Node-native `Reg If set, Renovate will query this API for Merge Confidence data. This feature is in private beta. +## `RENOVATE_X_MERGE_CONFIDENCE_SUPPORTED_DATASOURCES` + +If set, Renovate will query the merge-confidence JSON API only for datasources that are part of this list. +The expected value for this environment variable is a JSON array of strings. + ## `RENOVATE_X_PLATFORM_VERSION` If set, Renovate will use this string as GitLab server version instead of checking via the GitLab API. diff --git a/docs/usage/templates.md b/docs/usage/templates.md index e72e7182576e45..f355135754ea02 100644 --- a/docs/usage/templates.md +++ b/docs/usage/templates.md @@ -59,6 +59,14 @@ In the example above `depName` is the string you want to decode. Read the [MDN Web Docs, decodeURIComponent()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/decodeURIComponent) to learn more. +### encodeBase64 + +If you want to convert a string to Base64, use the built-in function `encodeBase64` like this: + +`{{{encodeBase64 body}}}` + +In the example above `body` is the string you want to transform into a Base64-encoded value. + ### replace The `replace` helper replaces _all_ found strings matching the given regex with the replacement string. diff --git a/lib/config/options/index.ts b/lib/config/options/index.ts index 7d3d460d975941..30503cc6ca4565 100644 --- a/lib/config/options/index.ts +++ b/lib/config/options/index.ts @@ -179,6 +179,19 @@ const options: RenovateOptions[] = [ type: 'string', }, }, + { + name: 'statusCheckNames', + description: 'Custom strings to use as status check names.', + type: 'object', + mergeable: true, + advancedUse: true, + default: { + artifactError: 'renovate/artifacts', + configValidation: 'renovate/config-validation', + mergeConfidence: 'renovate/merge-confidence', + minimumReleaseAge: 'renovate/stability-days', + }, + }, { name: 'extends', description: 'Configuration presets to use or extend.', @@ -330,6 +343,13 @@ const options: RenovateOptions[] = [ type: 'string', globalOnly: true, }, + { + name: 'redisPrefix', + description: 'Key prefix for redis cache entries.', + stage: 'global', + type: 'string', + globalOnly: true, + }, { name: 'baseDir', description: @@ -389,7 +409,7 @@ const options: RenovateOptions[] = [ description: 'Change this value to override the default Renovate sidecar image.', type: 'string', - default: 'ghcr.io/containerbase/sidecar:9.30.0', + default: 'ghcr.io/containerbase/sidecar:9.31.3', globalOnly: true, }, { @@ -1038,7 +1058,7 @@ const options: RenovateOptions[] = [ description: 'Set to `true` to automatically approve PRs.', type: 'boolean', default: false, - supportedPlatforms: ['azure', 'gitlab'], + supportedPlatforms: ['azure', 'gerrit', 'gitlab'], }, // depType { @@ -1824,7 +1844,7 @@ const options: RenovateOptions[] = [ type: 'string', allowedValues: ['auto', 'fast-forward', 'merge-commit', 'rebase', 'squash'], default: 'auto', - supportedPlatforms: ['bitbucket', 'gitea'], + supportedPlatforms: ['azure', 'bitbucket', 'gitea'], }, { name: 'automergeComment', @@ -2760,6 +2780,17 @@ const options: RenovateOptions[] = [ globalOnly: true, default: [], }, + { + name: 'maxRetryAfter', + description: + 'Maximum retry-after header value to wait for before retrying a failed request.', + type: 'integer', + default: 60, + stage: 'package', + parent: 'hostRules', + cli: false, + env: false, + }, ]; export function getOptions(): RenovateOptions[] { diff --git a/lib/config/presets/internal/group.ts b/lib/config/presets/internal/group.ts index b3f8bfe3482f3e..713a538adf5998 100644 --- a/lib/config/presets/internal/group.ts +++ b/lib/config/presets/internal/group.ts @@ -318,7 +318,7 @@ const staticGroups = { { groupName: 'PHPStan packages', matchDatasources: ['packagist'], - matchPackagePatterns: ['^phpstan/phpstan$', '/phpstan-'], + matchPackagePatterns: ['^phpstan/phpstan$', '/phpstan-', '/larastan'], }, ], }, diff --git a/lib/config/presets/internal/helpers.ts b/lib/config/presets/internal/helpers.ts index 805d0de92de292..cfe7ba427f9e1d 100644 --- a/lib/config/presets/internal/helpers.ts +++ b/lib/config/presets/internal/helpers.ts @@ -30,4 +30,15 @@ export const presets: Record = { }, ], }, + pinGitHubActionDigestsToSemver: { + description: 'Convert pinned GitHub Action digests to SemVer.', + packageRules: [ + { + extends: ['helpers:pinGitHubActionDigests'], + extractVersion: '^(?v\\d+\\.\\d+\\.\\d+)$', + versioning: + 'regex:^v(?\\d+)(\\.(?\\d+)\\.(?\\d+))?$', + }, + ], + }, }; diff --git a/lib/config/presets/internal/merge-confidence.ts b/lib/config/presets/internal/merge-confidence.ts index e238f30197e483..f2e900c8bf621b 100644 --- a/lib/config/presets/internal/merge-confidence.ts +++ b/lib/config/presets/internal/merge-confidence.ts @@ -1,18 +1,21 @@ import type { Preset } from '../types'; +export const supportedDatasources = [ + 'go', + 'maven', + 'npm', + 'nuget', + 'packagist', + 'pypi', + 'rubygems', +]; + export const presets: Record = { 'all-badges': { description: 'Show all Merge Confidence badges for pull requests.', packageRules: [ { - matchDatasources: [ - 'maven', - 'npm', - 'nuget', - 'packagist', - 'pypi', - 'rubygems', - ], + matchDatasources: supportedDatasources, matchUpdateTypes: ['patch', 'minor', 'major'], prBodyColumns: [ 'Package', @@ -30,14 +33,7 @@ export const presets: Record = { 'Show only the Age and Confidence Merge Confidence badges for pull requests.', packageRules: [ { - matchDatasources: [ - 'maven', - 'npm', - 'nuget', - 'packagist', - 'pypi', - 'rubygems', - ], + matchDatasources: supportedDatasources, matchUpdateTypes: ['patch', 'minor', 'major'], prBodyColumns: ['Package', 'Change', 'Age', 'Confidence'], }, diff --git a/lib/config/presets/internal/monorepo.ts b/lib/config/presets/internal/monorepo.ts index 00fda759abf9bb..be75e781267c41 100644 --- a/lib/config/presets/internal/monorepo.ts +++ b/lib/config/presets/internal/monorepo.ts @@ -16,6 +16,7 @@ const repoGroups = { 'algoliasearch-autocomplete': 'https://github.com/algolia/autocomplete', 'algoliasearch-client-javascript': 'https://github.com/algolia/algoliasearch-client-javascript', + analog: 'https://github.com/analogjs/analog', angular: 'https://github.com/angular/angular', 'angular-cli': 'https://github.com/angular/angular-cli', 'angular-eslint': 'https://github.com/angular-eslint/angular-eslint', @@ -287,6 +288,7 @@ const repoGroups = { gitbeaker: 'https://github.com/jdalrymple/gitbeaker', 'github-workflows-kt': 'https://github.com/typesafegithub/github-workflows-kt', + 'go-cloud': 'https://github.com/google/go-cloud', 'google-api-dotnet-client': 'https://github.com/googleapis/google-api-dotnet-client', grafana: 'https://github.com/grafana/grafana', @@ -299,6 +301,7 @@ const repoGroups = { 'https://github.com/dotansimha/graphql-codegen', ], groovy: 'https://github.com/apache/groovy', + 'grpc-dotnet': 'https://github.com/grpc/grpc-dotnet', guava: 'https://github.com/google/guava', Hangfire: 'https://github.com/HangfireIO/Hangfire', 'hickory-dns': 'https://github.com/hickory-dns/hickory-dns', @@ -315,6 +318,7 @@ const repoGroups = { 'https://github.com/jestjs/jest', ], jna: 'https://github.com/java-native-access/jna', + 'json-smart-v2': 'https://github.com/netplex/json-smart-v2', jsplumb: 'https://github.com/jsplumb/jsplumb', junit5: 'https://github.com/junit-team/junit5', kotlin: 'https://github.com/JetBrains/kotlin', @@ -337,11 +341,13 @@ const repoGroups = { 'material-ui': [ 'https://github.com/mui-org/material-ui', // Previous organization name (see: https://github.com/mui/material-ui/pull/30944) 'https://github.com/mui/material-ui', + 'https://github.com/mui/mui-x', ], 'mdc-react': 'material-components/material-components-web-react', mdx: 'https://github.com/mdx-js/mdx', 'middy-js': 'https://github.com/middyjs/middy', 'mikro-orm': 'https://github.com/mikro-orm/mikro-orm', + 'ml-dotnet': 'https://github.com/dotnet/machinelearning', mockito: 'https://github.com/mockito/mockito', 'mongo-csharp-driver': 'https://github.com/mongodb/mongo-csharp-driver', mstest: 'https://github.com/microsoft/testfx', @@ -374,8 +380,11 @@ const repoGroups = { 'https://github.com/nuxt/nuxt', ], okhttp: 'https://github.com/square/okhttp', + openiddict: 'https://github.com/openiddict/openiddict-core', 'opentelemetry-dotnet': 'https://github.com/open-telemetry/opentelemetry-dotnet', + 'opentelemetry-dotnet-contrib': + 'https://github.com/open-telemetry/opentelemetry-dotnet-contrib', 'opentelemetry-erlang': 'https://github.com/open-telemetry/opentelemetry-erlang', 'opentelemetry-erlang-contrib': @@ -383,6 +392,7 @@ const repoGroups = { 'opentelemetry-go': 'https://github.com/open-telemetry/opentelemetry-go', 'opentelemetry-js': 'https://github.com/open-telemetry/opentelemetry-js', orleans: 'https://github.com/dotnet/orleans', + 'panda-css': 'https://github.com/chakra-ui/panda', parcel: 'https://github.com/parcel-bundler/parcel', 'percy-cli': 'https://github.com/percy/cli', picassojs: 'https://github.com/qlik-oss/picasso.js', @@ -390,7 +400,8 @@ const repoGroups = { 'https://github.com/pixijs/pixi.js', // old repo 'https://github.com/pixijs/pixijs', ], - playwright: 'https://github.com/Microsoft/playwright', + playwright: 'https://github.com/microsoft/playwright', + 'playwright-dotnet': 'https://github.com/microsoft/playwright-dotnet', pnpjs: 'https://github.com/pnp/pnpjs', pollyjs: 'https://github.com/Netflix/pollyjs', pouchdb: 'https://github.com/pouchdb/pouchdb', @@ -426,6 +437,7 @@ const repoGroups = { 'sentry-dotnet': 'https://github.com/getsentry/sentry-dotnet', 'sentry-javascript': 'https://github.com/getsentry/sentry-javascript', 'sentry-ruby': 'https://github.com/getsentry/sentry-ruby', + 'sentry-rust': 'https://github.com/getsentry/sentry-rust', serde: 'https://github.com/serde-rs/serde', shedlock: 'https://github.com/lukas-krecan/ShedLock', 'shopify-app-bridge': 'https://github.com/Shopify/app-bridge', @@ -449,6 +461,7 @@ const repoGroups = { 'telus-tds': 'https://github.com/telusdigital/tds', 'telus-tds-core': 'https://github.com/telus/tds-core', 'temporalio-ts': 'https://github.com/temporalio/sdk-typescript', + 'testcontainers-go': 'https://github.com/testcontainers/testcontainers-go', 'testcontainers-java': 'https://github.com/testcontainers/testcontainers-java', 'testcontainers-node': @@ -486,6 +499,7 @@ const repoGroups = { 'https://github.com/xunit/xunit.analyzers', ], yarn: 'https://github.com/yarnpkg/berry', + 'zag-js': 'https://github.com/chakra-ui/zag', 'zxing-net': 'https://github.com/micjahn/ZXing.Net', }; diff --git a/lib/config/presets/local/index.ts b/lib/config/presets/local/index.ts index 96b1d4d4690211..6b7e39a3738f2f 100644 --- a/lib/config/presets/local/index.ts +++ b/lib/config/presets/local/index.ts @@ -21,6 +21,7 @@ const resolvers = { bitbucket: local, 'bitbucket-server': local, codecommit: null, + gerrit: local, gitea, github, gitlab, diff --git a/lib/config/types.ts b/lib/config/types.ts index ce565e8844cb2c..d696b554878eee 100644 --- a/lib/config/types.ts +++ b/lib/config/types.ts @@ -113,6 +113,7 @@ export interface GlobalOnlyConfig { privateKeyPath?: string; privateKeyPathOld?: string; redisUrl?: string; + redisPrefix?: string; repositories?: RenovateRepository[]; platform?: PlatformId; endpoint?: string; @@ -191,6 +192,14 @@ export type RenovateRepository = export type UseBaseBranchConfigType = 'merge' | 'none'; export type ConstraintsFilter = 'strict' | 'none'; +export const allowedStatusCheckStrings = [ + 'minimumReleaseAge', + 'mergeConfidence', + 'configValidation', + 'artifactError', +] as const; +export type StatusCheckKey = (typeof allowedStatusCheckStrings)[number]; + // TODO: Proper typings export interface RenovateConfig extends LegacyAdminConfig, @@ -201,6 +210,7 @@ export interface RenovateConfig Record { depName?: string; baseBranches?: string[]; + commitBody?: string; useBaseBranchConfig?: UseBaseBranchConfigType; baseBranch?: string; defaultBranch?: string; @@ -262,6 +272,8 @@ export interface RenovateConfig checkedBranches?: string[]; customizeDashboard?: Record; + + statusCheckNames?: Record; } const CustomDatasourceFormats = ['json', 'plain', 'yaml', 'html'] as const; diff --git a/lib/config/validation.spec.ts b/lib/config/validation.spec.ts index 65c2847d736382..fb7d0ac9b0809e 100644 --- a/lib/config/validation.spec.ts +++ b/lib/config/validation.spec.ts @@ -145,6 +145,30 @@ describe('config/validation', () => { ]); }); + it('validates invalid statusCheckNames', async () => { + const config = { + statusCheckNames: { + randomKey: '', + mergeConfidence: 10, + configValidation: '', + artifactError: null, + }, + }; + // @ts-expect-error invalid options + const { errors } = await configValidation.validateConfig(config); + expect(errors).toMatchObject([ + { + message: + 'Invalid `statusCheckNames.mergeConfidence` configuration: status check is not a string.', + }, + { + message: + 'Invalid `statusCheckNames.statusCheckNames.randomKey` configuration: key is not allowed.', + }, + ]); + expect(errors).toHaveLength(2); + }); + it('catches invalid customDatasources record type', async () => { const config = { customDatasources: { diff --git a/lib/config/validation.ts b/lib/config/validation.ts index 6a045221e1b242..ffa29bd142a8d0 100644 --- a/lib/config/validation.ts +++ b/lib/config/validation.ts @@ -1,4 +1,5 @@ import is from '@sindresorhus/is'; +import { logger } from '../logger'; import { allManagersList, getManagerList } from '../modules/manager'; import { isCustomManager } from '../modules/manager/custom'; import type { @@ -18,11 +19,13 @@ import { GlobalConfig } from './global'; import { migrateConfig } from './migration'; import { getOptions } from './options'; import { resolveConfigPresets } from './presets'; -import type { - RenovateConfig, - RenovateOptions, - ValidationMessage, - ValidationResult, +import { + type RenovateConfig, + type RenovateOptions, + type StatusCheckKey, + type ValidationMessage, + type ValidationResult, + allowedStatusCheckStrings, } from './types'; import * as managerValidator from './validation-helpers/managers'; @@ -566,6 +569,30 @@ export async function validateConfig( message: `Invalid \`${currentPath}.${key}.${res}\` configuration: value is not a string`, }); } + } else if (key === 'statusCheckNames') { + for (const [statusCheckKey, statusCheckValue] of Object.entries( + val, + )) { + if ( + !allowedStatusCheckStrings.includes( + statusCheckKey as StatusCheckKey, + ) + ) { + errors.push({ + topic: 'Configuration Error', + message: `Invalid \`${currentPath}.${key}.${statusCheckKey}\` configuration: key is not allowed.`, + }); + } + if ( + !(is.string(statusCheckValue) || is.null_(statusCheckValue)) + ) { + errors.push({ + topic: 'Configuration Error', + message: `Invalid \`${currentPath}.${statusCheckKey}\` configuration: status check is not a string.`, + }); + continue; + } + } } else if (key === 'customDatasources') { const allowedKeys = [ 'description', @@ -694,7 +721,11 @@ function validateRegexManagerFields( for (const matchString of customManager.matchStrings) { try { regEx(matchString); - } catch (e) { + } catch (err) { + logger.debug( + { err }, + 'customManager.matchStrings regEx validation error', + ); errors.push({ topic: 'Configuration Error', message: `Invalid regExp for ${currentPath}: \`${matchString}\``, diff --git a/lib/constants/platforms.ts b/lib/constants/platforms.ts index c43a9ca9fc7281..d1ee8156312ba6 100644 --- a/lib/constants/platforms.ts +++ b/lib/constants/platforms.ts @@ -3,6 +3,7 @@ export type PlatformId = | 'codecommit' | 'bitbucket' | 'bitbucket-server' + | 'gerrit' | 'gitea' | 'github' | 'gitlab' diff --git a/lib/modules/datasource/artifactory/index.ts b/lib/modules/datasource/artifactory/index.ts index 2668182e800fa6..86fe6918b04b44 100644 --- a/lib/modules/datasource/artifactory/index.ts +++ b/lib/modules/datasource/artifactory/index.ts @@ -69,7 +69,7 @@ export class ArtifactoryDatasource extends Datasource { : node.innerHTML; const published = ArtifactoryDatasource.parseReleaseTimestamp( - node.nextSibling?.text, + node.nextSibling!.text, // TODO: can be null (#22198) ); const thisRelease: Release = { diff --git a/lib/modules/datasource/docker/index.ts b/lib/modules/datasource/docker/index.ts index b18db9417eb3f0..ea9e6dd897098e 100644 --- a/lib/modules/datasource/docker/index.ts +++ b/lib/modules/datasource/docker/index.ts @@ -176,7 +176,7 @@ export class DockerDatasource extends Datasource { ) => `${registryHost}:${dockerRepository}@${configDigest}`, ttlMinutes: 1440 * 28, }) - public async getImageConfig( + async getImageConfig( registryHost: string, dockerRepository: string, configDigest: string, @@ -221,7 +221,7 @@ export class DockerDatasource extends Datasource { ) => `${registryHost}:${dockerRepository}@${configDigest}`, ttlMinutes: 1440 * 28, }) - public async getHelmConfig( + async getHelmConfig( registryHost: string, dockerRepository: string, configDigest: string, @@ -336,7 +336,7 @@ export class DockerDatasource extends Datasource { ) => `${registryHost}:${dockerRepository}@${currentDigest}`, ttlMinutes: 1440 * 28, }) - public async getImageArchitecture( + async getImageArchitecture( registryHost: string, dockerRepository: string, currentDigest: string, @@ -434,7 +434,7 @@ export class DockerDatasource extends Datasource { `${registryHost}:${dockerRepository}:${tag}`, ttlMinutes: 24 * 60, }) - public async getLabels( + async getLabels( registryHost: string, dockerRepository: string, tag: string, @@ -687,7 +687,7 @@ export class DockerDatasource extends Datasource { key: (registryHost: string, dockerRepository: string) => `${registryHost}:${dockerRepository}`, }) - public async getTags( + async getTags( registryHost: string, dockerRepository: string, ): Promise { @@ -913,6 +913,10 @@ export class DockerDatasource extends Datasource { return digest; } + @cache({ + namespace: 'datasource-docker-hub-tags', + key: (dockerRepository: string) => `${dockerRepository}`, + }) async getDockerHubTags(dockerRepository: string): Promise { const result: Release[] = []; let url: null | string = diff --git a/lib/modules/datasource/hex/__fixtures__/certifi.json b/lib/modules/datasource/hex/__fixtures__/certifi.json index ae3248783ea01a..4d52475d6f5c38 100644 --- a/lib/modules/datasource/hex/__fixtures__/certifi.json +++ b/lib/modules/datasource/hex/__fixtures__/certifi.json @@ -148,7 +148,12 @@ } ], "repository": "hexpm", - "retirements": {}, + "retirements": { + "0.1.1": { + "message": "Used for testing", + "reason": "Not really retired" + } +}, "updated_at": "2020-03-04T14:54:16.279054Z", "url": "https://hex.pm/api/packages/certifi" } diff --git a/lib/modules/datasource/hex/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/hex/__snapshots__/index.spec.ts.snap index f1dba45a9e7558..4067953c19a138 100644 --- a/lib/modules/datasource/hex/__snapshots__/index.spec.ts.snap +++ b/lib/modules/datasource/hex/__snapshots__/index.spec.ts.snap @@ -6,6 +6,7 @@ exports[`modules/datasource/hex/index getReleases process public repo without au "registryUrl": "https://hex.pm/", "releases": [ { + "isDeprecated": true, "releaseTimestamp": "2015-09-10T13:58:55.620Z", "version": "0.1.1", }, @@ -112,6 +113,7 @@ exports[`modules/datasource/hex/index getReleases processes real data 1`] = ` "registryUrl": "https://hex.pm/", "releases": [ { + "isDeprecated": true, "releaseTimestamp": "2015-09-10T13:58:55.620Z", "version": "0.1.1", }, diff --git a/lib/modules/datasource/hex/index.spec.ts b/lib/modules/datasource/hex/index.spec.ts index 70556645629f5e..cf9cbb9b565b5e 100644 --- a/lib/modules/datasource/hex/index.spec.ts +++ b/lib/modules/datasource/hex/index.spec.ts @@ -131,6 +131,19 @@ describe('modules/datasource/hex/index', () => { expect(res).toBeDefined(); }); + it('extracts depreceated info', async () => { + httpMock + .scope(baseUrl) + .get('/packages/certifi') + .reply(200, certifiResponse); + hostRules.find.mockReturnValueOnce({}); + const res = await getPkgReleases({ + datasource, + packageName: 'certifi', + }); + expect(res?.releases.some((rel) => rel.isDeprecated)).toBeTrue(); + }); + it('processes a private repo with auth', async () => { httpMock .scope(baseUrl, { diff --git a/lib/modules/datasource/hex/schema.ts b/lib/modules/datasource/hex/schema.ts index b1015ae664d065..fea85b0919da78 100644 --- a/lib/modules/datasource/hex/schema.ts +++ b/lib/modules/datasource/hex/schema.ts @@ -1,3 +1,4 @@ +import is from '@sindresorhus/is'; import { z } from 'zod'; import { LooseArray } from '../../../util/schema-utils'; import type { Release, ReleaseResult } from '../types'; @@ -19,6 +20,15 @@ export const HexRelease = z inserted_at: z.string().optional(), }), ).refine((releases) => releases.length > 0, 'No releases found'), + retirements: z + .record( + z.string(), + z.object({ + message: z.string(), + reason: z.string(), + }), + ) + .optional(), }) .transform((hexResponse): ReleaseResult => { const releases: Release[] = hexResponse.releases.map( @@ -29,6 +39,10 @@ export const HexRelease = z release.releaseTimestamp = releaseTimestamp; } + if (is.plainObject(hexResponse.retirements?.[version])) { + release.isDeprecated = true; + } + return release; }, ); diff --git a/lib/modules/datasource/nuget/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/nuget/__snapshots__/index.spec.ts.snap index bdbf086ec83c9a..bdce30f14a4718 100644 --- a/lib/modules/datasource/nuget/__snapshots__/index.spec.ts.snap +++ b/lib/modules/datasource/nuget/__snapshots__/index.spec.ts.snap @@ -11,6 +11,9 @@ exports[`modules/datasource/nuget/index getReleases handles paginated results (v "version": "2.0.0", }, ], + "tags": { + "latest": "2.0.0", + }, } `; @@ -200,6 +203,9 @@ exports[`modules/datasource/nuget/index getReleases processes real data (v2) 1`] }, ], "sourceUrl": "https://nunit.org/", + "tags": { + "latest": "3.12.0", + }, } `; @@ -1692,6 +1698,9 @@ exports[`modules/datasource/nuget/index getReleases processes real data with no }, ], "sourceUrl": "https://nunit.org", + "tags": { + "latest": "3.11.0", + }, } `; @@ -1829,6 +1838,9 @@ exports[`modules/datasource/nuget/index getReleases processes real data without "version": "3.11.0", }, ], + "tags": { + "latest": "3.11.0", + }, } `; diff --git a/lib/modules/datasource/nuget/index.spec.ts b/lib/modules/datasource/nuget/index.spec.ts index 6dc521ac76322a..db7bbaa7b52d61 100644 --- a/lib/modules/datasource/nuget/index.spec.ts +++ b/lib/modules/datasource/nuget/index.spec.ts @@ -585,7 +585,7 @@ describe('modules/datasource/nuget/index', () => { expect(res?.sourceUrl).toBeDefined(); }); - it('processes real data no relase (v2)', async () => { + it('processes real data no release (v2)', async () => { httpMock .scope('https://www.nuget.org') .get( @@ -627,6 +627,20 @@ describe('modules/datasource/nuget/index', () => { expect(res).toMatchSnapshot(); }); + it('extracts latest tag (v2)', async () => { + httpMock + .scope('https://www.nuget.org') + .get( + '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published', + ) + .reply(200, pkgListV2NoGitHubProjectUrl); + const res = await getPkgReleases({ + ...configV2, + }); + expect(res).not.toBeNull(); + expect(res?.tags?.latest).toBe('3.11.0'); + }); + it('handles paginated results (v2)', async () => { httpMock .scope('https://www.nuget.org') diff --git a/lib/modules/datasource/nuget/v2.ts b/lib/modules/datasource/nuget/v2.ts index 79c19161a1ab8b..71629f90f8c436 100644 --- a/lib/modules/datasource/nuget/v2.ts +++ b/lib/modules/datasource/nuget/v2.ts @@ -40,6 +40,7 @@ export async function getReleases( try { const pkgIsLatestVersion = getPkgProp(pkgInfo, 'IsLatestVersion'); if (pkgIsLatestVersion === 'true') { + dep['tags'] = { latest: removeBuildMeta(`${version}`) }; const projectUrl = getPkgProp(pkgInfo, 'ProjectUrl'); if (projectUrl) { dep.sourceUrl = massageUrl(projectUrl); diff --git a/lib/modules/manager/ansible-galaxy/index.ts b/lib/modules/manager/ansible-galaxy/index.ts index 5b468378b37828..85e0d274ca0cd7 100644 --- a/lib/modules/manager/ansible-galaxy/index.ts +++ b/lib/modules/manager/ansible-galaxy/index.ts @@ -8,7 +8,7 @@ export { extractPackageFile } from './extract'; export const categories: Category[] = ['ansible', 'iac']; export const defaultConfig = { - fileMatch: ['(^|/)requirements\\.ya?ml$', '(^|/)galaxy\\.ya?ml$'], + fileMatch: ['(^|/)(galaxy|requirements)(\\.ansible)?\\.ya?ml$'], }; export const supportedDatasources = [ diff --git a/lib/modules/manager/api.ts b/lib/modules/manager/api.ts index ba08a398818dbd..6f618abaee49d2 100644 --- a/lib/modules/manager/api.ts +++ b/lib/modules/manager/api.ts @@ -61,6 +61,7 @@ import * as nodenv from './nodenv'; import * as npm from './npm'; import * as nuget from './nuget'; import * as nvm from './nvm'; +import * as ocb from './ocb'; import * as osgi from './osgi'; import * as pep621 from './pep621'; import * as pipCompile from './pip-compile'; @@ -153,6 +154,7 @@ api.set('nodenv', nodenv); api.set('npm', npm); api.set('nuget', nuget); api.set('nvm', nvm); +api.set('ocb', ocb); api.set('osgi', osgi); api.set('pep621', pep621); api.set('pip-compile', pipCompile); diff --git a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-jobs.yaml b/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-jobs.yaml deleted file mode 100644 index c75ec9374b0ff6..00000000000000 --- a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-jobs.yaml +++ /dev/null @@ -1,6 +0,0 @@ -jobs: - - job: job_one - steps: - - task: Bash@3 - inputs: - script: 'echo Hello World' diff --git a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-stages.yaml b/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-stages.yaml deleted file mode 100644 index 8dbff5f31f5897..00000000000000 --- a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-stages.yaml +++ /dev/null @@ -1,8 +0,0 @@ -stages: - - stage: stage_one - jobs: - - job: job_one - steps: - - task: Bash@3 - inputs: - script: 'echo Hello World' diff --git a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-steps.yaml b/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-steps.yaml deleted file mode 100644 index 28c203d03b993e..00000000000000 --- a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-steps.yaml +++ /dev/null @@ -1,4 +0,0 @@ -steps: - - task: Bash@3 - inputs: - script: 'echo Hello World' diff --git a/lib/modules/manager/azure-pipelines/extract.spec.ts b/lib/modules/manager/azure-pipelines/extract.spec.ts index 7e4f6e40423e03..e53b41639e5495 100644 --- a/lib/modules/manager/azure-pipelines/extract.spec.ts +++ b/lib/modules/manager/azure-pipelines/extract.spec.ts @@ -1,3 +1,4 @@ +import { codeBlock } from 'common-tags'; import { Fixtures } from '../../../../test/fixtures'; import { GlobalConfig } from '../../../config/global'; import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks'; @@ -15,9 +16,6 @@ const azurePipelines = Fixtures.get('azure-pipelines.yaml'); const azurePipelinesNoDependency = Fixtures.get( 'azure-pipelines-no-dependency.yaml', ); -const azurePipelinesStages = Fixtures.get('azure-pipelines-stages.yaml'); -const azurePipelinesJobs = Fixtures.get('azure-pipelines-jobs.yaml'); -const azurePipelinesSteps = Fixtures.get('azure-pipelines-steps.yaml'); describe('modules/manager/azure-pipelines/extract', () => { afterEach(() => { @@ -58,12 +56,11 @@ describe('modules/manager/azure-pipelines/extract', () => { ).toBeNull(); }); - it('should return null when reference is not defined', () => { + it('should return null when reference is not defined specified', () => { expect( extractRepository({ type: 'github', name: 'user/repo', - ref: null, }), ).toBeNull(); }); @@ -138,10 +135,6 @@ describe('modules/manager/azure-pipelines/extract', () => { datasource: 'docker', }); }); - - it('should return null if image field is missing', () => { - expect(extractContainer({ image: null })).toBeNull(); - }); }); describe('extractAzurePipelinesTasks()', () => { @@ -191,11 +184,196 @@ describe('modules/manager/azure-pipelines/extract', () => { ).toBeNull(); }); + it('should extract deployment jobs runonce', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + runOnce: + deploy: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs on failure', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + runOnce: + on: + failure: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs on success', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + runOnce: + on: + success: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs postroute', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + runOnce: + postRouteTraffic: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs predeploy', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + runOnce: + preDeploy: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs route', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + runOnce: + routeTraffic: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs rolling', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + rolling: + deploy: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + + it('should extract deployment jobs canary', () => { + const packageFile = codeBlock` + jobs: + - deployment: deployment_one + strategy: + canary: + deploy: + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); + expect(res?.deps).toEqual([ + { + depName: 'Bash', + currentValue: '3', + datasource: AzurePipelinesTasksDatasource.id, + }, + ]); + }); + it('should extract stages', () => { - const res = extractPackageFile( - azurePipelinesStages, - azurePipelinesFilename, - ); + const packageFile = codeBlock` + stages: + - stage: stage_one + jobs: + - job: job_one + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); expect(res?.deps).toEqual([ { depName: 'Bash', @@ -206,10 +384,15 @@ describe('modules/manager/azure-pipelines/extract', () => { }); it('should extract jobs', () => { - const res = extractPackageFile( - azurePipelinesJobs, - azurePipelinesFilename, - ); + const packageFile = codeBlock` + jobs: + - job: job_one + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); expect(res?.deps).toEqual([ { depName: 'Bash', @@ -220,10 +403,13 @@ describe('modules/manager/azure-pipelines/extract', () => { }); it('should extract steps', () => { - const res = extractPackageFile( - azurePipelinesSteps, - azurePipelinesFilename, - ); + const packageFile = codeBlock` + steps: + - task: Bash@3 + inputs: + script: 'echo Hello World' + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); expect(res?.deps).toEqual([ { depName: 'Bash', @@ -234,10 +420,11 @@ describe('modules/manager/azure-pipelines/extract', () => { }); it('should return null when task alias used', () => { - const content = ` - steps: - - bash: 'echo Hello World'`; - const res = extractPackageFile(content, azurePipelinesFilename); + const packageFile = codeBlock` + steps: + - bash: 'echo Hello World'; + `; + const res = extractPackageFile(packageFile, azurePipelinesFilename); expect(res).toBeNull(); }); }); diff --git a/lib/modules/manager/azure-pipelines/extract.ts b/lib/modules/manager/azure-pipelines/extract.ts index 5f2a1c7f2c81a5..433d4b7c2f987f 100644 --- a/lib/modules/manager/azure-pipelines/extract.ts +++ b/lib/modules/manager/azure-pipelines/extract.ts @@ -3,12 +3,21 @@ import { logger } from '../../../logger'; import { coerceArray } from '../../../util/array'; import { regEx } from '../../../util/regex'; import { joinUrlParts } from '../../../util/url'; -import { parseSingleYaml } from '../../../util/yaml'; import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks'; import { GitTagsDatasource } from '../../datasource/git-tags'; import { getDep } from '../dockerfile/extract'; import type { PackageDependency, PackageFileContent } from '../types'; -import type { AzurePipelines, Container, Repository } from './types'; +import { + AzurePipelines, + AzurePipelinesYaml, + Container, + Deploy, + Deployment, + Job, + Jobs, + Repository, + Step, +} from './schema'; const AzurePipelinesTaskRegex = regEx(/^(?[^@]+)@(?.*)$/); @@ -68,10 +77,6 @@ export function extractRepository( export function extractContainer( container: Container, ): PackageDependency | null { - if (!container.image) { - return null; - } - const dep = getDep(container.image); logger.debug( { @@ -104,15 +109,60 @@ export function parseAzurePipelines( content: string, packageFile: string, ): AzurePipelines | null { - let pkg: AzurePipelines | null = null; - try { - pkg = parseSingleYaml(content, { json: true }) as AzurePipelines; - } catch (err) /* istanbul ignore next */ { - logger.debug({ packageFile, err }, 'Error parsing azure-pipelines content'); - return null; + const res = AzurePipelinesYaml.safeParse(content); + if (res.success) { + return res.data; + } else { + logger.debug( + { err: res.error, packageFile }, + 'Error parsing pubspec lockfile.', + ); + } + return null; +} + +function extractSteps( + steps: Step[] | undefined, +): PackageDependency>[] { + const deps = []; + for (const step of coerceArray(steps)) { + const task = extractAzurePipelinesTasks(step.task); + if (task) { + deps.push(task); + } } + return deps; +} + +function extractJob(job: Job | undefined): PackageDependency[] { + return extractSteps(job?.steps); +} + +function extractDeploy(deploy: Deploy | undefined): PackageDependency[] { + const deps = extractJob(deploy?.deploy); + deps.push(...extractJob(deploy?.postRouteTraffic)); + deps.push(...extractJob(deploy?.preDeploy)); + deps.push(...extractJob(deploy?.routeTraffic)); + deps.push(...extractJob(deploy?.on?.failure)); + deps.push(...extractJob(deploy?.on?.success)); + return deps; +} + +function extractJobs(jobs: Jobs | undefined): PackageDependency[] { + const deps: PackageDependency[] = []; + for (const jobOrDeployment of coerceArray(jobs)) { + const deployment = jobOrDeployment as Deployment; + if (deployment.strategy) { + deps.push(...extractDeploy(deployment.strategy.canary)); + deps.push(...extractDeploy(deployment.strategy.rolling)); + deps.push(...extractDeploy(deployment.strategy.runOnce)); + continue; + } - return pkg; + const job = jobOrDeployment as Job; + deps.push(...extractJob(job)); + } + return deps; } export function extractPackageFile( @@ -142,31 +192,11 @@ export function extractPackageFile( } for (const { jobs } of coerceArray(pkg.stages)) { - for (const { steps } of coerceArray(jobs)) { - for (const step of coerceArray(steps)) { - const task = extractAzurePipelinesTasks(step.task); - if (task) { - deps.push(task); - } - } - } - } - - for (const { steps } of coerceArray(pkg.jobs)) { - for (const step of coerceArray(steps)) { - const task = extractAzurePipelinesTasks(step.task); - if (task) { - deps.push(task); - } - } + deps.push(...extractJobs(jobs)); } - for (const step of coerceArray(pkg.steps)) { - const task = extractAzurePipelinesTasks(step.task); - if (task) { - deps.push(task); - } - } + deps.push(...extractJobs(pkg.jobs)); + deps.push(...extractSteps(pkg.steps)); if (!deps.length) { return null; diff --git a/lib/modules/manager/azure-pipelines/schema.ts b/lib/modules/manager/azure-pipelines/schema.ts new file mode 100644 index 00000000000000..1a1c3252546b95 --- /dev/null +++ b/lib/modules/manager/azure-pipelines/schema.ts @@ -0,0 +1,81 @@ +import { z } from 'zod'; +import { LooseArray, Yaml } from '../../../util/schema-utils'; + +export const Step = z.object({ + task: z.string(), +}); +export type Step = z.infer; + +export const Job = z.object({ + steps: LooseArray(Step), +}); +export type Job = z.infer; + +export const Deploy = z + .object({ + deploy: Job, + preDeploy: Job, + routeTraffic: Job, + postRouteTraffic: Job, + on: z + .object({ + failure: Job, + success: Job, + }) + .partial(), + }) + .partial(); +export type Deploy = z.infer; + +export const Deployment = z + .object({ + strategy: z + .object({ + runOnce: Deploy, + rolling: Deploy, + canary: Deploy, + }) + .partial(), + }) + .partial(); +export type Deployment = z.infer; + +export const Jobs = LooseArray(z.union([Job, Deployment])); +export type Jobs = z.infer; + +export const Stage = z.object({ + jobs: Jobs, +}); +export type Stage = z.infer; + +export const Container = z.object({ + image: z.string(), +}); +export type Container = z.infer; + +export const Repository = z.object({ + type: z.enum(['git', 'github', 'bitbucket']), + name: z.string(), + ref: z.string().optional(), +}); +export type Repository = z.infer; + +export const Resources = z + .object({ + repositories: LooseArray(Repository), + containers: LooseArray(Container), + }) + .partial(); +export type Resources = z.infer; + +export const AzurePipelines = z + .object({ + resources: Resources, + stages: LooseArray(Stage), + jobs: Jobs, + steps: LooseArray(Step), + }) + .partial(); +export type AzurePipelines = z.infer; + +export const AzurePipelinesYaml = Yaml.pipe(AzurePipelines); diff --git a/lib/modules/manager/azure-pipelines/types.ts b/lib/modules/manager/azure-pipelines/types.ts deleted file mode 100644 index f532a4fb83eb9a..00000000000000 --- a/lib/modules/manager/azure-pipelines/types.ts +++ /dev/null @@ -1,30 +0,0 @@ -export interface Container { - image?: string | null; -} -export interface Repository { - type: 'git' | 'github' | 'bitbucket'; - name: string; - ref?: string | null; -} -export interface Resources { - repositories?: Repository[]; - containers?: Container[]; -} -export interface AzurePipelines { - resources?: Resources; - stages?: Stage[]; - jobs?: Job[]; - steps?: Step[]; -} - -export interface Stage { - jobs?: Job[]; -} - -export interface Job { - steps?: Step[]; -} - -export interface Step { - task: string; -} diff --git a/lib/modules/manager/cargo/artifacts.ts b/lib/modules/manager/cargo/artifacts.ts index 4ca7aabc5f1ca4..7fa9ced9db4f4b 100644 --- a/lib/modules/manager/cargo/artifacts.ts +++ b/lib/modules/manager/cargo/artifacts.ts @@ -59,6 +59,7 @@ async function cargoUpdatePrecise( } const execOptions: ExecOptions = { + extraEnv: { ...getGitEnvironmentVariables(['cargo']) }, docker: {}, toolConstraints: [{ toolName: 'rust', constraint }], }; diff --git a/lib/modules/manager/git-submodules/extract.ts b/lib/modules/manager/git-submodules/extract.ts index 6e0ac14ec4b9f8..64cc0806cd0679 100644 --- a/lib/modules/manager/git-submodules/extract.ts +++ b/lib/modules/manager/git-submodules/extract.ts @@ -70,7 +70,7 @@ async function getBranch( ).trim(); return branchFromConfig === '.' - ? (await git.branch(['--show-current'])).current.trim() + ? (await git.branch(['--list'])).current.trim() : branchFromConfig || (await getDefaultBranch(subModuleUrl)).trim(); } diff --git a/lib/modules/manager/gradle/parser.spec.ts b/lib/modules/manager/gradle/parser.spec.ts index 2fb70034c1abc1..a6d35c004757f1 100644 --- a/lib/modules/manager/gradle/parser.spec.ts +++ b/lib/modules/manager/gradle/parser.spec.ts @@ -925,6 +925,7 @@ describe('modules/manager/gradle/parser', () => { ${''} | ${'unknown { toolVersion = "1.2.3" }'} | ${null} ${''} | ${'composeOptions { kotlinCompilerExtensionVersion = "1.2.3" }'} | ${{ depName: 'composeOptions', packageName: GRADLE_PLUGINS['composeOptions'][1], currentValue: '1.2.3' }} ${''} | ${'jmh { jmhVersion = "1.2.3" }'} | ${{ depName: 'jmh', packageName: GRADLE_PLUGINS['jmh'][1], currentValue: '1.2.3' }} + ${''} | ${'micronaut { version = "1.2.3" }'} | ${{ depName: 'micronaut', packageName: GRADLE_PLUGINS['micronaut'][1], currentValue: '1.2.3' }} `('$def | $input', ({ def, input, output }) => { const { deps } = parseGradle([def, input].join('\n')); expect(deps).toMatchObject([output].filter(is.truthy)); diff --git a/lib/modules/manager/gradle/parser/common.ts b/lib/modules/manager/gradle/parser/common.ts index b564fd30c86e61..9505df5137cf77 100644 --- a/lib/modules/manager/gradle/parser/common.ts +++ b/lib/modules/manager/gradle/parser/common.ts @@ -31,6 +31,7 @@ export const GRADLE_PLUGINS = { jacoco: ['toolVersion', 'org.jacoco:jacoco'], jmh: ['jmhVersion', 'org.openjdk.jmh:jmh-core'], lombok: ['version', 'org.projectlombok:lombok'], + micronaut: ['version', 'io.micronaut.platform:micronaut-platform'], pmd: ['toolVersion', 'net.sourceforge.pmd:pmd-java'], spotbugs: ['toolVersion', 'com.github.spotbugs:spotbugs'], }; diff --git a/lib/modules/manager/helmv3/__snapshots__/artifacts.spec.ts.snap b/lib/modules/manager/helmv3/__snapshots__/artifacts.spec.ts.snap index 974ad178fb2648..61454159f84dc4 100644 --- a/lib/modules/manager/helmv3/__snapshots__/artifacts.spec.ts.snap +++ b/lib/modules/manager/helmv3/__snapshots__/artifacts.spec.ts.snap @@ -3,7 +3,7 @@ exports[`modules/manager/helmv3/artifacts alias name is picked, when repository is as alias and dependency defined 1`] = ` [ { - "cmd": "helm repo add repo1 https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --username basicUser --password secret", + "cmd": "helm repo add repo1 https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update --username basicUser --password secret", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -52,7 +52,7 @@ exports[`modules/manager/helmv3/artifacts alias name is picked, when repository exports[`modules/manager/helmv3/artifacts do not add registryAliases to repository list 1`] = ` [ { - "cmd": "helm repo add jetstack https://charts.jetstack.io", + "cmd": "helm repo add jetstack https://charts.jetstack.io --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -74,7 +74,7 @@ exports[`modules/manager/helmv3/artifacts do not add registryAliases to reposito }, }, { - "cmd": "helm repo add nginx https://kubernetes.github.io/ingress-nginx", + "cmd": "helm repo add nginx https://kubernetes.github.io/ingress-nginx --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -145,7 +145,7 @@ exports[`modules/manager/helmv3/artifacts log into private registries and reposi }, }, { - "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --username basicUser --password secret", + "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update --username basicUser --password secret", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -216,7 +216,7 @@ exports[`modules/manager/helmv3/artifacts log into private registries and reposi }, }, { - "cmd": "helm repo add stable the_stable_url", + "cmd": "helm repo add stable the_stable_url --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -238,7 +238,7 @@ exports[`modules/manager/helmv3/artifacts log into private registries and reposi }, }, { - "cmd": "helm repo add repo1 https://the_repo1_url --username basicUser --password secret", + "cmd": "helm repo add repo1 https://the_repo1_url --force-update --username basicUser --password secret", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -260,7 +260,7 @@ exports[`modules/manager/helmv3/artifacts log into private registries and reposi }, }, { - "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable", + "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -309,7 +309,7 @@ exports[`modules/manager/helmv3/artifacts log into private registries and reposi exports[`modules/manager/helmv3/artifacts returns null if unchanged 1`] = ` [ { - "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable", + "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -358,7 +358,7 @@ exports[`modules/manager/helmv3/artifacts returns null if unchanged 1`] = ` exports[`modules/manager/helmv3/artifacts returns updated Chart.lock 1`] = ` [ { - "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable", + "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -407,7 +407,7 @@ exports[`modules/manager/helmv3/artifacts returns updated Chart.lock 1`] = ` exports[`modules/manager/helmv3/artifacts returns updated Chart.lock for lockfile maintenance 1`] = ` [ { - "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable", + "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -468,7 +468,7 @@ exports[`modules/manager/helmv3/artifacts returns updated Chart.lock with docker }, }, { - "cmd": "docker run --rm --name=renovate_sidecar --label=renovate_child -v "/tmp/github/some/repo":"/tmp/github/some/repo" -v "/tmp/renovate/cache":"/tmp/renovate/cache" -e HELM_EXPERIMENTAL_OCI -e HELM_REGISTRY_CONFIG -e HELM_REPOSITORY_CONFIG -e HELM_REPOSITORY_CACHE -e CONTAINERBASE_CACHE_DIR -w "/tmp/github/some/repo" ghcr.io/containerbase/sidecar bash -l -c "install-tool helm v3.7.2 && helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable && helm dependency update ''"", + "cmd": "docker run --rm --name=renovate_sidecar --label=renovate_child -v "/tmp/github/some/repo":"/tmp/github/some/repo" -v "/tmp/renovate/cache":"/tmp/renovate/cache" -e HELM_EXPERIMENTAL_OCI -e HELM_REGISTRY_CONFIG -e HELM_REPOSITORY_CONFIG -e HELM_REPOSITORY_CACHE -e CONTAINERBASE_CACHE_DIR -w "/tmp/github/some/repo" ghcr.io/containerbase/sidecar bash -l -c "install-tool helm v3.7.2 && helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update && helm dependency update ''"", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -496,7 +496,7 @@ exports[`modules/manager/helmv3/artifacts returns updated Chart.lock with docker exports[`modules/manager/helmv3/artifacts sets repositories from registryAliases 1`] = ` [ { - "cmd": "helm repo add stable the_stable_url", + "cmd": "helm repo add stable the_stable_url --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -518,7 +518,7 @@ exports[`modules/manager/helmv3/artifacts sets repositories from registryAliases }, }, { - "cmd": "helm repo add repo1 the_repo1_url", + "cmd": "helm repo add repo1 the_repo1_url --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -540,7 +540,7 @@ exports[`modules/manager/helmv3/artifacts sets repositories from registryAliases }, }, { - "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable", + "cmd": "helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", @@ -601,7 +601,7 @@ exports[`modules/manager/helmv3/artifacts sets repositories from registryAliases }, }, { - "cmd": "docker run --rm --name=renovate_sidecar --label=renovate_child -v "/tmp/github/some/repo":"/tmp/github/some/repo" -v "/tmp/renovate/cache":"/tmp/renovate/cache" -e HELM_EXPERIMENTAL_OCI -e HELM_REGISTRY_CONFIG -e HELM_REPOSITORY_CONFIG -e HELM_REPOSITORY_CACHE -e CONTAINERBASE_CACHE_DIR -w "/tmp/github/some/repo" ghcr.io/containerbase/sidecar bash -l -c "install-tool helm v3.7.2 && helm repo add stable the_stable_url && helm repo add repo1 the_repo1_url && helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable && helm dependency update ''"", + "cmd": "docker run --rm --name=renovate_sidecar --label=renovate_child -v "/tmp/github/some/repo":"/tmp/github/some/repo" -v "/tmp/renovate/cache":"/tmp/renovate/cache" -e HELM_EXPERIMENTAL_OCI -e HELM_REGISTRY_CONFIG -e HELM_REPOSITORY_CONFIG -e HELM_REPOSITORY_CACHE -e CONTAINERBASE_CACHE_DIR -w "/tmp/github/some/repo" ghcr.io/containerbase/sidecar bash -l -c "install-tool helm v3.7.2 && helm repo add stable the_stable_url --force-update && helm repo add repo1 the_repo1_url --force-update && helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update && helm dependency update ''"", "options": { "cwd": "/tmp/github/some/repo", "encoding": "utf-8", diff --git a/lib/modules/manager/helmv3/artifacts.spec.ts b/lib/modules/manager/helmv3/artifacts.spec.ts index dcc9351d49a27f..2a551f1c4b6898 100644 --- a/lib/modules/manager/helmv3/artifacts.spec.ts +++ b/lib/modules/manager/helmv3/artifacts.spec.ts @@ -290,7 +290,7 @@ describe('modules/manager/helmv3/artifacts', () => { ]); expect(execSnapshots).toMatchObject([ { - cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable', + cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update', }, { cmd: "helm dependency update ''", @@ -343,7 +343,7 @@ describe('modules/manager/helmv3/artifacts', () => { ]); expect(execSnapshots).toMatchObject([ { - cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable', + cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update', options: { env: { HELM_EXPERIMENTAL_OCI: '1', @@ -411,7 +411,7 @@ describe('modules/manager/helmv3/artifacts', () => { ]); expect(execSnapshots).toMatchObject([ { - cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable', + cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update', options: { env: { HELM_EXPERIMENTAL_OCI: '1', @@ -486,7 +486,7 @@ describe('modules/manager/helmv3/artifacts', () => { ]); expect(execSnapshots).toMatchObject([ { - cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable', + cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update', options: { env: { HELM_EXPERIMENTAL_OCI: '1', @@ -546,7 +546,7 @@ describe('modules/manager/helmv3/artifacts', () => { ).toBeNull(); expect(execSnapshots).toMatchObject([ { - cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable', + cmd: 'helm repo add repo-test https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable --force-update', options: { env: { HELM_EXPERIMENTAL_OCI: '1', diff --git a/lib/modules/manager/helmv3/artifacts.ts b/lib/modules/manager/helmv3/artifacts.ts index 0228d5f9d82da3..f7d18c092efe0b 100644 --- a/lib/modules/manager/helmv3/artifacts.ts +++ b/lib/modules/manager/helmv3/artifacts.ts @@ -70,7 +70,7 @@ async function helmCommands( // add helm repos if an alias or credentials for the url are defined classicRepositories.forEach((value) => { const { username, password } = value.hostRule; - const parameters = [`${value.repository}`]; + const parameters = [`${value.repository}`, `--force-update`]; const isPrivateRepo = username && password; if (isPrivateRepo) { parameters.push(`--username ${quote(username)}`); diff --git a/lib/modules/manager/nuget/extract.spec.ts b/lib/modules/manager/nuget/extract.spec.ts index c60eec0e467813..69c2cb01e1b005 100644 --- a/lib/modules/manager/nuget/extract.spec.ts +++ b/lib/modules/manager/nuget/extract.spec.ts @@ -1,3 +1,4 @@ +import { codeBlock } from 'common-tags'; import upath from 'upath'; import { Fixtures } from '../../../../test/fixtures'; import { GlobalConfig } from '../../../config/global'; @@ -67,6 +68,59 @@ describe('modules/manager/nuget/extract', () => { expect(res?.deps).toHaveLength(17); }); + it('extracts ContainerBaseImage', async () => { + const contents = codeBlock` + + + 0.1.0 + mcr.microsoft.com/dotnet/runtime:7.0.10 + + `; + + expect(await extractPackageFile(contents, contents, config)).toEqual({ + deps: [ + { + autoReplaceStringTemplate: + '{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}', + depName: 'mcr.microsoft.com/dotnet/runtime', + depType: 'docker', + datasource: 'docker', + currentValue: '7.0.10', + replaceString: 'mcr.microsoft.com/dotnet/runtime:7.0.10', + }, + ], + packageFileVersion: '0.1.0', + }); + }); + + it('extracts ContainerBaseImage with pinned digest', async () => { + const contents = codeBlock` + + + 0.1.0 + mcr.microsoft.com/dotnet/runtime:7.0.10@sha256:181067029e094856691ee1ce3782ea3bd3fda01bb5b6d19411d0f673cab1ab19 + + `; + + expect(await extractPackageFile(contents, contents, config)).toEqual({ + deps: [ + { + autoReplaceStringTemplate: + '{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}', + depName: 'mcr.microsoft.com/dotnet/runtime', + depType: 'docker', + datasource: 'docker', + currentValue: '7.0.10', + currentDigest: + 'sha256:181067029e094856691ee1ce3782ea3bd3fda01bb5b6d19411d0f673cab1ab19', + replaceString: + 'mcr.microsoft.com/dotnet/runtime:7.0.10@sha256:181067029e094856691ee1ce3782ea3bd3fda01bb5b6d19411d0f673cab1ab19', + }, + ], + packageFileVersion: '0.1.0', + }); + }); + it('considers NuGet.config', async () => { const packageFile = 'with-config-file/with-config-file.csproj'; const contents = Fixtures.get(packageFile); diff --git a/lib/modules/manager/nuget/extract.ts b/lib/modules/manager/nuget/extract.ts index 4cd519bf63833e..bded64973137c0 100644 --- a/lib/modules/manager/nuget/extract.ts +++ b/lib/modules/manager/nuget/extract.ts @@ -5,14 +5,15 @@ import { getSiblingFileName, localPathExists } from '../../../util/fs'; import { hasKey } from '../../../util/object'; import { regEx } from '../../../util/regex'; import { NugetDatasource } from '../../datasource/nuget'; +import { getDep } from '../dockerfile/extract'; import type { ExtractConfig, PackageDependency, PackageFileContent, } from '../types'; import { extractMsbuildGlobalManifest } from './extract/global-manifest'; -import type { DotnetToolsManifest } from './types'; -import { findVersion, getConfiguredRegistries } from './util'; +import type { DotnetToolsManifest, NugetPackageDependency } from './types'; +import { applyRegistries, findVersion, getConfiguredRegistries } from './util'; /** * https://docs.microsoft.com/en-us/nuget/concepts/package-versioning @@ -39,13 +40,21 @@ function isXmlElem(node: XmlNode): boolean { return hasKey('name', node); } -function extractDepsFromXml(xmlNode: XmlDocument): PackageDependency[] { - const results: PackageDependency[] = []; +function extractDepsFromXml(xmlNode: XmlDocument): NugetPackageDependency[] { + const results: NugetPackageDependency[] = []; const todo: XmlElement[] = [xmlNode]; while (todo.length) { const child = todo.pop()!; const { name, attr } = child; + if (name === 'ContainerBaseImage') { + const { depName, ...dep } = getDep(child.val, true); + + if (is.nonEmptyStringAndNotWhitespace(depName)) { + results.push({ ...dep, depName, depType: 'docker' }); + } + } + if (elemNames.has(name)) { const depName = attr?.Include || attr?.Update; const version = @@ -79,9 +88,6 @@ export async function extractPackageFile( logger.trace(`nuget.extractPackageFile(${packageFile})`); const registries = await getConfiguredRegistries(packageFile); - const registryUrls = registries - ? registries.map((registry) => registry.url) - : undefined; if (packageFile.endsWith('dotnet-tools.json')) { const deps: PackageDependency[] = []; @@ -102,15 +108,14 @@ export async function extractPackageFile( for (const depName of Object.keys(manifest.tools ?? {})) { const tool = manifest.tools[depName]; const currentValue = tool.version; - const dep: PackageDependency = { + const dep: NugetPackageDependency = { depType: 'nuget', depName, currentValue, datasource: NugetDatasource.id, }; - if (registryUrls) { - dep.registryUrls = registryUrls; - } + + applyRegistries(dep, registries); deps.push(dep); } @@ -119,17 +124,16 @@ export async function extractPackageFile( } if (packageFile.endsWith('global.json')) { - return extractMsbuildGlobalManifest(content, packageFile); + return extractMsbuildGlobalManifest(content, packageFile, registries); } let deps: PackageDependency[] = []; let packageFileVersion: string | undefined; try { const parsedXml = new XmlDocument(content); - deps = extractDepsFromXml(parsedXml).map((dep) => ({ - ...dep, - ...(registryUrls && { registryUrls }), - })); + deps = extractDepsFromXml(parsedXml).map((dep) => + applyRegistries(dep, registries), + ); packageFileVersion = findVersion(parsedXml)?.val; } catch (err) { logger.debug({ err, packageFile }, `Failed to parse XML`); diff --git a/lib/modules/manager/nuget/extract/global-manifest.ts b/lib/modules/manager/nuget/extract/global-manifest.ts index 45a61aba1f1e20..45c66a29e0682c 100644 --- a/lib/modules/manager/nuget/extract/global-manifest.ts +++ b/lib/modules/manager/nuget/extract/global-manifest.ts @@ -2,11 +2,17 @@ import { logger } from '../../../../logger'; import { DotnetVersionDatasource } from '../../../datasource/dotnet-version'; import { NugetDatasource } from '../../../datasource/nuget'; import type { PackageDependency, PackageFileContent } from '../../types'; -import type { MsbuildGlobalManifest } from '../types'; +import type { + MsbuildGlobalManifest, + NugetPackageDependency, + Registry, +} from '../types'; +import { applyRegistries } from '../util'; export function extractMsbuildGlobalManifest( content: string, packageFile: string, + registries: Registry[] | undefined, ): PackageFileContent | null { const deps: PackageDependency[] = []; let manifest: MsbuildGlobalManifest; @@ -35,13 +41,15 @@ export function extractMsbuildGlobalManifest( if (manifest['msbuild-sdks']) { for (const depName of Object.keys(manifest['msbuild-sdks'])) { const currentValue = manifest['msbuild-sdks'][depName]; - const dep: PackageDependency = { + const dep: NugetPackageDependency = { depType: 'msbuild-sdk', depName, currentValue, datasource: NugetDatasource.id, }; + applyRegistries(dep, registries); + deps.push(dep); } } diff --git a/lib/modules/manager/nuget/index.ts b/lib/modules/manager/nuget/index.ts index 88e1ccdc01066b..2f439599b7f6c1 100644 --- a/lib/modules/manager/nuget/index.ts +++ b/lib/modules/manager/nuget/index.ts @@ -1,4 +1,5 @@ import type { Category } from '../../../constants'; +import { DockerDatasource } from '../../datasource/docker'; import { DotnetVersionDatasource } from '../../datasource/dotnet-version'; import { NugetDatasource } from '../../datasource/nuget'; @@ -18,6 +19,7 @@ export const defaultConfig = { export const categories: Category[] = ['dotnet']; export const supportedDatasources = [ + DockerDatasource.id, DotnetVersionDatasource.id, NugetDatasource.id, ]; diff --git a/lib/modules/manager/nuget/types.ts b/lib/modules/manager/nuget/types.ts index 30b5d63547b1d4..0eef41beaf9885 100644 --- a/lib/modules/manager/nuget/types.ts +++ b/lib/modules/manager/nuget/types.ts @@ -1,3 +1,5 @@ +import type { PackageDependency } from '../types'; + export interface DotnetToolsManifest { readonly version: number; readonly isRoot: boolean; @@ -41,3 +43,7 @@ export interface PackageSourceMap { readonly name: string; readonly patterns: string[]; } + +export interface NugetPackageDependency extends PackageDependency { + depName: string; +} diff --git a/lib/modules/manager/nuget/util.spec.ts b/lib/modules/manager/nuget/util.spec.ts index 80655a1ecd94dc..13097f69016a5f 100644 --- a/lib/modules/manager/nuget/util.spec.ts +++ b/lib/modules/manager/nuget/util.spec.ts @@ -1,8 +1,9 @@ import { codeBlock } from 'common-tags'; import { XmlDocument } from 'xmldoc'; import { fs } from '../../../../test/util'; +import type { Registry } from './types'; import { bumpPackageVersion } from './update'; -import { findVersion, getConfiguredRegistries } from './util'; +import { applyRegistries, findVersion, getConfiguredRegistries } from './util'; jest.mock('../../../util/fs'); @@ -106,4 +107,121 @@ describe('modules/manager/nuget/util', () => { ]); }); }); + + describe('applyRegistries', () => { + it('applies registry to package name via source mapping', () => { + const registries: Registry[] = [ + { + name: 'nuget.org', + url: 'https://api.nuget.org/v3/index.json', + sourceMappedPackagePatterns: ['*'], + }, + { + name: 'contoso.com', + url: 'https://contoso.com/packages/', + sourceMappedPackagePatterns: ['Contoso.*', 'NuGet.Common'], + }, + { + name: 'contoso.test', + url: 'https://contoso.test/packages/', + sourceMappedPackagePatterns: [ + 'Contoso.*', + 'Contoso.Test.*', + 'NuGet.*', + 'NuGet.Common*', + ], + }, + ]; + + expect( + applyRegistries({ depName: 'Newtonsoft.Json' }, registries), + ).toEqual({ + depName: 'Newtonsoft.Json', + registryUrls: ['https://api.nuget.org/v3/index.json'], + }); + + expect( + applyRegistries({ depName: 'Contoso.SomePackage' }, registries), + ).toEqual({ + depName: 'Contoso.SomePackage', + registryUrls: [ + 'https://contoso.com/packages/', + 'https://contoso.test/packages/', + ], + }); + + expect(applyRegistries({ depName: 'NuGet.Some' }, registries)).toEqual({ + depName: 'NuGet.Some', + registryUrls: ['https://contoso.test/packages/'], + }); + + expect( + applyRegistries({ depName: 'Contoso.Test.SomePackage' }, registries), + ).toEqual({ + depName: 'Contoso.Test.SomePackage', + registryUrls: ['https://contoso.test/packages/'], + }); + }); + + it('applies registry to package name case insensitive', () => { + const registries: Registry[] = [ + { + name: 'nuget.org', + url: 'https://api.nuget.org/v3/index.json', + sourceMappedPackagePatterns: ['*'], + }, + { + name: 'contoso.com', + url: 'https://contoso.com/packages/', + sourceMappedPackagePatterns: ['Contoso.*', 'Nuget.common'], + }, + ]; + + expect(applyRegistries({ depName: 'NuGet.Common' }, registries)).toEqual({ + depName: 'NuGet.Common', + registryUrls: ['https://contoso.com/packages/'], + }); + }); + + it('applies all registries to package name', () => { + const registries: Registry[] = [ + { + name: 'nuget.org', + url: 'https://api.nuget.org/v3/index.json', + }, + { + name: 'contoso.com', + url: 'https://contoso.com/packages/', + }, + ]; + + expect( + applyRegistries( + { + depName: 'Newtonsoft.Json', + }, + registries, + ), + ).toEqual({ + depName: 'Newtonsoft.Json', + registryUrls: [ + 'https://api.nuget.org/v3/index.json', + 'https://contoso.com/packages/', + ], + }); + }); + + it('applies nothing', () => { + expect( + applyRegistries( + { + depName: 'Newtonsoft.Json', + }, + undefined, + ), + ).toEqual({ + depName: 'Newtonsoft.Json', + }); + }); + }); }); diff --git a/lib/modules/manager/nuget/util.ts b/lib/modules/manager/nuget/util.ts index 8511c91b07f0d3..54cf05773aaf97 100644 --- a/lib/modules/manager/nuget/util.ts +++ b/lib/modules/manager/nuget/util.ts @@ -2,9 +2,10 @@ import upath from 'upath'; import { XmlDocument, XmlElement } from 'xmldoc'; import { logger } from '../../../logger'; import { findUpLocal, readLocalFile } from '../../../util/fs'; +import { minimatch } from '../../../util/minimatch'; import { regEx } from '../../../util/regex'; import { nugetOrg } from '../../datasource/nuget'; -import type { Registry } from './types'; +import type { NugetPackageDependency, Registry } from './types'; export async function readFileAsXmlDocument( file: string, @@ -122,3 +123,61 @@ export function findVersion(parsedXml: XmlDocument): XmlElement | null { } return null; } + +export function applyRegistries( + dep: NugetPackageDependency, + registries: Registry[] | undefined, +): NugetPackageDependency { + if (registries) { + if (!registries.some((reg) => reg.sourceMappedPackagePatterns)) { + dep.registryUrls = registries.map((reg) => reg.url); + return dep; + } + + const regs = registries.filter((r) => r.sourceMappedPackagePatterns); + const map = new Map( + regs.flatMap((r) => r.sourceMappedPackagePatterns!.map((p) => [p, []])), + ); + const depName = dep.depName; + + for (const reg of regs) { + for (const pattern of reg.sourceMappedPackagePatterns!) { + map.get(pattern)!.push(reg); + } + } + + const urls: string[] = []; + + for (const [pattern, regs] of [...map].sort(sortPatterns)) { + if (minimatch(pattern, { nocase: true }).match(depName)) { + urls.push(...regs.map((r) => r.url)); + break; + } + } + + if (urls.length) { + dep.registryUrls = urls; + } + } + return dep; +} + +/* + * Sorts patterns by specificity: + * 1. Exact match patterns + * 2. Wildcard match patterns + */ +function sortPatterns( + a: [string, Registry[]], + b: [string, Registry[]], +): number { + if (a[0].endsWith('*') && !b[0].endsWith('*')) { + return 1; + } + + if (!a[0].endsWith('*') && b[0].endsWith('*')) { + return -1; + } + + return a[0].localeCompare(b[0]) * -1; +} diff --git a/lib/modules/manager/ocb/extract.spec.ts b/lib/modules/manager/ocb/extract.spec.ts new file mode 100644 index 00000000000000..99fd13ec2a1f71 --- /dev/null +++ b/lib/modules/manager/ocb/extract.spec.ts @@ -0,0 +1,74 @@ +import { codeBlock } from 'common-tags'; +import { extractPackageFile } from '.'; + +describe('modules/manager/ocb/extract', () => { + describe('extractPackageFile', () => { + it('run successfully with full example', () => { + const content = codeBlock` + dist: + name: otelcol-custom + description: Local OpenTelemetry Collector binary + module: github.com/open-telemetry/opentelemetry-collector + otelcol_version: 0.40.0 + version: 1.0.0 + output_path: /tmp/dist + exporters: + - gomod: github.com/open-telemetry/opentelemetry-collector-contrib/exporter/alibabacloudlogserviceexporter v0.86.0 + - gomod: go.opentelemetry.io/collector/exporter/debugexporter v0.86.0 + + receivers: + - gomod: go.opentelemetry.io/collector/receiver/otlpreceiver v0.86.0 + + processors: + - gomod: go.opentelemetry.io/collector/processor/batchprocessor v0.86.0 + `; + const result = extractPackageFile(content, 'builder-config.yaml'); + expect(result?.deps).toEqual([ + { + currentValue: '0.40.0', + datasource: 'go', + depName: 'github.com/open-telemetry/opentelemetry-collector', + depType: 'collector', + extractVersion: '^v(?\\S+)', + }, + { + currentValue: 'v0.86.0', + datasource: 'go', + depName: + 'github.com/open-telemetry/opentelemetry-collector-contrib/exporter/alibabacloudlogserviceexporter', + depType: 'exports', + }, + { + currentValue: 'v0.86.0', + datasource: 'go', + depName: 'go.opentelemetry.io/collector/exporter/debugexporter', + depType: 'exports', + }, + { + currentValue: 'v0.86.0', + datasource: 'go', + depName: 'go.opentelemetry.io/collector/processor/batchprocessor', + depType: 'processors', + }, + ]); + }); + + it('return null for unknown content', () => { + expect(extractPackageFile('foo', 'bar.yaml')).toBeNull(); + }); + + it('return null for content which is not YAML', () => { + expect( + extractPackageFile( + codeBlock` + myObject: + aString: value + --- + foo: bar + `, + 'bar.yaml', + ), + ).toBeNull(); + }); + }); +}); diff --git a/lib/modules/manager/ocb/extract.ts b/lib/modules/manager/ocb/extract.ts new file mode 100644 index 00000000000000..99416851bce64f --- /dev/null +++ b/lib/modules/manager/ocb/extract.ts @@ -0,0 +1,81 @@ +import is from '@sindresorhus/is'; +import { logger } from '../../../logger'; +import { regEx } from '../../../util/regex'; +import { parseSingleYaml } from '../../../util/yaml'; +import { GoDatasource } from '../../datasource/go'; +import type { + ExtractConfig, + PackageDependency, + PackageFileContent, +} from '../types'; +import { type Module, type OCBConfig, OCBConfigSchema } from './schema'; + +export function extractPackageFile( + content: string, + packageFile: string, + _config?: ExtractConfig, +): PackageFileContent | null { + let definition: OCBConfig | null = null; + try { + const yaml = parseSingleYaml(content); + const parsed = OCBConfigSchema.safeParse(yaml); + if (!parsed.success) { + logger.trace( + { packageFile, error: parsed.error }, + 'Failed to parse OCB schema', + ); + return null; + } + + definition = parsed.data; + } catch (error) { + logger.debug( + { packageFile, error }, + 'OCB manager failed to parse file as YAML', + ); + return null; + } + + const deps: PackageDependency[] = []; + if (definition.dist.module && definition.dist.otelcol_version) { + deps.push({ + datasource: GoDatasource.id, + depType: 'collector', + depName: definition.dist.module, + currentValue: definition.dist.otelcol_version, + extractVersion: '^v(?\\S+)', + }); + } + + deps.push(...processModule(definition.connectors, 'connectors')); + deps.push(...processModule(definition.exporters, 'exports')); + deps.push(...processModule(definition.extension, 'extensions')); + deps.push(...processModule(definition.processors, 'processors')); + + return { + packageFileVersion: definition.dist.version, + deps, + }; +} + +export function processModule( + module: Module, + depType: string, +): PackageDependency[] { + const deps: PackageDependency[] = []; + if (is.nullOrUndefined(module)) { + return deps; + } + + for (const element of module) { + const [depName, currentValue] = element.gomod.trim().split(regEx(/\s+/)); + deps.push({ + datasource: GoDatasource.id, + depType, + depName, + currentValue, + }); + } + + return deps; +} diff --git a/lib/modules/manager/ocb/index.ts b/lib/modules/manager/ocb/index.ts new file mode 100644 index 00000000000000..013a7f62965a02 --- /dev/null +++ b/lib/modules/manager/ocb/index.ts @@ -0,0 +1,13 @@ +import type { Category } from '../../../constants'; +import { GoDatasource } from '../../datasource/go'; + +export { extractPackageFile } from './extract'; +export { bumpPackageVersion } from './update'; + +export const supportedDatasources = [GoDatasource.id]; + +export const categories: Category[] = ['golang']; + +export const defaultConfig = { + fileMatch: [], +}; diff --git a/lib/modules/manager/ocb/readme.md b/lib/modules/manager/ocb/readme.md new file mode 100644 index 00000000000000..2ca2119d6aedd0 --- /dev/null +++ b/lib/modules/manager/ocb/readme.md @@ -0,0 +1,23 @@ +Renovate uses this manager to update dependencies defined in the build definitions for the [OpenTelemetry Collector Builder (ocb)](https://github.com/open-telemetry/opentelemetry-collector/tree/main/cmd/builder). + +By default, the `ocb` manager has no `fileMatch` patterns. +This means you must set a `fileMatch` pattern for the `ocb` manager, in order for Renovate to update your `ocb` files. +Here's a configuration example: + +```json title="If your builder files are named like foo-builder.yml or builder.yaml" +{ + "ocb": { + "fileMatch": ["builder.ya?ml$"] + } +} +``` + +Supported dependencies and their respective `depType`s are: + +| Name | depType | +| -------------- | ------------ | +| base collector | `collector` | +| connectors | `connectors` | +| exports | `exports` | +| extensions | `extensions` | +| processors | `processors` | diff --git a/lib/modules/manager/ocb/schema.ts b/lib/modules/manager/ocb/schema.ts new file mode 100644 index 00000000000000..0b8bd927b469a5 --- /dev/null +++ b/lib/modules/manager/ocb/schema.ts @@ -0,0 +1,22 @@ +import { z } from 'zod'; + +const Entry = z.object({ + gomod: z.string(), +}); + +const ModuleSchema = z.array(Entry).optional(); +export type Module = z.infer; + +export const OCBConfigSchema = z.object({ + dist: z.object({ + otelcol_version: z.string().optional(), + module: z.string().optional(), + version: z.string().optional(), + }), + extension: ModuleSchema, + exporters: ModuleSchema, + receivers: ModuleSchema, + processors: ModuleSchema, + connectors: ModuleSchema, +}); +export type OCBConfig = z.infer; diff --git a/lib/modules/manager/ocb/update.spec.ts b/lib/modules/manager/ocb/update.spec.ts new file mode 100644 index 00000000000000..dc1e8cda5f9463 --- /dev/null +++ b/lib/modules/manager/ocb/update.spec.ts @@ -0,0 +1,77 @@ +import { codeBlock } from 'common-tags'; +import { bumpPackageVersion } from '.'; + +describe('modules/manager/ocb/update', () => { + describe('bumpPackageVersion()', () => { + it('increments with all fields', () => { + const content = codeBlock` + dist: + name: otelcol-custom + description: Local OpenTelemetry Collector binary + module: github.com/open-telemetry/opentelemetry-collector + otelcol_version: 0.40.0 + version: 1.0.0 + output_path: /tmp/dist + `; + const expected = content.replace('1.0.0', '1.0.1'); + + const { bumpedContent } = bumpPackageVersion(content, '1.0.0', 'patch'); + expect(bumpedContent).toEqual(expected); + }); + + it('increments with double quotes', () => { + const content = codeBlock` + dist: + version: "1.0.0" + `; + const expected = content.replace('1.0.0', '1.0.1'); + + const { bumpedContent } = bumpPackageVersion(content, '1.0.0', 'patch'); + expect(bumpedContent).toEqual(expected); + }); + + it('increments with single quotes', () => { + const content = codeBlock` + dist: + version: '1.0.0' + `; + const expected = content.replace('1.0.0', '1.0.1'); + + const { bumpedContent } = bumpPackageVersion(content, '1.0.0', 'patch'); + expect(bumpedContent).toEqual(expected); + }); + + it('no ops', () => { + const content = codeBlock` + dist: + version: '0.0.2' + `; + const { bumpedContent } = bumpPackageVersion(content, '0.0.1', 'patch'); + expect(bumpedContent).toEqual(content); + }); + + it('updates', () => { + const content = codeBlock` + dist: + version: '0.0.2' + `; + const { bumpedContent } = bumpPackageVersion(content, '0.0.1', 'minor'); + const expected = content.replace('0.0.2', '0.1.0'); + expect(bumpedContent).toEqual(expected); + }); + + it('returns content if bumping errors', () => { + const content = codeBlock` + dist: + version: '1.0.0' + `; + const { bumpedContent } = bumpPackageVersion( + content, + '0.0.2', + // @ts-expect-error supplying a wrong parameter to trigger an exception + true, + ); + expect(bumpedContent).toEqual(content); + }); + }); +}); diff --git a/lib/modules/manager/ocb/update.ts b/lib/modules/manager/ocb/update.ts new file mode 100644 index 00000000000000..7e893c699b0f03 --- /dev/null +++ b/lib/modules/manager/ocb/update.ts @@ -0,0 +1,47 @@ +import { type ReleaseType, inc } from 'semver'; +import { logger } from '../../../logger'; +import { regEx } from '../../../util/regex'; +import type { BumpPackageVersionResult } from '../types'; + +export function bumpPackageVersion( + content: string, + currentValue: string, + bumpVersion: ReleaseType, +): BumpPackageVersionResult { + logger.debug( + { bumpVersion, currentValue }, + 'Checking if we should bump OCB version', + ); + + let bumpedContent = content; + try { + const newProjectVersion = inc(currentValue, bumpVersion); + if (!newProjectVersion) { + throw new Error('semver inc failed'); + } + + logger.debug(`newProjectVersion: ${newProjectVersion}`); + bumpedContent = content.replace( + regEx(/\b(?version:\s+["']?)(?[^'"\s]*)/), + `$${newProjectVersion}`, + ); + + if (bumpedContent === content) { + logger.debug('Version was already bumped'); + } else { + logger.debug('Bumped OCB version'); + } + } catch (err) { + logger.warn( + { + content, + currentValue, + bumpVersion, + manager: 'ocb', + }, + 'Failed to bumpVersion', + ); + } + + return { bumpedContent }; +} diff --git a/lib/modules/manager/pep621/extract.spec.ts b/lib/modules/manager/pep621/extract.spec.ts index 31b6edbb20d580..4911bc35b732ad 100644 --- a/lib/modules/manager/pep621/extract.spec.ts +++ b/lib/modules/manager/pep621/extract.spec.ts @@ -281,6 +281,13 @@ describe('modules/manager/pep621/extract', () => { depType: 'project.dependencies', packageName: 'requests', }, + { + datasource: 'pypi', + depName: 'hatchling', + depType: 'build-system.requires', + packageName: 'hatchling', + skipReason: 'unspecified-version', + }, { currentValue: '==6.5', datasource: 'pypi', @@ -311,5 +318,55 @@ describe('modules/manager/pep621/extract', () => { }, ]); }); + + it('should extract project version', () => { + const content = codeBlock` + [project] + name = "test" + version = "0.0.2" + dependencies = [ "requests==2.30.0" ] + `; + + const res = extractPackageFile(content, 'pyproject.toml'); + expect(res?.packageFileVersion).toBe('0.0.2'); + }); + + it('should extract dependencies from build-system.requires', function () { + const content = codeBlock` + [build-system] + requires = ["hatchling==1.18.0", "setuptools==69.0.3"] + build-backend = "hatchling.build" + + [project] + name = "test" + version = "0.0.2" + dependencies = [ "requests==2.30.0" ] + `; + const result = extractPackageFile(content, 'pyproject.toml'); + + expect(result?.deps).toEqual([ + { + currentValue: '==2.30.0', + datasource: 'pypi', + depName: 'requests', + depType: 'project.dependencies', + packageName: 'requests', + }, + { + currentValue: '==1.18.0', + datasource: 'pypi', + depName: 'hatchling', + depType: 'build-system.requires', + packageName: 'hatchling', + }, + { + currentValue: '==69.0.3', + datasource: 'pypi', + depName: 'setuptools', + depType: 'build-system.requires', + packageName: 'setuptools', + }, + ]); + }); }); }); diff --git a/lib/modules/manager/pep621/extract.ts b/lib/modules/manager/pep621/extract.ts index f5c5a607f64574..66648663dcddaf 100644 --- a/lib/modules/manager/pep621/extract.ts +++ b/lib/modules/manager/pep621/extract.ts @@ -26,6 +26,8 @@ export function extractPackageFile( if (is.nullOrUndefined(def)) { return null; } + + const packageFileVersion = def.project?.version; const pythonConstraint = def.project?.['requires-python']; const extractedConstraints = is.nonEmptyString(pythonConstraint) ? { extractedConstraints: { python: pythonConstraint } } @@ -41,6 +43,12 @@ export function extractPackageFile( def.project?.['optional-dependencies'], ), ); + deps.push( + ...parseDependencyList( + depTypes.buildSystemRequires, + def['build-system']?.requires, + ), + ); // process specific tool sets let processedDeps = deps; @@ -49,6 +57,6 @@ export function extractPackageFile( } return processedDeps.length - ? { ...extractedConstraints, deps: processedDeps } + ? { ...extractedConstraints, deps: processedDeps, packageFileVersion } : null; } diff --git a/lib/modules/manager/pep621/index.ts b/lib/modules/manager/pep621/index.ts index 1b4326edb1d24b..d71206781f7f95 100644 --- a/lib/modules/manager/pep621/index.ts +++ b/lib/modules/manager/pep621/index.ts @@ -1,5 +1,6 @@ import type { Category } from '../../../constants'; import { PypiDatasource } from '../../datasource/pypi'; +export { bumpPackageVersion } from './update'; export { extractPackageFile } from './extract'; export { updateArtifacts } from './artifacts'; diff --git a/lib/modules/manager/pep621/readme.md b/lib/modules/manager/pep621/readme.md index 344e3948559e4e..3d3009e5b55856 100644 --- a/lib/modules/manager/pep621/readme.md +++ b/lib/modules/manager/pep621/readme.md @@ -9,5 +9,6 @@ Available `depType`s: - `project.dependencies` - `project.optional-dependencies` +- `build-system.requires` - `tool.pdm.dev-dependencies` - `tool.hatch.envs.` diff --git a/lib/modules/manager/pep621/schema.ts b/lib/modules/manager/pep621/schema.ts index dbf9e1f4f2345b..212a1301c1b257 100644 --- a/lib/modules/manager/pep621/schema.ts +++ b/lib/modules/manager/pep621/schema.ts @@ -10,11 +10,17 @@ const DependencyRecordSchema = z export const PyProjectSchema = z.object({ project: z .object({ + version: z.string().optional().catch(undefined), 'requires-python': z.string().optional(), dependencies: DependencyListSchema, 'optional-dependencies': DependencyRecordSchema, }) .optional(), + 'build-system': z + .object({ + requires: DependencyListSchema, + }) + .optional(), tool: z .object({ pdm: z diff --git a/lib/modules/manager/pep621/update.spec.ts b/lib/modules/manager/pep621/update.spec.ts new file mode 100644 index 00000000000000..5b33267b330dc6 --- /dev/null +++ b/lib/modules/manager/pep621/update.spec.ts @@ -0,0 +1,51 @@ +import { codeBlock } from 'common-tags'; +import * as projectUpdater from '.'; + +describe('modules/manager/pep621/update', () => { + describe('bumpPackageVersion()', () => { + const content = codeBlock` + [project] + name = "test" + version = "0.0.2" + description = "test" + `; + + it('increments', () => { + const { bumpedContent } = projectUpdater.bumpPackageVersion( + content, + '0.0.2', + 'patch', + ); + const expected = content.replace('0.0.2', '0.0.3'); + expect(bumpedContent).toEqual(expected); + }); + + it('no ops', () => { + const { bumpedContent } = projectUpdater.bumpPackageVersion( + content, + '0.0.1', + 'patch', + ); + expect(bumpedContent).toEqual(content); + }); + + it('updates', () => { + const { bumpedContent } = projectUpdater.bumpPackageVersion( + content, + '0.0.1', + 'minor', + ); + const expected = content.replace('0.0.2', '0.1.0'); + expect(bumpedContent).toEqual(expected); + }); + + it('returns content if bumping errors', () => { + const { bumpedContent } = projectUpdater.bumpPackageVersion( + content, + '0.0.2', + true as any, + ); + expect(bumpedContent).toEqual(content); + }); + }); +}); diff --git a/lib/modules/manager/pep621/update.ts b/lib/modules/manager/pep621/update.ts new file mode 100644 index 00000000000000..d19bb143ed717f --- /dev/null +++ b/lib/modules/manager/pep621/update.ts @@ -0,0 +1,48 @@ +import { inc } from '@renovatebot/pep440'; +import type { ReleaseType } from 'semver'; +import { logger } from '../../../logger'; +import { regEx } from '../../../util/regex'; +import type { BumpPackageVersionResult } from '../types'; + +export function bumpPackageVersion( + content: string, + currentValue: string, + bumpVersion: ReleaseType, +): BumpPackageVersionResult { + logger.debug( + { bumpVersion, currentValue }, + 'Checking if we should bump pyproject.toml version', + ); + + let bumpedContent = content; + try { + const newProjectVersion = inc(currentValue, bumpVersion); + if (!newProjectVersion) { + throw new Error('pep440 inc failed'); + } + + logger.debug(`newProjectVersion: ${newProjectVersion}`); + bumpedContent = content.replace( + regEx(`^(?version[ \\t]*=[ \\t]*['"])[^'"]*`, 'm'), + `$${newProjectVersion}`, + ); + + if (bumpedContent === content) { + logger.debug('Version was already bumped'); + } else { + logger.debug('Bumped pyproject.toml version'); + } + } catch (err) { + logger.warn( + { + content, + currentValue, + bumpVersion, + manager: 'pep621', + }, + 'Failed to bumpVersion', + ); + } + + return { bumpedContent }; +} diff --git a/lib/modules/manager/pep621/utils.ts b/lib/modules/manager/pep621/utils.ts index 4420992bcb4f43..5b28541993508f 100644 --- a/lib/modules/manager/pep621/utils.ts +++ b/lib/modules/manager/pep621/utils.ts @@ -15,6 +15,7 @@ export const depTypes = { dependencies: 'project.dependencies', optionalDependencies: 'project.optional-dependencies', pdmDevDependencies: 'tool.pdm.dev-dependencies', + buildSystemRequires: 'build-system.requires', }; export function parsePEP508( diff --git a/lib/modules/manager/pip-compile/index.ts b/lib/modules/manager/pip-compile/index.ts index dac650f2bcb360..98e616010896be 100644 --- a/lib/modules/manager/pip-compile/index.ts +++ b/lib/modules/manager/pip-compile/index.ts @@ -1,4 +1,5 @@ import type { Category } from '../../../constants'; +import { GitTagsDatasource } from '../../datasource/git-tags'; import { PypiDatasource } from '../../datasource/pypi'; export { extractPackageFile } from '../pip_requirements/extract'; @@ -6,8 +7,6 @@ export { updateArtifacts } from './artifacts'; export const supportsLockFileMaintenance = true; -export const supportedDatasources = [PypiDatasource.id]; - export const defaultConfig = { fileMatch: [], lockFileMaintenance: { @@ -18,3 +17,5 @@ export const defaultConfig = { }; export const categories: Category[] = ['python']; + +export const supportedDatasources = [PypiDatasource.id, GitTagsDatasource.id]; diff --git a/lib/modules/manager/poetry/__snapshots__/extract.spec.ts.snap b/lib/modules/manager/poetry/__snapshots__/extract.spec.ts.snap index fb65b2995258b2..81b6571687665d 100644 --- a/lib/modules/manager/poetry/__snapshots__/extract.spec.ts.snap +++ b/lib/modules/manager/poetry/__snapshots__/extract.spec.ts.snap @@ -345,6 +345,7 @@ exports[`modules/manager/poetry/extract extractPackageFile() extracts mixed vers }, ], "extractedConstraints": {}, + "packageFileVersion": "0.1.0", } `; @@ -358,6 +359,7 @@ exports[`modules/manager/poetry/extract extractPackageFile() extracts multiple d "depType": "dependencies", "managerData": { "nestedVersion": true, + "sourceName": undefined, }, "versioning": "poetry", }, @@ -368,6 +370,7 @@ exports[`modules/manager/poetry/extract extractPackageFile() extracts multiple d "depType": "dependencies", "managerData": { "nestedVersion": true, + "sourceName": undefined, }, "versioning": "poetry", }, @@ -425,6 +428,7 @@ exports[`modules/manager/poetry/extract extractPackageFile() extracts multiple d }, ], "extractedConstraints": {}, + "packageFileVersion": "0.1.0", } `; @@ -542,14 +546,6 @@ exports[`modules/manager/poetry/extract extractPackageFile() extracts multiple d ] `; -exports[`modules/manager/poetry/extract extractPackageFile() extracts registries 1`] = ` -[ - "https://foo.bar/simple/", - "https://bar.baz/+simple/", - "https://pypi.org/pypi/", -] -`; - exports[`modules/manager/poetry/extract extractPackageFile() handles multiple constraint dependencies 1`] = ` { "deps": [ @@ -561,6 +557,7 @@ exports[`modules/manager/poetry/extract extractPackageFile() handles multiple co }, ], "extractedConstraints": {}, + "packageFileVersion": "0.1.0", } `; @@ -595,5 +592,6 @@ exports[`modules/manager/poetry/extract extractPackageFile() resolves lockedVers "extractedConstraints": { "python": "^3.9", }, + "packageFileVersion": undefined, } `; diff --git a/lib/modules/manager/poetry/extract.spec.ts b/lib/modules/manager/poetry/extract.spec.ts index b0796d40434e59..62f43868f7287a 100644 --- a/lib/modules/manager/poetry/extract.spec.ts +++ b/lib/modules/manager/poetry/extract.spec.ts @@ -1,8 +1,10 @@ import { codeBlock } from 'common-tags'; import { Fixtures } from '../../../../test/fixtures'; import { fs } from '../../../../test/util'; +import { GitRefsDatasource } from '../../datasource/git-refs'; import { GithubReleasesDatasource } from '../../datasource/github-releases'; import { GithubTagsDatasource } from '../../datasource/github-tags'; +import { PypiDatasource } from '../../datasource/pypi'; import { extractPackageFile } from '.'; jest.mock('../../../util/fs'); @@ -78,29 +80,6 @@ describe('modules/manager/poetry/extract', () => { expect(res?.deps).toHaveLength(3); }); - it('extracts registries', async () => { - const res = await extractPackageFile(pyproject6toml, filename); - expect(res?.registryUrls).toMatchSnapshot(); - expect(res?.registryUrls).toHaveLength(3); - }); - - it('can parse empty registries', async () => { - const res = await extractPackageFile(pyproject7toml, filename); - expect(res?.registryUrls).toBeUndefined(); - }); - - it('can parse missing registries', async () => { - const res = await extractPackageFile(pyproject1toml, filename); - expect(res?.registryUrls).toBeUndefined(); - }); - - it('dedupes registries', async () => { - const res = await extractPackageFile(pyproject8toml, filename); - expect(res).toMatchObject({ - registryUrls: ['https://pypi.org/pypi/', 'https://bar.baz/+simple/'], - }); - }); - it('extracts mixed versioning types', async () => { const res = await extractPackageFile(pyproject9toml, filename); expect(res).toMatchSnapshot({ @@ -192,6 +171,107 @@ describe('modules/manager/poetry/extract', () => { }); }); + it('parses git dependencies long commit hashs on http urls', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + fastapi = {git = "https://github.com/tiangolo/fastapi.git", rev="6f5aa81c076d22e38afbe7d602db6730e28bc3cc"} + dep = "^2.0" + `; + const res = await extractPackageFile(content, filename); + expect(res?.deps).toMatchObject([ + { + depType: 'dependencies', + depName: 'fastapi', + datasource: GitRefsDatasource.id, + currentDigest: '6f5aa81c076d22e38afbe7d602db6730e28bc3cc', + replaceString: '6f5aa81c076d22e38afbe7d602db6730e28bc3cc', + packageName: 'https://github.com/tiangolo/fastapi.git', + }, + { + depType: 'dependencies', + depName: 'dep', + datasource: PypiDatasource.id, + currentValue: '^2.0', + }, + ]); + }); + + it('parses git dependencies short commit hashs on http urls', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + fastapi = {git = "https://github.com/tiangolo/fastapi.git", rev="6f5aa81"} + dep = "^2.0" + `; + const res = await extractPackageFile(content, filename); + expect(res?.deps).toMatchObject([ + { + depType: 'dependencies', + depName: 'fastapi', + datasource: GitRefsDatasource.id, + currentDigest: '6f5aa81', + replaceString: '6f5aa81', + packageName: 'https://github.com/tiangolo/fastapi.git', + }, + { + depType: 'dependencies', + depName: 'dep', + datasource: PypiDatasource.id, + currentValue: '^2.0', + }, + ]); + }); + + it('parses git dependencies long commit hashs on ssh urls', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + fastapi = {git = "git@github.com:tiangolo/fastapi.git", rev="6f5aa81c076d22e38afbe7d602db6730e28bc3cc"} + dep = "^2.0" + `; + const res = await extractPackageFile(content, filename); + expect(res?.deps).toMatchObject([ + { + depType: 'dependencies', + depName: 'fastapi', + datasource: GitRefsDatasource.id, + currentDigest: '6f5aa81c076d22e38afbe7d602db6730e28bc3cc', + replaceString: '6f5aa81c076d22e38afbe7d602db6730e28bc3cc', + packageName: 'git@github.com:tiangolo/fastapi.git', + }, + { + depType: 'dependencies', + depName: 'dep', + datasource: PypiDatasource.id, + currentValue: '^2.0', + }, + ]); + }); + + it('parses git dependencies long commit hashs on http urls with branch marker', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + fastapi = {git = "https://github.com/tiangolo/fastapi.git", branch="develop", rev="6f5aa81c076d22e38afbe7d602db6730e28bc3cc"} + dep = "^2.0" + `; + const res = await extractPackageFile(content, filename); + expect(res?.deps).toMatchObject([ + { + depType: 'dependencies', + depName: 'fastapi', + datasource: GitRefsDatasource.id, + currentValue: 'develop', + currentDigest: '6f5aa81c076d22e38afbe7d602db6730e28bc3cc', + replaceString: '6f5aa81c076d22e38afbe7d602db6730e28bc3cc', + packageName: 'https://github.com/tiangolo/fastapi.git', + }, + { + depType: 'dependencies', + depName: 'dep', + datasource: PypiDatasource.id, + currentValue: '^2.0', + }, + ]); + }); + it('parses github dependencies tags on ssh urls', async () => { const content = codeBlock` [tool.poetry.dependencies] @@ -305,5 +385,99 @@ describe('modules/manager/poetry/extract', () => { registryUrls: null, }); }); + + describe('registry URLs', () => { + it('can parse empty registries', async () => { + const res = await extractPackageFile(pyproject7toml, filename); + expect(res?.registryUrls).toBeUndefined(); + }); + + it('can parse missing registries', async () => { + const res = await extractPackageFile(pyproject1toml, filename); + expect(res?.registryUrls).toBeUndefined(); + }); + + it('extracts registries', async () => { + const res = await extractPackageFile(pyproject6toml, filename); + expect(res?.registryUrls).toMatchObject([ + 'https://foo.bar/simple/', + 'https://bar.baz/+simple/', + 'https://pypi.org/pypi/', + ]); + }); + + it('dedupes registries', async () => { + const res = await extractPackageFile(pyproject8toml, filename); + expect(res?.registryUrls).toMatchObject([ + 'https://pypi.org/pypi/', + 'https://bar.baz/+simple/', + ]); + }); + + it('source with priority="default" and implicit PyPI priority="primary"', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + python = "^3.11" + + [[tool.poetry.source]] + name = "foo" + url = "https://foo.bar/simple/" + priority = "default" + + [[tool.poetry.source]] + name = "PyPI" + `; + const res = await extractPackageFile(content, filename); + expect(res?.registryUrls).toMatchObject([ + 'https://foo.bar/simple/', + 'https://pypi.org/pypi/', + ]); + }); + + it('source with implicit priority and PyPI with priority="explicit"', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + python = "^3.11" + + [[tool.poetry.source]] + name = "foo" + url = "https://foo.bar/simple/" + + [[tool.poetry.source]] + name = "PyPI" + priority = "explicit" + `; + const res = await extractPackageFile(content, filename); + expect(res?.registryUrls).toMatchObject(['https://foo.bar/simple/']); + }); + + it('supports dependencies with explicit source', async () => { + const content = codeBlock` + [tool.poetry.dependencies] + attrs = "^23.1.0" + typer = { version = "^0.9.0", source = "pypi" } + requests-cache = { version = "^1.1.0", source = "artifactory" } + + [[tool.poetry.source]] + name = "artifactory" + url = "https://example.com" + priority = "explicit" + `; + const res = await extractPackageFile(content, filename); + expect(res?.deps).toMatchObject([ + { depName: 'attrs', currentValue: '^23.1.0' }, + { + depName: 'typer', + currentValue: '^0.9.0', + registryUrls: ['https://pypi.org/pypi/'], + }, + { + depName: 'requests-cache', + currentValue: '^1.1.0', + registryUrls: ['https://example.com'], + }, + ]); + }); + }); }); }); diff --git a/lib/modules/manager/poetry/index.ts b/lib/modules/manager/poetry/index.ts index ba5cdee9a8e951..82a131c8e26ea9 100644 --- a/lib/modules/manager/poetry/index.ts +++ b/lib/modules/manager/poetry/index.ts @@ -1,8 +1,10 @@ import type { Category } from '../../../constants'; +import { GitRefsDatasource } from '../../datasource/git-refs'; import { GithubReleasesDatasource } from '../../datasource/github-releases'; import { GithubTagsDatasource } from '../../datasource/github-tags'; import { PypiDatasource } from '../../datasource/pypi'; +export { bumpPackageVersion } from '../pep621/update'; export { extractPackageFile } from './extract'; export { updateArtifacts } from './artifacts'; export { updateLockedDependency } from './update-locked'; @@ -11,6 +13,7 @@ export const supportedDatasources = [ PypiDatasource.id, GithubTagsDatasource.id, GithubReleasesDatasource.id, + GitRefsDatasource.id, ]; export const supportsLockFileMaintenance = true; diff --git a/lib/modules/manager/poetry/schema.spec.ts b/lib/modules/manager/poetry/schema.spec.ts new file mode 100644 index 00000000000000..373aa68d44c8cb --- /dev/null +++ b/lib/modules/manager/poetry/schema.spec.ts @@ -0,0 +1,371 @@ +import { PoetrySectionSchema, PoetrySources } from './schema'; + +describe('modules/manager/poetry/schema', () => { + it('parses project version', () => { + expect( + PoetrySectionSchema.parse({ version: '1.2.3' }).packageFileVersion, + ).toBe('1.2.3'); + + expect( + PoetrySectionSchema.parse({ version: { some: 'value' } }) + .packageFileVersion, + ).toBeUndefined(); + }); + + describe('PoetrySources', () => { + it('parses default values', () => { + expect(PoetrySources.parse([])).toBeEmptyArray(); + expect(PoetrySources.parse([null])).toBeEmptyArray(); + }); + + it('parses unordered sources', () => { + expect( + PoetrySources.parse([ + { name: 'missing-url' }, + { name: 'missing-priority', url: 'https://some-source.com' }, + { + name: 'foo-Secondary', + priority: 'secondary', + url: 'https://some-vcs.com/secondary', + }, + { name: 'PyPI', priority: 'primary' }, + { + name: 'foo-Primary', + priority: 'primary', + url: 'https://some-vcs.com/primary', + }, + { + name: 'foo-Default', + priority: 'default', + url: 'https://some-vcs.com/default', + }, + { + name: 'foo-Explicit', + priority: 'explicit', + url: 'https://some-vcs.com/explicit', + }, + { + name: 'foo-Supplemental', + priority: 'supplemental', + url: 'https://some-vcs.com/supplemental', + }, + ]), + ).toEqual([ + { + name: 'foo-default', + priority: 'default', + url: 'https://some-vcs.com/default', + }, + { + name: 'missing-priority', + priority: 'primary', + url: 'https://some-source.com', + }, + { + name: 'pypi', + priority: 'primary', + url: 'https://pypi.org/pypi/', + }, + { + name: 'foo-primary', + priority: 'primary', + url: 'https://some-vcs.com/primary', + }, + { + name: 'foo-secondary', + priority: 'secondary', + url: 'https://some-vcs.com/secondary', + }, + { + name: 'foo-supplemental', + priority: 'supplemental', + url: 'https://some-vcs.com/supplemental', + }, + { + name: 'foo-explicit', + priority: 'explicit', + url: 'https://some-vcs.com/explicit', + }, + ]); + }); + + it('implicit use of PyPI source', () => { + expect( + PoetrySources.parse([ + { + name: 'foo-Supplemental', + priority: 'supplemental', + url: 'https://some-vcs.com/supplemental', + }, + ]), + ).toEqual([ + { + name: 'pypi', + priority: 'default', + url: 'https://pypi.org/pypi/', + }, + { + name: 'foo-supplemental', + priority: 'supplemental', + url: 'https://some-vcs.com/supplemental', + }, + ]); + + expect( + PoetrySources.parse([ + { + name: 'foo-Primary', + priority: 'primary', + url: 'https://some-vcs.com/primary', + }, + ]), + ).toEqual([ + { + name: 'foo-primary', + priority: 'primary', + url: 'https://some-vcs.com/primary', + }, + { + name: 'pypi', + priority: 'secondary', + url: 'https://pypi.org/pypi/', + }, + ]); + }); + + it('source with priority="default"', () => { + expect( + PoetrySources.parse([ + { + name: 'foo', + priority: 'default', + url: 'https://foo.bar/simple/', + }, + ]), + ).toEqual([ + { + name: 'foo', + priority: 'default', + url: 'https://foo.bar/simple/', + }, + ]); + }); + + it('PyPI source with priority="default"', () => { + expect( + PoetrySources.parse([ + { + name: 'PyPI', + priority: 'default', + }, + ]), + ).toEqual([ + { + name: 'pypi', + priority: 'default', + url: 'https://pypi.org/pypi/', + }, + ]); + }); + + it('source with priority="primary"', () => { + expect( + PoetrySources.parse([ + { + name: 'foo', + priority: 'primary', + url: 'https://foo.bar/simple/', + }, + ]), + ).toEqual([ + { + name: 'foo', + priority: 'primary', + url: 'https://foo.bar/simple/', + }, + { + name: 'pypi', + priority: 'secondary', + url: 'https://pypi.org/pypi/', + }, + ]); + }); + + it('source with implicit priority="primary"', () => { + expect( + PoetrySources.parse([ + { + name: 'foo', + url: 'https://foo.bar/simple/', + }, + ]), + ).toEqual([ + { + name: 'foo', + priority: 'primary', + url: 'https://foo.bar/simple/', + }, + { + name: 'pypi', + priority: 'secondary', + url: 'https://pypi.org/pypi/', + }, + ]); + }); + + it('sources with priority="secondary"', () => { + expect( + PoetrySources.parse([ + { + name: 'foo', + priority: 'secondary', + url: 'https://foo.bar/simple/', + }, + { + name: 'bar', + priority: 'secondary', + url: 'https://bar.baz/simple/', + }, + ]), + ).toEqual([ + { + name: 'pypi', + priority: 'default', + url: 'https://pypi.org/pypi/', + }, + { + name: 'foo', + priority: 'secondary', + url: 'https://foo.bar/simple/', + }, + { + name: 'bar', + priority: 'secondary', + url: 'https://bar.baz/simple/', + }, + ]); + }); + + it('unordered sources and implicit PyPI priority="primary"', () => { + expect( + PoetrySources.parse([ + { + name: 'foo', + priority: 'secondary', + url: 'https://foo.bar/simple/', + }, + { + name: 'bar', + url: 'https://bar.baz/simple/', + }, + { + name: 'PyPI', + }, + { + name: 'baz', + url: 'https://baz.bar/simple/', + }, + ]), + ).toEqual([ + { + name: 'bar', + priority: 'primary', + url: 'https://bar.baz/simple/', + }, + { + name: 'pypi', + priority: 'primary', + url: 'https://pypi.org/pypi/', + }, + { + name: 'baz', + priority: 'primary', + url: 'https://baz.bar/simple/', + }, + { + name: 'foo', + priority: 'secondary', + url: 'https://foo.bar/simple/', + }, + ]); + }); + + it('unordered sources with implicit PyPI priority="secondary"', () => { + expect( + PoetrySources.parse([ + { + name: 'foo', + priority: 'secondary', + url: 'https://foo.bar/simple/', + }, + { + name: 'bar', + url: 'https://bar.baz/simple/', + }, + ]), + ).toEqual([ + { + name: 'bar', + priority: 'primary', + url: 'https://bar.baz/simple/', + }, + { + name: 'foo', + priority: 'secondary', + url: 'https://foo.bar/simple/', + }, + { + name: 'pypi', + priority: 'secondary', + url: 'https://pypi.org/pypi/', + }, + ]); + }); + + it('source with priority="supplemental"', () => { + expect( + PoetrySources.parse([ + { + name: 'supplemental', + priority: 'supplemental', + url: 'https://supplemental.com/simple/', + }, + ]), + ).toEqual([ + { + name: 'pypi', + priority: 'default', + url: 'https://pypi.org/pypi/', + }, + { + name: 'supplemental', + priority: 'supplemental', + url: 'https://supplemental.com/simple/', + }, + ]); + }); + + it('source with priority="explicit"', () => { + expect( + PoetrySources.parse([ + { + name: 'explicit', + priority: 'explicit', + url: 'https://explicit.com/simple/', + }, + ]), + ).toEqual([ + { + name: 'pypi', + priority: 'default', + url: 'https://pypi.org/pypi/', + }, + { + name: 'explicit', + priority: 'explicit', + url: 'https://explicit.com/simple/', + }, + ]); + }); + }); +}); diff --git a/lib/modules/manager/poetry/schema.ts b/lib/modules/manager/poetry/schema.ts index afe3f8ec6db36d..621f844b7c5a48 100644 --- a/lib/modules/manager/poetry/schema.ts +++ b/lib/modules/manager/poetry/schema.ts @@ -1,4 +1,5 @@ import { ZodEffects, ZodType, ZodTypeDef, z } from 'zod'; +import { logger } from '../../../logger'; import { parseGitUrl } from '../../../util/git/url'; import { regEx } from '../../../util/regex'; import { LooseArray, LooseRecord, Toml } from '../../../util/schema-utils'; @@ -6,6 +7,7 @@ import { uniq } from '../../../util/uniq'; import { GitRefsDatasource } from '../../datasource/git-refs'; import { GithubTagsDatasource } from '../../datasource/github-tags'; import { PypiDatasource } from '../../datasource/pypi'; +import * as gitVersioning from '../../versioning/git'; import * as pep440Versioning from '../../versioning/pep440'; import * as poetryVersioning from '../../versioning/poetry'; import { dependencyPattern } from '../pip_requirements/extract'; @@ -34,52 +36,58 @@ const PoetryGitDependency = z git: z.string(), tag: z.string().optional().catch(undefined), version: z.string().optional().catch(undefined), + branch: z.string().optional().catch(undefined), + rev: z.string().optional().catch(undefined), }) - .transform(({ git, tag, version }): PackageDependency => { - if (!tag) { - const res: PackageDependency = { - datasource: GitRefsDatasource.id, - packageName: git, - skipReason: 'git-dependency', - }; - - if (version) { - res.currentValue = version; + .transform(({ git, tag, version, branch, rev }): PackageDependency => { + if (tag) { + const { source, owner, name } = parseGitUrl(git); + if (source === 'github.com') { + const repo = `${owner}/${name}`; + return { + datasource: GithubTagsDatasource.id, + currentValue: tag, + packageName: repo, + }; + } else { + return { + datasource: GitRefsDatasource.id, + currentValue: tag, + packageName: git, + skipReason: 'git-dependency', + }; } - - return res; } - const parsedUrl = parseGitUrl(git); - if (parsedUrl.source !== 'github.com') { + if (rev) { + return { + datasource: GitRefsDatasource.id, + currentValue: branch, + currentDigest: rev, + replaceString: rev, + packageName: git, + }; + } else { return { datasource: GitRefsDatasource.id, - currentValue: tag, + currentValue: version, packageName: git, skipReason: 'git-dependency', }; } - - const { owner, name } = parsedUrl; - const repo = `${owner}/${name}`; - return { - datasource: GithubTagsDatasource.id, - currentValue: tag, - packageName: repo, - }; }); const PoetryPypiDependency = z.union([ z - .object({ version: z.string().optional() }) - .transform(({ version: currentValue }): PackageDependency => { + .object({ version: z.string().optional(), source: z.string().optional() }) + .transform(({ version: currentValue, source }): PackageDependency => { if (!currentValue) { return { datasource: PypiDatasource.id }; } return { datasource: PypiDatasource.id, - managerData: { nestedVersion: true }, + managerData: { nestedVersion: true, sourceName: source?.toLowerCase() }, currentValue, }; }), @@ -113,6 +121,11 @@ export const PoetryDependencies = LooseRecord( return dep; } + if (dep.datasource === GitRefsDatasource.id && dep.currentDigest) { + dep.versioning = gitVersioning.id; + return dep; + } + // istanbul ignore if: normaly should not happen if (!dep.currentValue) { dep.skipReason = 'unspecified-version'; @@ -178,8 +191,73 @@ export const PoetryGroupDependencies = LooseRecord( return deps; }); +const PoetrySourceOrder = [ + 'default', + 'primary', + 'secondary', + 'supplemental', + 'explicit', +] as const; + +export const PoetrySource = z.object({ + name: z.string().toLowerCase(), + url: z.string().optional(), + priority: z.enum(PoetrySourceOrder).default('primary'), +}); +export type PoetrySource = z.infer; + +export const PoetrySources = LooseArray(PoetrySource, { + onError: ({ error: err }) => { + logger.debug({ err }, 'Poetry: error parsing sources array'); + }, +}) + .transform((sources) => { + const pypiUrl = process.env.PIP_INDEX_URL ?? 'https://pypi.org/pypi/'; + const result: PoetrySource[] = []; + + let overridesPyPi = false; + let hasDefaultSource = false; + let hasPrimarySource = false; + for (const source of sources) { + if (source.name === 'pypi') { + source.url = pypiUrl; + overridesPyPi = true; + } + + if (!source.url) { + continue; + } + + if (source.priority === 'default') { + hasDefaultSource = true; + } else if (source.priority === 'primary') { + hasPrimarySource = true; + } + + result.push(source); + } + + if (sources.length && !hasDefaultSource && !overridesPyPi) { + result.push({ + name: 'pypi', + priority: hasPrimarySource ? 'secondary' : 'default', + url: pypiUrl, + }); + } + + result.sort( + (a, b) => + PoetrySourceOrder.indexOf(a.priority) - + PoetrySourceOrder.indexOf(b.priority), + ); + + return result; + }) + .catch([]); + export const PoetrySectionSchema = z .object({ + version: z.string().optional().catch(undefined), dependencies: withDepType(PoetryDependencies, 'dependencies').optional(), 'dev-dependencies': withDepType( PoetryDependencies, @@ -187,29 +265,16 @@ export const PoetrySectionSchema = z ).optional(), extras: withDepType(PoetryDependencies, 'extras').optional(), group: PoetryGroupDependencies.optional(), - source: LooseArray( - z - .object({ - url: z.string(), - }) - .transform(({ url }) => url), - ) - .refine((urls) => urls.length > 0) - .transform((urls) => [ - ...urls, - process.env.PIP_INDEX_URL ?? 'https://pypi.org/pypi/', - ]) - .transform((urls) => uniq(urls)) - .optional() - .catch(undefined), + source: PoetrySources, }) .transform( ({ + version, dependencies = [], 'dev-dependencies': devDependencies = [], extras: extraDependencies = [], group: groupDependencies = [], - source: registryUrls, + source: sourceUrls, }) => { const deps: PackageDependency[] = [ ...dependencies, @@ -218,10 +283,24 @@ export const PoetrySectionSchema = z ...groupDependencies, ]; - const res: PackageFileContent = { deps }; + const res: PackageFileContent = { deps, packageFileVersion: version }; + + if (sourceUrls.length) { + for (const dep of res.deps) { + if (dep.managerData?.sourceName) { + const sourceUrl = sourceUrls.find( + ({ name }) => name === dep.managerData?.sourceName, + ); + if (sourceUrl?.url) { + dep.registryUrls = [sourceUrl.url]; + } + } + } - if (registryUrls) { - res.registryUrls = registryUrls; + const sourceUrlsFiltered = sourceUrls.filter( + ({ priority }) => priority !== 'explicit', + ); + res.registryUrls = uniq(sourceUrlsFiltered.map(({ url }) => url!)); } return res; diff --git a/lib/modules/manager/poetry/types.ts b/lib/modules/manager/poetry/types.ts index d88e278b78dbac..97191116dc18e6 100644 --- a/lib/modules/manager/poetry/types.ts +++ b/lib/modules/manager/poetry/types.ts @@ -22,6 +22,8 @@ export interface PoetryDependency { git?: string; tag?: string; version?: string; + branch?: string; + rev?: string; } export interface PoetrySource { diff --git a/lib/modules/manager/terraform/extractors/others/modules.spec.ts b/lib/modules/manager/terraform/extractors/others/modules.spec.ts index 82be37043c1387..845e7b7c46e008 100644 --- a/lib/modules/manager/terraform/extractors/others/modules.spec.ts +++ b/lib/modules/manager/terraform/extractors/others/modules.spec.ts @@ -148,6 +148,9 @@ describe('modules/manager/terraform/extractors/others/modules', () => { const subfolderWithDoubleSlash = bitbucketRefMatchRegex.exec( 'bitbucket.org/hashicorp/example.git//terraform?ref=v1.0.0', )?.groups; + const subfolderWithGitInName = bitbucketRefMatchRegex.exec( + 'bitbucket.org/hashicorp/example.git//terraform-git?ref=v1.0.0', + )?.groups; const depth = bitbucketRefMatchRegex.exec( 'git::https://git@bitbucket.org/hashicorp/example.git?depth=1&ref=v1.0.0', )?.groups; @@ -180,6 +183,11 @@ describe('modules/manager/terraform/extractors/others/modules', () => { project: 'example', tag: 'v1.0.0', }); + expect(subfolderWithGitInName).toMatchObject({ + workspace: 'hashicorp', + project: 'example', + tag: 'v1.0.0', + }); expect(depth).toMatchObject({ workspace: 'hashicorp', project: 'example', diff --git a/lib/modules/manager/terraform/extractors/others/modules.ts b/lib/modules/manager/terraform/extractors/others/modules.ts index c6d129dcf5cf5e..5ba4f31c3ca122 100644 --- a/lib/modules/manager/terraform/extractors/others/modules.ts +++ b/lib/modules/manager/terraform/extractors/others/modules.ts @@ -13,7 +13,7 @@ export const githubRefMatchRegex = regEx( /github\.com([/:])(?[^/]+\/[a-z0-9-_.]+).*\?(depth=\d+&)?ref=(?.*?)(&depth=\d+)?$/i, ); export const bitbucketRefMatchRegex = regEx( - /(?:git::)?(?(?:http|https|ssh)?(?::\/\/)?(?:.*@)?(?bitbucket\.org\/(?.*)\/(?.*).git\/?(?.*)))\?(depth=\d+&)?ref=(?.*?)(&depth=\d+)?$/, + /(?:git::)?(?(?:http|https|ssh)?(?::\/\/)?(?:.*@)?(?bitbucket\.org\/(?.*)\/(?.*)\.git\/?(?.*)))\?(depth=\d+&)?ref=(?.*?)(&depth=\d+)?$/, ); export const gitTagsRefMatchRegex = regEx( /(?:git::)?(?(?:(?:http|https|ssh):\/\/)?(?:.*@)?(?.*\/(?.*\/.*)))\?(depth=\d+&)?ref=(?.*?)(&depth=\d+)?$/, diff --git a/lib/modules/manager/terraform/lockfile/index.spec.ts b/lib/modules/manager/terraform/lockfile/index.spec.ts index fb1eb92b9f3e2b..a49a2dadd1e6de 100644 --- a/lib/modules/manager/terraform/lockfile/index.spec.ts +++ b/lib/modules/manager/terraform/lockfile/index.spec.ts @@ -1117,5 +1117,18 @@ describe('modules/manager/terraform/lockfile/index', () => { ), ).toBe('>= 2.36.0, 2.46.0'); }); + + it('create constraint with full version', () => { + expect( + getNewConstraint( + { + currentValue: '>= 4.0, <4.12', + newValue: '< 4.21', + newVersion: '4.20.0', + }, + '>= 4.0.0, < 4.12.0', + ), + ).toBe('< 4.21.0'); + }); }); }); diff --git a/lib/modules/manager/terraform/lockfile/index.ts b/lib/modules/manager/terraform/lockfile/index.ts index 53c6ae7e3388a5..70f9e8739ee9d4 100644 --- a/lib/modules/manager/terraform/lockfile/index.ts +++ b/lib/modules/manager/terraform/lockfile/index.ts @@ -17,6 +17,7 @@ import { extractLocks, findLockFile, isPinnedVersion, + massageNewValue, readLockFile, writeLockUpdates, } from './util'; @@ -70,8 +71,15 @@ export function getNewConstraint( dep: Upgrade>, oldConstraint: string | undefined, ): string | undefined { - const { currentValue, currentVersion, newValue, newVersion, packageName } = - dep; + const { + currentValue, + currentVersion, + newValue: rawNewValue, + newVersion, + packageName, + } = dep; + + const newValue = massageNewValue(rawNewValue); if (oldConstraint && currentValue && newValue && currentValue === newValue) { logger.debug( diff --git a/lib/modules/manager/terraform/lockfile/util.ts b/lib/modules/manager/terraform/lockfile/util.ts index b3193ec1e0f479..cb3c0079baa335 100644 --- a/lib/modules/manager/terraform/lockfile/util.ts +++ b/lib/modules/manager/terraform/lockfile/util.ts @@ -1,3 +1,4 @@ +import is from '@sindresorhus/is'; import { findLocalSiblingOrParent, readLocalFile } from '../../../../util/fs'; import { newlineRegex, regEx } from '../../../../util/regex'; import { get as getVersioning } from '../../../versioning'; @@ -225,3 +226,30 @@ export function writeLockUpdates( }, }; } + +export function massageNewValue(value: string | undefined): string | undefined { + if (is.nullOrUndefined(value)) { + return value; + } + + const elements = value.split(','); + const massagedElements: string[] = []; + for (const element of elements) { + // these constraints are allowed to miss precision + if (element.includes('~>')) { + massagedElements.push(element); + continue; + } + + const missing0s = 3 - element.split('.').length; + + let massagedElement = element; + + for (let i = 0; i < missing0s; i++) { + massagedElement = `${massagedElement}.0`; + } + massagedElements.push(massagedElement); + } + + return massagedElements.join(','); +} diff --git a/lib/modules/manager/terragrunt/__fixtures__/1.hcl b/lib/modules/manager/terragrunt/__fixtures__/1.hcl new file mode 100644 index 00000000000000..61164ff13f4e93 --- /dev/null +++ b/lib/modules/manager/terragrunt/__fixtures__/1.hcl @@ -0,0 +1,39 @@ +#real +terraform { + extra_arguments "common_vars" { + commands = ["plan", "apply"] + + arguments = [ + "-var-file=../../common.tfvars", + "-var-file=../region.tfvars" + ] + } + + before_hook "before_hook" { + commands = ["apply", "plan"] + execute = ["echo", "Running Terraform"] + } + + source = "tfr:///myuser/myrepo/cloud//folder/modules/moduleone?ref=v0.0.9" + + after_hook "after_hook" { + commands = ["apply", "plan"] + execute = ["echo", "Finished running Terraform"] + run_on_error = true + } +} + +#submodule +terraform { + source = "tfr:///terraform-google-modules/kubernetes-engine/google//modules/private-cluster?version=1.2.3" +} + +#bar +terraform { + source = "tfr:///terraform-aws-modules/vpc/aws?version=3.3.0" +} + +#missing third backslash +terraform { + source = "tfr://terraform-aws-modules/vpc/aws?version=3.3.0" +} diff --git a/lib/modules/manager/terragrunt/extract.spec.ts b/lib/modules/manager/terragrunt/extract.spec.ts index 5b7af54845281c..7dfae48e2f8f59 100644 --- a/lib/modules/manager/terragrunt/extract.spec.ts +++ b/lib/modules/manager/terragrunt/extract.spec.ts @@ -7,6 +7,33 @@ describe('modules/manager/terragrunt/extract', () => { expect(extractPackageFile('nothing here')).toBeNull(); }); + it('extracts terragrunt sources using tfr protocol', () => { + const res = extractPackageFile(Fixtures.get('1.hcl')); + expect(res).toEqual({ + deps: [ + { + currentValue: 'v0.0.9', + datasource: 'terraform-module', + depName: 'myuser/myrepo/cloud', + depType: 'terragrunt', + }, + { + currentValue: '1.2.3', + datasource: 'terraform-module', + depName: 'terraform-google-modules/kubernetes-engine/google', + depType: 'terragrunt', + }, + { + currentValue: '3.3.0', + datasource: 'terraform-module', + depName: 'terraform-aws-modules/vpc/aws', + depType: 'terragrunt', + }, + {}, + ], + }); + }); + it('extracts terragrunt sources', () => { const res = extractPackageFile(Fixtures.get('2.hcl')); expect(res).toEqual({ diff --git a/lib/modules/manager/terragrunt/modules.ts b/lib/modules/manager/terragrunt/modules.ts index 56722fe7a12323..2e22f7d3c1bbc8 100644 --- a/lib/modules/manager/terragrunt/modules.ts +++ b/lib/modules/manager/terragrunt/modules.ts @@ -13,6 +13,9 @@ export const githubRefMatchRegex = regEx( export const gitTagsRefMatchRegex = regEx( /(?:git::)?(?(?:http|https|ssh):\/\/(?:.*@)?(?.*.*\/(?.*\/.*)))\?(depth=\d+&)?ref=(?.*?)(&depth=\d+)?$/, ); +export const tfrVersionMatchRegex = regEx( + /tfr:\/\/(?.*?)\/(?[^/]+?)\/(?[^/]+?)\/(?[^/?]+).*\?(?:ref|version)=(?.*?)$/, +); const hostnameMatchRegex = regEx(/^(?([\w|\d]+\.)+[\w|\d]+)/); export function extractTerragruntModule( @@ -35,6 +38,7 @@ export function analyseTerragruntModule( const source = dep.managerData!.source; const githubRefMatch = githubRefMatchRegex.exec(source ?? ''); const gitTagsRefMatch = gitTagsRefMatchRegex.exec(source ?? ''); + const tfrVersionMatch = tfrVersionMatchRegex.exec(source ?? ''); if (githubRefMatch?.groups) { dep.depType = 'github'; @@ -58,6 +62,16 @@ export function analyseTerragruntModule( } dep.currentValue = gitTagsRefMatch.groups.tag; dep.datasource = GitTagsDatasource.id; + } else if (tfrVersionMatch?.groups) { + dep.depType = 'terragrunt'; + dep.depName = + tfrVersionMatch.groups.org + + '/' + + tfrVersionMatch.groups.name + + '/' + + tfrVersionMatch.groups.cloud; + dep.currentValue = tfrVersionMatch.groups.currentValue; + dep.datasource = TerraformModuleDatasource.id; } else if (source) { const moduleParts = source.split('//')[0].split('/'); if (moduleParts[0] === '..') { diff --git a/lib/modules/manager/types.ts b/lib/modules/manager/types.ts index 8435b4e9b9d467..f3743f241a50a7 100644 --- a/lib/modules/manager/types.ts +++ b/lib/modules/manager/types.ts @@ -235,6 +235,7 @@ export interface ManagerApi extends ModuleApi { content: string, currentValue: string, bumpVersion: ReleaseType, + packageFile: string, ): Result; detectGlobalConfig?(): Result; diff --git a/lib/modules/platform/api.ts b/lib/modules/platform/api.ts index 973f6b75d39db6..7d0ab19cee6bcb 100644 --- a/lib/modules/platform/api.ts +++ b/lib/modules/platform/api.ts @@ -3,6 +3,7 @@ import * as azure from './azure'; import * as bitbucket from './bitbucket'; import * as bitbucketServer from './bitbucket-server'; import * as codecommit from './codecommit'; +import * as gerrit from './gerrit'; import * as gitea from './gitea'; import * as github from './github'; import * as gitlab from './gitlab'; @@ -16,6 +17,7 @@ api.set(azure.id, azure); api.set(bitbucket.id, bitbucket); api.set(bitbucketServer.id, bitbucketServer); api.set(codecommit.id, codecommit); +api.set(gerrit.id, gerrit); api.set(gitea.id, gitea); api.set(github.id, github); api.set(gitlab.id, gitlab); diff --git a/lib/modules/platform/azure/__snapshots__/index.spec.ts.snap b/lib/modules/platform/azure/__snapshots__/index.spec.ts.snap index ecd7882d4abbc2..14750606605808 100644 --- a/lib/modules/platform/azure/__snapshots__/index.spec.ts.snap +++ b/lib/modules/platform/azure/__snapshots__/index.spec.ts.snap @@ -60,7 +60,7 @@ exports[`modules/platform/azure/index createPr() when usePlatformAutomerge is se "completionOptions": { "deleteSourceBranch": true, "mergeCommitMessage": "The Title", - "squashMerge": true, + "mergeStrategy": 2, }, "createdAt": undefined, "createdBy": { diff --git a/lib/modules/platform/azure/index.spec.ts b/lib/modules/platform/azure/index.spec.ts index 3924120dd94b7e..9aaa23a87ef0fa 100644 --- a/lib/modules/platform/azure/index.spec.ts +++ b/lib/modules/platform/azure/index.spec.ts @@ -2,6 +2,7 @@ import { Readable } from 'node:stream'; import is from '@sindresorhus/is'; import type { IGitApi } from 'azure-devops-node-api/GitApi'; import { + GitPullRequest, GitPullRequestMergeStrategy, GitStatusState, PullRequestStatus, @@ -911,7 +912,7 @@ describe('modules/platform/azure/index', () => { id: prResult.createdBy.id, }, completionOptions: { - squashMerge: true, + mergeStrategy: GitPullRequestMergeStrategy.Squash, deleteSourceBranch: true, mergeCommitMessage: 'The Title', }, @@ -936,7 +937,7 @@ describe('modules/platform/azure/index', () => { expect(pr).toMatchSnapshot(); }); - it('should only call getMergeMethod once per run', async () => { + it('should only call getMergeMethod once per run when automergeStrategy is auto', async () => { await initRepo({ repository: 'some/repo' }); const prResult = [ { @@ -961,7 +962,7 @@ describe('modules/platform/azure/index', () => { id: prResult[0].createdBy.id, }, completionOptions: { - squashMerge: true, + mergeStrategy: GitPullRequestMergeStrategy.Squash, deleteSourceBranch: true, mergeCommitMessage: 'The Title', }, @@ -972,7 +973,7 @@ describe('modules/platform/azure/index', () => { id: prResult[1].createdBy.id, }, completionOptions: { - squashMerge: true, + mergeStrategy: GitPullRequestMergeStrategy.Squash, deleteSourceBranch: true, mergeCommitMessage: 'The Second Title', }, @@ -1001,7 +1002,10 @@ describe('modules/platform/azure/index', () => { prTitle: 'The Title', prBody: 'Hello world', labels: ['deps', 'renovate'], - platformOptions: { usePlatformAutomerge: true }, + platformOptions: { + automergeStrategy: 'auto', + usePlatformAutomerge: true, + }, }); await azure.createPr({ @@ -1010,12 +1014,127 @@ describe('modules/platform/azure/index', () => { prTitle: 'The Second Title', prBody: 'Hello world', labels: ['deps', 'renovate'], - platformOptions: { usePlatformAutomerge: true }, + platformOptions: { + automergeStrategy: 'auto', + usePlatformAutomerge: true, + }, }); expect(updateFn).toHaveBeenCalledTimes(2); expect(azureHelper.getMergeMethod).toHaveBeenCalledTimes(1); }); + + it.each` + automergeStrategy + ${'fast-forward'} + ${'merge-commit'} + ${'rebase'} + ${'squash'} + `( + 'should not call getMergeMethod when automergeStrategy is $automergeStrategy', + async (automergeStrategy) => { + await initRepo({ repository: 'some/repo' }); + const prResult = { + pullRequestId: 123, + title: 'The Title', + createdBy: { + id: '123', + }, + }; + const prUpdateResults = { + ...prResult, + autoCompleteSetBy: { + id: prResult.createdBy.id, + }, + completionOptions: { + mergeStrategy: GitPullRequestMergeStrategy.Squash, + deleteSourceBranch: true, + mergeCommitMessage: 'The Title', + }, + }; + const updateFn = jest.fn(() => Promise.resolve(prUpdateResults)); + + azureApi.gitApi.mockResolvedValue( + partial({ + createPullRequest: jest.fn(() => Promise.resolve(prResult)), + createPullRequestLabel: jest.fn().mockResolvedValue({}), + updatePullRequest: updateFn, + }), + ); + await azure.createPr({ + sourceBranch: 'some-branch', + targetBranch: 'dev', + prTitle: 'The Title', + prBody: 'Hello world', + labels: ['deps', 'renovate'], + platformOptions: { + automergeStrategy, + usePlatformAutomerge: true, + }, + }); + + expect(azureHelper.getMergeMethod).toHaveBeenCalledTimes(0); + }, + ); + + it.each` + automergeStrategy | prMergeStrategy + ${'fast-forward'} | ${GitPullRequestMergeStrategy.Rebase} + ${'merge-commit'} | ${GitPullRequestMergeStrategy.NoFastForward} + ${'rebase'} | ${GitPullRequestMergeStrategy.Rebase} + ${'squash'} | ${GitPullRequestMergeStrategy.Squash} + `( + 'should create PR with mergeStrategy $prMergeStrategy', + async ({ automergeStrategy, prMergeStrategy }) => { + await initRepo({ repository: 'some/repo' }); + const prResult = { + pullRequestId: 456, + title: 'The Title', + createdBy: { + id: '123', + }, + }; + const prUpdateResult = { + ...prResult, + autoCompleteSetBy: { + id: prResult.createdBy.id, + }, + completionOptions: { + mergeStrategy: prMergeStrategy, + deleteSourceBranch: true, + mergeCommitMessage: 'The Title', + }, + }; + const updateFn = jest.fn().mockResolvedValue(prUpdateResult); + azureApi.gitApi.mockResolvedValueOnce( + partial({ + createPullRequest: jest.fn().mockResolvedValue(prResult), + createPullRequestLabel: jest.fn().mockResolvedValue({}), + updatePullRequest: updateFn, + }), + ); + const pr = await azure.createPr({ + sourceBranch: 'some-branch', + targetBranch: 'dev', + prTitle: 'The Title', + prBody: 'Hello world', + labels: ['deps', 'renovate'], + platformOptions: { + automergeStrategy, + usePlatformAutomerge: true, + }, + }); + + expect((pr as GitPullRequest).completionOptions?.mergeStrategy).toBe( + prMergeStrategy, + ); + expect(updateFn).toHaveBeenCalled(); + expect( + updateFn.mock.calls[0][0].completionOptions.mergeStrategy, + ).toBe(prMergeStrategy); + expect(azureHelper.getMergeMethod).toHaveBeenCalledTimes(0); + }, + ); }); it('should create and return an approved PR object', async () => { @@ -1528,6 +1647,7 @@ describe('modules/platform/azure/index', () => { const res = await azure.mergePr({ branchName: branchNameMock, id: pullRequestIdMock, + strategy: 'auto', }); expect(updatePullRequestMock).toHaveBeenCalledWith( @@ -1546,6 +1666,59 @@ describe('modules/platform/azure/index', () => { expect(res).toBeTrue(); }); + it.each` + automergeStrategy | prMergeStrategy + ${'fast-forward'} | ${GitPullRequestMergeStrategy.Rebase} + ${'merge-commit'} | ${GitPullRequestMergeStrategy.NoFastForward} + ${'rebase'} | ${GitPullRequestMergeStrategy.Rebase} + ${'squash'} | ${GitPullRequestMergeStrategy.Squash} + `( + 'should complete PR with mergeStrategy $prMergeStrategy', + async ({ automergeStrategy, prMergeStrategy }) => { + await initRepo({ repository: 'some/repo' }); + const pullRequestIdMock = 12345; + const branchNameMock = 'test'; + const lastMergeSourceCommitMock = { commitId: 'abcd1234' }; + const updatePullRequestMock = jest.fn(() => ({ + status: 3, + })); + azureApi.gitApi.mockImplementationOnce( + () => + ({ + getPullRequestById: jest.fn(() => ({ + lastMergeSourceCommit: lastMergeSourceCommitMock, + targetRefName: 'refs/heads/ding', + title: 'title', + })), + updatePullRequest: updatePullRequestMock, + }) as any, + ); + + azureHelper.getMergeMethod = jest.fn().mockReturnValue(prMergeStrategy); + + const res = await azure.mergePr({ + branchName: branchNameMock, + id: pullRequestIdMock, + strategy: automergeStrategy, + }); + + expect(updatePullRequestMock).toHaveBeenCalledWith( + { + status: PullRequestStatus.Completed, + lastMergeSourceCommit: lastMergeSourceCommitMock, + completionOptions: { + mergeStrategy: prMergeStrategy, + deleteSourceBranch: true, + mergeCommitMessage: 'title', + }, + }, + '1', + pullRequestIdMock, + ); + expect(res).toBeTrue(); + }, + ); + it('should return false if the PR does not update successfully', async () => { await initRepo({ repository: 'some/repo' }); const pullRequestIdMock = 12345; @@ -1593,10 +1766,12 @@ describe('modules/platform/azure/index', () => { await azure.mergePr({ branchName: 'test-branch-1', id: 1234, + strategy: 'auto', }); await azure.mergePr({ branchName: 'test-branch-2', id: 5678, + strategy: 'auto', }); expect(azureHelper.getMergeMethod).toHaveBeenCalledTimes(1); diff --git a/lib/modules/platform/azure/index.ts b/lib/modules/platform/azure/index.ts index c154d4cd6b8c7b..fe9fa26c19d833 100644 --- a/lib/modules/platform/azure/index.ts +++ b/lib/modules/platform/azure/index.ts @@ -52,6 +52,7 @@ import { getRenovatePRFormat, getRepoByName, getStorageExtraCloneOpts, + mapMergeStrategy, max4000Chars, } from './util'; @@ -491,7 +492,10 @@ export async function createPr({ config.repoId, ); if (platformOptions?.usePlatformAutomerge) { - const mergeStrategy = await getMergeStrategy(pr.targetRefName!); + const mergeStrategy = + platformOptions.automergeStrategy === 'auto' + ? await getMergeStrategy(pr.targetRefName!) + : mapMergeStrategy(platformOptions.automergeStrategy); pr = await azureApiGit.updatePullRequest( { autoCompleteSetBy: { @@ -736,13 +740,17 @@ export async function setBranchStatus({ export async function mergePr({ branchName, id: pullRequestId, + strategy, }: MergePRConfig): Promise { logger.debug(`mergePr(${pullRequestId}, ${branchName!})`); const azureApiGit = await azureApi.gitApi(); let pr = await azureApiGit.getPullRequestById(pullRequestId, config.project); - const mergeStrategy = await getMergeStrategy(pr.targetRefName!); + const mergeStrategy = + strategy === 'auto' + ? await getMergeStrategy(pr.targetRefName!) + : mapMergeStrategy(strategy); const objToUpdate: GitPullRequest = { status: PullRequestStatus.Completed, lastMergeSourceCommit: pr.lastMergeSourceCommit, diff --git a/lib/modules/platform/azure/readme.md b/lib/modules/platform/azure/readme.md index b65aaa7b87f3e3..cb06f4b7a66080 100644 --- a/lib/modules/platform/azure/readme.md +++ b/lib/modules/platform/azure/readme.md @@ -18,10 +18,6 @@ Permissions for your PAT should be at minimum: Remember to set `platform=azure` somewhere in your Renovate config file. -## Features awaiting implementation - -- The `automergeStrategy` configuration option has not been implemented for this platform, and all values behave as if the value `auto` was used. Renovate will use the merge strategy configured in the Azure Repos repository itself, and this cannot be overridden yet - ## Running Renovate in Azure Pipelines ### Setting up a new pipeline diff --git a/lib/modules/platform/azure/util.ts b/lib/modules/platform/azure/util.ts index adbb2560a6ff86..9ca3563961bcb5 100644 --- a/lib/modules/platform/azure/util.ts +++ b/lib/modules/platform/azure/util.ts @@ -1,9 +1,11 @@ import { GitPullRequest, + GitPullRequestMergeStrategy, GitRepository, GitStatusContext, PullRequestStatus, } from 'azure-devops-node-api/interfaces/GitInterfaces.js'; +import type { MergeStrategy } from '../../../config/types'; import { logger } from '../../../logger'; import type { HostRule, PrState } from '../../../types'; import type { GitOptions } from '../../../types/git'; @@ -181,3 +183,19 @@ export function getRepoByName( } return foundRepo ?? null; } + +export function mapMergeStrategy( + mergeStrategy?: MergeStrategy, +): GitPullRequestMergeStrategy { + switch (mergeStrategy) { + case 'rebase': + case 'fast-forward': + return GitPullRequestMergeStrategy.Rebase; + case 'merge-commit': + return GitPullRequestMergeStrategy.NoFastForward; + case 'squash': + return GitPullRequestMergeStrategy.Squash; + default: + return GitPullRequestMergeStrategy.NoFastForward; + } +} diff --git a/lib/modules/platform/bitbucket-server/index.spec.ts b/lib/modules/platform/bitbucket-server/index.spec.ts index f925b30d298974..117bce84ef4045 100644 --- a/lib/modules/platform/bitbucket-server/index.spec.ts +++ b/lib/modules/platform/bitbucket-server/index.spec.ts @@ -1306,6 +1306,50 @@ describe('modules/platform/bitbucket-server/index', () => { }), ).toBeNull(); }); + + it('finds pr from other authors', async () => { + const scope = await initRepo(); + scope + .get( + `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=OPEN&direction=outgoing&at=refs/heads/branch&limit=1`, + ) + .reply(200, { + isLastPage: true, + values: [prMock(url, 'SOME', 'repo')], + }); + expect( + await bitbucket.findPr({ + branchName: 'branch', + state: 'open', + includeOtherAuthors: true, + }), + ).toMatchObject({ + number: 5, + sourceBranch: 'userName1/pullRequest5', + targetBranch: 'master', + title: 'title', + state: 'open', + }); + }); + + it('returns null if no pr found - (includeOtherAuthors)', async () => { + const scope = await initRepo(); + scope + .get( + `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=OPEN&direction=outgoing&at=refs/heads/branch&limit=1`, + ) + .reply(200, { + isLastPage: true, + values: [], + }); + + const pr = await bitbucket.findPr({ + branchName: 'branch', + state: 'open', + includeOtherAuthors: true, + }); + expect(pr).toBeNull(); + }); }); describe('createPr()', () => { diff --git a/lib/modules/platform/bitbucket-server/index.ts b/lib/modules/platform/bitbucket-server/index.ts index bd9a0be19c34c0..aa6bcc2e7a4828 100644 --- a/lib/modules/platform/bitbucket-server/index.ts +++ b/lib/modules/platform/bitbucket-server/index.ts @@ -324,8 +324,34 @@ export async function findPr({ prTitle, state = 'all', refreshCache, + includeOtherAuthors, }: FindPRConfig): Promise { logger.debug(`findPr(${branchName}, "${prTitle!}", "${state}")`); + + if (includeOtherAuthors) { + // PR might have been created by anyone, so don't use the cached Renovate PR list + const searchParams: Record = { + state: 'OPEN', + }; + searchParams['direction'] = 'outgoing'; + searchParams['at'] = `refs/heads/${branchName}`; + + const query = getQueryString(searchParams); + const prs = await utils.accumulateValues( + `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}`, + 'get', + {}, + 1, // only fetch the latest pr + ); + + if (!prs.length) { + logger.debug(`No PR found for branch ${branchName}`); + return null; + } + + return utils.prInfo(prs[0]); + } + const prList = await getPrList(refreshCache); const pr = prList.find(isRelevantPr(branchName, prTitle, state)); if (pr) { diff --git a/lib/modules/platform/bitbucket/index.spec.ts b/lib/modules/platform/bitbucket/index.spec.ts index 8a6695384d53ef..ab13c73de1277d 100644 --- a/lib/modules/platform/bitbucket/index.spec.ts +++ b/lib/modules/platform/bitbucket/index.spec.ts @@ -1,5 +1,6 @@ import * as httpMock from '../../../../test/http-mock'; import type { logger as _logger } from '../../../logger'; +import { reset as memCacheReset } from '../../../util/cache/memory'; import type * as _git from '../../../util/git'; import { setBaseUrl } from '../../../util/http/bitbucket'; import type { Platform, PlatformResult, RepoParams } from '../types'; @@ -42,6 +43,7 @@ describe('modules/platform/bitbucket/index', () => { }); setBaseUrl(baseUrl); + memCacheReset(); }); async function initRepoMock( @@ -234,9 +236,8 @@ describe('modules/platform/bitbucket/index', () => { it('bitbucket finds PR for branch', async () => { const scope = await initRepoMock(); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [pr] }) .get('/2.0/repositories/some/repo/pullrequests/5') .reply(200, pr); @@ -247,9 +248,8 @@ describe('modules/platform/bitbucket/index', () => { it('returns null if no PR for branch', async () => { const scope = await initRepoMock(); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [pr] }); const res = await bitbucket.getBranchPr('branch_without_pr'); @@ -752,9 +752,8 @@ describe('modules/platform/bitbucket/index', () => { await bitbucket.initPlatform({ username: 'renovate', password: 'pass' }); await initRepoMock(undefined, null, scope); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&q=author.uuid="12345"&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [ { @@ -778,9 +777,8 @@ describe('modules/platform/bitbucket/index', () => { it('finds pr', async () => { const scope = await initRepoMock(); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [pr] }); expect( await bitbucket.findPr({ @@ -804,9 +802,8 @@ describe('modules/platform/bitbucket/index', () => { const scope = await initRepoMock(); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [ { @@ -842,9 +839,8 @@ describe('modules/platform/bitbucket/index', () => { const scope = await initRepoMock({}, { is_private: true }); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [ { @@ -882,9 +878,8 @@ describe('modules/platform/bitbucket/index', () => { const scope = await initRepoMock({}, { is_private: false }); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [ { @@ -926,9 +921,8 @@ describe('modules/platform/bitbucket/index', () => { const scope = await initRepoMock({}, { is_private: false }); scope - .get( - '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50', - ) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) .reply(200, { values: [ { @@ -953,6 +947,46 @@ describe('modules/platform/bitbucket/index', () => { }); expect(pr?.number).toBe(5); }); + + it('finds pr from other authors', async () => { + const scope = await initRepoMock(); + scope + .get( + '/2.0/repositories/some/repo/pullrequests?q=source.branch.name="branch"&state=open', + ) + .reply(200, { values: [pr] }); + expect( + await bitbucket.findPr({ + branchName: 'branch', + state: 'open', + includeOtherAuthors: true, + }), + ).toMatchObject({ + number: 5, + sourceBranch: 'branch', + targetBranch: 'master', + title: 'title', + state: 'open', + }); + }); + + it('returns null if no open pr exists - (includeOtherAuthors)', async () => { + const scope = await initRepoMock(); + scope + .get( + '/2.0/repositories/some/repo/pullrequests?q=source.branch.name="branch"&state=open', + ) + .reply(200, { + values: [], + }); + + const pr = await bitbucket.findPr({ + branchName: 'branch', + state: 'open', + includeOtherAuthors: true, + }); + expect(pr).toBeNull(); + }); }); describe('createPr()', () => { @@ -984,7 +1018,12 @@ describe('modules/platform/bitbucket/index', () => { values: [projectReviewer, repoReviewer], }) .post('/2.0/repositories/some/repo/pullrequests') - .reply(200, { id: 5 }); + .reply(200, { id: 5 }) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) + .reply(200, { + values: [{ id: 5 }], + }); const pr = await bitbucket.createPr({ sourceBranch: 'branch', targetBranch: 'master', @@ -1062,7 +1101,12 @@ describe('modules/platform/bitbucket/index', () => { account_status: 'inactive', }) .post('/2.0/repositories/some/repo/pullrequests') - .reply(200, { id: 5 }); + .reply(200, { id: 5 }) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) + .reply(200, { + values: [{ id: 5 }], + }); const pr = await bitbucket.createPr({ sourceBranch: 'branch', targetBranch: 'master', @@ -1120,7 +1164,12 @@ describe('modules/platform/bitbucket/index', () => { ) .reply(200) .post('/2.0/repositories/some/repo/pullrequests') - .reply(200, { id: 5 }); + .reply(200, { id: 5 }) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) + .reply(200, { + values: [{ id: 5 }], + }); const pr = await bitbucket.createPr({ sourceBranch: 'branch', targetBranch: 'master', @@ -1216,7 +1265,12 @@ describe('modules/platform/bitbucket/index', () => { }, }) .post('/2.0/repositories/some/repo/pullrequests') - .reply(200, { id: 5 }); + .reply(200, { id: 5 }) + .get(`/2.0/repositories/some/repo/pullrequests`) + .query(true) + .reply(200, { + values: [{ id: 5 }], + }); const pr = await bitbucket.createPr({ sourceBranch: 'branch', targetBranch: 'master', diff --git a/lib/modules/platform/bitbucket/index.ts b/lib/modules/platform/bitbucket/index.ts index 9f30b6e2aef8ff..165a09d446c9d8 100644 --- a/lib/modules/platform/bitbucket/index.ts +++ b/lib/modules/platform/bitbucket/index.ts @@ -31,6 +31,7 @@ import { repoFingerprint } from '../util'; import { smartTruncate } from '../utils/pr-body'; import { readOnlyIssueBody } from '../utils/read-only-issue-body'; import * as comments from './comments'; +import { BitbucketPrCache } from './pr-cache'; import type { Account, BitbucketStatus, @@ -273,31 +274,37 @@ function matchesState(state: string, desiredState: string): boolean { export async function getPrList(): Promise { logger.debug('getPrList()'); - if (!config.prList) { - logger.debug('Retrieving PR list'); - let url = `/2.0/repositories/${config.repository}/pullrequests?`; - url += utils.prStates.all.map((state) => 'state=' + state).join('&'); - if (renovateUserUuid && !config.ignorePrAuthor) { - url += `&q=author.uuid="${renovateUserUuid}"`; - } - const prs = ( - await bitbucketHttp.getJson>(url, { - paginate: true, - pagelen: 50, - }) - ).body.values; - config.prList = prs.map(utils.prInfo); - logger.debug(`Retrieved Pull Requests, count: ${config.prList.length}`); - } - return config.prList; + return await BitbucketPrCache.getPrs( + bitbucketHttp, + config.repository, + renovateUserUuid, + ); } export async function findPr({ branchName, prTitle, state = 'all', + includeOtherAuthors, }: FindPRConfig): Promise { logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`); + + if (includeOtherAuthors) { + // PR might have been created by anyone, so don't use the cached Renovate PR list + const prs = ( + await bitbucketHttp.getJson>( + `/2.0/repositories/${config.repository}/pullrequests?q=source.branch.name="${branchName}"&state=open`, + ) + ).body.values; + + if (prs.length === 0) { + logger.debug(`No PR found for branch ${branchName}`); + return null; + } + + return utils.prInfo(prs[0]); + } + const prList = await getPrList(); const pr = prList.find( (p) => @@ -305,15 +312,17 @@ export async function findPr({ (!prTitle || p.title.toUpperCase() === prTitle.toUpperCase()) && matchesState(p.state, state), ); - if (pr) { - logger.debug(`Found PR #${pr.number}`); + + if (!pr) { + return null; } + logger.debug(`Found PR #${pr.number}`); /** * Bitbucket doesn't support renaming or reopening declined PRs. * Instead, we have to use comment-driven signals. */ - if (pr?.state === 'closed') { + if (pr.state === 'closed') { const reopenComments = await comments.reopenComments(config, pr.number); if (is.nonEmptyArray(reopenComments)) { @@ -336,7 +345,7 @@ export async function findPr({ } } - return pr ?? null; + return pr; } // Gets details for a PR @@ -890,10 +899,12 @@ export async function createPr({ ) ).body; const pr = utils.prInfo(prRes); - // istanbul ignore if - if (config.prList) { - config.prList.push(pr); - } + await BitbucketPrCache.addPr( + bitbucketHttp, + config.repository, + renovateUserUuid, + pr, + ); return pr; } catch (err) /* istanbul ignore next */ { // Try sanitizing reviewers @@ -915,10 +926,12 @@ export async function createPr({ ) ).body; const pr = utils.prInfo(prRes); - // istanbul ignore if - if (config.prList) { - config.prList.push(pr); - } + await BitbucketPrCache.addPr( + bitbucketHttp, + config.repository, + renovateUserUuid, + pr, + ); return pr; } } diff --git a/lib/modules/platform/bitbucket/pr-cache.spec.ts b/lib/modules/platform/bitbucket/pr-cache.spec.ts new file mode 100644 index 00000000000000..2dea9fe977b49a --- /dev/null +++ b/lib/modules/platform/bitbucket/pr-cache.spec.ts @@ -0,0 +1,137 @@ +import * as httpMock from '../../../../test/http-mock'; +import { reset as memCacheReset } from '../../../util/cache/memory'; +import { + getCache, + resetCache as repoCacheReset, +} from '../../../util/cache/repository'; +import { BitbucketHttp } from '../../../util/http/bitbucket'; +import { BitbucketPrCache } from './pr-cache'; +import type { PrResponse } from './types'; +import { prInfo } from './utils'; + +const http = new BitbucketHttp(); + +const pr1: PrResponse = { + id: 1, + title: 'title', + state: 'OPEN', + links: { + commits: { + href: 'https://api.bitbucket.org/2.0/repositories/some/repo/pullrequests/1/commits', + }, + }, + source: { branch: { name: 'branch' } }, + destination: { branch: { name: 'master' } }, + reviewers: [], + created_on: '2020-01-01T00:00:00.000Z', + updated_on: '2020-01-01T00:00:00.000Z', +}; + +const pr2: PrResponse = { + id: 2, + title: 'title', + state: 'OPEN', + links: { + commits: { + href: 'https://api.bitbucket.org/2.0/repositories/some/repo/pullrequests/2/commits', + }, + }, + source: { branch: { name: 'branch' } }, + destination: { branch: { name: 'master' } }, + reviewers: [], + created_on: '2023-01-01T00:00:00.000Z', + updated_on: '2023-01-01T00:00:00.000Z', +}; + +describe('modules/platform/bitbucket/pr-cache', () => { + let cache = getCache(); + + beforeEach(() => { + memCacheReset(); + repoCacheReset(); + cache = getCache(); + }); + + it('fetches cache', async () => { + httpMock + .scope('https://api.bitbucket.org') + .get(`/2.0/repositories/some-workspace/some-repo/pullrequests`) + .query(true) + .reply(200, { + values: [pr1], + }); + + const res = await BitbucketPrCache.getPrs( + http, + 'some-workspace/some-repo', + 'some-author', + ); + + expect(res).toMatchObject([ + { + number: 1, + title: 'title', + }, + ]); + expect(cache).toEqual({ + platform: { + bitbucket: { + pullRequestsCache: { + author: 'some-author', + items: { + '1': prInfo(pr1), + }, + updated_on: '2020-01-01T00:00:00.000Z', + }, + }, + }, + }); + }); + + it('syncs cache', async () => { + cache.platform = { + bitbucket: { + pullRequestsCache: { + items: { + '1': prInfo(pr1), + }, + author: 'some-author', + updated_on: '2020-01-01T00:00:00.000Z', + }, + }, + }; + + httpMock + .scope('https://api.bitbucket.org') + .get(`/2.0/repositories/some-workspace/some-repo/pullrequests`) + .query(true) + .reply(200, { + values: [pr2], + }); + + const res = await BitbucketPrCache.getPrs( + http, + 'some-workspace/some-repo', + 'some-author', + ); + + expect(res).toMatchObject([ + { number: 1, title: 'title' }, + { number: 2, title: 'title' }, + ]); + expect(cache).toEqual({ + platform: { + bitbucket: { + pullRequestsCache: { + items: { + '1': prInfo(pr1), + '2': prInfo(pr2), + }, + author: 'some-author', + updated_on: '2023-01-01T00:00:00.000Z', + }, + }, + }, + }); + }); +}); diff --git a/lib/modules/platform/bitbucket/pr-cache.ts b/lib/modules/platform/bitbucket/pr-cache.ts new file mode 100644 index 00000000000000..95fb4b947a7af0 --- /dev/null +++ b/lib/modules/platform/bitbucket/pr-cache.ts @@ -0,0 +1,135 @@ +import { dequal } from 'dequal'; +import { DateTime } from 'luxon'; +import { logger } from '../../../logger'; +import * as memCache from '../../../util/cache/memory'; +import { getCache } from '../../../util/cache/repository'; +import type { BitbucketHttp } from '../../../util/http/bitbucket'; +import type { Pr } from '../types'; +import type { BitbucketPrCacheData, PagedResult, PrResponse } from './types'; +import { prFieldsFilter, prInfo, prStates } from './utils'; + +export class BitbucketPrCache { + private cache: BitbucketPrCacheData; + + private constructor( + private repo: string, + private author: string | null, + ) { + const repoCache = getCache(); + repoCache.platform ??= {}; + repoCache.platform.bitbucket ??= {}; + + let pullRequestCache: BitbucketPrCacheData | undefined = + repoCache.platform.bitbucket.pullRequestsCache; + if (!pullRequestCache || pullRequestCache.author !== author) { + pullRequestCache = { + items: {}, + updated_on: null, + author, + }; + } + repoCache.platform.bitbucket.pullRequestsCache = pullRequestCache; + this.cache = pullRequestCache; + } + + private static async init( + http: BitbucketHttp, + repo: string, + author: string | null, + ): Promise { + const res = new BitbucketPrCache(repo, author); + const isSynced = memCache.get( + 'bitbucket-pr-cache-synced', + ); + + if (!isSynced) { + await res.sync(http); + memCache.set('bitbucket-pr-cache-synced', true); + } + + return res; + } + + private getPrs(): Pr[] { + return Object.values(this.cache.items); + } + + static async getPrs( + http: BitbucketHttp, + repo: string, + author: string | null, + ): Promise { + const prCache = await BitbucketPrCache.init(http, repo, author); + return prCache.getPrs(); + } + + private addPr(pr: Pr): void { + this.cache.items[pr.number] = pr; + } + + static async addPr( + http: BitbucketHttp, + repo: string, + author: string | null, + item: Pr, + ): Promise { + const prCache = await BitbucketPrCache.init(http, repo, author); + prCache.addPr(item); + } + + private reconcile(rawItems: PrResponse[]): void { + const { items: oldItems } = this.cache; + let { updated_on } = this.cache; + + for (const rawItem of rawItems) { + const id = rawItem.id; + + const oldItem = oldItems[id]; + const newItem = prInfo(rawItem); + + const itemNewTime = DateTime.fromISO(rawItem.updated_on); + + if (!dequal(oldItem, newItem)) { + oldItems[id] = newItem; + } + + const cacheOldTime = updated_on ? DateTime.fromISO(updated_on) : null; + if (!cacheOldTime || itemNewTime > cacheOldTime) { + updated_on = rawItem.updated_on; + } + } + + this.cache.updated_on = updated_on; + } + + private getUrl(): string { + const params = new URLSearchParams(); + + for (const state of prStates.all) { + params.append('state', state); + } + + params.append('fields', prFieldsFilter); + + const q: string[] = []; + if (this.author) { + q.push(`author.uuid = "${this.author}"`); + } + if (this.cache.updated_on) { + q.push(`updated_on > "${this.cache.updated_on}"`); + } + params.append('q', q.join(' AND ')); + + const query = params.toString(); + return `/2.0/repositories/${this.repo}/pullrequests?${query}`; + } + + private async sync(http: BitbucketHttp): Promise { + logger.debug('Syncing PR list'); + const url = this.getUrl(); + const opts = { paginate: true, pagelen: 50 }; + const res = await http.getJson>(url, opts); + this.reconcile(res.body.values); + return this; + } +} diff --git a/lib/modules/platform/bitbucket/types.ts b/lib/modules/platform/bitbucket/types.ts index 2bd692ca6b2680..35fba3d1638a5e 100644 --- a/lib/modules/platform/bitbucket/types.ts +++ b/lib/modules/platform/bitbucket/types.ts @@ -13,7 +13,6 @@ export interface Config { has_issues: boolean; mergeMethod: string; owner: string; - prList: Pr[]; repository: string; ignorePrAuthor: boolean; is_private: boolean; @@ -89,8 +88,9 @@ export interface PrResponse { name: string; }; }; - reviewers: Array; + reviewers: Account[]; created_on: string; + updated_on: string; } export interface Account { @@ -105,3 +105,9 @@ export interface EffectiveReviewer { reviewer_type: string; user: Account; } + +export interface BitbucketPrCacheData { + items: Record; + updated_on: string | null; + author: string | null; +} diff --git a/lib/modules/platform/bitbucket/utils.ts b/lib/modules/platform/bitbucket/utils.ts index 06d72dacd31b23..852f61611164f5 100644 --- a/lib/modules/platform/bitbucket/utils.ts +++ b/lib/modules/platform/bitbucket/utils.ts @@ -72,3 +72,19 @@ export function prInfo(pr: PrResponse): Pr { createdAt: pr.created_on, }; } + +export const prFieldsFilter = [ + 'values.id', + 'values.title', + 'values.state', + 'values.links.commits.href', + 'values.summary.raw', + 'values.source.branch.name', + 'values.destination.branch.name', + 'values.reviewers.display_name', + 'values.reviewers.uuid', + 'values.reviewers.nickname', + 'values.reviewers.account_status', + 'values.created_on', + 'values.updated_on', +].join(','); diff --git a/lib/modules/platform/gerrit/client.spec.ts b/lib/modules/platform/gerrit/client.spec.ts new file mode 100644 index 00000000000000..620ec9d5baef57 --- /dev/null +++ b/lib/modules/platform/gerrit/client.spec.ts @@ -0,0 +1,494 @@ +import * as httpMock from '../../../../test/http-mock'; +import { partial } from '../../../../test/util'; +import { REPOSITORY_ARCHIVED } from '../../../constants/error-messages'; +import { setBaseUrl } from '../../../util/http/gerrit'; +import type { FindPRConfig } from '../types'; +import { client } from './client'; +import type { + GerritChange, + GerritChangeMessageInfo, + GerritFindPRConfig, + GerritMergeableInfo, +} from './types'; + +const gerritEndpointUrl = 'https://dev.gerrit.com/renovate/'; +const jsonResultHeader = { 'content-type': 'application/json;charset=utf-8' }; + +describe('modules/platform/gerrit/client', () => { + beforeAll(() => { + setBaseUrl(gerritEndpointUrl); + }); + + describe('getRepos()', () => { + it('returns repos', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/projects/?type=CODE&state=ACTIVE') + .reply( + 200, + gerritRestResponse({ + repo1: { id: 'repo1', state: 'ACTIVE' }, + repo2: { id: 'repo2', state: 'ACTIVE' }, + }), + jsonResultHeader, + ); + expect(await client.getRepos()).toEqual(['repo1', 'repo2']); + }); + }); + + describe('getProjectInfo()', () => { + it('inactive', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/projects/test%2Frepo') + .reply( + 200, + gerritRestResponse({ + id: 'repo1', + name: 'test-repo', + state: 'READ_ONLY', + }), + jsonResultHeader, + ); + await expect(client.getProjectInfo('test/repo')).rejects.toThrow( + REPOSITORY_ARCHIVED, + ); + }); + + it('active', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/projects/test%2Frepo') + .reply( + 200, + gerritRestResponse({ + id: 'repo1', + name: 'test-repo', + state: 'ACTIVE', + }), + jsonResultHeader, + ); + await expect(client.getProjectInfo('test/repo')).resolves.toEqual({ + id: 'repo1', + name: 'test-repo', + state: 'ACTIVE', + }); + }); + }); + + describe('getBranchInfo()', () => { + it('info', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/projects/test%2Frepo/branches/HEAD') + .reply( + 200, + gerritRestResponse({ ref: 'sha-hash....', revision: 'main' }), + jsonResultHeader, + ); + await expect(client.getBranchInfo('test/repo')).resolves.toEqual({ + ref: 'sha-hash....', + revision: 'main', + }); + }); + }); + + describe('findChanges()', () => { + it.each([ + ['owner:self', { branchName: 'dependency-xyz' }], + ['project:repo', { branchName: 'dependency-xyz' }], + ['-is:wip', { branchName: 'dependency-xyz' }], + ['hashtag:sourceBranch-dependency-xyz', { branchName: 'dependency-xyz' }], + ['label:Code-Review=-2', { branchName: 'dependency-xyz', label: '-2' }], + [ + 'branch:otherTarget', + { branchName: 'dependency-xyz', targetBranch: 'otherTarget' }, + ], + [ + 'status:closed', + { + branchName: 'dependency-xyz', + state: 'closed' as FindPRConfig['state'], + }, + ], + ])( + 'query contains %p', + async (expectedQueryPart: string, config: GerritFindPRConfig) => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/') + .query((query) => query?.q?.includes(expectedQueryPart) ?? false) + .reply( + 200, + gerritRestResponse([{ _number: 1 }, { _number: 2 }]), + jsonResultHeader, + ); + await expect(client.findChanges('repo', config)).resolves.toEqual([ + { _number: 1 }, + { _number: 2 }, + ]); + }, + ); + }); + + describe('getChange()', () => { + it('get', async () => { + const change = partial({}); + httpMock + .scope(gerritEndpointUrl) + .get( + '/a/changes/123456?o=SUBMITTABLE&o=CHECK&o=MESSAGES&o=DETAILED_ACCOUNTS&o=LABELS&o=CURRENT_ACTIONS&o=CURRENT_REVISION', + ) + .reply(200, gerritRestResponse(change), jsonResultHeader); + await expect(client.getChange(123456)).resolves.toEqual(change); + }); + }); + + describe('getMergeableInfo()', () => { + it('get', async () => { + const mergeInfo: GerritMergeableInfo = { + mergeable: true, + submit_type: 'MERGE_IF_NECESSARY', + }; + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/revisions/current/mergeable') + .reply(200, gerritRestResponse(mergeInfo), jsonResultHeader); + await expect( + client.getMergeableInfo(partial({ _number: 123456 })), + ).resolves.toEqual(mergeInfo); + }); + }); + + describe('abandonChange()', () => { + it('abandon', async () => { + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/abandon') + .reply(200, gerritRestResponse({}), jsonResultHeader); + await expect(client.abandonChange(123456)).toResolve(); + }); + }); + + describe('submitChange()', () => { + it('submit', async () => { + const change = partial({}); + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/submit') + .reply(200, gerritRestResponse(change), jsonResultHeader); + await expect(client.submitChange(123456)).resolves.toEqual(change); + }); + }); + + describe('setCommitMessage()', () => { + it('setCommitMessage', async () => { + const change = partial({}); + httpMock + .scope(gerritEndpointUrl) + .put('/a/changes/123456/message', { message: 'new message' }) + .reply(200, gerritRestResponse(change), jsonResultHeader); + await expect(client.setCommitMessage(123456, 'new message')).toResolve(); + }); + }); + + describe('updateCommitMessage', () => { + it('updateCommitMessage - success', async () => { + const change = partial({}); + httpMock + .scope(gerritEndpointUrl) + .put('/a/changes/123456/message', { + message: `new message\n\nChange-Id: changeID\n`, + }) + .reply(200, gerritRestResponse(change), jsonResultHeader); + await expect( + client.updateCommitMessage(123456, 'changeID', 'new message'), + ).toResolve(); + }); + }); + + describe('getMessages()', () => { + it('no messages', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/messages') + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.getMessages(123456)).resolves.toEqual([]); + }); + + it('with messages', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/messages') + .reply( + 200, + gerritRestResponse([ + partial({ message: 'msg1' }), + partial({ message: 'msg2' }), + ]), + jsonResultHeader, + ); + await expect(client.getMessages(123456)).resolves.toEqual([ + { message: 'msg1' }, + { message: 'msg2' }, + ]); + }); + }); + + describe('addMessage()', () => { + it('add with tag', async () => { + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/revisions/current/review', { + message: 'message', + tag: 'tag', + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.addMessage(123456, 'message', 'tag')).toResolve(); + }); + + it('add without tag', async () => { + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/revisions/current/review', { + message: 'message', + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.addMessage(123456, 'message')).toResolve(); + }); + + it('add too big message', async () => { + const okMessage = 'a'.repeat(0x4000); + const tooBigMessage = okMessage + 'b'; + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/revisions/current/review', { + message: okMessage, + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.addMessage(123456, tooBigMessage)).toResolve(); + }); + }); + + describe('checkForExistingMessage()', () => { + it('msg not found', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/messages') + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect( + client.checkForExistingMessage(123456, ' the message '), + ).resolves.toBeFalse(); + }); + + it('msg found', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/messages') + .reply( + 200, + gerritRestResponse([ + partial({ message: 'msg1' }), + partial({ message: 'the message' }), + ]), + jsonResultHeader, + ); + await expect( + client.checkForExistingMessage(123456, 'the message'), + ).resolves.toBeTrue(); + }); + }); + + describe('addMessageIfNotAlreadyExists()', () => { + it('msg not found', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/messages') + .reply(200, gerritRestResponse([]), jsonResultHeader); + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/revisions/current/review', { + message: 'new trimmed message', + tag: 'TAG', + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + + await expect( + client.addMessageIfNotAlreadyExists( + 123456, + ' new trimmed message\n', + 'TAG', + ), + ).toResolve(); + }); + + it('msg already exists', async () => { + httpMock + .scope(gerritEndpointUrl) + .get('/a/changes/123456/messages') + .reply( + 200, + gerritRestResponse([ + partial({ message: 'msg1', tag: 'TAG' }), + ]), + jsonResultHeader, + ); + + await expect( + client.addMessageIfNotAlreadyExists(123456, 'msg1\n', 'TAG'), + ).toResolve(); + }); + }); + + describe('setLabel()', () => { + it('setLabel', async () => { + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/revisions/current/review', { + labels: { 'Code-Review': 2 }, + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.setLabel(123456, 'Code-Review', +2)).toResolve(); + }); + }); + + describe('addReviewer()', () => { + it('add', async () => { + httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/reviewers', { + reviewer: 'username', + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.addReviewer(123456, 'username')).toResolve(); + }); + }); + + describe('addAssignee()', () => { + it('add', async () => { + httpMock + .scope(gerritEndpointUrl) + .put('/a/changes/123456/assignee', { + assignee: 'username', + }) + .reply(200, gerritRestResponse([]), jsonResultHeader); + await expect(client.addAssignee(123456, 'username')).toResolve(); + }); + }); + + describe('getFile()', () => { + it('getFile() - repo and branch', async () => { + httpMock + .scope(gerritEndpointUrl) + .get( + '/a/projects/test%2Frepo/branches/main/files/renovate.json/content', + ) + .reply(200, gerritFileResponse('{}')); + await expect( + client.getFile('test/repo', 'main', 'renovate.json'), + ).resolves.toBe('{}'); + }); + }); + + describe('approveChange()', () => { + it('already approved - do nothing', async () => { + const change = partial({}); + httpMock + .scope(gerritEndpointUrl) + .get((url) => url.includes('/a/changes/123456?o=')) + .reply(200, gerritRestResponse(change), jsonResultHeader); + await expect(client.approveChange(123456)).toResolve(); + }); + + it('label not available - do nothing', async () => { + const change = partial({ labels: {} }); + httpMock + .scope(gerritEndpointUrl) + .get((url) => url.includes('/a/changes/123456?o=')) + .reply(200, gerritRestResponse(change), jsonResultHeader); + + await expect(client.approveChange(123456)).toResolve(); + }); + + it('not already approved - approve now', async () => { + const change = partial({ labels: { 'Code-Review': {} } }); + httpMock + .scope(gerritEndpointUrl) + .get((url) => url.includes('/a/changes/123456?o=')) + .reply(200, gerritRestResponse(change), jsonResultHeader); + const approveMock = httpMock + .scope(gerritEndpointUrl) + .post('/a/changes/123456/revisions/current/review', { + labels: { 'Code-Review': +2 }, + }) + .reply(200, gerritRestResponse(''), jsonResultHeader); + await expect(client.approveChange(123456)).toResolve(); + expect(approveMock.isDone()).toBeTrue(); + }); + }); + + describe('wasApprovedBy()', () => { + it('label not exists', () => { + expect( + client.wasApprovedBy(partial({}), 'user'), + ).toBeUndefined(); + }); + + it('not approved by anyone', () => { + expect( + client.wasApprovedBy( + partial({ + labels: { + 'Code-Review': {}, + }, + }), + 'user', + ), + ).toBeUndefined(); + }); + + it('approved by given user', () => { + expect( + client.wasApprovedBy( + partial({ + labels: { + 'Code-Review': { + approved: { + _account_id: 1, + username: 'user', + }, + }, + }, + }), + 'user', + ), + ).toBeTrue(); + }); + + it('approved by given other', () => { + expect( + client.wasApprovedBy( + partial({ + labels: { + 'Code-Review': { + approved: { + _account_id: 1, + username: 'other', + }, + }, + }, + }), + 'user', + ), + ).toBeFalse(); + }); + }); +}); + +function gerritRestResponse(body: any): any { + return `)]}'\n${JSON.stringify(body)}`; +} + +function gerritFileResponse(content: string): any { + return Buffer.from(content).toString('base64'); +} diff --git a/lib/modules/platform/gerrit/client.ts b/lib/modules/platform/gerrit/client.ts new file mode 100644 index 00000000000000..543328189ba7da --- /dev/null +++ b/lib/modules/platform/gerrit/client.ts @@ -0,0 +1,241 @@ +import { REPOSITORY_ARCHIVED } from '../../../constants/error-messages'; +import { logger } from '../../../logger'; +import { GerritHttp } from '../../../util/http/gerrit'; +import type { + GerritAccountInfo, + GerritBranchInfo, + GerritChange, + GerritChangeMessageInfo, + GerritFindPRConfig, + GerritMergeableInfo, + GerritProjectInfo, +} from './types'; +import { mapPrStateToGerritFilter } from './utils'; + +class GerritClient { + private requestDetails = [ + 'SUBMITTABLE', //include the submittable field in ChangeInfo, which can be used to tell if the change is reviewed and ready for submit. + 'CHECK', // include potential problems with the change. + 'MESSAGES', + 'DETAILED_ACCOUNTS', + 'LABELS', + 'CURRENT_ACTIONS', //to check if current_revision can be "rebased" + 'CURRENT_REVISION', //get RevisionInfo::ref to fetch + ] as const; + + private gerritHttp = new GerritHttp(); + + async getRepos(): Promise { + const res = await this.gerritHttp.getJson( + 'a/projects/?type=CODE&state=ACTIVE', + {}, + ); + return Object.keys(res.body); + } + + async getProjectInfo(repository: string): Promise { + const projectInfo = await this.gerritHttp.getJson( + `a/projects/${encodeURIComponent(repository)}`, + ); + if (projectInfo.body.state !== 'ACTIVE') { + throw new Error(REPOSITORY_ARCHIVED); + } + return projectInfo.body; + } + + async getBranchInfo(repository: string): Promise { + const branchInfo = await this.gerritHttp.getJson( + `a/projects/${encodeURIComponent(repository)}/branches/HEAD`, + ); + return branchInfo.body; + } + + async findChanges( + repository: string, + findPRConfig: GerritFindPRConfig, + refreshCache?: boolean, + ): Promise { + const filters = GerritClient.buildSearchFilters(repository, findPRConfig); + const changes = await this.gerritHttp.getJson( + `a/changes/?q=` + + filters.join('+') + + this.requestDetails.map((det) => `&o=${det}`).join(''), + { memCache: !refreshCache }, + ); + logger.trace( + `findChanges(${filters.join(', ')}) => ${changes.body.length}`, + ); + return changes.body; + } + + async getChange(changeNumber: number): Promise { + const changes = await this.gerritHttp.getJson( + `a/changes/${changeNumber}?` + + this.requestDetails.map((det) => `o=${det}`).join('&'), + ); + return changes.body; + } + + async getMergeableInfo(change: GerritChange): Promise { + const mergeable = await this.gerritHttp.getJson( + `a/changes/${change._number}/revisions/current/mergeable`, + ); + return mergeable.body; + } + + async abandonChange(changeNumber: number): Promise { + await this.gerritHttp.postJson(`a/changes/${changeNumber}/abandon`); + } + + async submitChange(changeNumber: number): Promise { + const change = await this.gerritHttp.postJson( + `a/changes/${changeNumber}/submit`, + ); + return change.body; + } + + async setCommitMessage(changeNumber: number, message: string): Promise { + await this.gerritHttp.putJson(`a/changes/${changeNumber}/message`, { + body: { message }, + }); + } + + async updateCommitMessage( + number: number, + gerritChangeID: string, + prTitle: string, + ): Promise { + await this.setCommitMessage( + number, + `${prTitle}\n\nChange-Id: ${gerritChangeID}\n`, + ); + } + + async getMessages(changeNumber: number): Promise { + const messages = await this.gerritHttp.getJson( + `a/changes/${changeNumber}/messages`, + { memCache: false }, + ); + return messages.body; + } + + async addMessage( + changeNumber: number, + fullMessage: string, + tag?: string, + ): Promise { + const message = this.normalizeMessage(fullMessage); + await this.gerritHttp.postJson( + `a/changes/${changeNumber}/revisions/current/review`, + { body: { message, tag } }, + ); + } + + async checkForExistingMessage( + changeNumber: number, + newMessage: string, + msgType?: string, + ): Promise { + const messages = await this.getMessages(changeNumber); + return messages.some( + (existingMsg) => + (msgType === undefined || msgType === existingMsg.tag) && + existingMsg.message.includes(newMessage), + ); + } + + async addMessageIfNotAlreadyExists( + changeNumber: number, + message: string, + tag?: string, + ): Promise { + const newMsg = this.normalizeMessage(message); + if (!(await this.checkForExistingMessage(changeNumber, newMsg, tag))) { + await this.addMessage(changeNumber, newMsg, tag); + } + } + + async setLabel( + changeNumber: number, + label: string, + value: number, + ): Promise { + await this.gerritHttp.postJson( + `a/changes/${changeNumber}/revisions/current/review`, + { body: { labels: { [label]: value } } }, + ); + } + + async addReviewer(changeNumber: number, reviewer: string): Promise { + await this.gerritHttp.postJson(`a/changes/${changeNumber}/reviewers`, { + body: { reviewer }, + }); + } + + async addAssignee(changeNumber: number, assignee: string): Promise { + await this.gerritHttp.putJson( + `a/changes/${changeNumber}/assignee`, + { + body: { assignee }, + }, + ); + } + + async getFile( + repo: string, + branch: string, + fileName: string, + ): Promise { + const base64Content = await this.gerritHttp.get( + `a/projects/${encodeURIComponent( + repo, + )}/branches/${branch}/files/${encodeURIComponent(fileName)}/content`, + ); + return Buffer.from(base64Content.body, 'base64').toString(); + } + + async approveChange(changeId: number): Promise { + const isApproved = await this.checkIfApproved(changeId); + if (!isApproved) { + await this.setLabel(changeId, 'Code-Review', +2); + } + } + + async checkIfApproved(changeId: number): Promise { + const change = await client.getChange(changeId); + const reviewLabels = change?.labels?.['Code-Review']; + return reviewLabels === undefined || reviewLabels.approved !== undefined; + } + + wasApprovedBy(change: GerritChange, username: string): boolean | undefined { + return ( + change.labels?.['Code-Review'].approved && + change.labels['Code-Review'].approved.username === username + ); + } + + normalizeMessage(message: string): string { + //the last \n was removed from gerrit after the comment was added... + return message.substring(0, 0x4000).trim(); + } + + private static buildSearchFilters( + repository: string, + searchConfig: GerritFindPRConfig, + ): string[] { + const filterState = mapPrStateToGerritFilter(searchConfig.state); + const filters = ['owner:self', 'project:' + repository, filterState]; + if (searchConfig.branchName !== '') { + filters.push(`hashtag:sourceBranch-${searchConfig.branchName}`); + } + if (searchConfig.targetBranch) { + filters.push(`branch:${searchConfig.targetBranch}`); + } + if (searchConfig.label) { + filters.push(`label:Code-Review=${searchConfig.label}`); + } + return filters; + } +} + +export const client = new GerritClient(); diff --git a/lib/modules/platform/gerrit/index.spec.ts b/lib/modules/platform/gerrit/index.spec.ts new file mode 100644 index 00000000000000..7b5fa75db6b5ea --- /dev/null +++ b/lib/modules/platform/gerrit/index.spec.ts @@ -0,0 +1,764 @@ +import { git, mocked, partial } from '../../../../test/util'; +import { REPOSITORY_ARCHIVED } from '../../../constants/error-messages'; +import type { BranchStatus } from '../../../types'; +import * as _hostRules from '../../../util/host-rules'; +import { repoFingerprint } from '../util'; +import { client as _client } from './client'; +import type { + GerritAccountInfo, + GerritChange, + GerritChangeMessageInfo, + GerritLabelInfo, + GerritLabelTypeInfo, + GerritProjectInfo, +} from './types'; +import { TAG_PULL_REQUEST_BODY, mapGerritChangeToPr } from './utils'; +import { writeToConfig } from '.'; +import * as gerrit from '.'; + +const gerritEndpointUrl = 'https://dev.gerrit.com/renovate'; + +const codeReviewLabel: GerritLabelTypeInfo = { + values: { + '-2': 'bad', + '-1': 'unlikely', + 0: 'neutral', + 1: 'ok', + 2: 'good', + }, + default_value: 0, +}; + +jest.mock('../../../util/host-rules'); +jest.mock('../../../util/git'); +jest.mock('./client'); +const clientMock = mocked(_client); +const hostRules = mocked(_hostRules); + +describe('modules/platform/gerrit/index', () => { + beforeEach(async () => { + hostRules.find.mockReturnValue({ + username: 'user', + password: 'pass', + }); + writeToConfig({ + repository: 'test/repo', + labels: {}, + }); + await gerrit.initPlatform({ + endpoint: gerritEndpointUrl, + username: 'user', + password: 'pass', + }); + }); + + describe('initPlatform()', () => { + it('should throw if no endpoint', () => { + expect.assertions(1); + expect(() => gerrit.initPlatform({})).toThrow(); + }); + + it('should throw if no username/password', () => { + expect.assertions(1); + expect(() => gerrit.initPlatform({ endpoint: 'endpoint' })).toThrow(); + }); + + it('should init', async () => { + expect( + await gerrit.initPlatform({ + endpoint: gerritEndpointUrl, + username: 'abc', + password: '123', + }), + ).toEqual({ endpoint: 'https://dev.gerrit.com/renovate/' }); + }); + }); + + describe('getRepos()', () => { + it('returns repos', async () => { + clientMock.getRepos.mockResolvedValueOnce(['repo1', 'repo2']); + expect(await gerrit.getRepos()).toEqual(['repo1', 'repo2']); + }); + }); + + it('initRepo() - inactive', async () => { + clientMock.getProjectInfo.mockRejectedValueOnce( + new Error(REPOSITORY_ARCHIVED), + ); + await expect(gerrit.initRepo({ repository: 'test/repo' })).rejects.toThrow( + REPOSITORY_ARCHIVED, + ); + }); + + describe('initRepo()', () => { + const projectInfo: GerritProjectInfo = { + id: 'repo1', + name: 'test-repo2', + }; + + beforeEach(() => { + clientMock.getBranchInfo.mockResolvedValueOnce({ + ref: 'sha-hash....', + revision: 'main', + }); + }); + + it('initRepo() - active', async () => { + clientMock.getProjectInfo.mockResolvedValueOnce(projectInfo); + clientMock.findChanges.mockResolvedValueOnce([]); + expect(await gerrit.initRepo({ repository: 'test/repo' })).toEqual({ + defaultBranch: 'main', + isFork: false, + repoFingerprint: repoFingerprint('test/repo', `${gerritEndpointUrl}/`), + }); + expect(git.initRepo).toHaveBeenCalledWith({ + url: 'https://user:pass@dev.gerrit.com/renovate/a/test%2Frepo', + }); + }); + + it('initRepo() - abandon rejected changes', async () => { + clientMock.getProjectInfo.mockResolvedValueOnce({ + ...projectInfo, + labels: { 'Code-Review': codeReviewLabel }, + }); + clientMock.findChanges.mockResolvedValueOnce([ + partial({ _number: 1 }), + partial({ _number: 2 }), + ]); + + await gerrit.initRepo({ repository: 'test/repo' }); + + expect(clientMock.findChanges.mock.calls[0]).toEqual([ + 'test/repo', + { branchName: '', label: '-2', state: 'open' }, + ]); + expect(clientMock.abandonChange.mock.calls).toEqual([[1], [2]]); + }); + }); + + describe('findPr()', () => { + it('findPr() - no results', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerrit.findPr({ branchName: 'branch', state: 'open' }), + ).resolves.toBeNull(); + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { branchName: 'branch', state: 'open' }, + undefined, + ); + }); + + it('findPr() - return the last change from search results', async () => { + clientMock.findChanges.mockResolvedValueOnce([ + partial({ _number: 1 }), + partial({ _number: 2 }), + ]); + await expect( + gerrit.findPr({ branchName: 'branch', state: 'open' }), + ).resolves.toHaveProperty('number', 2); + }); + }); + + describe('getPr()', () => { + it('getPr() - found', async () => { + const change = partial({}); + clientMock.getChange.mockResolvedValueOnce(change); + await expect(gerrit.getPr(123456)).resolves.toEqual( + mapGerritChangeToPr(change), + ); + expect(clientMock.getChange).toHaveBeenCalledWith(123456); + }); + + it('getPr() - not found', async () => { + clientMock.getChange.mockRejectedValueOnce({ statusCode: 404 }); + await expect(gerrit.getPr(123456)).resolves.toBeNull(); + }); + + it('getPr() - other error', async () => { + clientMock.getChange.mockRejectedValueOnce(new Error('other error')); + await expect(gerrit.getPr(123456)).rejects.toThrow(); + }); + }); + + describe('updatePr()', () => { + beforeAll(() => { + gerrit.writeToConfig({ labels: {} }); + }); + + it('updatePr() - new prTitle => copy to commit msg', async () => { + const change = partial({ + change_id: '...', + subject: 'old title', + }); + clientMock.getChange.mockResolvedValueOnce(change); + await gerrit.updatePr({ number: 123456, prTitle: 'new title' }); + expect(clientMock.updateCommitMessage).toHaveBeenCalledWith( + 123456, + '...', + 'new title', + ); + }); + + it('updatePr() - auto approve enabled', async () => { + const change = partial({}); + clientMock.getChange.mockResolvedValueOnce(change); + await gerrit.updatePr({ + number: 123456, + prTitle: 'subject', + platformOptions: { + autoApprove: true, + }, + }); + expect(clientMock.approveChange).toHaveBeenCalledWith(123456); + }); + + it('updatePr() - closed => abandon the change', async () => { + const change = partial({}); + clientMock.getChange.mockResolvedValueOnce(change); + await gerrit.updatePr({ + number: 123456, + prTitle: change.subject, + state: 'closed', + }); + expect(clientMock.abandonChange).toHaveBeenCalledWith(123456); + }); + + it('updatePr() - existing prBody found in change.messages => nothing todo...', async () => { + const change = partial({}); + clientMock.getChange.mockResolvedValueOnce(change); + clientMock.getMessages.mockResolvedValueOnce([ + partial({ + tag: TAG_PULL_REQUEST_BODY, + message: 'Last PR-Body', + }), + ]); + await gerrit.updatePr({ + number: 123456, + prTitle: 'title', + prBody: 'Last PR-Body', + }); + expect(clientMock.addMessage).not.toHaveBeenCalled(); + }); + + it('updatePr() - new prBody found in change.messages => add as message', async () => { + const change = partial({}); + clientMock.getChange.mockResolvedValueOnce(change); + clientMock.getMessages.mockResolvedValueOnce([]); + await gerrit.updatePr({ + number: 123456, + prTitle: change.subject, + prBody: 'NEW PR-Body', + }); + expect(clientMock.addMessageIfNotAlreadyExists).toHaveBeenCalledWith( + 123456, + 'NEW PR-Body', + TAG_PULL_REQUEST_BODY, + ); + }); + }); + + describe('createPr() - error ', () => { + it('createPr() - no existing found => rejects', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerrit.createPr({ + sourceBranch: 'source', + targetBranch: 'target', + prTitle: 'title', + prBody: 'body', + }), + ).rejects.toThrow( + `the change should be created automatically from previous push to refs/for/source`, + ); + }); + }); + + describe('createPr() - success', () => { + beforeAll(() => { + gerrit.writeToConfig({ labels: {} }); + }); + + const change = partial({ + _number: 123456, + change_id: '...', + }); + + beforeEach(() => { + clientMock.findChanges.mockResolvedValueOnce([change]); + clientMock.getChange.mockResolvedValueOnce(change); + clientMock.getMessages.mockResolvedValueOnce([ + partial({ + tag: TAG_PULL_REQUEST_BODY, + message: 'Last PR-Body', + }), + ]); + }); + + it('createPr() - update body/title WITHOUT approve', async () => { + const pr = await gerrit.createPr({ + sourceBranch: 'source', + targetBranch: 'target', + prTitle: 'title', + prBody: 'body', + platformOptions: { + autoApprove: false, + }, + }); + expect(pr).toHaveProperty('number', 123456); + expect(clientMock.addMessageIfNotAlreadyExists).toHaveBeenCalledWith( + 123456, + 'body', + TAG_PULL_REQUEST_BODY, + ); + expect(clientMock.approveChange).not.toHaveBeenCalled(); + expect(clientMock.updateCommitMessage).toHaveBeenCalledWith( + 123456, + '...', + 'title', + ); + }); + + it('createPr() - update body and approve', async () => { + const pr = await gerrit.createPr({ + sourceBranch: 'source', + targetBranch: 'target', + prTitle: change.subject, + prBody: 'body', + platformOptions: { + autoApprove: true, + }, + }); + expect(pr).toHaveProperty('number', 123456); + expect(clientMock.addMessageIfNotAlreadyExists).toHaveBeenCalledWith( + 123456, + 'body', + TAG_PULL_REQUEST_BODY, + ); + expect(clientMock.approveChange).toHaveBeenCalledWith(123456); + expect(clientMock.setCommitMessage).not.toHaveBeenCalled(); + }); + }); + + describe('getBranchPr()', () => { + it('getBranchPr() - no result', async () => { + clientMock.findChanges.mockResolvedValue([]); + await expect( + gerrit.getBranchPr('renovate/dependency-1.x'), + ).resolves.toBeNull(); + expect(clientMock.findChanges).toHaveBeenCalledWith('test/repo', { + branchName: 'renovate/dependency-1.x', + state: 'open', + }); + }); + + it('getBranchPr() - found', async () => { + const change = partial({ + _number: 123456, + }); + clientMock.findChanges.mockResolvedValue([change]); + await expect( + gerrit.getBranchPr('renovate/dependency-1.x'), + ).resolves.toHaveProperty('number', 123456); + expect(clientMock.findChanges.mock.lastCall).toEqual([ + 'test/repo', + { state: 'open', branchName: 'renovate/dependency-1.x' }, + ]); + }); + }); + + describe('getPrList()', () => { + it('getPrList() - empty list', async () => { + clientMock.findChanges.mockResolvedValue([]); + await expect(gerrit.getPrList()).resolves.toEqual([]); + expect(clientMock.findChanges).toHaveBeenCalledWith('test/repo', { + branchName: '', + }); + }); + + it('getPrList() - multiple results', async () => { + const change = partial({}); + clientMock.findChanges.mockResolvedValue([change, change, change]); + await expect(gerrit.getPrList()).resolves.toHaveLength(3); + }); + }); + + describe('mergePr()', () => { + it('mergePr() - blocker by Verified', async () => { + clientMock.submitChange.mockRejectedValueOnce({ + statusCode: 409, + message: 'blocked by Verified', + }); + await expect(gerrit.mergePr({ id: 123456 })).resolves.toBeFalse(); + expect(clientMock.submitChange).toHaveBeenCalledWith(123456); + }); + + it('mergePr() - success', async () => { + clientMock.submitChange.mockResolvedValueOnce( + partial({ status: 'MERGED' }), + ); + await expect(gerrit.mergePr({ id: 123456 })).resolves.toBeTrue(); + }); + + it('mergePr() - other errors', async () => { + clientMock.submitChange.mockRejectedValueOnce( + new Error('any other error'), + ); + await expect(gerrit.mergePr({ id: 123456 })).rejects.toThrow(); + }); + }); + + describe('getBranchStatus()', () => { + it('getBranchStatus() - branchname/change not found => yellow', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerrit.getBranchStatus('renovate/dependency-1.x'), + ).resolves.toBe('yellow'); + }); + + it('getBranchStatus() - branchname/changes found, submittable and not hasProblems => green', async () => { + const change = partial({ + submittable: true, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect( + gerrit.getBranchStatus('renovate/dependency-1.x'), + ).resolves.toBe('green'); + }); + + it('getBranchStatus() - branchname/changes found and hasProblems => red', async () => { + const submittableChange = partial({ + submittable: true, + problems: [], + }); + const changeWithProblems = { ...submittableChange }; + changeWithProblems.submittable = false; + changeWithProblems.problems = [ + { message: 'error1' }, + { message: 'error2' }, + ]; + clientMock.findChanges.mockResolvedValueOnce([ + changeWithProblems, + submittableChange, + ]); + await expect( + gerrit.getBranchStatus('renovate/dependency-1.x'), + ).resolves.toBe('red'); + }); + }); + + describe('getBranchStatusCheck()', () => { + describe('GerritLabel is not available', () => { + beforeAll(() => { + writeToConfig({ labels: {} }); + }); + + it.each([ + 'unknownCtx', + 'renovate/stability-days', + 'renovate/merge-confidence', + ])('getBranchStatusCheck() - %s ', async (ctx) => { + await expect( + gerrit.getBranchStatusCheck('renovate/dependency-1.x', ctx), + ).resolves.toBe('yellow'); + expect(clientMock.findChanges).not.toHaveBeenCalled(); + }); + }); + + describe('GerritLabel is available', () => { + beforeEach(() => { + writeToConfig({ + labels: { + 'Renovate-Merge-Confidence': { + values: { '0': 'default', '-1': 'Unsatisfied', '1': 'Satisfied' }, + default_value: 0, + }, + }, + }); + }); + + it.each([ + { + label: 'Renovate-Merge-Confidence', + labelValue: { rejected: partial({}) }, + expectedState: 'red' as BranchStatus, + }, + { + label: 'Renovate-Merge-Confidence', + labelValue: { approved: partial({}) }, + expectedState: 'green' as BranchStatus, + }, + ])('$ctx/$labels', async ({ label, labelValue, expectedState }) => { + const change = partial({ + labels: { + [label]: partial({ ...labelValue }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect( + gerrit.getBranchStatusCheck('renovate/dependency-1.x', label), + ).resolves.toBe(expectedState); + }); + }); + }); + + describe('setBranchStatus()', () => { + describe('GerritLabel is not available', () => { + beforeEach(() => { + writeToConfig({ labels: {} }); + }); + + it('setBranchStatus(renovate/stability-days)', async () => { + await expect( + gerrit.setBranchStatus({ + branchName: 'branch', + context: 'renovate/stability-days', + state: 'red', + description: 'desc', + }), + ).resolves.toBeUndefined(); + expect(clientMock.setLabel).not.toHaveBeenCalled(); + }); + + it('setBranchStatus(renovate/merge-confidence)', async () => { + await expect( + gerrit.setBranchStatus({ + branchName: 'branch', + context: 'renovate/merge-confidence', + state: 'red', + description: 'desc', + }), + ).resolves.toBeUndefined(); + }); + }); + + describe('GerritLabel is available', () => { + beforeEach(() => { + writeToConfig({ + labels: { + 'Renovate-Merge-Confidence': { + values: { '0': 'default', '-1': 'Unsatisfied', '1': 'Satisfied' }, + default_value: 0, + }, + }, + }); + }); + + it.each([ + { + ctx: 'Renovate-Merge-Confidence', + branchState: 'red' as BranchStatus, + expectedVote: -1, + expectedLabel: 'Renovate-Merge-Confidence', + }, + { + ctx: 'Renovate-Merge-Confidence', + branchState: 'yellow' as BranchStatus, + expectedVote: -1, + expectedLabel: 'Renovate-Merge-Confidence', + }, + { + ctx: 'Renovate-Merge-Confidence', + branchState: 'green' as BranchStatus, + expectedVote: 1, + expectedLabel: 'Renovate-Merge-Confidence', + }, + ])( + '$ctx/$branchState', + async ({ ctx, branchState, expectedVote, expectedLabel }) => { + const change = partial({ _number: 123456 }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await gerrit.setBranchStatus({ + branchName: 'renovate/dependency-1.x', + context: ctx, + state: branchState, + description: 'desc', + }); + expect(clientMock.setLabel).toHaveBeenCalledWith( + 123456, + expectedLabel, + expectedVote, + ); + }, + ); + + it('no change found', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerrit.setBranchStatus({ + branchName: 'renovate/dependency-1.x', + context: 'Renovate-Merge-Confidence', + state: 'red', + description: 'desc', + }), + ).resolves.toBeUndefined(); + expect(clientMock.setLabel).not.toHaveBeenCalled(); + }); + }); + }); + + describe('addReviewers()', () => { + it('addReviewers() - add reviewers', async () => { + await expect( + gerrit.addReviewers(123456, ['user1', 'user2']), + ).resolves.toBeUndefined(); + expect(clientMock.addReviewer).toHaveBeenCalledTimes(2); + expect(clientMock.addReviewer).toHaveBeenNthCalledWith( + 1, + 123456, + 'user1', + ); + expect(clientMock.addReviewer).toHaveBeenNthCalledWith( + 2, + 123456, + 'user2', + ); + }); + }); + + describe('addAssignees()', () => { + it('addAssignees() - set assignee', async () => { + await expect( + gerrit.addAssignees(123456, ['user1', 'user2']), + ).resolves.toBeUndefined(); + expect(clientMock.addAssignee).toHaveBeenCalledTimes(1); + expect(clientMock.addAssignee).toHaveBeenCalledWith(123456, 'user1'); + }); + }); + + describe('ensureComment()', () => { + it('ensureComment() - without tag', async () => { + await expect( + gerrit.ensureComment({ + number: 123456, + topic: null, + content: 'My-Comment-Msg', + }), + ).resolves.toBeTrue(); + expect(clientMock.addMessageIfNotAlreadyExists).toHaveBeenCalledWith( + 123456, + 'My-Comment-Msg', + undefined, + ); + }); + + it('ensureComment() - with tag', async () => { + await expect( + gerrit.ensureComment({ + number: 123456, + topic: 'myTopic', + content: 'My-Comment-Msg', + }), + ).resolves.toBeTrue(); + expect(clientMock.addMessageIfNotAlreadyExists).toHaveBeenCalledWith( + 123456, + 'My-Comment-Msg', + 'myTopic', + ); + }); + }); + + describe('getRawFile()', () => { + beforeEach(() => { + clientMock.getFile.mockResolvedValueOnce('{}'); + }); + + it('getRawFile() - repo and branch', async () => { + await expect( + gerrit.getRawFile('renovate.json', 'test/repo', 'main'), + ).resolves.toBe('{}'); + expect(clientMock.getFile).toHaveBeenCalledWith( + 'test/repo', + 'main', + 'renovate.json', + ); + }); + + it('getRawFile() - repo/branch from config', async () => { + writeToConfig({ + repository: 'repo', + head: 'master', + labels: {}, + }); + await expect(gerrit.getRawFile('renovate.json')).resolves.toBe('{}'); + expect(clientMock.getFile).toHaveBeenCalledWith( + 'repo', + 'master', + 'renovate.json', + ); + }); + + it('getRawFile() - repo/branch defaults', async () => { + writeToConfig({ + repository: undefined, + head: undefined, + labels: {}, + }); + await expect(gerrit.getRawFile('renovate.json')).resolves.toBe('{}'); + expect(clientMock.getFile).toHaveBeenCalledWith( + 'All-Projects', + 'HEAD', + 'renovate.json', + ); + }); + }); + + describe('getJsonFile()', () => { + //TODO: the wanted semantic is not clear + it('getJsonFile()', async () => { + clientMock.getFile.mockResolvedValueOnce('{}'); + await expect( + gerrit.getJsonFile('renovate.json', 'test/repo', 'main'), + ).resolves.toEqual({}); + }); + }); + + describe('getRepoForceRebase()', () => { + it('getRepoForceRebase()', async () => { + await expect(gerrit.getRepoForceRebase()).resolves.toBeFalse(); + }); + }); + + describe('massageMarkdown()', () => { + it('massageMarkdown()', () => { + expect(gerrit.massageMarkdown('Pull Requests')).toBe('Change-Requests'); + }); + //TODO: add some tests for Gerrit-specific replacements.. + }); + + describe('currently unused/not-implemented functions', () => { + it('deleteLabel()', async () => { + await expect( + gerrit.deleteLabel(123456, 'label'), + ).resolves.toBeUndefined(); + }); + + it('ensureCommentRemoval()', async () => { + await expect( + gerrit.ensureCommentRemoval({ + type: 'by-topic', + number: 123456, + topic: 'topic', + }), + ).resolves.toBeUndefined(); + }); + + it('ensureIssueClosing()', async () => { + await expect(gerrit.ensureIssueClosing('title')).resolves.toBeUndefined(); + }); + + it('ensureIssue()', async () => { + await expect( + gerrit.ensureIssue({ body: 'body', title: 'title' }), + ).resolves.toBeNull(); + }); + + it('findIssue()', async () => { + await expect(gerrit.findIssue('title')).resolves.toBeNull(); + }); + + it('getIssueList()', async () => { + await expect(gerrit.getIssueList()).resolves.toStrictEqual([]); + }); + }); +}); diff --git a/lib/modules/platform/gerrit/index.ts b/lib/modules/platform/gerrit/index.ts new file mode 100644 index 00000000000000..8b7c8abd44c15d --- /dev/null +++ b/lib/modules/platform/gerrit/index.ts @@ -0,0 +1,454 @@ +import { logger } from '../../../logger'; +import type { BranchStatus } from '../../../types'; +import { parseJson } from '../../../util/common'; +import * as git from '../../../util/git'; +import { setBaseUrl } from '../../../util/http/gerrit'; +import { regEx } from '../../../util/regex'; +import { ensureTrailingSlash } from '../../../util/url'; +import type { + BranchStatusConfig, + CreatePRConfig, + EnsureCommentConfig, + EnsureCommentRemovalConfigByContent, + EnsureCommentRemovalConfigByTopic, + EnsureIssueConfig, + EnsureIssueResult, + FindPRConfig, + Issue, + MergePRConfig, + PlatformParams, + PlatformResult, + Pr, + RepoParams, + RepoResult, + UpdatePrConfig, +} from '../types'; +import { repoFingerprint } from '../util'; + +import { smartTruncate } from '../utils/pr-body'; +import { readOnlyIssueBody } from '../utils/read-only-issue-body'; +import { client } from './client'; +import { configureScm } from './scm'; +import type { GerritLabelTypeInfo, GerritProjectInfo } from './types'; +import { + TAG_PULL_REQUEST_BODY, + getGerritRepoUrl, + mapBranchStatusToLabel, + mapGerritChangeToPr, +} from './utils'; + +export const id = 'gerrit'; + +const defaults: { + endpoint?: string; +} = {}; + +let config: { + repository?: string; + head?: string; + config?: GerritProjectInfo; + labels: Record; + gerritUsername?: string; +} = { + labels: {}, +}; + +export function writeToConfig(newConfig: typeof config): void { + config = { ...config, ...newConfig }; +} + +export function initPlatform({ + endpoint, + username, + password, +}: PlatformParams): Promise { + logger.debug(`initPlatform(${endpoint!}, ${username!})`); + if (!endpoint) { + throw new Error('Init: You must configure a Gerrit Server endpoint'); + } + if (!(username && password)) { + throw new Error( + 'Init: You must configure a Gerrit Server username/password', + ); + } + config.gerritUsername = username; + defaults.endpoint = ensureTrailingSlash(endpoint); + setBaseUrl(defaults.endpoint); + const platformConfig: PlatformResult = { + endpoint: defaults.endpoint, + }; + return Promise.resolve(platformConfig); +} + +/** + * Get all state="ACTIVE" and type="CODE" repositories from gerrit + */ +export async function getRepos(): Promise { + logger.debug(`getRepos()`); + return await client.getRepos(); +} + +/** + * Clone repository to local directory + * @param config + */ +export async function initRepo({ + repository, + gitUrl, +}: RepoParams): Promise { + logger.debug(`initRepo(${repository}, ${gitUrl!})`); + const projectInfo = await client.getProjectInfo(repository); + const branchInfo = await client.getBranchInfo(repository); + + config = { + ...config, + repository, + head: branchInfo.revision, + config: projectInfo, + labels: projectInfo.labels ?? {}, + }; + const baseUrl = defaults.endpoint!; + const url = getGerritRepoUrl(repository, baseUrl); + configureScm(repository, config.gerritUsername!); + await git.initRepo({ url }); + + //abandon "open" and "rejected" changes at startup + const rejectedChanges = await client.findChanges(config.repository!, { + branchName: '', + state: 'open', + label: '-2', + }); + for (const change of rejectedChanges) { + await client.abandonChange(change._number); + } + const repoConfig: RepoResult = { + defaultBranch: config.head!, + isFork: false, + repoFingerprint: repoFingerprint(repository, baseUrl), + }; + return repoConfig; +} + +export async function findPr( + findPRConfig: FindPRConfig, + refreshCache?: boolean, +): Promise { + const change = ( + await client.findChanges(config.repository!, findPRConfig, refreshCache) + ).pop(); + return change ? mapGerritChangeToPr(change) : null; +} + +export async function getPr(number: number): Promise { + try { + const change = await client.getChange(number); + return mapGerritChangeToPr(change); + } catch (err) { + if (err.statusCode === 404) { + return null; + } + throw err; + } +} + +export async function updatePr(prConfig: UpdatePrConfig): Promise { + logger.debug(`updatePr(${prConfig.number}, ${prConfig.prTitle})`); + const change = await client.getChange(prConfig.number); + if (change.subject !== prConfig.prTitle) { + await client.updateCommitMessage( + prConfig.number, + change.change_id, + prConfig.prTitle, + ); + } + if (prConfig.prBody) { + await client.addMessageIfNotAlreadyExists( + prConfig.number, + prConfig.prBody, + TAG_PULL_REQUEST_BODY, + ); + } + if (prConfig.platformOptions?.autoApprove) { + await client.approveChange(prConfig.number); + } + if (prConfig.state && prConfig.state === 'closed') { + await client.abandonChange(prConfig.number); + } +} + +export async function createPr(prConfig: CreatePRConfig): Promise { + logger.debug( + `createPr(${prConfig.sourceBranch}, ${prConfig.prTitle}, ${ + prConfig.labels?.toString() ?? '' + })`, + ); + const pr = ( + await client.findChanges( + config.repository!, + { + branchName: prConfig.sourceBranch, + targetBranch: prConfig.targetBranch, + state: 'open', + }, + true, + ) + ).pop(); + if (pr === undefined) { + throw new Error( + `the change should be created automatically from previous push to refs/for/${prConfig.sourceBranch}`, + ); + } + //Workaround for "Known Problems.1" + if (pr.subject !== prConfig.prTitle) { + await client.updateCommitMessage( + pr._number, + pr.change_id, + prConfig.prTitle, + ); + } + await client.addMessageIfNotAlreadyExists( + pr._number, + prConfig.prBody, + TAG_PULL_REQUEST_BODY, + ); + if (prConfig.platformOptions?.autoApprove) { + await client.approveChange(pr._number); + } + return getPr(pr._number); +} + +export async function getBranchPr(branchName: string): Promise { + const change = ( + await client.findChanges(config.repository!, { branchName, state: 'open' }) + ).pop(); + return change ? mapGerritChangeToPr(change) : null; +} + +export function getPrList(): Promise { + return client + .findChanges(config.repository!, { branchName: '' }) + .then((res) => res.map((change) => mapGerritChangeToPr(change))); +} + +export async function mergePr(config: MergePRConfig): Promise { + logger.debug( + `mergePr(${config.id}, ${config.branchName!}, ${config.strategy!})`, + ); + try { + const change = await client.submitChange(config.id); + return change.status === 'MERGED'; + } catch (err) { + if (err.statusCode === 409) { + logger.warn( + { err }, + "Can't submit the change, because the submit rule doesn't allow it.", + ); + return false; + } + throw err; + } +} + +/** + * BranchStatus for Gerrit assumes that the branchName refers to a change. + * @param branchName + */ +export async function getBranchStatus( + branchName: string, +): Promise { + logger.debug(`getBranchStatus(${branchName})`); + const changes = await client.findChanges( + config.repository!, + { state: 'open', branchName }, + true, + ); + if (changes.length > 0) { + const allSubmittable = + changes.filter((change) => change.submittable === true).length === + changes.length; + if (allSubmittable) { + return 'green'; + } + const hasProblems = + changes.filter((change) => change.problems.length > 0).length > 0; + if (hasProblems) { + return 'red'; + } + } + return 'yellow'; +} + +/** + * check the gerrit-change for the presence of the corresponding "$context" Gerrit label if configured, + * return 'yellow' if not configured or not set + * @param branchName + * @param context renovate/stability-days || ... + */ +export async function getBranchStatusCheck( + branchName: string, + context: string, +): Promise { + const label = config.labels[context]; + if (label) { + const change = ( + await client.findChanges( + config.repository!, + { branchName, state: 'open' }, + true, + ) + ).pop(); + if (change) { + const labelRes = change.labels?.[context]; + if (labelRes) { + if (labelRes.approved) { + return 'green'; + } + if (labelRes.rejected) { + return 'red'; + } + } + } + } + return 'yellow'; +} + +/** + * Apply the branch state $context to the corresponding gerrit label (if available) + * context === "renovate/stability-days" / "renovate/merge-confidence" and state === "green"/... + * @param branchStatusConfig + */ +export async function setBranchStatus( + branchStatusConfig: BranchStatusConfig, +): Promise { + const label = config.labels[branchStatusConfig.context]; + const labelValue = + label && mapBranchStatusToLabel(branchStatusConfig.state, label); + if (branchStatusConfig.context && labelValue) { + const pr = await getBranchPr(branchStatusConfig.branchName); + if (pr === null) { + return; + } + await client.setLabel(pr.number, branchStatusConfig.context, labelValue); + } +} + +export function getRawFile( + fileName: string, + repoName?: string, + branchOrTag?: string, +): Promise { + const repo = repoName ?? config.repository ?? 'All-Projects'; + const branch = + branchOrTag ?? (repo === config.repository ? config.head! : 'HEAD'); + return client.getFile(repo, branch, fileName); +} + +export async function getJsonFile( + fileName: string, + repoName?: string, + branchOrTag?: string, +): Promise { + const raw = await getRawFile(fileName, repoName, branchOrTag); + return parseJson(raw, fileName); +} + +export function getRepoForceRebase(): Promise { + return Promise.resolve(false); +} + +export async function addReviewers( + number: number, + reviewers: string[], +): Promise { + for (const reviewer of reviewers) { + await client.addReviewer(number, reviewer); + } +} + +/** + * add "CC" (only one possible) + */ +export async function addAssignees( + number: number, + assignees: string[], +): Promise { + if (assignees.length) { + if (assignees.length > 1) { + logger.debug( + `addAssignees(${number}, ${assignees.toString()}) called with more then one assignee! Gerrit only supports one assignee! Using the first from list.`, + ); + } + await client.addAssignee(number, assignees[0]); + } +} + +export async function ensureComment( + ensureComment: EnsureCommentConfig, +): Promise { + logger.debug( + `ensureComment(${ensureComment.number}, ${ensureComment.topic!}, ${ + ensureComment.content + })`, + ); + await client.addMessageIfNotAlreadyExists( + ensureComment.number, + ensureComment.content, + ensureComment.topic ?? undefined, + ); + return true; +} + +export function massageMarkdown(prBody: string): string { + //TODO: do more Gerrit specific replacements? + return smartTruncate(readOnlyIssueBody(prBody), 16384) //TODO: check the real gerrit limit (max. chars) + .replace(regEx(/Pull Request(s)?/g), 'Change-Request$1') + .replace(regEx(/\bPR(s)?\b/g), 'Change-Request$1') + .replace(regEx(/<\/?summary>/g), '**') + .replace(regEx(/<\/?details>/g), '') + .replace(regEx(/​/g), '') //remove zero-width-space not supported in gerrit-markdown + .replace( + 'close this Change-Request unmerged.', + 'abandon or down vote this Change-Request with -2.', + ) + .replace('Branch creation', 'Change creation') + .replace( + 'Close this Change-Request', + 'Down-vote this Change-Request with -2', + ) + .replace( + 'you tick the rebase/retry checkbox', + 'add "rebase!" at the beginning of the commit message.', + ) + .replace(regEx(`\n---\n\n.*?.*?\n`), '') + .replace(regEx(//g), ''); +} + +export function deleteLabel(number: number, label: string): Promise { + return Promise.resolve(); +} + +export function ensureCommentRemoval( + ensureCommentRemoval: + | EnsureCommentRemovalConfigByTopic + | EnsureCommentRemovalConfigByContent, +): Promise { + return Promise.resolve(); +} + +export function ensureIssueClosing(title: string): Promise { + return Promise.resolve(); +} + +export function ensureIssue( + issueConfig: EnsureIssueConfig, +): Promise { + return Promise.resolve(null); +} + +export function findIssue(title: string): Promise { + return Promise.resolve(null); +} + +export function getIssueList(): Promise { + return Promise.resolve([]); +} diff --git a/lib/modules/platform/gerrit/readme.md b/lib/modules/platform/gerrit/readme.md new file mode 100644 index 00000000000000..ec753c7ab66e1a --- /dev/null +++ b/lib/modules/platform/gerrit/readme.md @@ -0,0 +1,70 @@ +# Gerrit + +## Supported Gerrit versions + +Renovate supports all Gerrit 3.x versions. +Support for Gerrit is currently _experimental_, meaning that it _might_ still have some undiscovered bugs or design limitations, and that we _might_ need to change functionality in a non-backwards compatible manner in a non-major release. + +The current implementation uses Gerrit's "hashtags" feature. +Therefore you must use a Gerrit version that uses the [NoteDB](https://gerrit-review.googlesource.com/Documentation/note-db.html) backend. +We did not test Gerrit `2.x` with NoteDB (only in `2.15` and `2.16`), but could work. + +## Authentication + +
+ ![Gerrit HTTP access token](../../../assets/images/gerrit-http-password.png){ loading=lazy } +
First, create a HTTP access token for the Renovate account.
+
+ +Let Renovate use your HTTP access token by doing _one_ of the following: + +- Set your HTTP access token as a `password` in your `config.js` file, or +- Set your HTTP access token as an environment variable `RENOVATE_PASSWORD`, or +- Set your HTTP access token when you run Renovate in the CLI with `--password=` + +The Gerrit user account must be allowed to assign the Code-Review label with "+2" to their own changes for "automerge" to work. + +You must set `platform=gerrit` in your Renovate config file. + +## Renovate PR/Branch-Model with Gerrit and needed permissions + +If you use the "Code-Review" label and want to get `automerge` working then you must set `autoApprove=true` in your Renovate config. +Renovate will now add the _Code-Review_ label with the value "+2" to each of its "pull requests" (Gerrit-Change). + + +!!! note + The bot's user account must have permission to give +2 for the Code-Review label. + +The Renovate option `automergeType: "branch"` makes no sense for Gerrit, because there are no branches used to create pull requests. +It works similar to the default option `"pr"`. + +## Optional features + +You can use the `statusCheckNames` configuration to map any of the available branch checks (like `minimumReleaseAge`, `mergeConfidence`, and so on) to a Gerrit label. + +For example, if you want to use the [Merge Confidence](https://docs.renovatebot.com/merge-confidence/) feature and map the result of the Merge Confidence check to your Gerrit label "Renovate-Merge-Confidence" you can configure: + +```json +{ + "statusCheckNames": { + "mergeConfidence": "Renovate-Merge-Confidence" + } +} +``` + +## Unsupported platform features/concepts + +- Creating issues (not a Gerrit concept) +- Dependency Dashboard (needs issues first) + +## Known problems + +### PR title is different from first commit message + +Sometimes the PR title passed to the Gerrit platform code is different from the first line of the commit message. +For example: + +Commit-Message=`Update keycloak.version to v21` \ +Pull-Request-Title=`Update keycloak.version to v21 (major)` + +In this case the Gerrit-Platform implementation tries to detect this and change the commit-message in a second patch-set. diff --git a/lib/modules/platform/gerrit/scm.spec.ts b/lib/modules/platform/gerrit/scm.spec.ts new file mode 100644 index 00000000000000..18667a04c4d4c5 --- /dev/null +++ b/lib/modules/platform/gerrit/scm.spec.ts @@ -0,0 +1,396 @@ +import { git, mocked, partial } from '../../../../test/util'; +import type { LongCommitSha } from '../../../util/git/types'; +import { client as _client } from './client'; +import { GerritScm, configureScm } from './scm'; +import type { + GerritAccountInfo, + GerritChange, + GerritRevisionInfo, +} from './types'; + +jest.mock('../../../util/git'); +jest.mock('./client'); +const clientMock = mocked(_client); + +describe('modules/platform/gerrit/scm', () => { + const gerritScm = new GerritScm(); + + beforeEach(() => { + configureScm('test/repo', 'user'); + }); + + describe('isBranchBehindBase()', () => { + it('no open change for with branchname found -> isBehind == true', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerritScm.isBranchBehindBase('myBranchName', 'baseBranch'), + ).resolves.toBeTrue(); + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { + branchName: 'myBranchName', + state: 'open', + targetBranch: 'baseBranch', + }, + true, + ); + }); + + it('open change found for branchname, rebase action is available -> isBehind == true ', async () => { + const change = partial({ + current_revision: 'currentRevSha', + revisions: { + currentRevSha: partial({ + actions: { + rebase: { + enabled: true, + }, + }, + }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect( + gerritScm.isBranchBehindBase('myBranchName', 'baseBranch'), + ).resolves.toBeTrue(); + }); + + it('open change found for branch name, but rebase action is not available -> isBehind == false ', async () => { + const change = partial({ + current_revision: 'currentRevSha', + revisions: { + currentRevSha: partial({ + actions: { + rebase: {}, + }, + }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect( + gerritScm.isBranchBehindBase('myBranchName', 'baseBranch'), + ).resolves.toBeFalse(); + }); + }); + + describe('isBranchModified()', () => { + it('no open change for with branchname found -> not modified', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerritScm.isBranchModified('myBranchName'), + ).resolves.toBeFalse(); + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { branchName: 'myBranchName', state: 'open' }, + true, + ); + }); + + it('open change found for branchname, but not modified', async () => { + const change = partial({ + current_revision: 'currentRevSha', + revisions: { + currentRevSha: partial({ + uploader: partial({ username: 'user' }), + }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect( + gerritScm.isBranchModified('myBranchName'), + ).resolves.toBeFalse(); + }); + + it('open change found for branchname, but modified from other user', async () => { + const change = partial({ + current_revision: 'currentRevSha', + revisions: { + currentRevSha: partial({ + uploader: partial({ username: 'other_user' }), //!== gerritLogin + }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect( + gerritScm.isBranchModified('myBranchName'), + ).resolves.toBeTrue(); + }); + }); + + describe('isBranchConflicted()', () => { + it('no open change with branch name found -> return true', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + await expect( + gerritScm.isBranchConflicted('target', 'myBranchName'), + ).resolves.toBe(true); + expect(clientMock.findChanges).toHaveBeenCalledWith('test/repo', { + branchName: 'myBranchName', + state: 'open', + targetBranch: 'target', + }); + }); + + it('open change found for branch name/baseBranch and its mergeable', async () => { + const change = partial({}); + clientMock.findChanges.mockResolvedValueOnce([change]); + clientMock.getMergeableInfo.mockResolvedValueOnce({ + submit_type: 'MERGE_IF_NECESSARY', + mergeable: true, + }); + await expect( + gerritScm.isBranchConflicted('target', 'myBranchName'), + ).resolves.toBeFalse(); + expect(clientMock.getMergeableInfo).toHaveBeenCalledWith(change); + }); + + it('open change found for branch name/baseBranch and its NOT mergeable', async () => { + const change = partial({}); + clientMock.findChanges.mockResolvedValueOnce([change]); + clientMock.getMergeableInfo.mockResolvedValueOnce({ + submit_type: 'MERGE_IF_NECESSARY', + mergeable: false, + }); + await expect( + gerritScm.isBranchConflicted('target', 'myBranchName'), + ).resolves.toBeTrue(); + expect(clientMock.getMergeableInfo).toHaveBeenCalledWith(change); + }); + }); + + describe('branchExists()', () => { + it('no change found for branch name -> return result from git.branchExists', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + git.branchExists.mockReturnValueOnce(true); + await expect(gerritScm.branchExists('myBranchName')).resolves.toBeTrue(); + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { + branchName: 'myBranchName', + state: 'open', + }, + true, + ); + expect(git.branchExists).toHaveBeenCalledWith('myBranchName'); + }); + + it('open change found for branch name -> return true', async () => { + const change = partial({}); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect(gerritScm.branchExists('myBranchName')).resolves.toBeTrue(); + expect(git.branchExists).not.toHaveBeenCalledWith('myBranchName'); + }); + }); + + describe('getBranchCommit()', () => { + it('no change found for branch name -> return result from git.getBranchCommit', async () => { + git.getBranchCommit.mockReturnValueOnce('shaHashValue' as LongCommitSha); + clientMock.findChanges.mockResolvedValueOnce([]); + await expect(gerritScm.getBranchCommit('myBranchName')).resolves.toBe( + 'shaHashValue', + ); + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { + branchName: 'myBranchName', + state: 'open', + }, + true, + ); + }); + + it('open change found for branchname -> return true', async () => { + const change = partial({ current_revision: 'curSha' }); + clientMock.findChanges.mockResolvedValueOnce([change]); + await expect(gerritScm.getBranchCommit('myBranchName')).resolves.toBe( + 'curSha', + ); + }); + }); + + it('deleteBranch()', async () => { + await expect(gerritScm.deleteBranch('branchName')).toResolve(); + }); + + describe('mergeToLocal', () => { + it('no change exists', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + git.mergeToLocal.mockResolvedValueOnce(); + + await expect(gerritScm.mergeToLocal('nonExistingChange')).toResolve(); + + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { + branchName: 'nonExistingChange', + state: 'open', + }, + true, + ); + expect(git.mergeToLocal).toHaveBeenCalledWith('nonExistingChange'); + }); + + it('change exists', async () => { + const change = partial({ + current_revision: 'curSha', + revisions: { + curSha: partial({ + ref: 'refs/changes/34/1234/1', + }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([change]); + git.mergeToLocal.mockResolvedValueOnce(); + + await expect(gerritScm.mergeToLocal('existingChange')).toResolve(); + + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { + branchName: 'existingChange', + state: 'open', + }, + true, + ); + expect(git.mergeToLocal).toHaveBeenCalledWith('refs/changes/34/1234/1'); + }); + }); + + describe('commitFiles()', () => { + it('commitFiles() - empty commit', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + git.prepareCommit.mockResolvedValueOnce(null); //empty commit + + await expect( + gerritScm.commitAndPush({ + branchName: 'renovate/dependency-1.x', + baseBranch: 'main', + message: 'commit msg', + files: [], + }), + ).resolves.toBeNull(); + expect(clientMock.findChanges).toHaveBeenCalledWith( + 'test/repo', + { + branchName: 'renovate/dependency-1.x', + state: 'open', + targetBranch: 'main', + }, + true, + ); + }); + + it('commitFiles() - create first Patch', async () => { + clientMock.findChanges.mockResolvedValueOnce([]); + git.prepareCommit.mockResolvedValueOnce({ + commitSha: 'commitSha' as LongCommitSha, + parentCommitSha: 'parentSha' as LongCommitSha, + files: [], + }); + git.pushCommit.mockResolvedValueOnce(true); + + expect( + await gerritScm.commitAndPush({ + branchName: 'renovate/dependency-1.x', + baseBranch: 'main', + message: 'commit msg', + files: [], + }), + ).toBe('commitSha'); + expect(git.prepareCommit).toHaveBeenCalledWith({ + baseBranch: 'main', + branchName: 'renovate/dependency-1.x', + files: [], + message: ['commit msg', expect.stringMatching(/Change-Id: I.{32}/)], + force: true, + }); + expect(git.pushCommit).toHaveBeenCalledWith({ + files: [], + sourceRef: 'renovate/dependency-1.x', + targetRef: 'refs/for/main%t=sourceBranch-renovate/dependency-1.x', + }); + }); + + it('commitFiles() - existing change-set without new changes', async () => { + const existingChange = partial({ + change_id: '...', + current_revision: 'commitSha', + revisions: { + commitSha: partial({ ref: 'refs/changes/1/2' }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([existingChange]); + git.prepareCommit.mockResolvedValueOnce({ + commitSha: 'commitSha' as LongCommitSha, + parentCommitSha: 'parentSha' as LongCommitSha, + files: [], + }); + git.pushCommit.mockResolvedValueOnce(true); + git.hasDiff.mockResolvedValueOnce(false); //no changes + + expect( + await gerritScm.commitAndPush({ + branchName: 'renovate/dependency-1.x', + baseBranch: 'main', + message: ['commit msg'], + files: [], + }), + ).toBeNull(); + expect(git.prepareCommit).toHaveBeenCalledWith({ + baseBranch: 'main', + branchName: 'renovate/dependency-1.x', + files: [], + message: ['commit msg', 'Change-Id: ...'], + force: true, + }); + expect(git.fetchRevSpec).toHaveBeenCalledWith('refs/changes/1/2'); + expect(git.pushCommit).toHaveBeenCalledTimes(0); + }); + + it('commitFiles() - existing change-set with new changes - auto-approve again', async () => { + const existingChange = partial({ + _number: 123456, + change_id: '...', + current_revision: 'commitSha', + revisions: { + commitSha: partial({ ref: 'refs/changes/1/2' }), + }, + }); + clientMock.findChanges.mockResolvedValueOnce([existingChange]); + clientMock.wasApprovedBy.mockReturnValueOnce(true); + git.prepareCommit.mockResolvedValueOnce({ + commitSha: 'commitSha' as LongCommitSha, + parentCommitSha: 'parentSha' as LongCommitSha, + files: [], + }); + git.pushCommit.mockResolvedValueOnce(true); + git.hasDiff.mockResolvedValueOnce(true); + + expect( + await gerritScm.commitAndPush({ + branchName: 'renovate/dependency-1.x', + baseBranch: 'main', + message: 'commit msg', + files: [], + }), + ).toBe('commitSha'); + expect(git.prepareCommit).toHaveBeenCalledWith({ + baseBranch: 'main', + branchName: 'renovate/dependency-1.x', + files: [], + message: ['commit msg', 'Change-Id: ...'], + force: true, + }); + expect(git.fetchRevSpec).toHaveBeenCalledWith('refs/changes/1/2'); + expect(git.pushCommit).toHaveBeenCalledWith({ + files: [], + sourceRef: 'renovate/dependency-1.x', + targetRef: 'refs/for/main%t=sourceBranch-renovate/dependency-1.x', + }); + expect(clientMock.wasApprovedBy).toHaveBeenCalledWith( + existingChange, + 'user', + ); + expect(clientMock.approveChange).toHaveBeenCalledWith(123456); + }); + }); +}); diff --git a/lib/modules/platform/gerrit/scm.ts b/lib/modules/platform/gerrit/scm.ts new file mode 100644 index 00000000000000..40fc56c88aab2e --- /dev/null +++ b/lib/modules/platform/gerrit/scm.ts @@ -0,0 +1,171 @@ +import { randomUUID } from 'crypto'; +import { logger } from '../../../logger'; +import * as git from '../../../util/git'; +import type { CommitFilesConfig, LongCommitSha } from '../../../util/git/types'; +import { hash } from '../../../util/hash'; +import { DefaultGitScm } from '../default-scm'; +import { client } from './client'; +import type { GerritFindPRConfig } from './types'; + +let repository: string; +let username: string; +export function configureScm(repo: string, login: string): void { + repository = repo; + username = login; +} + +export class GerritScm extends DefaultGitScm { + override async branchExists(branchName: string): Promise { + const searchConfig: GerritFindPRConfig = { state: 'open', branchName }; + const change = await client + .findChanges(repository, searchConfig, true) + .then((res) => res.pop()); + if (change) { + return true; + } + return git.branchExists(branchName); + } + + override async getBranchCommit( + branchName: string, + ): Promise { + const searchConfig: GerritFindPRConfig = { state: 'open', branchName }; + const change = await client + .findChanges(repository, searchConfig, true) + .then((res) => res.pop()); + if (change) { + return change.current_revision! as LongCommitSha; + } + return git.getBranchCommit(branchName); + } + + override async isBranchBehindBase( + branchName: string, + baseBranch: string, + ): Promise { + const searchConfig: GerritFindPRConfig = { + state: 'open', + branchName, + targetBranch: baseBranch, + }; + const change = await client + .findChanges(repository, searchConfig, true) + .then((res) => res.pop()); + if (change) { + const currentGerritPatchset = change.revisions![change.current_revision!]; + return currentGerritPatchset.actions?.['rebase'].enabled === true; + } + return true; + } + + override async isBranchConflicted( + baseBranch: string, + branch: string, + ): Promise { + const searchConfig: GerritFindPRConfig = { + state: 'open', + branchName: branch, + targetBranch: baseBranch, + }; + const change = (await client.findChanges(repository, searchConfig)).pop(); + if (change) { + const mergeInfo = await client.getMergeableInfo(change); + return !mergeInfo.mergeable; + } else { + logger.warn( + `There is no open change with branch=${branch} and baseBranch=${baseBranch}`, + ); + return true; + } + } + + override async isBranchModified(branchName: string): Promise { + const searchConfig: GerritFindPRConfig = { state: 'open', branchName }; + const change = await client + .findChanges(repository, searchConfig, true) + .then((res) => res.pop()); + if (change) { + const currentGerritPatchset = change.revisions![change.current_revision!]; + return currentGerritPatchset.uploader.username !== username; + } + return false; + } + + override async commitAndPush( + commit: CommitFilesConfig, + ): Promise { + logger.debug(`commitAndPush(${commit.branchName})`); + const searchConfig: GerritFindPRConfig = { + state: 'open', + branchName: commit.branchName, + targetBranch: commit.baseBranch, + }; + const existingChange = await client + .findChanges(repository, searchConfig, true) + .then((res) => res.pop()); + + let hasChanges = true; + const origMsg = + typeof commit.message === 'string' ? [commit.message] : commit.message; + commit.message = [ + ...origMsg, + `Change-Id: ${existingChange?.change_id ?? generateChangeId()}`, + ]; + const commitResult = await git.prepareCommit({ ...commit, force: true }); + if (commitResult) { + const { commitSha } = commitResult; + if (existingChange?.revisions && existingChange.current_revision) { + const fetchRefSpec = + existingChange.revisions[existingChange.current_revision].ref; + await git.fetchRevSpec(fetchRefSpec); //fetch current ChangeSet for git diff + hasChanges = await git.hasDiff('HEAD', 'FETCH_HEAD'); //avoid empty patchsets + } + if (hasChanges || commit.force) { + const pushResult = await git.pushCommit({ + sourceRef: commit.branchName, + targetRef: `refs/for/${commit.baseBranch!}%t=sourceBranch-${ + commit.branchName + }`, + files: commit.files, + }); + if (pushResult) { + //existingChange was the old change before commit/push. we need to approve again, if it was previously approved from renovate + if ( + existingChange && + client.wasApprovedBy(existingChange, username) + ) { + await client.approveChange(existingChange._number); + } + return commitSha; + } + } + } + return null; //empty commit, no changes in this Gerrit-Change + } + + override deleteBranch(branchName: string): Promise { + return Promise.resolve(); + } + + override async mergeToLocal(branchName: string): Promise { + const searchConfig: GerritFindPRConfig = { state: 'open', branchName }; + const change = await client + .findChanges(repository, searchConfig, true) + .then((res) => res.pop()); + if (change) { + return super.mergeToLocal( + change.revisions![change.current_revision!].ref, + ); + } + return super.mergeToLocal(branchName); + } +} + +/** + * This function should generate a Gerrit Change-ID analogous to the commit hook. We avoid the commit hook cause of security concerns. + * random=$( (whoami ; hostname ; date; cat $1 ; echo $RANDOM) | git hash-object --stdin) prefixed with an 'I'. + * TODO: Gerrit don't accept longer Change-IDs (sha256), but what happens with this https://git-scm.com/docs/hash-function-transition/ ? + */ +function generateChangeId(): string { + return 'I' + hash(randomUUID(), 'sha1'); +} diff --git a/lib/modules/platform/gerrit/types.ts b/lib/modules/platform/gerrit/types.ts new file mode 100644 index 00000000000000..7ec71999f4f1e2 --- /dev/null +++ b/lib/modules/platform/gerrit/types.ts @@ -0,0 +1,93 @@ +import type { FindPRConfig } from '../types'; + +export interface GerritFindPRConfig extends FindPRConfig { + label?: string; +} + +/** + * The Interfaces for the Gerrit API Responses ({@link https://gerrit-review.googlesource.com/Documentation/rest-api.html | REST-API}) + * minimized to only needed properties. + * + * @packageDocumentation + */ + +export interface GerritProjectInfo { + id: string; + name: string; + state?: 'ACTIVE' | 'READ_ONLY' | 'HIDDEN'; + labels?: Record; +} + +export interface GerritLabelTypeInfo { + values: Record; + default_value: number; +} + +export interface GerritBranchInfo { + ref: string; + revision: string; +} + +export type GerritChangeStatus = 'NEW' | 'MERGED' | 'ABANDONED'; + +export type GerritReviewersType = 'REVIEWER' | 'CC' | 'REMOVED'; + +export interface GerritChange { + branch: string; + hashtags?: string[]; + change_id: string; + subject: string; + status: GerritChangeStatus; + submittable?: boolean; + _number: number; + labels?: Record; + reviewers?: Record; + messages?: GerritChangeMessageInfo[]; + current_revision?: string; + /** + * All patch sets of this change as a map that maps the commit ID of the patch set to a RevisionInfo entity. + */ + revisions?: Record; + problems: unknown[]; +} + +export interface GerritRevisionInfo { + uploader: GerritAccountInfo; + /** + * The Git reference for the patch set. + */ + ref: string; + actions?: Record; +} + +export interface GerritChangeMessageInfo { + id: string; + message: string; + tag?: string; +} + +export interface GerritLabelInfo { + approved?: GerritAccountInfo; + rejected?: GerritAccountInfo; +} + +export interface GerritActionInfo { + method?: string; + enabled?: boolean; +} + +export interface GerritAccountInfo { + _account_id: number; + username?: string; +} + +export interface GerritMergeableInfo { + submit_type: + | 'MERGE_IF_NECESSARY' + | 'FAST_FORWARD_ONLY' + | 'REBASE_IF_NECESSARY' + | 'REBASE_ALWAYS' + | 'MERGE_ALWAYS' + | 'CHERRY_PICK'; + mergeable: boolean; +} diff --git a/lib/modules/platform/gerrit/utils.spec.ts b/lib/modules/platform/gerrit/utils.spec.ts new file mode 100644 index 00000000000000..f5159804473bea --- /dev/null +++ b/lib/modules/platform/gerrit/utils.spec.ts @@ -0,0 +1,251 @@ +import { mocked, partial } from '../../../../test/util'; +import { CONFIG_GIT_URL_UNAVAILABLE } from '../../../constants/error-messages'; +import type { BranchStatus } from '../../../types'; +import * as _hostRules from '../../../util/host-rules'; +import { setBaseUrl } from '../../../util/http/gerrit'; +import { hashBody } from '../pr-body'; +import type { + GerritAccountInfo, + GerritChange, + GerritChangeMessageInfo, + GerritChangeStatus, + GerritLabelTypeInfo, +} from './types'; +import * as utils from './utils'; +import { mapBranchStatusToLabel } from './utils'; + +jest.mock('../../../util/host-rules'); + +const baseUrl = 'https://gerrit.example.com'; +const hostRules = mocked(_hostRules); + +describe('modules/platform/gerrit/utils', () => { + beforeEach(() => { + setBaseUrl(baseUrl); + }); + + describe('getGerritRepoUrl()', () => { + it('create a git url with username/password', () => { + hostRules.find.mockReturnValue({ + username: 'abc', + password: '123', + }); + const repoUrl = utils.getGerritRepoUrl('web/apps', baseUrl); + expect(repoUrl).toBe('https://abc:123@gerrit.example.com/a/web%2Fapps'); + }); + + it('create a git url without username/password', () => { + hostRules.find.mockReturnValue({}); + expect(() => utils.getGerritRepoUrl('web/apps', baseUrl)).toThrow( + 'Init: You must configure a Gerrit Server username/password', + ); + }); + + it('throws on invalid endpoint', () => { + expect(() => utils.getGerritRepoUrl('web/apps', '...')).toThrow( + Error(CONFIG_GIT_URL_UNAVAILABLE), + ); + }); + }); + + describe('mapPrStateToGerritFilter()', () => { + it.each([ + ['closed', 'status:closed'], + ['merged', 'status:merged'], + ['!open', '-status:open'], + ['open', 'status:open'], + ['all', '-is:wip'], + [undefined, '-is:wip'], + ])( + 'maps pr state %p to gerrit filter %p', + (prState: any, filter: string) => { + expect(utils.mapPrStateToGerritFilter(prState)).toEqual(filter); + }, + ); + }); + + describe('mapGerritChangeStateToPrState()', () => { + it.each([ + ['NEW' as GerritChangeStatus, 'open'], + ['MERGED' as GerritChangeStatus, 'merged'], + ['ABANDONED' as GerritChangeStatus, 'closed'], + ['unknown' as GerritChangeStatus, 'all'], + ])( + 'maps gerrit change state %p to PrState %p', + (state: GerritChangeStatus, prState: any) => { + expect(utils.mapGerritChangeStateToPrState(state)).toEqual(prState); + }, + ); + }); + + describe('mapGerritChangeToPr()', () => { + it('map a gerrit change to to Pr', () => { + const change = partial({ + _number: 123456, + status: 'NEW', + hashtags: ['other', 'sourceBranch-renovate/dependency-1.x'], + branch: 'main', + subject: 'Fix for', + reviewers: { + REVIEWER: [partial({ username: 'username' })], + REMOVED: [], + CC: [], + }, + messages: [ + partial({ + id: '9d78ac236714cee8c2d86e95d638358925cf6853', + tag: 'pull-request', + message: 'Patch Set 1:\n\nOld PR-Body', + }), + partial({ + id: '1d17c930381e88e177bbc59595c3ec941bd21028', + tag: 'pull-request', + message: 'Patch Set 12:\n\nLast PR-Body', + }), + partial({ + id: '9d78ac236714cee8c2d86e95d638358925cf6853', + message: 'other message...', + }), + ], + }); + + expect(utils.mapGerritChangeToPr(change)).toEqual({ + number: 123456, + state: 'open', + title: 'Fix for', + sourceBranch: 'renovate/dependency-1.x', + targetBranch: 'main', + reviewers: ['username'], + bodyStruct: { + hash: hashBody('Last PR-Body'), + }, + }); + }); + + it('map a gerrit change without sourceBranch-tag and reviewers to Pr', () => { + const change = partial({ + _number: 123456, + status: 'NEW', + hashtags: ['other'], + branch: 'main', + subject: 'Fix for', + }); + expect(utils.mapGerritChangeToPr(change)).toEqual({ + number: 123456, + state: 'open', + title: 'Fix for', + sourceBranch: 'main', + targetBranch: 'main', + reviewers: [], + bodyStruct: { + hash: hashBody(''), + }, + }); + }); + }); + + describe('extractSourceBranch()', () => { + it('without hashtags', () => { + const change = partial({ + hashtags: undefined, + }); + expect(utils.extractSourceBranch(change)).toBeUndefined(); + }); + + it('no hashtag with "sourceBranch-" prefix', () => { + const change = partial({ + hashtags: ['other', 'another'], + }); + expect(utils.extractSourceBranch(change)).toBeUndefined(); + }); + + it('hashtag with "sourceBranch-" prefix', () => { + const change = partial({ + hashtags: ['other', 'sourceBranch-renovate/dependency-1.x', 'another'], + }); + expect(utils.extractSourceBranch(change)).toBe('renovate/dependency-1.x'); + }); + }); + + describe('findPullRequestBody()', () => { + it('find pull-request-body', () => { + const change = partial({ + messages: [ + partial({ + id: '9d78ac236714cee8c2d86e95d638358925cf6853', + tag: 'pull-request', + message: 'Patch Set 1:\n\nOld PR-Body', + }), + partial({ + id: '1d17c930381e88e177bbc59595c3ec941bd21028', + tag: 'pull-request', + message: 'Patch Set 12:\n\nLast PR-Body', + }), + partial({ + id: '9d78ac236714cee8c2d86e95d638358925cf6853', + message: 'other message...', + }), + ], + }); + expect(utils.findPullRequestBody(change)).toBe('Last PR-Body'); + }); + + it('no pull-request-body message found', () => { + const change = partial({}); + expect(utils.findPullRequestBody(change)).toBeUndefined(); + change.messages = []; + expect(utils.findPullRequestBody(change)).toBeUndefined(); + change.messages = [ + partial({ + tag: 'other-tag', + message: 'message', + }), + ]; + expect(utils.findPullRequestBody(change)).toBeUndefined(); + }); + }); + + describe('mapBranchStatusToLabel()', () => { + const labelWithOne: GerritLabelTypeInfo = { + values: { '-1': 'rejected', '0': 'default', '1': 'accepted' }, + default_value: 0, + }; + + it.each([ + ['red' as BranchStatus, -1], + ['yellow' as BranchStatus, -1], + ['green' as BranchStatus, 1], + ])( + 'Label with +1/-1 map branchState=%p to %p', + (branchState, expectedValue) => { + expect(mapBranchStatusToLabel(branchState, labelWithOne)).toEqual( + expectedValue, + ); + }, + ); + + const labelWithTwo: GerritLabelTypeInfo = { + values: { + '-2': 'rejected', + '-1': 'disliked', + '0': 'default', + '1': 'looksOkay', + '2': 'approved', + }, + default_value: 0, + }; + + it.each([ + ['red' as BranchStatus, -2], + ['yellow' as BranchStatus, -2], + ['green' as BranchStatus, 2], + ])( + 'Label with +2/-2, map branchState=%p to %p', + (branchState, expectedValue) => { + expect(mapBranchStatusToLabel(branchState, labelWithTwo)).toEqual( + expectedValue, + ); + }, + ); + }); +}); diff --git a/lib/modules/platform/gerrit/utils.ts b/lib/modules/platform/gerrit/utils.ts new file mode 100644 index 00000000000000..d42ec4a463b2e1 --- /dev/null +++ b/lib/modules/platform/gerrit/utils.ts @@ -0,0 +1,122 @@ +import { CONFIG_GIT_URL_UNAVAILABLE } from '../../../constants/error-messages'; +import { logger } from '../../../logger'; +import type { BranchStatus, PrState } from '../../../types'; +import * as hostRules from '../../../util/host-rules'; +import { joinUrlParts, parseUrl } from '../../../util/url'; +import { hashBody } from '../pr-body'; +import type { Pr } from '../types'; +import type { + GerritChange, + GerritChangeStatus, + GerritLabelTypeInfo, +} from './types'; + +export const TAG_PULL_REQUEST_BODY = 'pull-request'; + +export function getGerritRepoUrl(repository: string, endpoint: string): string { + // Find options for current host and determine Git endpoint + const opts = hostRules.find({ + hostType: 'gerrit', + url: endpoint, + }); + + const url = parseUrl(endpoint); + if (!url) { + throw new Error(CONFIG_GIT_URL_UNAVAILABLE); + } + if (!(opts.username && opts.password)) { + throw new Error( + 'Init: You must configure a Gerrit Server username/password', + ); + } + url.username = opts.username; + url.password = opts.password; + url.pathname = joinUrlParts( + url.pathname, + 'a', + encodeURIComponent(repository), + ); + logger.trace( + { url: url.toString() }, + 'using URL based on configured endpoint', + ); + return url.toString(); +} + +export function mapPrStateToGerritFilter(state?: PrState): string { + switch (state) { + case 'closed': + return 'status:closed'; + case 'merged': + return 'status:merged'; + case '!open': + return '-status:open'; + case 'open': + return 'status:open'; + case 'all': + default: + return '-is:wip'; + } +} + +export function mapGerritChangeToPr(change: GerritChange): Pr { + return { + number: change._number, + state: mapGerritChangeStateToPrState(change.status), + sourceBranch: extractSourceBranch(change) ?? change.branch, + targetBranch: change.branch, + title: change.subject, + reviewers: + change.reviewers?.REVIEWER?.filter( + (reviewer) => typeof reviewer.username === 'string', + ).map((reviewer) => reviewer.username!) ?? [], + bodyStruct: { + hash: hashBody(findPullRequestBody(change)), + }, + }; +} + +export function mapGerritChangeStateToPrState( + state: GerritChangeStatus, +): PrState { + switch (state) { + case 'NEW': + return 'open'; + case 'MERGED': + return 'merged'; + case 'ABANDONED': + return 'closed'; + } + return 'all'; +} +export function extractSourceBranch(change: GerritChange): string | undefined { + return change.hashtags + ?.find((tag) => tag.startsWith('sourceBranch-')) + ?.replace('sourceBranch-', ''); +} + +export function findPullRequestBody(change: GerritChange): string | undefined { + const msg = Array.from(change.messages ?? []) + .reverse() + .find((msg) => msg.tag === TAG_PULL_REQUEST_BODY); + if (msg) { + return msg.message.replace(/^Patch Set \d+:\n\n/, ''); //TODO: check how to get rid of the auto-added prefix? + } + return undefined; +} + +export function mapBranchStatusToLabel( + state: BranchStatus, + label: GerritLabelTypeInfo, +): number { + const numbers = Object.keys(label.values).map((x) => parseInt(x, 10)); + switch (state) { + case 'green': + return Math.max(...numbers); + case 'yellow': + case 'red': + return Math.min(...numbers); + } + // istanbul ignore next + return label.default_value; +} diff --git a/lib/modules/platform/gitea/__snapshots__/index.spec.ts.snap b/lib/modules/platform/gitea/__snapshots__/index.spec.ts.snap deleted file mode 100644 index c08c86e20b0000..00000000000000 --- a/lib/modules/platform/gitea/__snapshots__/index.spec.ts.snap +++ /dev/null @@ -1,235 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`modules/platform/gitea/index createPr should use base branch by default 1`] = ` -{ - "bodyStruct": { - "hash": "9d586a6aedc4e7cb205276933c9e474cd3c2b341d3340458c31eb750795f197d", - }, - "cannotMergeReason": undefined, - "createdAt": "2014-04-01T05:14:20Z", - "hasAssignees": false, - "isDraft": false, - "number": 42, - "sha": "0d9c7726c3d628b7e28af234595cfd20febdbf8e", - "sourceBranch": "pr-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "devel", - "title": "pr-title", -} -`; - -exports[`modules/platform/gitea/index createPr should use default branch if requested 1`] = ` -{ - "bodyStruct": { - "hash": "9d586a6aedc4e7cb205276933c9e474cd3c2b341d3340458c31eb750795f197d", - }, - "cannotMergeReason": undefined, - "createdAt": "2014-04-01T05:14:20Z", - "hasAssignees": false, - "isDraft": false, - "number": 42, - "sha": "0d9c7726c3d628b7e28af234595cfd20febdbf8e", - "sourceBranch": "pr-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "master", - "title": "pr-title", -} -`; - -exports[`modules/platform/gitea/index getPr should fallback to direct fetching if cache fails 1`] = ` -{ - "bodyStruct": { - "hash": "f41557d6153a316ee747e13de8952c4068de931585c1a18d095d6703254de6af", - }, - "cannotMergeReason": "pr.mergeable="false"", - "createdAt": "2015-03-22T20:36:16Z", - "hasAssignees": false, - "isDraft": false, - "number": 1, - "sha": "some-head-sha", - "sourceBranch": "some-head-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "some-base-branch", - "title": "Some PR", -} -`; - -exports[`modules/platform/gitea/index getPr should return enriched pull request which exists if open 1`] = ` -{ - "bodyStruct": { - "hash": "f41557d6153a316ee747e13de8952c4068de931585c1a18d095d6703254de6af", - }, - "cannotMergeReason": undefined, - "createdAt": "2015-03-22T20:36:16Z", - "hasAssignees": false, - "isDraft": false, - "number": 1, - "sha": "some-head-sha", - "sourceBranch": "some-head-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "some-base-branch", - "title": "Some PR", -} -`; - -exports[`modules/platform/gitea/index getPrList should filter list by creator 1`] = ` -{ - "endpoint": "https://gitea.com/", - "gitAuthor": "Renovate Bot ", -} -`; - -exports[`modules/platform/gitea/index getPrList should filter list by creator 2`] = ` -[ - { - "bodyStruct": { - "hash": "f41557d6153a316ee747e13de8952c4068de931585c1a18d095d6703254de6af", - }, - "cannotMergeReason": undefined, - "createdAt": "2015-03-22T20:36:16Z", - "hasAssignees": false, - "isDraft": false, - "number": 1, - "sha": "some-head-sha", - "sourceBranch": "some-head-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "some-base-branch", - "title": "Some PR", - }, - { - "bodyStruct": { - "hash": "916e5965a20785df1883ff5dc219508a1070ae1f37ccb64e954526f3ca1d22f4", - }, - "cannotMergeReason": undefined, - "createdAt": "2011-08-18T22:30:38Z", - "hasAssignees": false, - "isDraft": false, - "number": 2, - "sha": "other-head-sha", - "sourceBranch": "other-head-branch", - "sourceRepo": "some/repo", - "state": "closed", - "targetBranch": "other-base-branch", - "title": "Other PR", - }, - { - "bodyStruct": { - "hash": "916e5965a20785df1883ff5dc219508a1070ae1f37ccb64e954526f3ca1d22f4", - }, - "cannotMergeReason": undefined, - "createdAt": "2011-08-18T22:30:39Z", - "hasAssignees": false, - "isDraft": true, - "number": 3, - "sha": "draft-head-sha", - "sourceBranch": "draft-head-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "draft-base-branch", - "title": "Draft PR", - }, -] -`; - -exports[`modules/platform/gitea/index getPrList should return list of pull requests 1`] = ` -[ - { - "bodyStruct": { - "hash": "f41557d6153a316ee747e13de8952c4068de931585c1a18d095d6703254de6af", - }, - "cannotMergeReason": undefined, - "createdAt": "2015-03-22T20:36:16Z", - "hasAssignees": false, - "isDraft": false, - "number": 1, - "sha": "some-head-sha", - "sourceBranch": "some-head-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "some-base-branch", - "title": "Some PR", - }, - { - "bodyStruct": { - "hash": "916e5965a20785df1883ff5dc219508a1070ae1f37ccb64e954526f3ca1d22f4", - }, - "cannotMergeReason": undefined, - "createdAt": "2011-08-18T22:30:38Z", - "hasAssignees": false, - "isDraft": false, - "number": 2, - "sha": "other-head-sha", - "sourceBranch": "other-head-branch", - "sourceRepo": "some/repo", - "state": "closed", - "targetBranch": "other-base-branch", - "title": "Other PR", - }, - { - "bodyStruct": { - "hash": "916e5965a20785df1883ff5dc219508a1070ae1f37ccb64e954526f3ca1d22f4", - }, - "cannotMergeReason": undefined, - "createdAt": "2011-08-18T22:30:39Z", - "hasAssignees": false, - "isDraft": true, - "number": 3, - "sha": "draft-head-sha", - "sourceBranch": "draft-head-branch", - "sourceRepo": "some/repo", - "state": "open", - "targetBranch": "draft-base-branch", - "title": "Draft PR", - }, -] -`; - -exports[`modules/platform/gitea/index initPlatform() should support custom endpoint 1`] = ` -{ - "endpoint": "https://gitea.renovatebot.com/", - "gitAuthor": "Renovate Bot ", -} -`; - -exports[`modules/platform/gitea/index initPlatform() should support default endpoint 1`] = ` -{ - "endpoint": "https://gitea.com/", - "gitAuthor": "Renovate Bot ", -} -`; - -exports[`modules/platform/gitea/index initPlatform() should use username as author name if full name is missing 1`] = ` -{ - "endpoint": "https://gitea.com/", - "gitAuthor": "renovate ", -} -`; - -exports[`modules/platform/gitea/index initRepo should fall back to merge method "merge" 1`] = ` -{ - "defaultBranch": "master", - "isFork": false, - "repoFingerprint": "c48ad9428365701f1a7f4798a410db2401b13267c205e345beb5b469a4a1480b163e1ce663ce483cfe579b2748a807cbeeba2035dc55eca5fe46d60d182510ec", -} -`; - -exports[`modules/platform/gitea/index initRepo should fall back to merge method "rebase-merge" 1`] = ` -{ - "defaultBranch": "master", - "isFork": false, - "repoFingerprint": "c48ad9428365701f1a7f4798a410db2401b13267c205e345beb5b469a4a1480b163e1ce663ce483cfe579b2748a807cbeeba2035dc55eca5fe46d60d182510ec", -} -`; - -exports[`modules/platform/gitea/index initRepo should fall back to merge method "squash" 1`] = ` -{ - "defaultBranch": "master", - "isFork": false, - "repoFingerprint": "c48ad9428365701f1a7f4798a410db2401b13267c205e345beb5b469a4a1480b163e1ce663ce483cfe579b2748a807cbeeba2035dc55eca5fe46d60d182510ec", -} -`; diff --git a/lib/modules/platform/gitea/index.spec.ts b/lib/modules/platform/gitea/index.spec.ts index 1c1fc2b77e61f4..a7e8280aba1009 100644 --- a/lib/modules/platform/gitea/index.spec.ts +++ b/lib/modules/platform/gitea/index.spec.ts @@ -1,10 +1,5 @@ -import type { - BranchStatusConfig, - EnsureIssueConfig, - Platform, - RepoParams, - RepoResult, -} from '..'; +import type { EnsureIssueConfig, Platform, RepoParams } from '..'; +import * as httpMock from '../../../../test/http-mock'; import { mocked, partial } from '../../../../test/util'; import { CONFIG_GIT_URL_UNAVAILABLE, @@ -16,27 +11,20 @@ import { REPOSITORY_MIRRORED, } from '../../../constants/error-messages'; import type { logger as _logger } from '../../../logger'; -import type { BranchStatus, PrState } from '../../../types'; import type * as _git from '../../../util/git'; import type { LongCommitSha } from '../../../util/git/types'; import { setBaseUrl } from '../../../util/http/gitea'; -import type { PlatformResult } from '../types'; import type { - Branch, - CombinedCommitStatus, Comment, CommitStatus, CommitStatusType, - CommitUser, Issue, Label, PR, Repo, - RepoContents, User, } from './types'; -jest.mock('./gitea-helper'); jest.mock('../../../util/git'); /** @@ -46,9 +34,8 @@ const GITEA_VERSION = '1.14.0+dev-754-g5d2b7ba63'; describe('modules/platform/gitea/index', () => { let gitea: Platform; - let helper: jest.Mocked; let logger: jest.Mocked; - let gitvcs: jest.Mocked; + let git: jest.Mocked; let hostRules: typeof import('../../../util/host-rules'); const mockCommitHash = @@ -123,7 +110,7 @@ describe('modules/platform/gitea/index', () => { diff_url: 'https://gitea.renovatebot.com/some/repo/pulls/3.diff', created_at: '2011-08-18T22:30:39Z', closed_at: '2016-01-09T10:03:22Z', - mergeable: true, + mergeable: false, base: { ref: 'draft-base-branch' }, head: { label: 'draft-head-branch', @@ -206,11 +193,10 @@ describe('modules/platform/gitea/index', () => { jest.resetModules(); gitea = await import('.'); - helper = jest.requireMock('./gitea-helper'); logger = mocked(await import('../../../logger')).logger; - gitvcs = jest.requireMock('../../../util/git'); - gitvcs.isBranchBehindBase.mockResolvedValue(false); - gitvcs.getBranchCommit.mockReturnValue(mockCommitHash); + git = jest.requireMock('../../../util/git'); + git.isBranchBehindBase.mockResolvedValue(false); + git.getBranchCommit.mockReturnValue(mockCommitHash); hostRules = await import('../../../util/host-rules'); hostRules.clear(); @@ -220,22 +206,27 @@ describe('modules/platform/gitea/index', () => { delete process.env.RENOVATE_X_AUTODISCOVER_REPO_ORDER; }); - function initFakePlatform(version = GITEA_VERSION): Promise { - helper.getCurrentUser.mockResolvedValueOnce(mockUser); - helper.getVersion.mockResolvedValueOnce(version); - return gitea.initPlatform({ token: 'abc' }); + async function initFakePlatform( + scope: httpMock.Scope, + version = GITEA_VERSION, + ): Promise { + scope + .get('/user') + .reply(200, mockUser) + .get('/version') + .reply(200, { version }); + await gitea.initPlatform({ token: 'abc' }); } - function initFakeRepo( + async function initFakeRepo( + scope: httpMock.Scope, repo?: Partial, config?: Partial, - ): Promise { - helper.getRepo.mockResolvedValueOnce({ ...mockRepo, ...repo }); - - return gitea.initRepo({ - repository: mockRepo.full_name, - ...config, - }); + ): Promise { + const repoResult = { ...mockRepo, ...repo }; + const repository = repoResult.full_name; + scope.get(`/repos/${repository}`).reply(200, repoResult); + await gitea.initRepo({ repository, ...config }); } describe('initPlatform()', () => { @@ -244,7 +235,8 @@ describe('modules/platform/gitea/index', () => { }); it('should throw if auth fails', async () => { - helper.getCurrentUser.mockRejectedValueOnce(new Error()); + const scope = httpMock.scope('https://gitea.com/api/v1'); + scope.get('/user').reply(500); await expect( gitea.initPlatform({ token: 'some-token' }), @@ -252,81 +244,128 @@ describe('modules/platform/gitea/index', () => { }); it('should support default endpoint', async () => { - helper.getCurrentUser.mockResolvedValueOnce(mockUser); + const scope = httpMock.scope('https://gitea.com/api/v1'); + scope + .get('/user') + .reply(200, mockUser) + .get('/version') + .reply(200, { version: GITEA_VERSION }); - expect( - await gitea.initPlatform({ token: 'some-token' }), - ).toMatchSnapshot(); + expect(await gitea.initPlatform({ token: 'some-token' })).toEqual({ + endpoint: 'https://gitea.com/', + gitAuthor: 'Renovate Bot ', + }); }); it('should support custom endpoint', async () => { - helper.getCurrentUser.mockResolvedValueOnce(mockUser); + const scope = httpMock.scope('https://gitea.renovatebot.com/api/v1'); + scope + .get('/user') + .reply(200, mockUser) + .get('/version') + .reply(200, { version: GITEA_VERSION }); expect( await gitea.initPlatform({ token: 'some-token', endpoint: 'https://gitea.renovatebot.com', }), - ).toMatchSnapshot(); + ).toEqual({ + endpoint: 'https://gitea.renovatebot.com/', + gitAuthor: 'Renovate Bot ', + }); }); it('should support custom endpoint including api path', async () => { - helper.getCurrentUser.mockResolvedValueOnce(mockUser); + const scope = httpMock.scope('https://gitea.renovatebot.com/api/v1'); + scope + .get('/user') + .reply(200, mockUser) + .get('/version') + .reply(200, { version: GITEA_VERSION }); expect( await gitea.initPlatform({ token: 'some-token', - endpoint: 'https://gitea.renovatebot.com/api/v1', + endpoint: 'https://gitea.renovatebot.com', }), - ).toMatchObject({ + ).toEqual({ endpoint: 'https://gitea.renovatebot.com/', + gitAuthor: 'Renovate Bot ', }); }); it('should use username as author name if full name is missing', async () => { - helper.getCurrentUser.mockResolvedValueOnce({ - ...mockUser, - full_name: undefined, - }); + const scope = httpMock.scope('https://gitea.com/api/v1'); + scope + .get('/user') + .reply(200, { + ...mockUser, + full_name: undefined, + }) + .get('/version') + .reply(200, { version: GITEA_VERSION }); - expect( - await gitea.initPlatform({ token: 'some-token' }), - ).toMatchSnapshot(); + expect(await gitea.initPlatform({ token: 'some-token' })).toEqual({ + endpoint: 'https://gitea.com/', + gitAuthor: 'renovate ', + }); }); }); describe('getRepos', () => { it('should propagate any other errors', async () => { - helper.searchRepos.mockRejectedValueOnce(new Error('searchRepos()')); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/search') + .query({ + uid: 1, + archived: false, + }) + .replyWithError(new Error('searchRepos()')); + await initFakePlatform(scope); await expect(gitea.getRepos()).rejects.toThrow('searchRepos()'); }); it('should return an array of repos', async () => { - helper.searchRepos.mockResolvedValueOnce(mockRepos); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/search') + .query({ + uid: 1, + archived: false, + }) + .reply(200, { + ok: true, + data: mockRepos, + }); + await initFakePlatform(scope); const repos = await gitea.getRepos(); expect(repos).toEqual(['a/b', 'c/d']); - expect(helper.searchRepos).toHaveBeenCalledWith({ - uid: undefined, - archived: false, - }); }); it('Sorts repos', async () => { process.env.RENOVATE_X_AUTODISCOVER_REPO_SORT = 'updated'; process.env.RENOVATE_X_AUTODISCOVER_REPO_ORDER = 'desc'; - helper.searchRepos.mockResolvedValueOnce(mockRepos); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/search') + .query({ + uid: 1, + archived: false, + sort: 'updated', + order: 'desc', + }) + .reply(200, { + ok: true, + data: mockRepos, + }); + await initFakePlatform(scope); const repos = await gitea.getRepos(); expect(repos).toEqual(['a/b', 'c/d']); - - expect(helper.searchRepos).toHaveBeenCalledWith({ - uid: undefined, - archived: false, - sort: 'updated', - order: 'desc', - }); }); }); @@ -336,119 +375,208 @@ describe('modules/platform/gitea/index', () => { }; it('should propagate API errors', async () => { - helper.getRepo.mockRejectedValueOnce(new Error('getRepo()')); - + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .replyWithError(new Error('getRepo()')); + await initFakePlatform(scope); await expect(gitea.initRepo(initRepoCfg)).rejects.toThrow('getRepo()'); }); it('should abort when repo is archived', async () => { - await expect(initFakeRepo({ archived: true })).rejects.toThrow( + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + archived: true, + }); + await initFakePlatform(scope); + await expect(gitea.initRepo(initRepoCfg)).rejects.toThrow( REPOSITORY_ARCHIVED, ); }); it('should abort when repo is mirrored', async () => { - await expect(initFakeRepo({ mirror: true })).rejects.toThrow( + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + mirror: true, + }); + await initFakePlatform(scope); + await expect(gitea.initRepo(initRepoCfg)).rejects.toThrow( REPOSITORY_MIRRORED, ); }); it('should abort when repo is empty', async () => { - await expect(initFakeRepo({ empty: true })).rejects.toThrow( + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + empty: true, + }); + await initFakePlatform(scope); + await expect(gitea.initRepo(initRepoCfg)).rejects.toThrow( REPOSITORY_EMPTY, ); }); it('should abort when repo has insufficient permissions', async () => { - await expect( - initFakeRepo({ + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, permissions: { pull: false, push: false, admin: false, }, - }), - ).rejects.toThrow(REPOSITORY_ACCESS_FORBIDDEN); + }); + await initFakePlatform(scope); + await expect(gitea.initRepo(initRepoCfg)).rejects.toThrow( + REPOSITORY_ACCESS_FORBIDDEN, + ); }); it('should abort when repo has no available merge methods', async () => { - await expect(initFakeRepo({ allow_rebase: false })).rejects.toThrow( + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + allow_rebase: false, + }); + await initFakePlatform(scope); + await expect(gitea.initRepo(initRepoCfg)).rejects.toThrow( REPOSITORY_BLOCKED, ); }); it('should fall back to merge method "rebase-merge"', async () => { - expect( - await initFakeRepo({ + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, allow_rebase: false, allow_rebase_explicit: true, + }); + await initFakePlatform(scope); + + await gitea.initRepo(initRepoCfg); + + expect(git.initRepo).toHaveBeenCalledExactlyOnceWith( + expect.objectContaining({ + mergeMethod: 'rebase-merge', }), - ).toMatchSnapshot(); + ); }); it('should fall back to merge method "squash"', async () => { - expect( - await initFakeRepo({ allow_rebase: false, allow_squash_merge: true }), - ).toMatchSnapshot(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + allow_rebase: false, + allow_squash_merge: true, + }); + await initFakePlatform(scope); + + await gitea.initRepo(initRepoCfg); + + expect(git.initRepo).toHaveBeenCalledExactlyOnceWith( + expect.objectContaining({ + mergeMethod: 'squash', + }), + ); }); it('should fall back to merge method "merge"', async () => { - expect( - await initFakeRepo({ + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, allow_rebase: false, allow_merge_commits: true, + }); + await initFakePlatform(scope); + + await gitea.initRepo(initRepoCfg); + + expect(git.initRepo).toHaveBeenCalledExactlyOnceWith( + expect.objectContaining({ + mergeMethod: 'merge', }), - ).toMatchSnapshot(); + ); }); it('should use clone_url of repo if gitUrl is not specified', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, mockRepo); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce(mockRepo); const repoCfg: RepoParams = { repository: mockRepo.full_name, }; await gitea.initRepo(repoCfg); - expect(gitvcs.initRepo).toHaveBeenCalledWith( + expect(git.initRepo).toHaveBeenCalledWith( expect.objectContaining({ url: mockRepo.clone_url }), ); }); it('should use clone_url of repo if gitUrl has value default', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, mockRepo); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce(mockRepo); const repoCfg: RepoParams = { repository: mockRepo.full_name, gitUrl: 'default', }; await gitea.initRepo(repoCfg); - expect(gitvcs.initRepo).toHaveBeenCalledWith( + expect(git.initRepo).toHaveBeenCalledWith( expect.objectContaining({ url: mockRepo.clone_url }), ); }); it('should use ssh_url of repo if gitUrl has value ssh', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, mockRepo); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce(mockRepo); const repoCfg: RepoParams = { repository: mockRepo.full_name, gitUrl: 'ssh', }; await gitea.initRepo(repoCfg); - expect(gitvcs.initRepo).toHaveBeenCalledWith( + expect(git.initRepo).toHaveBeenCalledWith( expect.objectContaining({ url: mockRepo.ssh_url }), ); }); it('should abort when gitUrl has value ssh but ssh_url is empty', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { ...mockRepo, ssh_url: undefined }); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce({ ...mockRepo, ssh_url: undefined }); const repoCfg: RepoParams = { repository: mockRepo.full_name, gitUrl: 'ssh', @@ -460,16 +588,19 @@ describe('modules/platform/gitea/index', () => { }); it('should use generated url of repo if gitUrl has value endpoint', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, mockRepo); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce(mockRepo); const repoCfg: RepoParams = { repository: mockRepo.full_name, gitUrl: 'endpoint', }; await gitea.initRepo(repoCfg); - expect(gitvcs.initRepo).toHaveBeenCalledWith( + expect(git.initRepo).toHaveBeenCalledWith( expect.objectContaining({ url: `https://gitea.com/${mockRepo.full_name}.git`, }), @@ -477,12 +608,15 @@ describe('modules/platform/gitea/index', () => { }); it('should abort when clone_url is empty', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + clone_url: undefined, + }); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce({ - ...mockRepo, - clone_url: undefined, - }); const repoCfg: RepoParams = { repository: mockRepo.full_name, }; @@ -493,7 +627,11 @@ describe('modules/platform/gitea/index', () => { }); it('should use given access token if gitUrl has value endpoint', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, mockRepo); + await initFakePlatform(scope); const token = 'abc'; hostRules.add({ @@ -502,7 +640,6 @@ describe('modules/platform/gitea/index', () => { token, }); - helper.getRepo.mockResolvedValueOnce(mockRepo); const repoCfg: RepoParams = { repository: mockRepo.full_name, gitUrl: 'endpoint', @@ -511,7 +648,7 @@ describe('modules/platform/gitea/index', () => { const url = new URL(`${mockRepo.clone_url}`); url.username = token; - expect(gitvcs.initRepo).toHaveBeenCalledWith( + expect(git.initRepo).toHaveBeenCalledWith( expect.objectContaining({ url: `https://${token}@gitea.com/${mockRepo.full_name}.git`, }), @@ -519,7 +656,11 @@ describe('modules/platform/gitea/index', () => { }); it('should use given access token if gitUrl is not specified', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, mockRepo); + await initFakePlatform(scope); const token = 'abc'; hostRules.add({ @@ -528,7 +669,6 @@ describe('modules/platform/gitea/index', () => { token, }); - helper.getRepo.mockResolvedValueOnce(mockRepo); const repoCfg: RepoParams = { repository: mockRepo.full_name, }; @@ -536,18 +676,21 @@ describe('modules/platform/gitea/index', () => { const url = new URL(`${mockRepo.clone_url}`); url.username = token; - expect(gitvcs.initRepo).toHaveBeenCalledWith( + expect(git.initRepo).toHaveBeenCalledWith( expect.objectContaining({ url: url.toString() }), ); }); it('should abort when clone_url is not valid', async () => { - expect.assertions(1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get(`/repos/${initRepoCfg.repository}`) + .reply(200, { + ...mockRepo, + clone_url: 'abc', + }); + await initFakePlatform(scope); - helper.getRepo.mockResolvedValueOnce({ - ...mockRepo, - clone_url: 'abc', - }); const repoCfg: RepoParams = { repository: mockRepo.full_name, }; @@ -559,101 +702,190 @@ describe('modules/platform/gitea/index', () => { }); describe('setBranchStatus', () => { - const setBranchStatus = async (bsc?: Partial) => { - await initFakeRepo(); - await gitea.setBranchStatus({ - branchName: 'some-branch', - state: 'green', - context: 'some-context', - description: 'some-description', - ...bsc, - }); - }; - it('should create a new commit status', async () => { - await setBranchStatus(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post( + '/repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e', + { + state: 'success', + context: 'some-context', + description: 'some-description', + }, + ) + .reply(200) + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, []); - expect(helper.createCommitStatus).toHaveBeenCalledTimes(1); - expect(helper.createCommitStatus).toHaveBeenCalledWith( - mockRepo.full_name, - mockCommitHash, - { - state: 'success', + await initFakePlatform(scope); + await initFakeRepo(scope); + + await expect( + gitea.setBranchStatus({ + branchName: 'some-branch', + state: 'green', context: 'some-context', description: 'some-description', - }, - ); + }), + ).toResolve(); }); it('should default to pending state', async () => { - await setBranchStatus({ state: undefined }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post( + '/repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e', + { + state: 'pending', + context: 'some-context', + description: 'some-description', + }, + ) + .reply(200) + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, []); - expect(helper.createCommitStatus).toHaveBeenCalledTimes(1); - expect(helper.createCommitStatus).toHaveBeenCalledWith( - mockRepo.full_name, - mockCommitHash, - { - state: 'pending', + await initFakePlatform(scope); + await initFakeRepo(scope); + + await expect( + gitea.setBranchStatus({ + branchName: 'some-branch', context: 'some-context', description: 'some-description', - }, - ); + state: undefined as never, + }), + ).toResolve(); }); it('should include url if specified', async () => { - await setBranchStatus({ url: 'some-url' }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post( + '/repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e', + { + state: 'success', + context: 'some-context', + description: 'some-description', + target_url: 'some-url', + }, + ) + .reply(200) + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, []); - expect(helper.createCommitStatus).toHaveBeenCalledTimes(1); - expect(helper.createCommitStatus).toHaveBeenCalledWith( - mockRepo.full_name, - mockCommitHash, - { - state: 'success', + await initFakePlatform(scope); + await initFakeRepo(scope); + + await expect( + gitea.setBranchStatus({ + branchName: 'some-branch', + state: 'green', context: 'some-context', description: 'some-description', - target_url: 'some-url', - }, - ); + url: 'some-url', + }), + ).toResolve(); }); it('should gracefully fail with warning', async () => { - helper.createCommitStatus.mockRejectedValueOnce(new Error()); - await setBranchStatus(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post( + '/repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e', + ) + .replyWithError('unknown error'); - expect(logger.warn).toHaveBeenCalledTimes(1); - }); - }); + await initFakePlatform(scope); + await initFakeRepo(scope); - describe('getBranchStatus', () => { - const getBranchStatus = async (state: string): Promise => { - await initFakeRepo(); - helper.getCombinedCommitStatus.mockResolvedValueOnce( - partial({ - worstStatus: state as CommitStatusType, + await expect( + gitea.setBranchStatus({ + branchName: 'some-branch', + state: 'green', + context: 'some-context', + description: 'some-description', }), + ).toResolve(); + + expect(logger.warn).toHaveBeenCalledWith( + { + err: expect.any(Error), + }, + 'Failed to set branch status', ); + }); + }); - return gitea.getBranchStatus('some-branch', true); - }; + describe('getBranchStatus', () => { + const commitStatus = (status: CommitStatusType): CommitStatus => ({ + id: 1, + status, + context: '', + description: '', + target_url: '', + created_at: '', + }); it('should return yellow for unknown result', async () => { - expect(await getBranchStatus('unknown')).toBe('yellow'); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [commitStatus('unknown')]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatus('some-branch', true); + + expect(res).toBe('yellow'); }); it('should return pending state for pending result', async () => { - expect(await getBranchStatus('pending')).toBe('yellow'); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [commitStatus('pending')]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatus('some-branch', true); + + expect(res).toBe('yellow'); }); - it('should return success state for success result', async () => { - expect(await getBranchStatus('success')).toBe('green'); + it('should return green state for success result', async () => { + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [commitStatus('success')]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatus('some-branch', true); + + expect(res).toBe('green'); }); - it('should return null for all other results', async () => { - expect(await getBranchStatus('invalid')).toBe('yellow'); + it('should return yellow for all other results', async () => { + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [commitStatus('invalid' as never)]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatus('some-branch', true); + + expect(res).toBe('yellow'); }); it('should abort when branch status returns 404', async () => { - helper.getCombinedCommitStatus.mockRejectedValueOnce({ statusCode: 404 }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(404); + await initFakePlatform(scope); + await initFakeRepo(scope); await expect(gitea.getBranchStatus('some-branch', true)).rejects.toThrow( REPOSITORY_CHANGED, @@ -661,19 +893,23 @@ describe('modules/platform/gitea/index', () => { }); it('should propagate any other errors', async () => { - helper.getCombinedCommitStatus.mockRejectedValueOnce( - new Error('getCombinedCommitStatus()'), - ); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .replyWithError('unknown error'); + await initFakePlatform(scope); + await initFakeRepo(scope); await expect(gitea.getBranchStatus('some-branch', true)).rejects.toThrow( - 'getCombinedCommitStatus()', + 'unknown error', ); }); it('should treat internal checks as success', async () => { - helper.getCombinedCommitStatus.mockResolvedValueOnce({ - worstStatus: 'success', - statuses: [ + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [ { id: 1, status: 'success', @@ -682,39 +918,46 @@ describe('modules/platform/gitea/index', () => { target_url: '', created_at: '', }, - ], - }); - expect(await gitea.getBranchStatus('some-branch', true)).toBe('green'); + ]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatus('some-branch', true); + + expect(res).toBe('green'); }); it('should not treat internal checks as success', async () => { - await initFakeRepo(); - helper.getCombinedCommitStatus.mockResolvedValueOnce( - partial({ - worstStatus: 'success', - statuses: [ - { - id: 1, - status: 'success', - context: 'renovate/stability-days', - description: 'internal check', - target_url: '', - created_at: '', - }, - ], - }), - ); - expect(await gitea.getBranchStatus('some-branch', false)).toBe('yellow'); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [ + { + id: 1, + status: 'success', + context: 'renovate/stability-days', + description: 'internal check', + target_url: '', + created_at: '', + }, + ]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatus('some-branch', false); + + expect(res).toBe('yellow'); }); }); describe('getBranchStatusCheck', () => { it('should return null with no results', async () => { - helper.getCombinedCommitStatus.mockResolvedValueOnce( - partial({ - statuses: [], - }), - ); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, []); + await initFakePlatform(scope); + await initFakeRepo(scope); expect( await gitea.getBranchStatusCheck('some-branch', 'some-context'), @@ -722,241 +965,341 @@ describe('modules/platform/gitea/index', () => { }); it('should return null with no matching results', async () => { - helper.getCombinedCommitStatus.mockResolvedValueOnce( - partial({ - statuses: [partial({ context: 'other-context' })], - }), + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [ + { + id: 1, + status: 'success', + context: 'other-context', + description: 'internal check', + target_url: '', + created_at: '', + }, + ]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatusCheck( + 'some-branch', + 'some-context', ); - expect( - await gitea.getBranchStatusCheck('some-branch', 'some-context'), - ).toBeNull(); + expect(res).toBeNull(); }); it('should return yellow with unknown status', async () => { - helper.getCombinedCommitStatus.mockResolvedValueOnce( - partial({ - statuses: [ - partial({ - context: 'some-context', - }), - ], - }), + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [ + { + id: 1, + status: 'xyz', + context: 'some-context', + description: '', + target_url: '', + created_at: '', + }, + ]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatusCheck( + 'some-branch', + 'some-context', ); - expect( - await gitea.getBranchStatusCheck('some-branch', 'some-context'), - ).toBe('yellow'); + expect(res).toBe('yellow'); }); it('should return green of matching result', async () => { - helper.getCombinedCommitStatus.mockResolvedValueOnce( - partial({ - statuses: [ - partial({ - status: 'success', - context: 'some-context', - }), - ], - }), + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/commits/some-branch/statuses') + .reply(200, [ + { + id: 1, + status: 'success', + context: 'some-context', + description: '', + target_url: '', + created_at: '', + }, + ]); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getBranchStatusCheck( + 'some-branch', + 'some-context', ); - expect( - await gitea.getBranchStatusCheck('some-branch', 'some-context'), - ).toBe('green'); + expect(res).toBe('green'); }); }); describe('getPrList', () => { it('should return list of pull requests', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); const res = await gitea.getPrList(); - expect(res).toHaveLength(mockPRs.length); - expect(res).toMatchSnapshot(); + expect(res).toMatchObject([ + { number: 1, title: 'Some PR' }, + { number: 2, title: 'Other PR' }, + { number: 3, title: 'Draft PR' }, + ]); }); it('should filter list by creator', async () => { - helper.getCurrentUser.mockResolvedValueOnce(mockUser); - - expect( - await gitea.initPlatform({ token: 'some-token' }), - ).toMatchSnapshot(); + const thirdPartyPr = partial({ + number: 42, + title: 'Third-party PR', + body: 'other random pull request', + state: 'open', + diff_url: 'https://gitea.renovatebot.com/some/repo/pulls/3.diff', + created_at: '2011-08-18T22:30:38Z', + closed_at: '2016-01-09T10:03:21Z', + mergeable: true, + base: { ref: 'third-party-base-branch' }, + head: { + label: 'other-head-branch', + sha: 'other-head-sha' as LongCommitSha, + repo: partial({ full_name: mockRepo.full_name }), + }, + user: { username: 'not-renovate' }, + }); + + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, [ + thirdPartyPr, + ...mockPRs.map((pr) => ({ + ...pr, + user: { username: 'renovate' }, + })), + ]); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); + const res = await gitea.getPrList(); - helper.searchPRs.mockResolvedValueOnce([ - partial({ - number: 3, - title: 'Third-party PR', - body: 'other random pull request', - state: 'open', - diff_url: 'https://gitea.renovatebot.com/some/repo/pulls/3.diff', - created_at: '2011-08-18T22:30:38Z', - closed_at: '2016-01-09T10:03:21Z', - mergeable: true, - base: { ref: 'third-party-base-branch' }, - head: { - label: 'other-head-branch', - sha: 'other-head-sha' as LongCommitSha, - repo: partial({ full_name: mockRepo.full_name }), - }, - user: { username: 'not-renovate' }, - }), - ...mockPRs.map((pr) => ({ ...pr, user: { username: 'renovate' } })), + expect(res).toMatchObject([ + { number: 1, title: 'Some PR' }, + { number: 2, title: 'Other PR' }, + { number: 3, title: 'Draft PR' }, ]); - - const res = await gitea.getPrList(); - expect(res).toHaveLength(mockPRs.length); - expect(res).toMatchSnapshot(); }); it('should cache results after first query', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); const res1 = await gitea.getPrList(); const res2 = await gitea.getPrList(); + expect(res1).toEqual(res2); - expect(helper.searchPRs).toHaveBeenCalledTimes(1); }); }); describe('getPr', () => { it('should return enriched pull request which exists if open', async () => { - const mockPR = mockPRs[0]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - helper.getBranch.mockResolvedValueOnce( - partial({ - commit: { - id: mockCommitHash, - author: partial({ - email: 'renovate@whitesourcesoftware.com', - }), - }, - }), - ); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); - const res = await gitea.getPr(mockPR.number); - expect(res).toHaveProperty('number', mockPR.number); - expect(res).toMatchSnapshot(); + const res = await gitea.getPr(1); + + expect(res).toMatchObject({ number: 1, title: 'Some PR' }); }); it('should fallback to direct fetching if cache fails', async () => { - const mockPR = mockPRs[0]; - helper.searchPRs.mockResolvedValueOnce([]); - helper.getPR.mockResolvedValueOnce({ ...mockPR, mergeable: false }); - await initFakeRepo(); - - const res = await gitea.getPr(mockPR.number); - expect(res).toHaveProperty('number', mockPR.number); - expect(res).toMatchSnapshot(); - expect(helper.getPR).toHaveBeenCalledTimes(1); - }); + const pr = mockPRs.find((pr) => pr.number === 1); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, []) + .get('/repos/some/repo/pulls/1') + .reply(200, pr); + await initFakePlatform(scope); + await initFakeRepo(scope); - it('should return null for missing pull request', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const res = await gitea.getPr(1); - expect(await gitea.getPr(42)).toBeNull(); + expect(res).toMatchObject({ number: 1, title: 'Some PR' }); }); - it('should block modified pull request for rebasing', async () => { - const mockPR = mockPRs[0]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + it('should return null for missing pull request', async () => { + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, []) + .get('/repos/some/repo/pulls/42') + .reply(200); // TODO: 404 should be handled + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.getPr(42); - const res = await gitea.getPr(mockPR.number); - expect(res).toHaveProperty('number', mockPR.number); + expect(res).toBeNull(); }); }); describe('findPr', () => { it('should find pull request without title or state', async () => { - const mockPR = mockPRs[0]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); - const res = await gitea.findPr({ branchName: mockPR.head.label }); - expect(res).toHaveProperty('sourceBranch', mockPR.head.label); + const res = await gitea.findPr({ branchName: 'some-head-branch' }); + + expect(res).toMatchObject({ + number: 1, + sourceBranch: 'some-head-branch', + }); }); it('should find pull request with title', async () => { - const mockPR = mockPRs[0]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); const res = await gitea.findPr({ - branchName: mockPR.head.label, - prTitle: mockPR.title, + branchName: 'some-head-branch', + prTitle: 'Some PR', + }); + + expect(res).toMatchObject({ + number: 1, + title: 'Some PR', }); - expect(res).toHaveProperty('sourceBranch', mockPR.head.label); - expect(res).toHaveProperty('title', mockPR.title); }); it('should find pull request with state', async () => { - const mockPR = mockPRs[1]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); const res = await gitea.findPr({ - branchName: mockPR.head.label, - state: mockPR.state, + branchName: 'some-head-branch', + state: 'open', + }); + + expect(res).toMatchObject({ + number: 1, + state: 'open', }); - expect(res).toHaveProperty('sourceBranch', mockPR.head.label); - expect(res).toHaveProperty('state', mockPR.state); }); it('should not find pull request with inverted state', async () => { - const mockPR = mockPRs[1]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect( - await gitea.findPr({ - branchName: mockPR.head.label, - state: `!${mockPR.state as PrState}` as never, // wrong argument being passed intentionally - }), - ).toBeNull(); + const res = await gitea.findPr({ + branchName: 'other-head-branch', + state: `!open`, + }); + + expect(res).toMatchObject({ + number: 2, + state: 'closed', + title: 'Other PR', + }); }); it('should find pull request with title and state', async () => { - const mockPR = mockPRs[1]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); const res = await gitea.findPr({ - branchName: mockPR.head.label, - prTitle: mockPR.title, - state: mockPR.state, + branchName: 'other-head-branch', + prTitle: 'Other PR', + state: 'closed', + }); + + expect(res).toMatchObject({ + number: 2, + state: 'closed', + title: 'Other PR', }); - expect(res).toHaveProperty('sourceBranch', mockPR.head.label); - expect(res).toHaveProperty('title', mockPR.title); - expect(res).toHaveProperty('state', mockPR.state); }); it('should find pull request with draft', async () => { - const mockPR = mockPRs[2]; - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); const res = await gitea.findPr({ - branchName: mockPR.head.label, + branchName: 'draft-head-branch', prTitle: 'Draft PR', - state: mockPR.state, + state: 'open', + }); + + expect(res).toMatchObject({ + number: 3, + title: 'Draft PR', + isDraft: true, }); - expect(res).toHaveProperty('sourceBranch', mockPR.head.label); - expect(res).toHaveProperty('title', 'Draft PR'); - expect(res).toHaveProperty('state', mockPR.state); }); it('should return null for missing pull request', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.findPr({ branchName: 'missing' }); - expect(await gitea.findPr({ branchName: 'missing' })).toBeNull(); + expect(res).toBeNull(); }); }); @@ -981,12 +1324,16 @@ describe('modules/platform/gitea/index', () => { }; it('should use base branch by default', async () => { - helper.createPR.mockResolvedValueOnce({ - ...mockNewPR, - base: { ref: 'devel' }, - }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, { + ...mockNewPR, + base: { ref: 'devel' }, + }); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'devel', @@ -994,23 +1341,20 @@ describe('modules/platform/gitea/index', () => { prBody: mockNewPR.body, }); - expect(res).toHaveProperty('number', mockNewPR.number); - expect(res).toHaveProperty('targetBranch', 'devel'); - expect(res).toMatchSnapshot(); - expect(helper.createPR).toHaveBeenCalledTimes(1); - expect(helper.createPR).toHaveBeenCalledWith(mockRepo.full_name, { - base: 'devel', - head: mockNewPR.head.label, - title: mockNewPR.title, - body: mockNewPR.body, - labels: [], + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', }); }); it('should use default branch if requested', async () => { - helper.createPR.mockResolvedValueOnce(mockNewPR); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', @@ -1019,50 +1363,49 @@ describe('modules/platform/gitea/index', () => { draftPR: true, }); - expect(res).toHaveProperty('number', mockNewPR.number); - expect(res).toHaveProperty('targetBranch', mockNewPR.base.ref); - expect(res).toMatchSnapshot(); - expect(helper.createPR).toHaveBeenCalledTimes(1); - expect(helper.createPR).toHaveBeenCalledWith(mockRepo.full_name, { - base: mockNewPR.base.ref, - head: mockNewPR.head.label, - title: `WIP: ${mockNewPR.title}`, - body: mockNewPR.body, - labels: [], + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', }); }); it('should resolve and apply optional labels to pull request', async () => { - helper.createPR.mockResolvedValueOnce(mockNewPR); - helper.getRepoLabels.mockResolvedValueOnce(mockRepoLabels); - helper.getOrgLabels.mockResolvedValueOnce(mockOrgLabels); - - const mockLabels = mockRepoLabels.concat(mockOrgLabels); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR) + .get('/repos/some/repo/labels') + .reply(200, mockRepoLabels) + .get('/orgs/some/labels') + .reply(200, mockOrgLabels); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); - await gitea.createPr({ + const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', prTitle: mockNewPR.title, prBody: mockNewPR.body, - labels: mockLabels.map((l) => l.name), + labels: [...mockRepoLabels, ...mockOrgLabels].map(({ name }) => name), }); - expect(helper.createPR).toHaveBeenCalledTimes(1); - expect(helper.createPR).toHaveBeenCalledWith(mockRepo.full_name, { - base: mockNewPR.base.ref, - head: mockNewPR.head.label, - title: mockNewPR.title, - body: mockNewPR.body, - labels: mockLabels.map((l) => l.id), + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', }); }); it('should ensure new pull request gets added to cached pull requests', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - helper.createPR.mockResolvedValueOnce(mockNewPR); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs) + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); await gitea.getPrList(); await gitea.createPr({ sourceBranch: mockNewPR.head.label, @@ -1070,17 +1413,25 @@ describe('modules/platform/gitea/index', () => { prTitle: mockNewPR.title, prBody: mockNewPR.body, }); - const res = gitea.getPr(mockNewPR.number); + const res = await gitea.getPr(mockNewPR.number); - expect(res).not.toBeNull(); - expect(helper.searchPRs).toHaveBeenCalledTimes(1); + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', + }); }); it('should attempt to resolve 409 conflict error (w/o update)', async () => { - helper.createPR.mockRejectedValueOnce({ statusCode: 409 }); - helper.searchPRs.mockResolvedValueOnce([mockNewPR]); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(409) + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, [mockNewPR]); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', @@ -1088,14 +1439,25 @@ describe('modules/platform/gitea/index', () => { prBody: mockNewPR.body, }); - expect(res).toHaveProperty('number', mockNewPR.number); + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', + }); }); it('should attempt to resolve 409 conflict error (w/ update)', async () => { - helper.createPR.mockRejectedValueOnce({ statusCode: 409 }); - helper.searchPRs.mockResolvedValueOnce([mockNewPR]); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(409) + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, [mockNewPR]) + .patch('/repos/some/repo/pulls/42') + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', @@ -1103,19 +1465,20 @@ describe('modules/platform/gitea/index', () => { prBody: 'new-body', }); - expect(res).toHaveProperty('number', mockNewPR.number); - expect(helper.updatePR).toHaveBeenCalledTimes(1); - expect(helper.updatePR).toHaveBeenCalledWith( - mockRepo.full_name, - mockNewPR.number, - { title: 'new-title', body: 'new-body' }, - ); + expect(res).toMatchObject({ + number: 42, + title: 'new-title', + }); }); it('should abort when response for created pull request is invalid', async () => { - helper.createPR.mockResolvedValueOnce(partial()); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, {}); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); await expect( gitea.createPr({ sourceBranch: mockNewPR.head.label, @@ -1127,9 +1490,15 @@ describe('modules/platform/gitea/index', () => { }); it('should use platform automerge', async () => { - helper.createPR.mockResolvedValueOnce(mockNewPR); - await initFakePlatform('1.17.0'); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR) + .post('/repos/some/repo/pulls/42/merge') + .reply(200); + await initFakePlatform(scope, '1.17.0'); + await initFakeRepo(scope); + const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', @@ -1138,32 +1507,22 @@ describe('modules/platform/gitea/index', () => { platformOptions: { usePlatformAutomerge: true }, }); - expect(res).toHaveProperty('number', mockNewPR.number); - expect(res).toHaveProperty('targetBranch', mockNewPR.base.ref); - - expect(helper.createPR).toHaveBeenCalledTimes(1); - expect(helper.createPR).toHaveBeenCalledWith(mockRepo.full_name, { - base: mockNewPR.base.ref, - head: mockNewPR.head.label, - title: mockNewPR.title, - body: mockNewPR.body, - labels: [], + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', }); - expect(helper.mergePR).toHaveBeenCalledWith( - mockRepo.full_name, - mockNewPR.number, - { - Do: 'rebase', - merge_when_checks_succeed: true, - }, - ); }); it('continues on platform automerge error', async () => { - helper.createPR.mockResolvedValueOnce(mockNewPR); - await initFakePlatform('1.17.0'); - await initFakeRepo(); - helper.mergePR.mockRejectedValueOnce(new Error('fake')); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR) + .post('/repos/some/repo/pulls/42/merge') + .replyWithError('unknown error'); + await initFakePlatform(scope, '1.17.0'); + await initFakeRepo(scope); + const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', @@ -1172,30 +1531,24 @@ describe('modules/platform/gitea/index', () => { platformOptions: { usePlatformAutomerge: true }, }); - expect(res).toHaveProperty('number', mockNewPR.number); - expect(res).toHaveProperty('targetBranch', mockNewPR.base.ref); - - expect(helper.createPR).toHaveBeenCalledTimes(1); - expect(helper.createPR).toHaveBeenCalledWith(mockRepo.full_name, { - base: mockNewPR.base.ref, - head: mockNewPR.head.label, - title: mockNewPR.title, - body: mockNewPR.body, - labels: [], + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', }); - expect(helper.mergePR).toHaveBeenCalledWith( - mockRepo.full_name, - mockNewPR.number, - { - Do: 'rebase', - merge_when_checks_succeed: true, - }, + expect(logger.warn).toHaveBeenCalledWith( + expect.objectContaining({ prNumber: 42 }), + 'Gitea-native automerge: fail', ); }); it('continues if platform automerge is not supported', async () => { - helper.createPR.mockResolvedValueOnce(mockNewPR); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR); + await initFakePlatform(scope, '1.10.0'); + await initFakeRepo(scope); + const res = await gitea.createPr({ sourceBranch: mockNewPR.head.label, targetBranch: 'master', @@ -1204,180 +1557,325 @@ describe('modules/platform/gitea/index', () => { platformOptions: { usePlatformAutomerge: true }, }); - expect(res).toHaveProperty('number', mockNewPR.number); - expect(res).toHaveProperty('targetBranch', mockNewPR.base.ref); - - expect(helper.createPR).toHaveBeenCalledTimes(1); - expect(helper.createPR).toHaveBeenCalledWith(mockRepo.full_name, { - base: mockNewPR.base.ref, - head: mockNewPR.head.label, - title: mockNewPR.title, - body: mockNewPR.body, - labels: [], + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', }); - expect(helper.mergePR).not.toHaveBeenCalled(); + expect(logger.debug).toHaveBeenCalledWith( + expect.objectContaining({ prNumber: 42 }), + 'Gitea-native automerge: not supported on this version of Gitea. Use 1.17.0 or newer.', + ); }); + + it('should create PR with repository merge method when automergeStrategy is auto', async () => { + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR) + .post('/repos/some/repo/pulls/42/merge') + .reply(200); + await initFakePlatform(scope, '1.17.0'); + await initFakeRepo(scope); + + const res = await gitea.createPr({ + sourceBranch: mockNewPR.head.label, + targetBranch: 'master', + prTitle: mockNewPR.title, + prBody: mockNewPR.body, + platformOptions: { + automergeStrategy: 'auto', + usePlatformAutomerge: true, + }, + }); + + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', + }); + }); + + it.each` + automergeStrategy | prMergeStrategy + ${'fast-forward'} | ${'rebase'} + ${'merge-commit'} | ${'merge'} + ${'rebase'} | ${'rebase-merge'} + ${'squash'} | ${'squash'} + `( + 'should create PR with mergeStrategy $prMergeStrategy', + async ({ automergeStrategy, prMergeStrategy }) => { + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls') + .reply(200, mockNewPR) + .post('/repos/some/repo/pulls/42/merge') + .reply(200, { + Do: prMergeStrategy, + merge_when_checks_succeed: true, + }); + await initFakePlatform(scope, '1.17.0'); + await initFakeRepo(scope); + + const res = await gitea.createPr({ + sourceBranch: mockNewPR.head.label, + targetBranch: 'master', + prTitle: mockNewPR.title, + prBody: mockNewPR.body, + platformOptions: { + automergeStrategy, + usePlatformAutomerge: true, + }, + }); + + expect(res).toMatchObject({ + number: 42, + title: 'pr-title', + }); + }, + ); }); describe('updatePr', () => { it('should update pull request with title', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); - await gitea.updatePr({ number: 1, prTitle: 'New Title' }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs) + .patch('/repos/some/repo/pulls/1', { title: 'New Title' }) + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(helper.updatePR).toHaveBeenCalledTimes(1); - expect(helper.updatePR).toHaveBeenCalledWith(mockRepo.full_name, 1, { - title: 'New Title', - }); + await expect( + gitea.updatePr({ number: 1, prTitle: 'New Title' }), + ).toResolve(); }); it('should update pull target branch', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); - await gitea.updatePr({ - number: 1, - prTitle: 'New Title', - targetBranch: 'New Base', - }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs) + .patch('/repos/some/repo/pulls/1', { + title: 'New Title', + base: 'New Base', + }) + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(helper.updatePR).toHaveBeenCalledTimes(1); - expect(helper.updatePR).toHaveBeenCalledWith(mockRepo.full_name, 1, { - title: 'New Title', - base: 'New Base', - }); + await expect( + gitea.updatePr({ + number: 1, + prTitle: 'New Title', + targetBranch: 'New Base', + }), + ).toResolve(); }); it('should update pull request with title and body', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); - await gitea.updatePr({ - number: 1, - prTitle: 'New Title', - prBody: 'New Body', - }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs) + .patch('/repos/some/repo/pulls/1', { + title: 'New Title', + body: 'New Body', + }) + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(helper.updatePR).toHaveBeenCalledTimes(1); - expect(helper.updatePR).toHaveBeenCalledWith(mockRepo.full_name, 1, { - title: 'New Title', - body: 'New Body', - }); + await expect( + gitea.updatePr({ + number: 1, + prTitle: 'New Title', + prBody: 'New Body', + }), + ).toResolve(); }); it('should update pull request with draft', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); - await gitea.updatePr({ - number: 3, - prTitle: 'New Title', - prBody: 'New Body', - }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs) + .patch('/repos/some/repo/pulls/3', { + title: 'WIP: New Title', + body: 'New Body', + }) + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(helper.updatePR).toHaveBeenCalledTimes(1); - expect(helper.updatePR).toHaveBeenCalledWith(mockRepo.full_name, 3, { - title: 'WIP: New Title', - body: 'New Body', - }); + await expect( + gitea.updatePr({ + number: 3, + prTitle: 'New Title', + prBody: 'New Body', + }), + ).toResolve(); }); it('should close pull request', async () => { - helper.searchPRs.mockResolvedValueOnce(mockPRs); - await initFakeRepo(); - await gitea.updatePr({ - number: 1, - prTitle: 'New Title', - prBody: 'New Body', - state: 'closed', - }); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/pulls') + .query({ state: 'all' }) + .reply(200, mockPRs) + .patch('/repos/some/repo/pulls/1', { + title: 'New Title', + body: 'New Body', + state: 'closed', + }) + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(helper.updatePR).toHaveBeenCalledWith(mockRepo.full_name, 1, { - title: 'New Title', - body: 'New Body', - state: 'closed', - }); + await expect( + gitea.updatePr({ + number: 1, + prTitle: 'New Title', + prBody: 'New Body', + state: 'closed', + }), + ).toResolve(); }); }); describe('mergePr', () => { it('should return true when merging succeeds', async () => { - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls/1/merge', { + Do: 'rebase', + }) + .reply(200); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect( - await gitea.mergePr({ - branchName: 'some-branch', - id: 1, - }), - ).toBe(true); - expect(helper.mergePR).toHaveBeenCalledTimes(1); - expect(helper.mergePR).toHaveBeenCalledWith(mockRepo.full_name, 1, { - Do: 'rebase', + const res = await gitea.mergePr({ + branchName: 'some-branch', + id: 1, }); + + expect(res).toBe(true); }); it('should return false when merging fails', async () => { - helper.mergePR.mockRejectedValueOnce(new Error()); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .post('/repos/some/repo/pulls/1/merge', { + Do: 'squash', + }) + .replyWithError('unknown'); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.mergePr({ + branchName: 'some-branch', + id: 1, + strategy: 'squash', + }); - expect( - await gitea.mergePr({ - branchName: 'some-branch', - id: 1, - strategy: 'squash', - }), - ).toBe(false); + expect(res).toBe(false); }); }); describe('getIssueList', () => { it('should return empty for disabled issues', async () => { - await initFakeRepo({ has_issues: false }); - expect(await gitea.getIssueList()).toBeEmptyArray(); + const scope = httpMock.scope('https://gitea.com/api/v1'); + await initFakePlatform(scope); + await initFakeRepo(scope, { has_issues: false }); + + const res = await gitea.getIssueList(); + + expect(res).toBeEmptyArray(); }); }); describe('getIssue', () => { it('should return the issue', async () => { const mockIssue = mockIssues.find((i) => i.number === 1)!; - helper.getIssue.mockResolvedValueOnce(mockIssue); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/issues/1') + .reply(200, mockIssue); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(await gitea.getIssue?.(mockIssue.number)).toHaveProperty( - 'number', - mockIssue.number, - ); + const res = await gitea.getIssue?.(mockIssue.number); + + expect(res).toEqual({ + body: 'some-content', + number: 1, + }); }); it('should return null for disabled issues', async () => { - await initFakeRepo({ has_issues: false }); - expect(await gitea.getIssue!(1)).toBeNull(); + const scope = httpMock.scope('https://gitea.com/api/v1'); + await initFakePlatform(scope); + await initFakeRepo(scope, { has_issues: false }); + + const res = await gitea.getIssue!(1); + + expect(res).toBeNull(); }); }); describe('findIssue', () => { it('should return existing open issue', async () => { - const mockIssue = mockIssues.find((i) => i.title === 'open-issue')!; - helper.searchIssues.mockResolvedValueOnce(mockIssues); - helper.getIssue.mockResolvedValueOnce(mockIssue); - await initFakeRepo(); - - expect(await gitea.findIssue(mockIssue.title)).toHaveProperty( - 'number', - mockIssue.number, - ); + const mockIssue = mockIssues.find(({ title }) => title === 'open-issue')!; + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/issues') + .query({ state: 'all', type: 'issues' }) + .reply(200, mockIssues) + .get('/repos/some/repo/issues/1') + .reply(200, mockIssue); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.findIssue(mockIssue.title); + + expect(res).toMatchObject({ + body: 'some-content', + number: 1, + }); }); it('should not return existing closed issue', async () => { - const mockIssue = mockIssues.find((i) => i.title === 'closed-issue')!; - helper.searchIssues.mockResolvedValueOnce(mockIssues); - await initFakeRepo(); + const mockIssue = mockIssues.find( + ({ title }) => title === 'closed-issue', + )!; + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/issues') + .query({ state: 'all', type: 'issues' }) + .reply(200, mockIssues); + await initFakePlatform(scope); + await initFakeRepo(scope); + + const res = await gitea.findIssue(mockIssue.title); - expect(await gitea.findIssue(mockIssue.title)).toBeNull(); + expect(res).toBeNull(); }); it('should return null for missing issue', async () => { - helper.searchIssues.mockResolvedValueOnce(mockIssues); - await initFakeRepo(); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/issues') + .query({ state: 'all', type: 'issues' }) + .reply(200, mockIssues); + await initFakePlatform(scope); + await initFakeRepo(scope); - expect(await gitea.findIssue('missing')).toBeNull(); + const res = await gitea.findIssue('missing'); + + expect(res).toBeNull(); }); }); @@ -1390,18 +1888,22 @@ describe('modules/platform/gitea/index', () => { once: false, }; - helper.searchIssues.mockResolvedValueOnce(mockIssues); - helper.createIssue.mockResolvedValueOnce(partial({ number: 42 })); + const scope = httpMock + .scope('https://gitea.com/api/v1') + .get('/repos/some/repo/issues') + .query({ state: 'all', type: 'issues' }) + .reply(200, mockIssues) + .post('/repos/some/repo/issues', { + body: mockIssue.body, + title: mockIssue.title, + }) + .reply(200, { number: 42 }); + await initFakePlatform(scope); + await initFakeRepo(scope); - await initFakeRepo(); const res = await gitea.ensureIssue(mockIssue); expect(res).toBe('created'); - expect(helper.createIssue).toHaveBeenCalledTimes(1); - expect(helper.createIssue).toHaveBeenCalledWith(mockRepo.full_name, { - body: mockIssue.body, - title: mockIssue.title, - }); }); it('should create issue with the correct labels', async () => { @@ -1412,35 +1914,49 @@ describe('modules/platform/gitea/index', () => { once: false, labels: ['Renovate', 'Maintenance'], }; - const mockLabels: Label[] = [ - partial