diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index 9d99d69427f41e..748b5c9903a038 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -1 +1 @@
-FROM ghcr.io/containerbase/devcontainer:10.1.4
+FROM ghcr.io/containerbase/devcontainer:10.6.14
diff --git a/.devcontainer/post-create.sh b/.devcontainer/post-create.sh
index cdca067449391a..eae5da872d27d6 100755
--- a/.devcontainer/post-create.sh
+++ b/.devcontainer/post-create.sh
@@ -7,4 +7,4 @@ if [[ "${CODESPACES}" == true ]]; then
sudo chmod 1777 /tmp
fi
-pnpm install
+COREPACK_ENABLE_DOWNLOAD_PROMPT=0 pnpm install
diff --git a/.eslintrc.js b/.eslintrc.js
index fa3c4fd191fd45..87f08fb0cfb5c2 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -16,7 +16,6 @@ module.exports = {
'plugin:@typescript-eslint/recommended-requiring-type-checking',
'plugin:promise/recommended',
'plugin:jest-formatting/recommended',
- 'prettier',
],
parserOptions: {
ecmaVersion: 9,
@@ -225,5 +224,14 @@ module.exports = {
'import/extensions': 0,
},
},
+ {
+ files: ['tools/docs/test/**/*.mjs'],
+ env: {
+ jest: false,
+ },
+ rules: {
+ '@typescript-eslint/no-floating-promises': 0,
+ },
+ },
],
};
diff --git a/.github/DISCUSSION_TEMPLATE/report-a-problem.yml b/.github/DISCUSSION_TEMPLATE/report-a-problem.yml
deleted file mode 100644
index d526271bdd349d..00000000000000
--- a/.github/DISCUSSION_TEMPLATE/report-a-problem.yml
+++ /dev/null
@@ -1,85 +0,0 @@
-body:
- - type: dropdown
- id: how-are-you-running-renovate
- attributes:
- label: How are you running Renovate?
- options:
- - 'Mend Renovate hosted app on github.com'
- - 'Self-hosted Renovate'
-
- - type: input
- id: self-hosted-veresion
- attributes:
- label: If you're self-hosting Renovate, tell us what version of Renovate you run.
- validations:
- required: false
-
- - type: dropdown
- id: self-hosted-platform
- attributes:
- label: If you're self-hosting Renovate, select which platform you are using.
- options:
- - 'AWS CodeCommit'
- - 'Azure DevOps (dev.azure.com)'
- - 'Azure DevOps Server'
- - 'Bitbucket Cloud (bitbucket.org)'
- - 'Bitbucket Server'
- - 'Gitea or Forgejo'
- - 'github.com'
- - 'GitHub Enterprise Server'
- - 'gitlab.com'
- - 'GitLab self-hosted'
- validations:
- required: false
-
- - type: dropdown
- id: regression-error
- attributes:
- label: Was this something which used to work for you, and then stopped?
- options:
- - 'It used to work, and then stopped'
- - 'I have not seen this working'
-
- - type: textarea
- id: describe-problem
- attributes:
- label: Describe the problem
- description: 'Do not report any security concerns here. Email [renovate-disclosure@mend.io](mailto:renovate-disclosure@mend.io) instead.'
- validations:
- required: true
-
- - type: textarea
- id: debug-logs
- attributes:
- label: Relevant debug logs
- description: |
- Try not to report a problem unless you've looked at the logs first.
- If you're running self-hosted, run with `LOG_LEVEL=debug` in your environment variables and search for whatever dependency/branch/PR that is causing the problem.
- If you are using the Renovate App, log into [Renovate's app dashboard](https://developer.mend.io) and locate the correct job log for when the problem occurred (e.g. when the PR was created).
- Try to paste the *relevant* logs here, not the entire thing and not just a link to the dashboard (others don't have permissions to view them).
- If you're not sure about the relevant parts of the log, then feel free to post the full log to a [Github Gist](https://gist.github.com/) and link to it.
- Try to highlight the important logs into the Discussion itself.
- value: |
- Logs
-
- ```
-
- Copy/paste the relevant log(s) here, between the starting and ending backticks
-
- ```
-
-
- validations:
- required: false
-
- - type: dropdown
- id: minimal-reproduction-repository
- attributes:
- label: Have you created a minimal reproduction repository?
- description: Please read the [minimal reproductions documentation](https://github.com/renovatebot/renovate/blob/main/docs/development/minimal-reproductions.md) to learn how to make a good minimal reproduction repository.
- options:
- - 'Placeholder value, please select the correct response from the dropdown'
- - 'I have linked to a minimal reproduction in the description above'
- - 'I have explained in the description why a minimal reproduction is impossible'
- validations:
- required: true
diff --git a/.github/DISCUSSION_TEMPLATE/ask-a-question.yml b/.github/DISCUSSION_TEMPLATE/request-help.yml
similarity index 68%
rename from .github/DISCUSSION_TEMPLATE/ask-a-question.yml
rename to .github/DISCUSSION_TEMPLATE/request-help.yml
index 7c995c9eadff62..3dcb62a21aa156 100644
--- a/.github/DISCUSSION_TEMPLATE/ask-a-question.yml
+++ b/.github/DISCUSSION_TEMPLATE/request-help.yml
@@ -1,4 +1,13 @@
body:
+ - type: dropdown
+ id: question-type
+ attributes:
+ label: What would you like help with?
+ options:
+ - 'I would like help with my configuration'
+ - 'I think I found a bug'
+ - 'Other'
+
- type: dropdown
id: how-are-you-running-renovate
attributes:
@@ -10,32 +19,14 @@ body:
- type: input
id: self-hosted-version
attributes:
- label: If you're self-hosting Renovate, tell us what version of Renovate you run.
- validations:
- required: false
-
- - type: dropdown
- id: self-hosted-platform
- attributes:
- label: If you're self-hosting Renovate, select which platform you are using.
- options:
- - 'AWS CodeCommit'
- - 'Azure DevOps (dev.azure.com)'
- - 'Azure DevOps Server'
- - 'Bitbucket Cloud (bitbucket.org)'
- - 'Bitbucket Server'
- - 'Gitea or Forgejo'
- - 'github.com'
- - 'GitHub Enterprise Server'
- - 'gitlab.com'
- - 'GitLab self-hosted'
+ label: If you're self-hosting Renovate, tell us which platform (GitHub, GitLab, etc) and which version of Renovate.
validations:
required: false
- type: textarea
id: the-question
attributes:
- label: What is your question?
+ label: Please tell us more about your question or problem
validations:
required: true
@@ -54,7 +45,7 @@ body:
```
- Copy/paste the relevant log(s) here, between the starting and ending backticks
+ Replace this text with your logs, between the starting and ending triple backticks
```
diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml
index 4fc4b36e860ad1..1aa7347f696af0 100644
--- a/.github/ISSUE_TEMPLATE/config.yml
+++ b/.github/ISSUE_TEMPLATE/config.yml
@@ -1,5 +1,5 @@
blank_issues_enabled: false
contact_links:
- name: Start a discussion
- url: https://github.com/renovatebot/renovate/discussions/new
+ url: https://github.com/renovatebot/renovate/discussions/new/choose
about: Our preferred starting point if you have any questions or suggestions about bot configuration, features or behavior.
diff --git a/.github/actions/calculate-prefetch-matrix/action.yml b/.github/actions/calculate-prefetch-matrix/action.yml
index b58c6353d6fd8b..c895efeaef0cce 100644
--- a/.github/actions/calculate-prefetch-matrix/action.yml
+++ b/.github/actions/calculate-prefetch-matrix/action.yml
@@ -34,7 +34,7 @@ runs:
- name: Check cache miss for MacOS
id: macos-cache
- uses: actions/cache/restore@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: node_modules
key: ${{ env.MACOS_KEY }}
@@ -43,7 +43,7 @@ runs:
- name: Check cache miss for Windows
id: windows-cache
- uses: actions/cache/restore@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: node_modules
key: ${{ env.WINDOWS_KEY }}
diff --git a/.github/actions/setup-node/action.yml b/.github/actions/setup-node/action.yml
index 2c24caa22ca310..b0b6e2bc360ac6 100644
--- a/.github/actions/setup-node/action.yml
+++ b/.github/actions/setup-node/action.yml
@@ -34,7 +34,7 @@ runs:
- name: Restore `node_modules`
id: node-modules-restore
- uses: actions/cache/restore@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: node_modules
key: ${{ env.CACHE_KEY }}
@@ -47,9 +47,10 @@ runs:
(steps.node-modules-restore.outputs.cache-hit == 'true') && 'true' || ''
}}' >> "$GITHUB_ENV"
- - name: Enable corepack
- shell: bash
- run: corepack enable
+ - name: Setup pnpm
+ uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0
+ with:
+ standalone: true
- name: Setup Node
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
@@ -67,7 +68,7 @@ runs:
- name: Write `node_modules` cache
if: inputs.save-cache == 'true' && env.CACHE_HIT != 'true'
- uses: actions/cache/save@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/save@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: node_modules
key: ${{ env.CACHE_KEY }}
diff --git a/.github/contributing.md b/.github/contributing.md
index d041337e30d2fc..20bdc16f0416ae 100644
--- a/.github/contributing.md
+++ b/.github/contributing.md
@@ -21,15 +21,6 @@ If you want help with your Renovate configuration, go to the [discussions tab in
For **feature requests**: first search for related requests in the issues and discussions, if you don't find anything: create a _discussion_.
-## Rate Limiting of Support Requests through Temporary Blocking
-
-To ensure that the Renovate maintainers don't burn out from dealing with unfriendly behavior, those who display a bad attitude when asking for or receiving support in the repo will be rate limited from further requests through the use of temporary blocking.
-The duration of the temporary block depends on how rude or inconsiderate the behavior is perceived to be, and can be from 1-30 days.
-
-If you have been blocked temporarily and believe that it is due to a misunderstanding, or you regret your comments and wish to make amends, please reach out to the lead maintainer Rhys Arkins by email with any request for early unblocking.
-If/once you are unblocked, you should edit or delete whatever comment lead to the blocking, even if you did not intend it to be rude or inconsiderate.
-Long emails or apologies are undesirable - the maintainers are busy and want to be able to help as many users as possible with the time they have available.
-
## Code
If you would like to fix a bug or work on a feature, please fork the repository and create a Pull Request.
diff --git a/.github/label-actions.yml b/.github/label-actions.yml
index b46ba40aebc2f3..6f8463a1df654f 100644
--- a/.github/label-actions.yml
+++ b/.github/label-actions.yml
@@ -9,15 +9,12 @@
Before we start working on your issue we need to know exactly what's causing the current behavior.
A minimal reproduction helps us with this.
+ Discussions without reproductions are less likely to be converted to Issues.
To get started, please read our guide on creating a [minimal reproduction](https://github.com/renovatebot/renovate/blob/main/docs/development/minimal-reproductions.md).
- We may close the discussion if you, or someone else, haven't created a minimal reproduction within two weeks.
- If you need more time, or are stuck, please ask for help or more time in a comment.
-
-
Good luck,
@@ -42,15 +39,15 @@
Select me to read instructions
- If you use the Renovate app (GitHub):
+ If you use the Mend Renovate app (GitHub):
- 1. Go to the affected PR, and search for "View repository job log here"
+ 1. Log in to [the Mend Developer Portal](https://developer.mend.io/)
- 1. Select the link to go to the "Mend Renovate Dashboard" and log in
+ 1. Navigate to the correct organization and repository
- 1. You are now in the correct repository log overview screen
+ 1. Locate the appropriate log (it may not always be the latest one)
- 1. Copy/paste the correct log
+ 1. Copy/paste the log contents
1. Follow the steps in the **formatting your logs** section
@@ -62,7 +59,7 @@
Select me to read instructions
- If you're running self-hosted, run with `LOG_LEVEL=debug` in your environment variables and search for whatever dependency/branch/PR that is causing the problem.
+ Read the [Renovate docs, troubleshooting, self-hosted](https://docs.renovatebot.com/troubleshooting/#self-hosted) to learn how to find the logs.
@@ -96,6 +93,9 @@
+ If you feel the logs are too large to paste here, please use a service like [GitHub Gist](https://gist.github.com/) and paste the link here.
+
+
Good luck,
@@ -128,6 +128,9 @@
Please try the latest version and tell us if that fixes your problem.
+ Be sure to provide updated logs once you have run with a newer version.
+
+
Good luck,
@@ -168,7 +171,7 @@
Hi there,
- You are using `done` comments which cause a lot of noise.
+ You are using `done` comments which cause a lot of noise/notifications.
Instead, please use GitHub's web interface to request another review.
Please read our [contributing guidelines](https://github.com/renovatebot/renovate/blob/main/.github/contributing.md#resolve-review-comments-instead-of-commenting) to reduce noise.
@@ -184,7 +187,13 @@
Thank you for your PR, but we need to discuss the requirements and implementation first.
- This PR will be closed, but you can reopen it after the discussion has been resolved.
+
+
+ The maintainers believe that there is a lack of - or misalignment of - requirements about this PR.
+ We need to discuss the requirements and implementation first so that you don't write code that won't be merged.
+
+
+ This PR will be closed for now to avoid confusion, but you can reopen it after the discussion has been resolved.
Thanks, the Renovate team
@@ -198,21 +207,39 @@
This discussion is missing some details, making it difficult or impossible to help you.
Please try again to provide more details.
+
+ For example, you may have left out information about your platform (e.g. GitHub Enterprise Server, etc), your version of Renovate (npm, Docker, GitHub Action, etc), or how you're running Renovate.
+
+
+ If you can't think of what possible information might be required, please reply to this message and ask for help.
+
'needs-discussion':
- unlabel:
- - 'type:bug'
- - 'type:feature'
- - 'status:requirements'
comment: >
**Please create a GitHub Discussion instead of this issue.**
- We only want Renovate maintainers to create new Issues. If needed, a Renovate maintainer will create an Issue after your Discussion been triaged and confirmed. As a Renovate user, please create a GitHub Discussion in this repo instead.
+ Issues in this repository are for creation by Maintainers only - please create a GitHub Discussion instead.
+ If needed, a Renovate maintainer will create an Issue after your Discussion been triaged and confirmed.
This Issue will now be closed and locked. We may later batch-delete this issue. This way we keep Issues actionable, and free of duplicates or wrong bug reports.
+ Thanks, the Renovate team
+ close: true
+ close-reason: 'not planned'
+
+'auto:inactivity-pr-close':
+ comment: >
+ **We're closing this PR due to inactivity, but we are happy for you, or others, to finish the PR.**
+
+
+ We limit the number of open PRs, so we close stale PRs, or PRs that are not getting ready to merge.
+
+
+ If you, or someone else, want to continue working on this PR, then please reopen this PR and let us know.
+
+
Thanks, the Renovate team
close: true
close-reason: 'not planned'
@@ -271,9 +298,6 @@
- Stop giving off more bad vibes
- If you're unhappy with this, we suggest you stop using the repository discussions or the product altogether.
-
-
Thanks, the Renovate team
'auto:one-topic':
@@ -299,32 +323,13 @@
Hi there,
- Please do not unnecessarily `@` mention maintainers like `@rarkins` or `@viceice`. Doing so causes annoying notifications and makes it harder to maintain this repository.
-
-
- For example, never `@` mention a maintainer when you are creating a discussion if your desire is to get attention. This is rude behavior, just like shouting out your coffee order in a Starbucks before it's your turn.
-
-
- It's OK to comment in an issue or discussion after multiple days or weeks. But please, still don't `@` mention people. The maintainers try to answer most discussions, but they can't answer all discussions. If you're still not getting an answer, take a look at the information you've given us and see if you can improve it.
-
-
- Thanks, the Renovate team
-
-'auto:misclassified-problem':
- comment: >
- Hi there,
-
+ This is intended as a polite, automated _request_ that users avoid `@` mentioning repository maintainers like `@rarkins` or `@viceice`. Doing so causes annoying mobile notifications and makes it harder to maintain this repository.
- A maintainer has flagged that this discussion is _misclassified_ as a bug when it is not.
+ We know it might be common elsewhere but we participate in hundreds of discussions a week and would need to turn off GitHub mobile notifications if we were mentioned in every one.
- Incorrectly classified discussions waste maintainer time, worsen search result accuracy and make it harder to train AI on this dataset.
+ As a general rule, we will read and respond to all discussions in this repository, so there is no need to mention us.
- The next time you create a discussion, please keep in mind:
- - If you are new to Renovate, try to stick to questions instead of problem reports
- - Just because Renovate does something you don't expect, doesn't automatically mean it's a bug
- - Unsupported features should be raised as ideas, not problems
- - Those who appear to be _twisting_ questions into sounding like a bug for attention will be given the least support
Thanks, the Renovate team
diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml
index 13269db28c7b70..071b8a739b6b02 100644
--- a/.github/workflows/build.yml
+++ b/.github/workflows/build.yml
@@ -32,13 +32,13 @@ env:
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
NODE_VERSION: 18
DRY_RUN: true
+ TEST_LEGACY_DECRYPTION: true
SPARSE_CHECKOUT: |-
.github/actions/
data/
tools/
package.json
pnpm-lock.yaml
- codecov.yml
jobs:
setup:
@@ -93,10 +93,11 @@ jobs:
run: gh api ${{ env.PR_URL }} | jq -rc '${{ env.JQ_FILTER }}' >> "$GITHUB_OUTPUT"
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
- sparse-checkout: ${{ env.SPARSE_CHECKOUT }}
filter: blob:none # we don't need all blobs
+ sparse-checkout: ${{ env.SPARSE_CHECKOUT }}
+ show-progress: false
- name: Calculate matrix for `node_modules` prefetch
uses: ./.github/actions/calculate-prefetch-matrix
@@ -150,9 +151,11 @@ jobs:
steps:
- name: Checkout code
if: needs.setup.outputs.os-matrix-is-full && runner.os != 'Linux'
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
+ filter: blob:none # we don't need all blobs
sparse-checkout: ${{ env.SPARSE_CHECKOUT }}
+ show-progress: false
- name: Setup Node.js
if: needs.setup.outputs.os-matrix-is-full && runner.os != 'Linux'
@@ -172,7 +175,9 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -181,7 +186,7 @@ jobs:
os: ${{ runner.os }}
- name: Restore eslint cache
- uses: actions/cache/restore@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: .cache/eslint
key: eslint-main-cache
@@ -200,7 +205,7 @@ jobs:
- name: Save eslint cache
if: github.event_name == 'push'
- uses: actions/cache/save@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/save@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: .cache/eslint
key: eslint-main-cache
@@ -215,7 +220,9 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -224,7 +231,7 @@ jobs:
os: ${{ runner.os }}
- name: Restore prettier cache
- uses: actions/cache/restore@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/restore@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: .cache/prettier
key: prettier-main-cache
@@ -243,7 +250,7 @@ jobs:
- name: Save prettier cache
if: github.event_name == 'push'
- uses: actions/cache/save@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache/save@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: .cache/prettier
key: prettier-main-cache
@@ -255,7 +262,9 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -264,7 +273,7 @@ jobs:
os: ${{ runner.os }}
- name: Lint markdown
- uses: DavidAnson/markdownlint-cli2-action@510b996878fc0d1a46c8a04ec86b06dbfba09de7 # v15.0.0
+ uses: DavidAnson/markdownlint-cli2-action@b4c9feab76d8025d1e83c653fa3990936df0e6c8 # v16.0.0
- name: Lint fenced code blocks
run: pnpm doc-fence-check
@@ -282,7 +291,9 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -319,7 +330,9 @@ jobs:
include: ${{ fromJSON(needs.setup.outputs.test-shard-matrix) }}
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -328,7 +341,7 @@ jobs:
os: ${{ runner.os }}
- name: Cache jest
- uses: actions/cache@ab5e6d0c87105b4c9c2047343972218f562e4319 # v4.0.1
+ uses: actions/cache@0c45773b623bea8c8e75f6c82b208c3cf94ea4f9 # v4.0.2
with:
path: .cache/jest
key: |
@@ -366,7 +379,7 @@ jobs:
- name: Save coverage artifacts
if: (success() || failure()) && github.event.pull_request.draft != true && matrix.coverage
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
+ uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: ${{ matrix.upload-artifact-name }}
path: |
@@ -377,20 +390,27 @@ jobs:
needs: [test]
runs-on: ubuntu-latest
timeout-minutes: 3
- if: (success() || failure()) && github.event.pull_request.draft != true
+ if: (success() || failure()) && github.event_name != 'merge_group' && github.event.pull_request.draft != true
steps:
+ - name: Checkout code
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ filter: blob:none # we don't need all blobs
+ show-progress: false
+
- name: Download coverage reports
- uses: actions/download-artifact@87c55149d96e628cc2ef7e6fc2aab372015aec85 # v4.1.3
+ uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
pattern: coverage-*
path: coverage
merge-multiple: true
- name: Codecov
- uses: codecov/codecov-action@ab904c41d6ece82784817410c45d8b8c02684457 # v3.1.6
+ uses: codecov/codecov-action@125fc84a9a348dbcf27191600683ec096ec9021c # v4.4.1
with:
+ token: ${{ secrets.CODECOV_TOKEN }}
directory: coverage/lcov
- fail_ci_if_error: true
+ fail_ci_if_error: github.event_name != 'pull_request'
verbose: true
coverage-threshold:
@@ -401,10 +421,11 @@ jobs:
if: (success() || failure()) && github.event.pull_request.draft != true
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
- sparse-checkout: ${{ env.SPARSE_CHECKOUT }}
filter: blob:none # we don't need all blobs
+ sparse-checkout: ${{ env.SPARSE_CHECKOUT }}
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -413,7 +434,7 @@ jobs:
os: ${{ runner.os }}
- name: Download coverage reports
- uses: actions/download-artifact@87c55149d96e628cc2ef7e6fc2aab372015aec85 # v4.1.3
+ uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
pattern: coverage-*
path: coverage
@@ -490,7 +511,9 @@ jobs:
if: github.event.pull_request.draft != true
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -510,7 +533,7 @@ jobs:
run: pnpm test-e2e:pack
- name: Upload
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
+ uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: renovate-package
path: renovate-0.0.0-semantic-release.tgz
@@ -522,7 +545,9 @@ jobs:
if: github.event.pull_request.draft != true
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Setup Node.js
uses: ./.github/actions/setup-node
@@ -533,8 +558,11 @@ jobs:
- name: Build
run: pnpm build:docs
+ - name: Test docs
+ run: pnpm test:docs
+
- name: Upload
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
+ uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: docs
path: tmp/docs/
@@ -548,19 +576,22 @@ jobs:
steps:
- name: Checkout code
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
+
+ - name: Setup pnpm
+ uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0
+ with:
+ standalone: true
- name: Setup Node.js
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
with:
node-version: ${{ env.NODE_VERSION }}
- - name: Enable corepack
- shell: bash
- run: corepack enable
-
- name: Download package
- uses: actions/download-artifact@87c55149d96e628cc2ef7e6fc2aab372015aec85 # v4.1.3
+ uses: actions/download-artifact@65a9edc5881444af0b9093a5e628f2fe47ea3b2e # v4.1.7
with:
name: renovate-package
@@ -594,14 +625,14 @@ jobs:
packages: write
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
fetch-depth: 0 # zero stands for full checkout, which is required for semantic-release
- show-progress: false
filter: blob:none # we don't need all blobs, only the full tree
+ show-progress: false
- name: docker-config
- uses: containerbase/internal-tools@dc264f478d5abd1fb9e28e29dc3becb0ad57b5a2 # v3.0.61
+ uses: containerbase/internal-tools@a0551836e0d8c9de0562e344da3c3832a03b9742 # v3.0.88
with:
command: docker-config
@@ -611,7 +642,7 @@ jobs:
node-version: ${{ env.NODE_VERSION }}
os: ${{ runner.os }}
- - uses: sigstore/cosign-installer@e1523de7571e31dbe865fd2e80c5c7c23ae71eb4 # v3.4.0
+ - uses: sigstore/cosign-installer@59acb6260d9c0ba8f4a2f9d9b48431a222b68e20 # v3.5.0
- name: Docker registry login
run: |
diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml
index 2e4e14b462a60b..4dcc77c1d9b02d 100644
--- a/.github/workflows/codeql-analysis.yml
+++ b/.github/workflows/codeql-analysis.yml
@@ -31,7 +31,9 @@ jobs:
security-events: write
steps:
- name: Checkout repository
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Delete fixtures to suppress false positives
run: |
@@ -39,7 +41,7 @@ jobs:
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
- uses: github/codeql-action/init@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
+ uses: github/codeql-action/init@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6
with:
languages: javascript
@@ -49,7 +51,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
- uses: github/codeql-action/autobuild@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
+ uses: github/codeql-action/autobuild@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6
# ℹ️ Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
@@ -63,4 +65,4 @@ jobs:
# make release
- name: Perform CodeQL Analysis
- uses: github/codeql-action/analyze@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
+ uses: github/codeql-action/analyze@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6
diff --git a/.github/workflows/dependency-review.yml b/.github/workflows/dependency-review.yml
index cb3735c36617f6..a2dce1d14fee81 100644
--- a/.github/workflows/dependency-review.yml
+++ b/.github/workflows/dependency-review.yml
@@ -9,7 +9,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: 'Checkout Repository'
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: 'Dependency Review'
- uses: actions/dependency-review-action@9129d7d40b8c12c1ed0f60400d00c92d437adcce # v4.1.3
+ uses: actions/dependency-review-action@0c155c5e8556a497adf53f2c18edabf945ed8e70 # v4.3.2
diff --git a/.github/workflows/devcontainer.yml b/.github/workflows/devcontainer.yml
index e71f5b0cdb0a6f..6a7b6bccd8ff37 100644
--- a/.github/workflows/devcontainer.yml
+++ b/.github/workflows/devcontainer.yml
@@ -18,9 +18,11 @@ jobs:
if: github.event.pull_request.draft != true
steps:
- name: Checkout
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Build and run dev container task
- uses: devcontainers/ci@3d462823359c481c587cb7426f39775f24257115 # v0.3.1900000339
+ uses: devcontainers/ci@a56d055efecd725e8cfe370543b6071b79989cc8 # v0.3.1900000349
with:
runCmd: pnpm build
diff --git a/.github/workflows/mend-slack.yml b/.github/workflows/mend-slack.yml
index 2f224e3f8d04ae..0392ebdfbc324d 100644
--- a/.github/workflows/mend-slack.yml
+++ b/.github/workflows/mend-slack.yml
@@ -14,7 +14,7 @@ jobs:
steps:
- name: Post to Slack
id: slack
- uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0
+ uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0
with:
channel-id: 'C05NLTMGCJC'
# For posting a simple plain text message
diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml
index 37c22d1545e5c0..30c892887ac311 100644
--- a/.github/workflows/scorecard.yml
+++ b/.github/workflows/scorecard.yml
@@ -20,12 +20,13 @@ jobs:
steps:
- name: 'Checkout code'
- uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
persist-credentials: false
+ show-progress: false
- name: 'Run analysis'
- uses: ossf/scorecard-action@0864cf19026789058feabb7e87baa5f140aac736 # v2.3.1
+ uses: ossf/scorecard-action@dc50aa9510b46c811795eb24b2f1ba02a914e534 # v2.3.3
with:
results_file: results.sarif
results_format: sarif
@@ -42,7 +43,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: 'Upload artifact'
- uses: actions/upload-artifact@5d5d22a31266ced268874388b861e4b58bb5c2f3 # v4.3.1
+ uses: actions/upload-artifact@65462800fd760344b1a7b4382951275a0abb4808 # v4.3.3
with:
name: SARIF file
path: results.sarif
@@ -50,6 +51,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard.
- name: 'Upload to code-scanning'
- uses: github/codeql-action/upload-sarif@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
+ uses: github/codeql-action/upload-sarif@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6
with:
sarif_file: results.sarif
diff --git a/.github/workflows/trivy.yml b/.github/workflows/trivy.yml
index 814dad582d5ca3..90336eab0b437a 100644
--- a/.github/workflows/trivy.yml
+++ b/.github/workflows/trivy.yml
@@ -21,7 +21,7 @@ jobs:
- full
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
with:
show-progress: false
@@ -31,7 +31,7 @@ jobs:
format: 'sarif'
output: 'trivy-results.sarif'
- - uses: github/codeql-action/upload-sarif@8a470fddafa5cbb6266ee11b37ef4d8aae19c571 # v3.24.6
+ - uses: github/codeql-action/upload-sarif@9fdb3e49720b44c48891d036bb502feb25684276 # v3.25.6
with:
sarif_file: trivy-results.sarif
category: 'docker-image-${{ matrix.tag }}'
diff --git a/.github/workflows/update-data.yml b/.github/workflows/update-data.yml
index 2c962655f340d8..e0b7062f098b24 100644
--- a/.github/workflows/update-data.yml
+++ b/.github/workflows/update-data.yml
@@ -17,11 +17,14 @@ jobs:
contents: write
pull-requests: write
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- - name: Enable corepack
- shell: bash
- run: corepack enable
+ - name: Setup pnpm
+ uses: pnpm/action-setup@a3252b78c470c02df07e9d59298aecedc3ccdd6d # v3.0.0
+ with:
+ standalone: true
- name: Set up Node.js ${{ env.NODE_VERSION }}
uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4.0.2
@@ -39,7 +42,7 @@ jobs:
run: pnpm prettier-fix
- name: Create pull request
- uses: peter-evans/create-pull-request@a4f52f8033a6168103c2538976c07b467e8163bc # v6.0.1
+ uses: peter-evans/create-pull-request@6d6857d36972b65feb161a90e484f2984215f83e # v6.0.5
with:
author: 'Renovate Bot '
branch: 'chore/update-static-data'
diff --git a/.github/workflows/ws_scan.yaml b/.github/workflows/ws_scan.yaml
index 8f6f1052db257e..ca0cc9d2148c7e 100644
--- a/.github/workflows/ws_scan.yaml
+++ b/.github/workflows/ws_scan.yaml
@@ -11,7 +11,9 @@ jobs:
WS_SCAN:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4.1.1
+ - uses: actions/checkout@a5ac7e51b41094c92402da3b24376905380afc29 # v4.1.6
+ with:
+ show-progress: false
- name: Download UA
run: curl -LJO https://github.com/whitesource/unified-agent-distribution/releases/latest/download/wss-unified-agent.jar
diff --git a/.ls-lint.yml b/.ls-lint.yml
index bfa1b8661035a2..f6771f74f0e55f 100644
--- a/.ls-lint.yml
+++ b/.ls-lint.yml
@@ -12,6 +12,7 @@ ignore:
- .git
- .github/ISSUE_TEMPLATE
- .github/pull_request_template.md
+ - CODE_OF_CONDUCT.md
- dist
- jest.config.ts
- node_modules
diff --git a/.npmrc b/.npmrc
index 58d12ff40c5146..4cda5a4d01298e 100644
--- a/.npmrc
+++ b/.npmrc
@@ -5,3 +5,4 @@ provenance = true
# https://pnpm.io/cli/run
shell-emulator = true
enable-pre-post-scripts = true
+strict-peer-dependencies = true
diff --git a/.nvmrc b/.nvmrc
index 3c5535cf60a0e8..87834047a6fa65 100644
--- a/.nvmrc
+++ b/.nvmrc
@@ -1 +1 @@
-18.19.1
+20.12.2
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 0e883005baee3b..998732a9141ace 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -31,7 +31,7 @@
"request": "launch",
"name": "Jest Current File",
"runtimeExecutable": "pnpm",
- "program": "jest:vscode",
+ "program": "jest",
"args": [
"--runInBand",
"--collectCoverage=false",
@@ -52,7 +52,7 @@
"request": "launch",
"name": "Jest All",
"runtimeExecutable": "pnpm",
- "program": "jest:vscode",
+ "program": "jest",
"args": [
"--runInBand",
"--collectCoverage=false",
@@ -71,7 +71,7 @@
"request": "launch",
"name": "Jest Current Folder",
"runtimeExecutable": "pnpm",
- "program": "jest:vscode",
+ "program": "jest",
"args": [
"--runInBand",
"--collectCoverage=false",
@@ -84,14 +84,29 @@
},
{
"type": "node",
- "name": "vscode-jest-tests",
+ "name": "vscode-jest-tests.v2",
"request": "launch",
"console": "integratedTerminal",
- "internalConsoleOptions": "neverOpen",
+ "internalConsoleOptions": "openOnSessionStart",
"cwd": "${workspaceFolder}",
- "runtimeExecutable": "pnpm",
- "program": "jest:vscode",
- "args": ["--runInBand", "--watchAll=false", "--testTimeout=100000000"]
+ "runtimeExecutable": "node",
+ "runtimeArgs": ["--experimental-vm-modules"],
+ "program": "node_modules/jest/bin/jest.js",
+ "args": [
+ "--runInBand",
+ "--watchAll=false",
+ "--testTimeout=100000000",
+ "--coverage=false",
+ "--runTestsByPath",
+ "${jest.testFile}",
+ "--testNamePattern",
+ "${jest.testNamePattern}"
+ ],
+ "env": {
+ "NODE_ENV": "test",
+ "LOG_LEVEL": "trace",
+ "GIT_ALLOW_PROTOCOL": "file"
+ }
}
]
}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index 9e9e9e57616939..9a5cba35c8c111 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -16,8 +16,13 @@
".releaserc": "json"
},
"omnisharp.autoStart": false,
- "jest.autoRun": "off",
- "jest.jestCommandLine": "pnpm jest",
+ "jest.runMode": "on-demand",
+ "jest.jestCommandLine": "node --experimental-vm-modules node_modules/jest/bin/jest.js",
+ "jest.nodeEnv": {
+ "NODE_ENV": "test",
+ "LOG_LEVEL": "trace",
+ "GIT_ALLOW_PROTOCOL": "file"
+ },
"npm.packageManager": "pnpm",
"editor.formatOnSave": true,
"editor.codeActionsOnSave": {
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 00000000000000..4be7f647231a35
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,105 @@
+# Code of Conduct
+
+To help us deliver great features and support the Renovate Open Source project we ask that you:
+
+- are polite
+- pay attention to details
+- keep in mind that most maintainers are volunteers
+- are respectful of the time and effort of the maintainers
+
+## Our priorities
+
+We want to keep this project sustainable.
+This means we support our maintainers and contributors, who spend their free time to help others.
+
+Maintainers getting stressed is a big threat to Open Source projects, like ours.
+Stressed maintainers quit, or reduce their time spent on the project.
+Often a few users behave badly, where most users are nice.
+
+We want to avoid maintainers getting stressed out by bad behavior from contributors.
+That's why we have these rules.
+
+## Politeness
+
+Sadly, it's common in Open Source projects for a few users to behave in an aggressive and rude way.
+A user might say something like: "You should have fixed this bug already!", or "Why am I still waiting for this feature?".
+We do not allow this kind of behavior.
+
+We expect basic politeness, do not act rude.
+For example: it is okay if you ask a question and do not thank us afterwards.
+But avoid writing mean comments like: "Pity the documentation didn’t say that." or "Thanks for nothing.".
+
+## Respect the time of those who help you
+
+Respect goes both ways, but time is limited.
+When you ask for help, please remember that the maintainer's time is valuable.
+We get many questions each week and do our best to answer each one.
+To get the help you need, please be prepared to give detailed logs or descriptions of your issues.
+If you do not want to spend the effort giving us enough information, it's likely you will not get the help you need.
+
+Remember, most of the support provided by our team, including the Mend.io staff, is _unpaid_.
+
+## Blocking and unblocking
+
+We quickly deal with rudeness in the community with:
+
+- automated comments
+- temporary blocks
+- permanent bans
+
+If you keep breaking the rules or challenge our guidelines openly, you will be blocked.
+For example: if you keep spamming the maintainers with `@mentions` or challenge our rules openly, you will be blocked.
+
+We generally do not argue about these decisions, but we are willing to reverse a block if you show that you understand and respect the rules, or if there was a misunderstanding.
+To reverse a block, or to clear up a misunderstanding, write a _short_ email to Renovate's lead maintainer Rhys Arkins.
+
+Simply put: we block and unblock swiftly, what matters is how you follow the rules going forward.
+
+## How we prioritize work
+
+Renovate's core contributors and maintainers focus on work that:
+
+- Helps a lot of users, or
+- Fixes regressions (errors introduced by recent changes), or
+- Is required by a customer of Mend.io, or
+- Is sponsored by third parties after independent validation, or
+- We personally need or want to implement
+
+You may be disappointed when we focus on other work ahead of your feature or bug, but you should understand and accept this.
+
+## Maintaining Issue and Code quality
+
+We use GitHub Discussions to start and sort issues.
+Only maintainers are allowed to create new issues.
+If we confirm a bug or agree with a feature idea, and if it's well-documented, we will turn it into an official issue.
+This way most issues are ready to work on, either by us or the community.
+
+We may reject ideas that are too specialized, or that would make the project too hard to maintain.
+
+We have strict coding standards and reviews to keep our code in good shape.
+A feature or fix must of course work, but it must also be well designed to stay maintainable.
+We may ask you to improve your code several times in a row, which can be difficult for you.
+We only do this to keep the project sustainable.
+
+## If you have urgent work
+
+People working for big companies might push too hard in Open Source projects.
+It’s often hard for them to understand that our maintainers cannot spend much time to solve their issues quickly.
+Frequent requests for updates like "@rarkins how can we move this forward?" are _not_ helpful.
+
+Please remember, unless you are a Mend.io customer, this project does not owe you the level of response or support you might expect.
+Mend.io customers should use their designated support channels for urgent needs.
+
+## Getting more help
+
+If you need more assistance than what this project offers, you have two options:
+
+1. Become a Mend.io customer, such as by buying Renovate Enterprise, or
+1. Hire an experienced Renovate contributor privately for consulting. Mend.io staff do _not_ offer this service, but one of our volunteer maintainers, [`@secustor`](https://github.com/secustor), does
+
+## Feedback
+
+We welcome respectful discussions about these rules and accept suggestions that improve this text.
+We avoid debates on social media or going off-topic in GitHub Discussions.
+
+Because we enforce all these rules, we can deliver new features and give excellent support to the community.
diff --git a/data/kubernetes-api.json5 b/data/kubernetes-api.json5
index 19aa2e192e6307..81a6df5a0b27d3 100644
--- a/data/kubernetes-api.json5
+++ b/data/kubernetes-api.json5
@@ -130,17 +130,25 @@
HelmChart: [
'source.toolkit.fluxcd.io/v1alpha1',
'source.toolkit.fluxcd.io/v1beta1',
+ 'source.toolkit.fluxcd.io/v1',
],
HelmRelease: [
'helm.toolkit.fluxcd.io/v2beta1',
'helm.toolkit.fluxcd.io/v2beta2',
+ 'helm.toolkit.fluxcd.io/v2',
],
HelmRepository: [
'source.toolkit.fluxcd.io/v1alpha1',
'source.toolkit.fluxcd.io/v1beta1',
'source.toolkit.fluxcd.io/v1beta2',
+ 'source.toolkit.fluxcd.io/v1',
],
+ ImagePolicy: ['image.toolkit.fluxcd.io/v1beta2'],
ImageRepository: ['image.toolkit.fluxcd.io/v1beta2'],
+ ImageUpdateAutomation: [
+ 'image.toolkit.fluxcd.io/v1beta1',
+ 'image.toolkit.fluxcd.io/v1beta2'
+ ],
OCIRepository: ['source.toolkit.fluxcd.io/v1beta2'],
Provider: [
'notification.toolkit.fluxcd.io/v1beta2',
diff --git a/data/node-js-schedule.json b/data/node-js-schedule.json
index c1cf00ead26428..e2e1b481c8891d 100644
--- a/data/node-js-schedule.json
+++ b/data/node-js-schedule.json
@@ -120,7 +120,7 @@
"end": "2024-06-01"
},
"v22": {
- "start": "2024-04-23",
+ "start": "2024-04-24",
"lts": "2024-10-29",
"maintenance": "2025-10-21",
"end": "2027-04-30",
diff --git a/data/ubuntu-distro-info.json b/data/ubuntu-distro-info.json
index c718afdffe4b71..4337250c53a3fa 100644
--- a/data/ubuntu-distro-info.json
+++ b/data/ubuntu-distro-info.json
@@ -295,5 +295,12 @@
"eol": "2029-05-31",
"eol_server": "2029-05-31",
"eol_esm": "2034-04-25"
+ },
+ "v24.10": {
+ "codename": "Oracular Oriole",
+ "series": "oracular",
+ "created": "2024-04-25",
+ "release": "2024-10-10",
+ "eol": "2025-07-10"
}
}
diff --git a/docs/development/adding-a-package-manager.md b/docs/development/adding-a-package-manager.md
index 715bb8b517e810..5b280da7303b8c 100644
--- a/docs/development/adding-a-package-manager.md
+++ b/docs/development/adding-a-package-manager.md
@@ -71,6 +71,8 @@ As another example, in order for Gradle to extract dependencies Renovate must fi
The `extractAllPackageFiles` function takes an array of filenames as input.
It returns an array of filenames and dependencies.
+If you implement `extractAllPackageFiles` the manager must export as well either `updateDependency` or `extractPackageFile`.
+
### `getRangeStrategy(config)` (optional)
Write this optional function if you want the manager to support "auto" range strategies.
diff --git a/docs/development/best-practices.md b/docs/development/best-practices.md
index d2877339dbb299..efd62bf0018735 100644
--- a/docs/development/best-practices.md
+++ b/docs/development/best-practices.md
@@ -154,16 +154,14 @@ Avoid refactoring the code and tests at the same time, this can mask regression
## Logging
-For `WARN`, `ERROR` and `FATAL log messages use logger metadata.
+For `WARN`, `ERROR` and `FATAL` log messages use logger metadata.
Also use logger metadata the result is a complex metadata object needing a multiple-line pretty stringification.
-For `INFO` log messages inline the metadata into the log message.
-Also, inline the metadata if the metadata object is complex.
+For `INFO` and `DEBUG` log messages inline the metadata into the log message where feasible.
+It is OK to not inline metadata if it's complex, but in that case first think whether that much information really needs to be logged.
`WARN`, `ERROR` and `FATAL` messages are often used in metrics or error catching services.
-These log messages should have a consistent `msg` component, so they can be automatically grouped or associated.
-Metadata that is separate from its message is hard for humans to read.
-Try to combine the metadata into the message, unless it is too complex to do so.
+These log messages should have a static `msg` component, so they can be automatically grouped or associated.
Good:
diff --git a/docs/development/local-development.md b/docs/development/local-development.md
index 0fe428f3d3e4e3..9bf2b48981f3ee 100644
--- a/docs/development/local-development.md
+++ b/docs/development/local-development.md
@@ -15,14 +15,14 @@ You need the following dependencies for local development:
- pnpm `^8.6.11` (use corepack)
- C++ compiler
-We support Node.js versions according to the [Node.js release schedule](https://github.com/nodejs/Release#release-schedule).
+We recommend you use the version of Node.js defined in the repository's `.nvmrc`.
#### Linux
You can use the following commands on Ubuntu.
```sh
-curl -sL https://deb.nodesource.com/setup_18.x | sudo -E bash -
+curl -sL https://deb.nodesource.com/setup_20.x | sudo -E bash -
sudo apt-get update
sudo apt-get install -y git build-essential nodejs
corepack enable
diff --git a/docs/development/minimal-reproductions.md b/docs/development/minimal-reproductions.md
index ee17d2316650d4..e43e238cf9a6ed 100644
--- a/docs/development/minimal-reproductions.md
+++ b/docs/development/minimal-reproductions.md
@@ -2,7 +2,11 @@
We may ask you to create a "minimal reproduction" repository to help us fix bugs or work on a feature.
-This document explains why we need a minimal reproduction, why we will not use your production repository to debug, and how to create a good minimal reproduction.
+This document explains:
+
+- why we need a minimal reproduction
+- why we will not use your production repository to debug
+- how to create a good minimal reproduction
## Help yourself by creating a minimal reproduction
@@ -16,37 +20,39 @@ It's fastest if you - as the bug reporter or feature requester - create the repr
## How the Renovate developers use your minimal reproduction
-The first benefit of a public reproduction is to prove that the problem is not caused by your environment or by a setting you left out of your description, thinking it was not relevant.
-If there were any doubts about whether you had found a genuine problem before, they are usually removed once a reproduction is made.
+A reproduction confirms the problem is with Renovate, and _not_ with your environment, or your configuration settings.
+A reproduction also helps us see where the bug or missing feature is, and to verify that the new code meets the requirements.
-Next, when a reproduction has minimal config, it can often let us narrow down or even identify the root cause, suggest workarounds, etc.
-This means we can often help you from code inspection alone.
+When a reproduction has minimal config, we can oftern narrow down, or identify the root cause.
+This helps us suggest workarounds.
+Often we can help you from code inspection alone.
-Finally, by making the code/dependencies minimal, it usually makes the problem feasible to step through using a debugging if code inspection was not sufficient.
-Production repositories or non-minimal reproductions are often very difficult to debug because break points get triggered dozens or hundreds or times.
+Finally, with minimal code and dependencies, we can step through with a debugger.
+This helps when looking at the code is not enough to find the problem.
+Production repositories, or non-minimal reproductions, are hard to debug as break points get triggered often.
## What is a minimal reproduction?
-The basic idea of a minimal reproduction is to use the _least_ amount of both code and config to trigger missing or wrong behavior.
-A minimal reproduction helps the developers see where the bug or missing feature is, and allows us to verify that the new code meets the requirements.
+A minimal reproduction should have the _least_ amount of both code and config to trigger missing or wrong behavior.
## Where to host the minimal reproduction
-If you can, use GitHub to host your reproduction.
-If the reproduction needs to be on GitLab or Bitbucket, that is also okay.
+Please put your reproduction in a public repository on GitHub.com, if possible.
+
+You may put the reproduction on another platform like GitLab or Bitbucket, _if_ the reproduction needs features/behavior of that platform.
## Creating a minimal reproduction
There are two ways to create a minimal reproduction:
-- Start with an empty repository and copy files from your production repository
-- Start with a fork of your production repository and remove files and config
+- Start with an empty repository and _copy_ files from your production repository
+- Start with a fork of your production repository and _remove_ files and config
General steps:
-1. Create your minimal reproduction repository on GitHub, only use GitLab or Bitbucket if really needed
+1. Put your minimal reproduction repository on GitHub, only use GitLab or Bitbucket if needed
1. Use the fewest number of repository files and dependencies
-1. Reduce the Renovate config to a minimum
+1. Reduce your Renovate config to a minimum
1. Remove private or secret information
1. Create a `readme.md` file that explains the current behavior and expected behavior
1. Set the repository visibility to `public`
@@ -66,31 +72,27 @@ A production repository usually has:
- many custom rules in the Renovate configuration file
- many files that are not relevant
-Having lots of "moving parts" makes debugging tricky, because debug break points can be triggered hundreds of times.
+Having lots of "moving parts" makes it hard to debug, as debug break points can trigger hundreds of times.
-When you have lots of custom config for Renovate, it's hard to find the root cause of the behavior.
+When you have lots of custom config for Renovate, it's hard for us to find the root cause of the behavior.
Bugs are often caused by multiple features interacting.
-Reducing the config to a minimum helps us find out exactly which config elements are required to trigger the bug.
+Reducing the config to a minimum helps us find exactly which config elements are needed to trigger the bug.
### "It's too much work to create a minimal reproduction"
-We would love to get down to zero reported bugs or feature requests remaining, but we have a lot to do and must set our priorities.
-This means we prefer working on issues with a minimal reproduction, as they allow us to spend our time efficiently.
-
-If you do not create a minimal reproduction, we will not prioritize working on your issue.
-
-Issues without a reproduction will probably stay open until you, or somebody else, creates a minimal reproduction.
-After a while, issues without a reproduction may be closed unfixed.
+If you do not create a minimal reproduction, the Renovate maintainers will not prioritize working on your issue.
+Discussions without a reproduction will probably go stale unless you, or somebody else, creates a minimal reproduction.
### "I already described what you need in the issue"
Thank you for describing your issue in detail.
-But we still need a minimal reproduction in a repository, and we would like you to be the one to make sure it matches both your description and expected behavior.
+But we still need a minimal reproduction in a repository.
+We'd like you to make sure it matches both your description as well as expected behavior.
### Forcing Renovate to create a lot of pending updates
Put an old version of a frequently updated dependency in your repository.
-Set a high `minimumReleaseAge` for that dependency, for example:
+Then set a high `minimumReleaseAge` for that dependency, for example:
```json
{
diff --git a/docs/usage/.pages b/docs/usage/.pages
index b2e6c95ed28b32..809fc075758dc7 100644
--- a/docs/usage/.pages
+++ b/docs/usage/.pages
@@ -4,6 +4,7 @@ nav:
- ... | getting-started
- Troubleshooting: 'troubleshooting.md'
- Configuration:
+ - 'Overview': 'config-overview.md'
- 'Repository': 'configuration-options.md'
- 'Self-hosted': 'self-hosted-configuration.md'
- 'Presets': 'config-presets.md'
@@ -31,6 +32,7 @@ nav:
- 'Noise Reduction': 'noise-reduction.md'
- 'Upgrade best practices': 'upgrade-best-practices.md'
- Included Presets:
+ - 'Custom Manager Presets': 'presets-customManagers.md'
- 'Default Presets': 'presets-default.md'
- 'Docker Presets': 'presets-docker.md'
- 'Full Config Presets': 'presets-config.md'
@@ -40,7 +42,6 @@ nav:
- 'npm Presets': 'presets-npm.md'
- 'Package Presets': 'presets-packages.md'
- 'Preview Presets': 'presets-preview.md'
- - 'Regex Manager Presets': 'presets-regexManagers.md'
- 'Replacement Presets': 'presets-replacements.md'
- 'Schedule Presets': 'presets-schedule.md'
- 'Security Presets': 'presets-security.md'
@@ -49,11 +50,13 @@ nav:
- ... | user-stories
- 'Security and Permissions': 'security-and-permissions.md'
- 'Merge Confidence': 'merge-confidence.md'
+ - 'Language constraints and upgrading': 'language-constraints-and-upgrading.md'
- 'Templates': 'templates.md'
- 'String Pattern Matching': 'string-pattern-matching.md'
- 'Frequently Asked Questions': 'faq.md'
- 'Known Limitations': 'known-limitations.md'
- 'Release notes for major versions': 'release-notes-for-major-versions.md'
- Bot comparison: 'bot-comparison.md'
+ - 'Logo and brand guidelines': 'logo-brand-guidelines.md'
- About Us: 'about-us.md'
- Contributing to Renovate: 'contributing-to-renovate.md'
diff --git a/docs/usage/about-us.md b/docs/usage/about-us.md
index 252c94308a58f9..7f8838c33521c3 100644
--- a/docs/usage/about-us.md
+++ b/docs/usage/about-us.md
@@ -42,6 +42,9 @@ Some features made a lot of people happy, and efficient!
- [@cgrindel](https://github.com/cgrindel) created the `bazel-module` manager
- [@RahulGautamSingh](https://github.com/RahulGautamSingh) refactored a lot of code and worked on performance improvements like reduced cloning during updates and onboarding
- [@Gabriel-Ladzaretti](https://github.com/Gabriel-Ladzaretti) S3 repo cache, child process refactoring, others
+- [@not7cd](https://github.com/not7cd) improved the `pip-compile` manager
+- [@squidfunk](https://github.com/squidfunk) for creating and maintaining the [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) framework, that we use to build our docs, and for helping us with problems with the framework, or our docs
+- [The MkDocs project](https://www.mkdocs.org/) for making the static site generator, that we use to build our docs
## Renovate development
@@ -52,3 +55,5 @@ This is where we do most of the development.
The Renovate docs are built from Markdown files in our [`renovatebot/renovate` repository](https://github.com/renovatebot/renovate).
Most of the source files can be found in the [`docs/usage/` directory](https://github.com/renovatebot/renovate/tree/main/docs/usage).
+
+We use [MkDocs](https://www.mkdocs.org/) and [Material for MkDocs](https://squidfunk.github.io/mkdocs-material/) to build our docs.
diff --git a/docs/usage/assets/images/matrix.png b/docs/usage/assets/images/matrix.png
new file mode 100644
index 00000000000000..c305c6626d82d1
Binary files /dev/null and b/docs/usage/assets/images/matrix.png differ
diff --git a/docs/usage/assets/images/swissquote_stats_collection.png b/docs/usage/assets/images/swissquote_stats_collection.png
new file mode 100644
index 00000000000000..666ea1770487f9
Binary files /dev/null and b/docs/usage/assets/images/swissquote_stats_collection.png differ
diff --git a/docs/usage/bot-comparison.md b/docs/usage/bot-comparison.md
index 0e68188184f49b..d9ddc01fe66859 100644
--- a/docs/usage/bot-comparison.md
+++ b/docs/usage/bot-comparison.md
@@ -18,7 +18,7 @@ If you see anything wrong on this page, please let us know by creating a [Discus
| Compatibility score badges | Four badges showing: Age, Adoption, Passing, Confidence | One badge with overall compatibility score |
| Built-in to GitHub | No, requires app or self-hosting | Yes |
| Scheduling | By default, Renovate runs as often as it is allowed to, read [Renovate scheduling](./key-concepts/scheduling.md) to learn more | Yes: `daily`, `weekly`, `monthly` |
-| License | [GNU Affero General Public License](https://github.com/renovatebot/renovate/blob/main/license) | [The Prosperity Public License 2.0.0](https://github.com/dependabot/dependabot-core/blob/main/LICENSE) |
+| License | [GNU Affero General Public License](https://github.com/renovatebot/renovate/blob/main/license) | [MIT License](https://github.com/dependabot/dependabot-core/blob/main/LICENSE) |
| Programming language of project | TypeScript | Ruby |
| Project pulse | [`renovatebot/renovate` monthly pulse](https://github.com/renovatebot/renovate/pulse/monthly) | [`dependabot-core` monthly pulse](https://github.com/dependabot/dependabot-core/pulse/monthly) |
| Contributor graph | [`renovatebot/renovate` contributor graph](https://github.com/renovatebot/renovate/graphs/contributors) | [`dependabot-core` contributor graph](https://github.com/dependabot/dependabot-core/graphs/contributors) |
@@ -114,7 +114,7 @@ Dependabot has four options that apply at a language level:
Renovate uses the [GNU Affero General Public License](https://github.com/renovatebot/renovate/blob/main/license).
-Dependabot uses [The Prosperity Public License 2.0.0](https://github.com/dependabot/dependabot-core/blob/main/LICENSE).
+Dependabot uses the [MIT License](https://github.com/dependabot/dependabot-core/blob/main/LICENSE).
Neither license is relevant to the end user though if consuming through an App/SaaS.
@@ -148,6 +148,14 @@ Available [Renovate distributions](./getting-started/running.md#available-distri
You can self-host Dependabot on other platforms than GitHub but none are officially supported.
+#### As a GitHub Actions workflow on GitHub
+
+You can run Dependabot as a GitHub Actions workflow on hosted and self-hosted runners.
+Learn more by reading the:
+
+- [GitHub Blog, Dependabot on GitHub Actions and self-hosted runners is now generally available](https://github.blog/2024-05-02-dependabot-on-github-actions-and-self-hosted-runners-is-now-generally-available/)
+- [GitHub Docs, About Dependabot on GitHub Actions runners](https://docs.github.com/en/code-security/dependabot/working-with-dependabot/about-dependabot-on-github-actions-runners)
+
#### `dependabot-core`
If you want to customize Dependabot, or self-host on another platform, you can use [`dependabot-core`](https://github.com/dependabot/dependabot-core).
diff --git a/docs/usage/config-overview.md b/docs/usage/config-overview.md
new file mode 100644
index 00000000000000..547baa7c485c42
--- /dev/null
+++ b/docs/usage/config-overview.md
@@ -0,0 +1,314 @@
+# Renovate configuration overview
+
+When Renovate runs on a repository, the final config used is derived from the:
+
+- Default config
+- Global config
+- Inherited config
+- Repository config
+- Resolved presets referenced in config
+
+## Types of config
+
+### Default config
+
+Every Renovate config option has a default value/setting.
+That default value/setting may even be `null`.
+You can find the default values on the Renovate docs website.
+
+For example:
+
+- The default value for `onboarding` is `true`
+- The option `labels` lacks a default value, which means that no labels will be added to Renovate's PRs
+
+The default config is loaded first, and may be superseded/overridden by the configuration types listed below.
+
+### Global config
+
+Global config means: the config defined by the person or team responsible for running the bot.
+This is also referred to as "bot config", because it's the config passed to the bot by the person running it.
+Global config can contain config which is "global only" as well as any configuration options which are valid in Inherited config or Repository config.
+
+If you are an end user of Renovate, for example if you're using the Mend Renovate App, then you don't need to care as much about any global config.
+As a end-user you can not change some settings because those settings are global-only.
+If you are an end user, you can skip the rest of this "Global config" section and proceed to "Inherited config".
+
+Global config can be read from a file, environment variable, or CLI parameters.
+You must configure at least one of these for Renovate to have the information it needs to run.
+For example: you may need to give Renovate the correct credentials.
+
+#### File config
+
+Renovate first tries to read the global config from a file.
+By default Renovate checks for a `config.js` file in the current working directory.
+But you can override this by defining `RENOVATE_CONFIG_FILE` in env, for example: `RENOVATE_CONFIG_FILE=/tmp/my-renovate-config.js`.
+
+By default Renovate allows the config file to be _missing_ and does not error if it cannot find it.
+But if you have configured `RENOVATE_CONFIG_FILE` and the path you specified is not found then Renovate will error and exit, because it assumes you have a configuration problem.
+If the file is found but cannot be parsed then Renovate will also error and exit.
+
+Global config files can be `.js` or `.json` files.
+You may use synchronous or asynchronous methods inside a `.js` file, including even to fetch config information from remote hosts.
+
+#### Environment config
+
+Global config can be defined using environment variables.
+The config options that you can use in environment variables all have the prefix `RENOVATE_`.
+For example, `RENOVATE_PLATFORM=gitlab` is the same as setting `"platform": "gitlab"` in File config.
+
+Usually there's a clear mapping from configuration option name to the corresponding Environment config name.
+But we recommend you still check the documentation for the field `env` for each option to make sure.
+If the configuration option lacks a `env` field, the config option also lacks a Environment config variable name.
+
+A special case for Environment config is the `RENOVATE_CONFIG` "meta" config option.
+The `RENOVATE_CONFIG` option accepts a stringified full config, for example: `RENOVATE_CONFIG={"platform":"gitlab","onboarding":false}`.
+Any additional Environment config variables take precedence over values in `RENOVATE_CONFIG`.
+
+##### Environment variable examples
+
+
+!!! warning
+ Make sure to escape any punctuation.
+ Be extra careful if you're passing stringified values.
+
+Boolean:
+
+- `RENOVATE_ONBOARDING=true`
+
+String:
+
+- `RENOVATE_BASE_DIR=/tmp/something`
+- `RENOVATE_BASE_DIR="/tmp/some thing"`
+
+Number:
+
+- `RENOVATE_PR_HOURLY_LIMIT=1`
+
+List with numbers or strings:
+
+- `RENOVATE_LABELS="abc,def,label with space"`
+
+Objects, or lists with objects:
+
+- `RENOVATE_CONFIG="{platform\":\"gitlab\",\"onboarding\":false}"`
+- `RENOVATE_PACKAGE_RULES="[{matchHost:\"gitlab\",token:\"$SOME_TOKEN\"}]"`
+
+
+!!! tip
+ Use "stringify" ([Example online service](https://jsonformatter.org/json-stringify-online)) for strings and objects.
+
+##### Experimental variables
+
+Renovate has "experimental" environment variables, which start with `RENOVATE_X_`.
+These variables are experimental, can be changed at any time, and are not parsed as part of regular configuration.
+Read the [Self-hosted experimental environment variables](./self-hosted-experimental.md) docs to learn more.
+
+##### Logging variables
+
+Finally, there are some special environment variables that are loaded _before_ configuration parsing because they are used during logging initialization:
+
+- `LOG_CONTEXT`: a unique identifier used in each log message to track context
+- `LOG_FORMAT`: defaults to a "pretty" human-readable output, but can be changed to "json"
+- `LOG_LEVEL`: most commonly used to change from the default `info` to `debug` logging
+
+#### CLI config
+
+The final way to configure Global config is through CLI parameters.
+For example, the CLI parameter `--platform=gitlab` is the same as setting `"platform": "gitlab"` in File config or `RENOVATE_PLATFORM=gitlab` in Environment config.
+
+CLI config is read last and takes precedence over Environment and File config.
+For example, if you configure conflicting values in Environment, File config and CLI config, then the CLI config will be merged last and "win" if values conflict.
+
+It is important that you:
+
+- Always provide a value, even if the field is boolean (e.g. `--onboarding=true` and _not_ `--onboarding`), and
+- Prefer `=` notation over spaces, e.g. `--onboarding=true` instead of `--onboarding true`
+
+### Inherited config
+
+#### Use cases
+
+The primary purpose of Inherited config is to allow for default settings of an organization/group.
+Two main use cases for Inherited config are:
+
+- Controlling onboarding settings within an org (e.g. disabling onboarding, making config optional)
+- Defining default config settings for repos within an org
+
+We recommend that organizations use shared presets instead of Inherited config, if possible.
+But default settings through Inherited config are useful if:
+
+- You want to avoid setting Repository config in each repo, or
+- You onboarded many repos prior to having a shared org config, and don't want to retrospectively edit each repo's config
+
+#### How it's found
+
+If `inheritConfig` is `true` in Global config then Renovate will look for Inherited config before processing each repository.
+The repository and file name which Renovate looks for can be configured using the other `inheritConfig*` settings documented in Global config.
+Default values are `{{parentOrg}}/renovate-config` for repository name and `org-inherited-config.json` for file name.
+
+If found, Inherited config will be merged on top (i.e. override) Global config.
+Avoid putting any global-only setting in a Inherited config, as doing so will result in an error.
+
+Inherited config may use all Repository config settings, and any Global config options which have the "supportsInheritConfig" property in the docs.
+
+For information on how the Mend Renovate App supports Inherited config, see the dedicated "Mend Renovate App Config" section toward the end of this page.
+
+### Repository config
+
+Repository config is the config loaded from a config file in the repository.
+Alternative file names are supported, but the default is `renovate.json`.
+If Renovate finds more than one configuration file in the same repository, then Renovate will use the _first_ configuration file it finds and ignores the other(s).
+
+### Config precedence
+
+Once Repository config is loaded, it is merged over the top of the previously loaded Global and Inherited config, meaning it takes precedence over them.
+Presets referenced with an "extends" config are resolved first and take lower precedence over regular/raw config in the same file or config object.
+
+## Onboarding
+
+When Renovate processes a repository, one of the first decisions it makes is "Does this repository need to be onboarded?".
+By default, Renovate will create an "Onboarding PR" with a default config if a repository does not have a Repository config file committed to the default branch.
+
+### Onboarding Config
+
+When Renovate creates an Onboarding PR it will propose a Repository config file to be merged.
+By default, it is essentially an empty config with only the Renovate JSON schema referenced, but you can change this behavior if desired.
+
+If you configure `onboardingConfig` in either Global config or Inherited config then Renovate will use that config directly instead of the default.
+
+Alternatively if you follow Renovate's naming convention for shared presets then it can automatically detect those instead.
+If the repository `{{parentOrg}}/renovate-config` has a `default.json` file then this will be treated as the organization's default preset and included in the Onboarding config.
+Additionally for platforms which support nested Organization/Group hierarchies, Renovate will "hunt" up such hierarchies for a `renovate-config` repository with default config and stop when it finds the first.
+
+
+!!! note
+ Renovate will also check for a `renovate.json` file if it cannot find a `default.json` file in a preset, however this option is deprecated and not recommended.
+
+If a default config is not found in a `renovate-config` repository within the Organization, Renovate will also check for the presence of a `renovate-config.json` file within a `.{{platform}}` repository parallel to the current repository.
+For example if the repository being onboarded is `abc/def` on a GitHub platform then Renovate will look for the existence of an `abc/.github` repository containing a `renovate-config.json` file.
+
+### Changing default behavior
+
+Default onboarding behavior for an Organization can be changed either in Global or Inherited config.
+
+For example, if you set `onboarding=false` then Renovate will not onboard repositories, and skip any repositories without a Repository config.
+In other words, users need to manually push a valid Repository config file to activate Renovate on the repository.
+
+If you set `onboarding=false` plus `requireConfig=optional` then it means Renovate will skip onboarding and proceed to run on a repository, even if Renovate does not find any Repository config.
+
+## Shared Presets
+
+### Overview
+
+The concept of shared configuration is covered in detail on the [Presets](./key-concepts/presets.md) page, so please read that first.
+
+### Use of Presets in Global config
+
+Presets should be used cautiously in Global config as they often lead to misunderstandings.
+
+#### globalExtends
+
+Sometimes you may not wish to put all settings within the Global config itself and instead commit it to a repository which is then referenced from the Global config.
+In such cases, use `globalExtends` instead of `extends` so that it is resolved immediately and used as part of Global config.
+
+#### extends
+
+If you use `extends` within Global config then it's important to note that these are _not_ resolved/expanded during Global config processing and instead are passed through unresolved to be part of Repository config.
+Passing `extends` through to be part of Repository config has two major consequences:
+
+- It allows repository users to be able to use `ignorePresets` to ignore all or part of the `extends` presets, and
+- Presets defined within `extends` in Global config will take _higher_ precedence that "regular" Global config, because it's resolved later
+
+### Using a centralized config
+
+Using "centralized" configs through Renovate presets is important in order to be able to:
+
+- Save time by not repeating yourself in every repo with the same config, and
+- Being able to change settings across an entire Organization or groups of repositories in one place
+
+Once you've created a centralized preset config, there are multiple ways you can pass it through to repositories:
+
+- Defining it in Global config (either `globalExtends` or `extends`)
+- Using it as your Inherited config, or referencing it from Inherited config using `extends`
+- Ensuring it's referenced in Onboarding config so that it's committed as part of the Repository config
+
+The above possibilities go from least to most transparent when it comes to end users.
+
+Global config may be invisible to developers without log access, meaning they could be confused by any settings you apply - via presets or directly - within Global config.
+For example the developers wonder why Renovate is behaving differently to its documented default behavior and may even think it's a bug.
+
+Inherited config is visible to developers (it's within a repository they can see) although it's _implicitly_ applied so without log access and if they're not aware to look for an Inherited config repository then they may again be a little confused as to why default behavior has changed.
+
+The recommended approach for using a centralized preset is to explicitly "extend" it from every repository, which can be achieved easily if it's part of your `onboardingConfig`.
+By having your centralized preset part of each Repository config `extends`, it has these two benefits:
+
+- You still have the ability to change shared settings in a single location
+- Any user viewing the repo can see the preset being extended and trace it back to understand which config is applied
+
+## Mend Renovate App Config
+
+The [Mend Renovate App](https://github.com/apps/renovate) is a popular way to use Renovate on GitHub.com so it's important that any of its non-default behavior is documented here.
+
+Importantly, logs for all Renovate jobs by the Mend Renovate App are available through the [Mend Developer Portal](https://developer.mend.io) so end users can view the logs to see which settings are applied.
+
+### Onboarding behavior
+
+#### Installing Renovate into all repositories leads to silent mode
+
+If an Organization installed Renovate with "All repositories" (instead of "Selected repositories"), then Renovate will default to "Silent" mode (`dryRun=lookup`).
+We chose this behavior because:
+
+- Too often an account or org administrator selects the "All repositories" option and accidentally onboards hundreds of repositories, and
+- By offering this option, it means that org administrators _can_ install Renovate into "All repositories" without worrying about the noise, and let individual repository admins decide if/when to start onboarding
+
+##### Why we call this silent mode
+
+- It's not just no PRs, it's also no Issues
+- It's a common term across other Mend capabilities, such as OSS security and SAST security, where status checks also use silent/non-silent
+
+#### Get onboarding PRs from Renovate by getting out of silent mode
+
+If Renovate is installed, _and_ you can see a job log, but Renovate is _not_ onboarding your repository: look for `dryRun` in the logs to confirm you are in Silent mode.
+To get a onboarding PR from Renovate, change to Interactive mode either at the Repository level or Organization level.
+
+#### Installing Renovate into selected repositories always leads to onboarding PRs
+
+Additionally, if an Organization is installed with "Selected repositories" then the app will change `onboardingNoDeps` to `true` so that an Onboarding PR is created even if no dependencies are detected.
+
+### Fork Processing
+
+If an Organization install Renovate with the "All repositories" option, then `forkProcessing` will remain as the default value `false`.
+This means forked repositories are _not_ onboarded, Renovate essentially ignores them.
+To change this behavior you need to manually push a `renovate.json` to the repository with `"forkProcessing": true`.
+
+If an Organization installs Renovate with "Selected repositories" then we assume the organization wants all of the selected repositories onboarded (even forked repositories), so `forkProcessing` is set to `true`.
+
+### Default presets
+
+The Mend Renovate app automatically adds the `mergeConfidence:all-badges` preset to the `extends` array.
+If you don't want the Merge Confidence badges, then add the `mergeConfidence:all-badges` preset to the `ignorePresets` array.
+
+Additionally, the preset `config:recommended` is added to `onboardingConfig`.
+
+### Allowed Post-upgrade commands
+
+A limited set of approved `postUpgradeTasks` commands are allowed in the app.
+They are not documented here as they may change over time - please consult the logs to see them.
+
+## Other
+
+The below contains edge cases which you should avoid if possible, and likely don't need to use.
+They are included here because they can cause "exceptions" to some of the previously mentioned rules of config.
+
+### Optimize for Disabled
+
+The `optimizeForDisabled` option was designed for an edge case where a large percentage of repos are disabled by config.
+If this option is set to `true`, Renovate will use a platform API call to see if a `renovate.json` exists and if it contains `"enabled": false`.
+If so, the repository will be skipped without a clone necessary.
+If the file is not present or does not disable Renovate, then Renovate continues as before (having "wasted" that extra API call).
+
+### Force config
+
+We recommend you avoid the `force` config option, if possible.
+
+It can be used to "force" config over the top of other config or rules which might be merged later, so at times can cause confusion - especially if it's defined in Global config and overriding settings in Repository config.
diff --git a/docs/usage/config-presets.md b/docs/usage/config-presets.md
index 84135298e0bc6d..a29253b9438258 100644
--- a/docs/usage/config-presets.md
+++ b/docs/usage/config-presets.md
@@ -10,15 +10,6 @@ Read the [Key concepts, presets](./key-concepts/presets.md) page to learn more a
Shareable config presets must use the JSON or JSON5 formats, other formats are not supported.
-
-!!! warning
- Only use `default.json` for your presets.
-
-
-!!! warning
- We have deprecated using a `renovate.json` file for presets, as this causes issues if the repository configuration _also_ uses a `renovate.json` file.
- If you are using a `renovate.json` file to share your presets, rename it to `default.json`.
-
!!! tip
Describe what your preset does in the `"description"` field.
@@ -42,6 +33,17 @@ Alternatively, Renovate can fetch preset files from an HTTP server.
You can set a Git tag (like a SemVer) to use a specific release of your shared config.
+### Preset File Naming
+
+Presets are repo-hosted, and you can have one or more presets hosted per repository.
+If you omit a file name from your preset (e.g. `github>abc/foo`) then Renovate will look for a `default.json` file in the repo.
+If you wish to have an alternative file name, you need to specify it (e.g. `github>abc/foo//alternative-name.json5`).
+
+
+!!! warning
+ We've deprecated using a `renovate.json` file for the default _preset_ file name in a repository.
+ If you're using a `renovate.json` file to share your presets, rename it to `default.json`.
+
### GitHub
| name | example use | preset | resolves as | filename | Git tag |
@@ -165,7 +167,7 @@ Here is how you would use these in your Renovate config:
In short, the number of `{{argx}}` parameters in the definition is how many parameters you need to provide.
Parameters must be strings, non-quoted, and separated by commas if there are more than one.
-If you find that you are repeating config a lot, you might consider publishing one of these types of parameterised presets yourself.
+If you find that you are repeating config a lot, you might consider publishing one of these types of parameterized presets yourself.
Or if you think your preset would be valuable for others, please contribute a PR to the Renovate repository, see [Contributing to presets](#contributing-to-presets).
## GitHub-hosted Presets
@@ -232,6 +234,40 @@ Parameters are supported similar to other methods:
}
```
+## Templating presets
+
+You can use [Handlebars](https://handlebarsjs.com/) templates to be flexible with your presets.
+This can be handy when you want to include presets conditionally.
+
+
+!!! note
+ The template only supports a small subset of options, but you can extend them via `customEnvVariables`.
+
+Read the [templates](./templates.md) section to learn more.
+
+### Example use-case
+
+The following example shows a self-hosted Renovate preset located in a GitLab repository called `renovate/presets`.
+
+```json
+{
+ "extends": ["local>renovate/presets"]
+}
+```
+
+Usually you want to validate the preset before you put it in your Renovate configuration
+Here is an example of how you can use templating to validate and load the preset on a branch level:
+
+```javascript
+// config.js
+module.exports = {
+ customEnvVariables: {
+ GITLAB_REF: process.env.CI_COMMIT_REF_NAME || 'main',
+ },
+ extends: ['local>renovate/presets#{{ env.GITLAB_REF }}'],
+};
+```
+
## Contributing to presets
Have you configured a rule that could help others?
diff --git a/docs/usage/configuration-options.md b/docs/usage/configuration-options.md
index 7dc419a3b9185a..063df084f9de9e 100644
--- a/docs/usage/configuration-options.md
+++ b/docs/usage/configuration-options.md
@@ -368,11 +368,6 @@ Solutions:
## branchName
-
-!!! warning
- We strongly recommended that you avoid configuring this field directly.
- Use at your own risk.
-
If you truly need to configure this then it probably means either:
- You are hopefully mistaken, and there's a better approach you should use, so open a new "config help" discussion at the [Renovate discussions tab](https://github.com/renovatebot/renovate/discussions) or
@@ -459,7 +454,7 @@ For example, To add `[skip ci]` to every commit you could configure:
}
```
-Another example would be if you want to configure a DCO sign-off to each commit.
+Another example would be if you want to configure a DCO sign off to each commit.
If you want Renovate to sign off its commits, add the [`:gitSignOff` preset](./presets-default.md#gitsignoff) to your `extends` array:
@@ -473,11 +468,6 @@ If you want Renovate to sign off its commits, add the [`:gitSignOff` preset](./p
## commitMessage
-
-!!! warning
- We deprecated editing the `commitMessage` directly, and we recommend you stop using this config option.
- Instead, use config options like `commitMessageAction`, `commitMessageExtra`, and so on, to create the commit message you want.
-
## commitMessageAction
This is used to alter `commitMessage` and `prTitle` without needing to copy/paste the whole string.
@@ -531,12 +521,12 @@ Composer `2.2` and up will be run with `--ignore-platform-req='ext-*' --ignore-p
Older Composer versions will be run with `--ignore-platform-reqs`, which means that all platform constraints (including the PHP version) will be ignored by default.
This can result in updated dependencies that are not compatible with your platform.
-To customize this behaviour, you can explicitly ignore platform requirements (for example `ext-zip`) by setting them separately in this array.
+To customize this behavior, you can explicitly ignore platform requirements (for example `ext-zip`) by setting them separately in this array.
Each item will be added to the Composer command with `--ignore-platform-req`, resulting in it being ignored during its invocation.
Note that this requires your project to use Composer V2, as V1 does not support excluding single platform requirements.
The used PHP version will be guessed automatically from your `composer.json` definition, so `php` should not be added as explicit dependency.
-If an empty array is configured, Renovate uses its default behaviour.
+If an empty array is configured, Renovate uses its default behavior.
Set to `null` (not recommended) to fully omit `--ignore-platform-reqs/--ignore-platform-req` during Composer invocation.
This requires the Renovate image to be fully compatible with your Composer platform requirements in order for the Composer invocation to succeed, otherwise Renovate will fail to create the updated lock file.
@@ -638,12 +628,18 @@ Renovate supports two options:
More advanced filtering options may come in the future.
There must be a `constraints` object in your Renovate config, or constraints detected from package files, for this to work.
+Additionally, the "datasource" within Renovate must be capable of returning `constraints` values about each package's release.
+
This feature is limited to the following datasources:
+- `crate`
- `jenkins-plugins`
- `npm`
- `packagist`
- `pypi`
+- `rubygems`
+
+Sometimes when using private registries they may omit constraints information, which then is another reason such filtering may not work even if the datasource and corresponding default public registry supports it.
!!! warning
@@ -693,7 +689,7 @@ The `regex` manager which is based on using Regular Expression named capture gro
You must have a named capture group matching (e.g. `(?.*)`) _or_ configure its corresponding template (e.g. `depNameTemplate`) for these fields:
- `datasource`
-- `depName`
+- `depName` and / or `packageName`
- `currentValue`
Use named capture group matching _or_ set a corresponding template.
@@ -707,7 +703,7 @@ For template fields, use the triple brace `{{{ }}}` notation to avoid Handlebars
!!! tip
- Look at our [Regex Manager Presets](./presets-regexManagers.md), they may have what you need.
+ Look at our [Custom Manager Presets](./presets-customManagers.md), they may have what you need.
### customType
@@ -774,9 +770,9 @@ As example the following configuration will update all three lines in the Docker
```
```dockerfile title="Dockerfile"
-FROM amd64/ubuntu:18.04
-ENV GRADLE_VERSION=6.2 # gradle-version/gradle&versioning=maven
-ENV NODE_VERSION=10.19.0 # github-tags/nodejs/node&versioning=node
+FROM amd64/ubuntu:24.04
+ENV GRADLE_VERSION=6.2 # gradle-version/gradle&versioning=maven
+ENV NODE_VERSION=10.19.0 # github-tags/nodejs/node&versioning=node
```
#### recursive
@@ -1255,7 +1251,7 @@ It is valid only as a top-level configuration option and not, for example, withi
!!! warning
- The bot administrator must configure a list of allowed environment names in the [`allowedEnv`](./self-hosted-configuration.md#allowedEnv) config option, before users can use those allowed names in the `env` option.
+ The bot administrator must configure a list of allowed environment names in the [`allowedEnv`](./self-hosted-configuration.md#allowedenv) config option, before users can use those allowed names in the `env` option.
Behavior:
@@ -1406,7 +1402,7 @@ For now, you can only use this option on the GitLab platform.
For `followTag` to work, the datasource must support distribution streams or tags, like for example npm does.
-The main usecase is to follow a pre-release tag of a dependency, say TypeScript's `"insiders"` build:
+The main use case is to follow a pre-release tag of a dependency, say TypeScript's `"insiders"` build:
```json
{
@@ -1446,7 +1442,7 @@ If this option is enabled, reviewers will need to create a new PR if more change
By default, Renovate skips any forked repositories when in `autodiscover` mode.
It even skips a forked repository that has a Renovate configuration file, because Renovate does not know if that file was added by the forked repository.
-**Process a fork in `autodiscover` mode`**
+**Process a fork in `autodiscover` mode**
If you want Renovate to run on a forked repository when in `autodiscover` mode then:
@@ -1852,7 +1848,7 @@ Enable got [http2](https://github.com/sindresorhus/got/blob/v11.5.2/readme.md#ht
You can provide a `headers` object that includes fields to be forwarded to the HTTP request headers.
By default, all headers starting with "X-" are allowed.
-A bot administrator may configure an override for [`allowedHeaders`](./self-hosted-configuration.md#allowedHeaders) to configure more permitted headers.
+A bot administrator may configure an override for [`allowedHeaders`](./self-hosted-configuration.md#allowedheaders) to configure more permitted headers.
`headers` value(s) configured in the bot admin `hostRules` (for example in a `config.js` file) are _not_ validated, so it may contain any header regardless of `allowedHeaders`.
@@ -1954,6 +1950,27 @@ registry=https://gitlab.myorg.com/api/v4/packages/npm/
!!! note
Values containing a URL path but missing a scheme will be prepended with 'https://' (e.g. `domain.com/path` -> `https://domain.com/path`)
+### readOnly
+
+If the `readOnly` field is being set to `true` inside the host rule, it will match only against the requests that are known to be read operations.
+Examples are `GET` requests or `HEAD` requests, but also it could be certain types of GraphQL queries.
+
+This option could be used to avoid rate limits for certain platforms like GitHub or Bitbucket, by offloading the read operations to a different user.
+
+```json
+{
+ "hostRules": [
+ {
+ "matchHost": "api.github.com",
+ "readOnly": true,
+ "token": "********"
+ }
+ ]
+}
+```
+
+If more than one token matches for a read-only request then the `readOnly` token will be given preference.
+
### timeout
Use this figure to adjust the timeout for queries.
@@ -2042,10 +2059,13 @@ Applicable for Composer only for now.
## ignorePrAuthor
-This is usually needed if someone needs to migrate bot accounts, including from the Mend Renovate App to the self-hosted variant.
+This is usually needed if someone needs to migrate bot accounts, including from the Mend Renovate App to self-hosted.
+An additional use case is for GitLab users of project or group access tokens who need to rotate them.
+
If `ignorePrAuthor` is configured to true, it means Renovate will fetch the entire list of repository PRs instead of optimizing to fetch only those PRs which it created itself.
You should only want to enable this if you are changing the bot account (e.g. from `@old-bot` to `@new-bot`) and want `@new-bot` to find and update any existing PRs created by `@old-bot`.
-It's recommended to revert this setting once that transition period is over and all old PRs are resolved.
+
+Setting this field to `true` in GitLab will also mean that all Issues will be fetched instead of only those by the bot itself.
## ignorePresets
@@ -2055,11 +2075,11 @@ For example, consider this config:
```json
{
"extends": ["config:recommended"],
- "ignorePresets": [":prHourlyLimit2"]
+ "ignorePresets": ["group:monorepos"]
}
```
-It would take the entire `"config:recommended"` preset - which has a lot of sub-presets - but ignore the `":prHourlyLimit2"` rule.
+It would take the entire `"config:recommended"` preset - which has a lot of sub-presets - but ignore the `"group:monorepos"` rule.
## ignoreReviewers
@@ -2123,6 +2143,19 @@ Currently, this applies to the `minimumReleaseAge` check only.
The `flexible` mode can result in "flapping" of Pull Requests, for example: a pending PR with version `1.0.3` is first released but then downgraded to `1.0.2` once it passes `minimumReleaseAge`.
We recommend that you use the `strict` mode, and enable the `dependencyDashboard` so that you can see suppressed PRs.
+## keepUpdatedLabel
+
+On supported platforms you may add a label to a PR so that Renovate recreates/rebases the PR when the branch falls behind the base branch.
+Adding the `keepUpdatedLabel` label to a PR makes Renovate behave as if `rebaseWhen` were set to `behind-base-branch`, but only for the given PR.
+Renovate does _not_ remove the label from the PR after it finishes rebasing.
+This is different from the `rebaseLabel` option, where Renovate _removes_ the label from the PR after rebasing.
+
+`keepUpdatedLabel` can be useful when you have approved certain PRs and want Renovate to keep the PRs up-to-date until you're ready to merge them.
+The setting `keepUpdatedLabel` is best used in this scenario:
+
+- By default, you configure `rebaseWhen` to `never` or `conflicted` to reduce rebasing
+- Sometimes, you want Renovate to keep specific PRs up-to-date with their base branch (equivalent to `rebaseWhen=behind-base-branch`)
+
## labels
By default, Renovate does not add any labels to PRs.
@@ -2146,10 +2179,10 @@ Consider this example:
With the above config, every PR raised by Renovate will have the label `dependencies` while PRs containing `eslint`-related packages will instead have the label `linting`.
-Renovate only adds labels when it creates the PR, which means:
+Behavior details:
-- If you remove labels which Renovate added, it does not re-apply them
-- If you change your config, the new/changed labels are not applied to any open PRs
+- On GitHub, GitLab and Gitea: Renovate will keep PR labels in sync with configured labels, provided that no other user or bot has made changes to the labels after PR creation. If labels are changed by any other account, Renovate will stop making further changes.
+- For other platforms, Renovate will add labels only at time of PR creation and not update them after that.
The `labels` array is non-mergeable, meaning if multiple `packageRules` match then Renovate uses the last value for `labels`.
If you want to add/combine labels, use the `addLabels` config option, which is mergeable.
@@ -2293,6 +2326,24 @@ This works because Renovate will add a "renovate/stability-days" pending status
Add to this object if you wish to define rules that apply only to minor updates.
+## mode
+
+This configuration option was created primarily for use with Mend's hosted app, but can also be useful for some self-hosted use cases.
+
+It enables a new `silent` mode to allow repos to be scanned for updates _and_ for users to be able to request such updates be opened in PRs _on demand_ through the Mend UI, without needing the Dependency Dashboard issue in the repo.
+
+Although similar, the options `mode=silent` and `dryRun` can be used together.
+When both are configured, `dryRun` takes precedence, so for example PRs won't be created.
+
+Configuring `silent` mode is quite similar to `dryRun=lookup` except:
+
+- It will bypass onboarding checks (unlike when performing a dry run on a non-onboarded repo) similar to `requireConfig=optional`
+- It can create branches/PRs if `checkedBranches` is set
+- It will keep any existing branches up-to-date (e.g. ones created previously using `checkedBranches`)
+
+When in `silent` mode, Renovate does not create issues (such as Dependency Dashboard, or due to config errors) or Config Migration PRs, even if enabled.
+It also does not prune/close any which already exist.
+
## npmToken
See [Private npm module support](./getting-started/private-packages.md) for details on how this is used.
@@ -2422,8 +2473,9 @@ For example, you have multiple `package.json` and want to use `dependencyDashboa
### allowedVersions
-Use this - usually within a packageRule - to limit how far to upgrade a dependency.
-For example, if you wish to upgrade to Angular v1.5 but not to `angular` v1.6 or higher, you could define this to be `<= 1.5` or `< 1.6.0`:
+You can use `allowedVersions` - usually within a `packageRules` entry - to limit how far to upgrade a dependency.
+
+For example, if you want to upgrade to Angular v1.5 but _not_ to `angular` v1.6 or higher, you could set `allowedVersions` to `<= 1.5` or `< 1.6.0`:
```json
{
@@ -2436,10 +2488,14 @@ For example, if you wish to upgrade to Angular v1.5 but not to `angular` v1.6 or
}
```
-The valid syntax for this will be calculated at runtime because it depends on the versioning scheme, which is itself dynamic.
+Renovate calculates the valid syntax for this at runtime, because it depends on the dynamic versioning scheme.
+
+#### Using regular expressions
+
+You can use Regular Expressions in the `allowedVersion` config.
+You must _begin_ and _end_ your Regular Expression with the `/` character!
-This field also supports Regular Expressions if they begin and end with `/`.
-For example, the following will enforce that only 3 or 4-part versions are supported, without any prefixes:
+For example, this config only allows 3 or 4-part versions, without any prefixes in the version:
```json
{
@@ -2452,8 +2508,12 @@ For example, the following will enforce that only 3 or 4-part versions are suppo
}
```
-This field also supports a special negated regex syntax for ignoring certain versions.
-Use the syntax `!/ /` like the following:
+Again: note how the Regular Expression _begins_ and _ends_ with the `/` character.
+
+#### Ignore versions with negated regex syntax
+
+You can use a special negated regex syntax to ignore certain versions.
+You must use the `!/ /` syntax, like this:
```json
{
@@ -2501,6 +2561,8 @@ Invalid if used outside a `packageRule`.
### excludeDepPatterns
+### excludeDepPrefixes
+
### excludePackageNames
**Important**: Do not mix this up with the option `ignoreDeps`.
@@ -2687,8 +2749,21 @@ Use this field to restrict rules to a particular datasource. e.g.
This option is matched against the `currentValue` field of a dependency.
-`matchCurrentValue` supports Regular Expressions which must begin and end with `/`.
-For example, the following enforces that only `1.*` versions will be used:
+`matchCurrentValue` supports Regular Expressions and glob patterns. For example, the following enforces that updates from `1.*` versions will be merged automatically:
+
+```json
+{
+ "packageRules": [
+ {
+ "matchPackagePatterns": ["io.github.resilience4j"],
+ "matchCurrentValue": "1.*",
+ "automerge": true
+ }
+ ]
+}
+```
+
+Regular Expressions must begin and end with `/`.
```json
{
@@ -2738,7 +2813,7 @@ Consider using instead `matchCurrentValue` if you wish to match against the raw
}
```
-The syntax of the version range must follow the [versioning scheme](modules/versioning.md#supported-versioning) used by the matched package(s).
+The syntax of the version range must follow the [versioning scheme](modules/versioning/index.md#supported-versioning) used by the matched package(s).
This is usually defined by the [manager](modules/manager/index.md#supported-managers) which discovered them or by the default versioning for the package's [datasource](modules/datasource/index.md).
For example, a Gradle package would typically need Gradle constraint syntax (e.g. `[,7.0)`) and not SemVer syntax (e.g. `<7.0`).
@@ -2819,14 +2894,31 @@ It is recommended that you avoid using "negative" globs, like `**/!(package.json
### matchDepNames
+This field behaves the same as `matchPackageNames` except it matches against `depName` instead of `packageName`.
+
### matchDepPatterns
+### matchDepPrefixes
+
### matchNewValue
This option is matched against the `newValue` field of a dependency.
-`matchNewValue` supports Regular Expressions which must begin and end with `/`.
-For example, the following enforces that only `1.*` versions will be used:
+`matchNewValue` supports Regular Expressions and glob patterns. For example, the following enforces that updates to `1.*` versions will be merged automatically:
+
+```json
+{
+ "packageRules": [
+ {
+ "matchPackagePatterns": ["io.github.resilience4j"],
+ "matchNewValue": "1.*",
+ "automerge": true
+ }
+ ]
+}
+```
+
+Regular Expressions must begin and end with `/`.
```json
{
@@ -2873,6 +2965,12 @@ See also `excludePackageNames`.
The above will configure `rangeStrategy` to `pin` only for the package `angular`.
+
+!!! note
+ `matchPackageNames` will try matching `packageName` first and then fall back to matching `depName`.
+ If the fallback is used, Renovate will log a warning, because the fallback will be removed in a future release.
+ Use `matchDepNames` instead.
+
### matchPackagePatterns
Use this field if you want to have one or more package names patterns in your package rule.
@@ -2891,6 +2989,12 @@ See also `excludePackagePatterns`.
The above will configure `rangeStrategy` to `replace` for any package starting with `angular`.
+
+!!! note
+ `matchPackagePatterns` will try matching `packageName` first and then fall back to matching `depName`.
+ If the fallback is used, Renovate will log a warning, because the fallback will be removed in a future release.
+ Use `matchDepPatterns` instead.
+
### matchPackagePrefixes
Use this field to match a package prefix without needing to write a regex expression.
@@ -2909,8 +3013,11 @@ See also `excludePackagePrefixes`.
Like the earlier `matchPackagePatterns` example, the above will configure `rangeStrategy` to `replace` for any package starting with `angular`.
-`matchPackagePrefixes` will match against `packageName` first, and then `depName`, however `depName` matching is deprecated and will be removed in a future major release.
-If matching against `depName`, use `matchDepPatterns` instead.
+
+!!! note
+ `matchPackagePrefixes` will try matching `packageName` first and then fall back to matching `depName`.
+ If the fallback is used, Renovate will log a warning, because the fallback will be removed in a future release.
+ Use `matchDepPatterns` instead.
### matchSourceUrlPrefixes
@@ -3108,6 +3215,39 @@ For example to replace the npm package `jade` with version `2.0.0` of the packag
}
```
+### prPriority
+
+Sometimes Renovate needs to rate limit its creation of PRs, e.g. hourly or concurrent PR limits.
+By default, Renovate sorts/prioritizes based on the update type, going from smallest update to biggest update.
+Renovate creates update PRs in this order:
+
+1. `pinDigest`
+1. `pin`
+1. `digest`
+1. `patch`
+1. `minor`
+1. `major`
+
+If you have dependencies that are more or less important than others then you can use the `prPriority` field for PR sorting.
+The default value is 0, so setting a negative value will make dependencies sort last, while higher values sort first.
+
+Here's an example of how you would define PR priority so that `devDependencies` are raised last and `react` is raised first:
+
+```json
+{
+ "packageRules": [
+ {
+ "matchDepTypes": ["devDependencies"],
+ "prPriority": -1
+ },
+ {
+ "matchPackageNames": ["react"],
+ "prPriority": 5
+ }
+ ]
+}
+```
+
## patch
Add to this object if you wish to define rules that apply only to patch updates.
@@ -3137,8 +3277,7 @@ If enabled Renovate will pin Docker images or GitHub Actions by means of their S
If you have enabled `automerge` and set `automergeType=pr` in the Renovate config, then leaving `platformAutomerge` as `true` speeds up merging via the platform's native automerge functionality.
-Renovate tries platform-native automerge only when it initially creates the PR.
-Any PR that is being updated will be automerged with the Renovate-based automerge.
+On GitHub and GitLab, Renovate re-enables the PR for platform-native automerge whenever it's rebased.
`platformAutomerge` will configure PRs to be merged after all (if any) branch policies have been met.
This option is available for Azure, Gitea, GitHub and GitLab.
@@ -3393,39 +3532,6 @@ This is why we configured an upper limit for how long we wait until creating a P
!!! note
If the option `minimumReleaseAge` is non-zero then Renovate disables the `prNotPendingHours` functionality.
-## prPriority
-
-Sometimes Renovate needs to rate limit its creation of PRs, e.g. hourly or concurrent PR limits.
-By default, Renovate sorts/prioritizes based on the update type, going from the smallest update to the biggest update.
-Renovate creates update PRs in this order:
-
-1. `pinDigest`
-1. `pin`
-1. `digest`
-1. `patch`
-1. `minor`
-1. `major`
-
-If you have dependencies that are more or less important than others then you can use the `prPriority` field for PR sorting.
-The default value is 0, so setting a negative value will make dependencies sort last, while higher values sort first.
-
-Here's an example of how you would define PR priority so that `devDependencies` are raised last and `react` is raised first:
-
-```json
-{
- "packageRules": [
- {
- "matchDepTypes": ["devDependencies"],
- "prPriority": -1
- },
- {
- "matchPackageNames": ["react"],
- "prPriority": 5
- }
- ]
-}
-```
-
## prTitle
The PR title is important for some of Renovate's matching algorithms (e.g. determining whether to recreate a PR or not) so ideally do not modify it much.
@@ -3541,8 +3647,10 @@ This feature works with the following managers:
- [`docker-compose`](modules/manager/docker-compose/index.md)
- [`dockerfile`](modules/manager/dockerfile/index.md)
- [`droneci`](modules/manager/droneci/index.md)
+- [`flux`](modules/manager/flux/index.md)
- [`gitlabci`](modules/manager/gitlabci/index.md)
- [`helm-requirements`](modules/manager/helm-requirements/index.md)
+- [`helm-values`](modules/manager/helm-values/index.md)
- [`helmfile`](modules/manager/helmfile/index.md)
- [`helmv3`](modules/manager/helmv3/index.md)
- [`kubernetes`](modules/manager/kubernetes/index.md)
@@ -3656,6 +3764,12 @@ every 3 months on the first day of the month
* 0 2 * *
```
+
+!!! warning
+ You _must_ keep the number and the `am`/`pm` part _together_!
+ Correct: `before 5am`, or `before 5:00am`.
+ Wrong: `before 5 am`, or `before 5:00 am`.
+
!!! warning
For Cron schedules, you _must_ use the `*` wildcard for the minutes value, as Renovate does not support minute granularity.
@@ -3745,9 +3859,16 @@ If you want to enforce grouped package updates, you need to set this option to `
## separateMinorPatch
-By default, Renovate does not distinguish between "patch" (e.g. 1.0.x) and "minor" (e.g. 1.x.0) releases - it groups them together.
-E.g., if you are running version 1.0.0 of a package and both versions 1.0.1 and 1.1.0 are available then Renovate will raise a single PR for version 1.1.0.
-If you wish to distinguish between patch and minor upgrades, for example if you wish to automerge patch but not minor, then you can configure this option to `true`.
+By default, Renovate groups `patch` (`1.0.x`) and `minor` (`1.x.0`) releases into a single PR.
+For example: you are running version `1.0.0` of a package, which has two updates:
+
+- `1.0.1`, a `patch` type update
+- `1.1.0`, a `minor` type update
+
+By default, Renovate creates a single PR for the `1.1.0` version.
+
+If you want Renovate to create _separate_ PRs for `patch` and `minor` upgrades, set `separateMinorPatch` to `true`.
+Getting separate updates from Renovate can be handy when you want to, for example, automerge `patch` updates but manually merge `minor` updates.
## separateMultipleMajor
@@ -3755,6 +3876,21 @@ Configure this to `true` if you wish to get one PR for every separate major vers
For example, if you are on webpack@v1 currently then default behavior is a PR for upgrading to webpack@v3 and not for webpack@v2.
If this setting is true then you would get one PR for webpack@v2 and one for webpack@v3.
+## separateMultipleMinor
+
+Enable this for dependencies when it is important to split updates into separate PRs per minor release stream (e.g. `python`).
+
+For example, if you are on `python@v3.9.0` currently, then by default Renovate creates a PR to upgrade you to the latest version such as `python@v3.12.x`.
+By default, Renovate skips versions in between, like `python@v3.10.x`.
+
+But if you set `separateMultipleMinor=true` then you get separate PRs for each minor stream, like `python@3.9.x`, `python@v3.10.x` and `python@v3.11.x`, etc.
+
+## skipInstalls
+
+By default, Renovate will use the most efficient approach to updating package files and lock files, which in most cases skips the need to perform a full module install by the bot.
+If this is set to false, then a full install of modules will be done.
+This is currently applicable to `npm` only, and only used in cases where bugs in `npm` result in incorrect lock files being updated.
+
## statusCheckNames
You can customize the name/context of status checks that Renovate adds to commits/branches/PRs.
@@ -3981,3 +4117,7 @@ To disable the vulnerability alerts feature, set `enabled=false` in a `vulnerabi
}
}
```
+
+
+!!! note
+ If you want to raise only vulnerability fix PRs, you may use the `security:only-security-updates` preset.
diff --git a/docs/usage/dependency-pinning.md b/docs/usage/dependency-pinning.md
index 3d7f4605bfe41b..694b0e54c05d87 100644
--- a/docs/usage/dependency-pinning.md
+++ b/docs/usage/dependency-pinning.md
@@ -6,7 +6,7 @@ description: The pros and cons of dependency pinning for JavaScript/npm
# Should you Pin your JavaScript Dependencies?
Once you start using a tool/service like Renovate, probably the biggest decision you need to make is whether to "pin" your dependencies instead of using SemVer ranges.
-The answer is "It's your choice", but we can certainly make some generalisations/recommendations to help you.
+The answer is "It's your choice", but we can certainly make some generalizations/recommendations to help you.
If you do not want to read the in-depth discussion, you can skip ahead to our recommendations in the ["Recommendations" section](#recommendations).
@@ -72,7 +72,7 @@ By pinning dependencies you know exactly what you are running, and you know exac
Now consider a similar theoretical scenario where `foobar@1.2.0` is faulty, but it is _not_ caught by any of your automated tests.
This is more common and more dangerous.
-If you were using SemVer ranges then this new version of `foobar` will likely be deployed to production automatically one day, sometime after which you notice errors and realise you need to fix it.
+If you were using SemVer ranges then this new version of `foobar` will likely be deployed to production automatically one day, sometime after which you notice errors and realize you need to fix it.
Like before, you need to manually work out which dependency caused it - assuming you guess correctly that it was a new dependency version at fault - and pin it manually by editing `package.json` one dependency at a time.
Alternatively, if you were instead pinning `foobar` then you would get a PR for `foobar@1.2.0` which awaits your approval.
@@ -186,8 +186,8 @@ You could even be running `yarn upgrade` regularly to be getting _indirect_ pack
So the lock file does not solve the same SemVer problems that pinning solves - but it compliments it.
For this reason our usual recommendation is using a lock file regardless of whether you pin dependencies or not, and pinning even if you have a lock file.
-Do not forget though that our motto is "Flexible, so you do not need to be", so go ahead and configure however you want.
-Also, we are open to ideas for how to make lock file updates more "visible" too.
+But you may also go ahead and configure however you want.
+Also, we're open to ideas for how to make lock file updates more "visible" too.
e.g. are you interested in a Renovate feature where you get a lockfile-only PR any time a direct dependency gets an in-range update?
## What about indirect/sub-dependencies?
diff --git a/docs/usage/docker.md b/docs/usage/docker.md
index bb68a0b41855e9..3eea02c099cbfc 100644
--- a/docs/usage/docker.md
+++ b/docs/usage/docker.md
@@ -9,6 +9,7 @@ Renovate supports upgrading dependencies in various types of Docker definition f
- Docker's `Dockerfile` files
- Docker Compose `docker-compose.yml`, `compose.yml` files
+- Visual Studio Code dev containers and GitHub Codespaces images and features
- CircleCI config files
- Kubernetes manifest files
- Ansible configuration files
@@ -120,13 +121,13 @@ For example:
Renovate understands [Ubuntu release code names](https://wiki.ubuntu.com/Releases) and will offer upgrades to the latest LTS release.
You must only use the _first_ term of the code name in _lowercase_.
-So use `jammy` for the Jammy Jellyfish release.
+So use `noble` for the Noble Numbat release.
For example, Renovate will offer to upgrade the following `Dockerfile` layer:
```diff
-- FROM ubuntu:focal
-+ FROM ubuntu:jammy
+- FROM ubuntu:jammy
++ FROM ubuntu:noble
```
### Debian codenames
@@ -383,7 +384,7 @@ To get access to the token a custom Renovate Docker image is needed that include
The Dockerfile to create such an image can look like this:
```Dockerfile
-FROM renovate/renovate:37.214.0
+FROM renovate/renovate:37.356.1
# Include the "Docker tip" which you can find here https://cloud.google.com/sdk/docs/install
# under "Installation" for "Debian/Ubuntu"
RUN ...
diff --git a/docs/usage/examples/self-hosting.md b/docs/usage/examples/self-hosting.md
index 5e302b9846cd90..643c3fa2baf231 100644
--- a/docs/usage/examples/self-hosting.md
+++ b/docs/usage/examples/self-hosting.md
@@ -384,7 +384,7 @@ spec:
## Logging
If you are ingesting/parsing logs into another system then we recommend you set `LOG_LEVEL=debug` and `LOG_FORMAT=json` in your environment variables.
-Debug logging is usually needed for any debugging, while JSON format will mean that the output is parseable.
+Debug logging is usually needed for any debugging, while JSON format will mean that the output is parsable.
### About the log level numbers
@@ -416,7 +416,10 @@ This means Renovate can safely connect to systems using that certificate or cert
Helper programs like Git and npm use the system trust store.
For those programs to trust a self-signed certificate you must add it to the systems trust store.
-On Ubuntu/Debian and many Linux-based systems, this can be done by copying the self-signed certificate (e.g. `self-signed-certificate.crt`) to `/usr/local/share/ca-certificates/` and running [`update-ca-certificates`](https://manpages.ubuntu.com/manpages/xenial/man8/update-ca-certificates.8.html) to update the system trust store afterwards.
+On Ubuntu/Debian and many Linux-based systems, this can be done by:
+
+1. copying the self-signed certificate (e.g. `self-signed-certificate.crt`) to `/usr/local/share/ca-certificates/`
+1. and running [`update-ca-certificates`](https://manpages.ubuntu.com/manpages/noble/man8/update-ca-certificates.8.html) to update the system trust store afterwards
### Renovate Docker image
diff --git a/docs/usage/faq.md b/docs/usage/faq.md
index d1a136a2c9ac4a..4faf08372df96f 100644
--- a/docs/usage/faq.md
+++ b/docs/usage/faq.md
@@ -36,11 +36,34 @@ The maintainers do not follow any release schedule or release cadence.
This means the Mend Renovate App can lag a few hours to a week behind the open source version.
Major releases of Renovate are held back until the maintainers are reasonably certain it works for most users.
+## How can I see which version the Mend Renovate app is using?
+
+Follow these steps to see which version the Mend Renovate app is on:
+
+1. Go to the [Mend Developer Portal](https://developer.mend.io/)
+1. Sign in to the Renovate app with your GitHub or Bitbucket account
+1. Select your organization
+1. Select a installed repository
+1. Select a job from the _Recent jobs_ overview
+1. Select the _Info_ Log Level from the dropdown menu
+1. You should see something like this:
+
+ ```
+ INFO: Repository started
+ {
+ "renovateVersion": "37.356.1"
+ }
+ ```
+
+
+!!! tip
+ The PRs that Renovate creates have a link to the "repository job log" in the footer of the PR body text.
+
## Renovate core features not supported on all platforms
| Feature | Platforms which lack feature | See Renovate issue(s) |
| --------------------- | ---------------------------------------------------------- | ------------------------------------------------------------ |
-| Dependency Dashboard | Azure, Bitbucket, Bitbucket Server | [#9592](https://github.com/renovatebot/renovate/issues/9592) |
+| Dependency Dashboard | Azure, Bitbucket, Bitbucket Server, Gerrit | [#9592](https://github.com/renovatebot/renovate/issues/9592) |
| The Mend Renovate App | Azure, Bitbucket, Bitbucket Server, Forgejo, Gitea, GitLab | |
## Major platform features not supported by Renovate
diff --git a/docs/usage/getting-started/private-packages.md b/docs/usage/getting-started/private-packages.md
index 13cb543b0ada8f..80383833c71dc8 100644
--- a/docs/usage/getting-started/private-packages.md
+++ b/docs/usage/getting-started/private-packages.md
@@ -616,4 +616,4 @@ For instructions on this, see the above section on encrypting secrets for the Me
### hostRules configuration using environment variables
-Self-hosted users can enable the option [`detectHostRulesFromEnv`](../self-hosted-configuration.md#detectHostRulesFromEnv) to configure the most common types of `hostRules` via environment variables.
+Self-hosted users can enable the option [`detectHostRulesFromEnv`](../self-hosted-configuration.md#detecthostrulesfromenv) to configure the most common types of `hostRules` via environment variables.
diff --git a/docs/usage/index.md b/docs/usage/index.md
new file mode 100644
index 00000000000000..bcf0dd8856e421
--- /dev/null
+++ b/docs/usage/index.md
@@ -0,0 +1,91 @@
+![Renovate banner](https://app.renovatebot.com/images/whitesource_renovate_660_220.jpg){ loading=lazy }
+
+# Renovate documentation
+
+Automated dependency updates.
+Multi-platform and multi-language.
+
+## Why use Renovate?
+
+
+
+
+
+
+- :octicons-git-pull-request-24:{ .lg .middle } __Automatic updates__
+
+ ---
+
+ Get pull requests to update your dependencies and lock files.
+
+- :octicons-calendar-24:{ .lg .middle } __On your schedule__
+
+ ---
+
+ Reduce noise by scheduling when Renovate creates PRs.
+
+- :octicons-package-24:{ .lg .middle } __Works out of the box__
+
+ ---
+
+ Renovate finds relevant package files automatically, including in monorepos.
+
+- :octicons-goal-24:{ .lg .middle } __How you like it__
+
+ ---
+
+ You can customize the bot's behavior with configuration files.
+
+- :octicons-share-24:{ .lg .middle } __Share your configuration__
+
+ ---
+
+ Share your configuration with ESLint-like config presets.
+
+- :octicons-sync-24:{ .lg .middle } __Out with the old, in with the new__
+
+ ---
+
+ Get replacement PRs to migrate from a deprecated dependency to the community suggested replacement, works with _most_ managers, see [issue 14149](https://github.com/renovatebot/renovate/issues/14149) for exceptions.
+
+- :octicons-tools-24:{ .lg .middle } __Open source__
+
+ ---
+
+ Renovate is licensed under the [GNU Affero General Public License](https://github.com/renovatebot/renovate/blob/main/license).
+
+
+
+
+
+
+## Supported Platforms
+
+Renovate works on these platforms:
+
+- [GitHub (.com and Enterprise Server)](./modules/platform/github/index.md)
+- [GitLab (.com and CE/EE)](./modules/platform/gitlab/index.md)
+- [Bitbucket Cloud](./modules/platform/bitbucket/index.md)
+- [Bitbucket Server](./modules/platform/bitbucket-server/index.md)
+- [Azure DevOps](./modules/platform/azure/index.md)
+- [AWS CodeCommit](./modules/platform/codecommit/index.md)
+- [Gitea and Forgejo](./modules/platform/gitea/index.md)
+- [Gerrit (experimental)](./modules/platform/gerrit/index.md)
+
+## Who Uses Renovate?
+
+Renovate is used by:
+
+![Renovate Matrix](./assets/images/matrix.png){ loading=lazy }
+
+## Ways to run Renovate
+
+You can run Renovate as:
+
+- an [Open Source npm package](https://www.npmjs.com/package/renovate)
+- a [pre-built Open Source image on Docker Hub](https://hub.docker.com/r/renovate/renovate)
+
+Or you can use [the Mend Renovate App](https://github.com/marketplace/renovate) which is hosted by [Mend](https://www.mend.io/).
+
+[Install the Mend Renovate app for GitHub](https://github.com/marketplace/renovate){ .md-button .md-button--primary }
+[Check out our tutorial](https://github.com/renovatebot/tutorial){ .md-button }
diff --git a/docs/usage/javascript.md b/docs/usage/javascript.md
index 56496d2cc9c37f..f499e2dc84f53d 100644
--- a/docs/usage/javascript.md
+++ b/docs/usage/javascript.md
@@ -1,10 +1,10 @@
---
title: JavaScript
-description: JavaScript (npm/Yarn) Package Manager Support in Renovate
+description: JavaScript (npm/Yarn/pnpm/Bun) Package Manager Support in Renovate
---
# JavaScript
Renovate supports upgrading JavaScript dependencies specified in `package.json` files.
-`npm`, `yarn`, and `pnpm` are all supported.
+`npm`, `yarn`, `pnpm` and `bun` are all supported.
diff --git a/docs/usage/key-concepts/changelogs.md b/docs/usage/key-concepts/changelogs.md
index 435e923f779817..b49a64dc548ce8 100644
--- a/docs/usage/key-concepts/changelogs.md
+++ b/docs/usage/key-concepts/changelogs.md
@@ -98,11 +98,11 @@ If your repository uses the monorepo pattern make sure _each_ `package.json` fil
### maven package maintainers
-Read [`maven` datasource, making your changelogs fetchable](https://docs.renovatebot.com/modules/datasource/maven/#making-your-changelogs-fetchable).
+Read [`maven` datasource, making your changelogs fetchable](../modules/datasource/maven/index.md#making-your-changelogs-fetchable).
### Docker image maintainers
-Read the [Docker datasource](https://docs.renovatebot.com/modules/datasource/docker/) docs.
+Read the [Docker datasource](../modules/datasource/docker/index.md) docs.
### Nuget package maintainers
diff --git a/docs/usage/key-concepts/how-renovate-works.md b/docs/usage/key-concepts/how-renovate-works.md
index 3869144fe7c30f..20d3e3eef3d653 100644
--- a/docs/usage/key-concepts/how-renovate-works.md
+++ b/docs/usage/key-concepts/how-renovate-works.md
@@ -23,7 +23,7 @@ Renovate's modules are:
- [datasource](../modules/datasource/index.md)
- [manager](../modules/manager/index.md)
- [platform](../modules/platform/index.md)
-- [versioning](../modules/versioning.md)
+- [versioning](../modules/versioning/index.md)
Renovate uses these modules in order:
diff --git a/docs/usage/language-constraints-and-upgrading.md b/docs/usage/language-constraints-and-upgrading.md
new file mode 100644
index 00000000000000..3f1e4501451904
--- /dev/null
+++ b/docs/usage/language-constraints-and-upgrading.md
@@ -0,0 +1,106 @@
+# Language constraints and upgrading
+
+## Package releases have language constraints
+
+Many ecosystems have the concept where each release of a package has its own language "constraint".
+For example, a npm package may support Node.js 18 and 20 in its `v1` releases and Node.js 20 and 22 from `v2.0.0` onwards.
+
+In an ideal scenario:
+
+- Package files allow a project to show its supported language constraints, and
+- Package registries allow packages to show the supported language constraints per release
+
+## Restricting upgrades to compatible releases
+
+By default Renovate _does not_ apply language constraints to upgrades.
+This means Renovate will propose "any" stable upgrade.
+Renovate will _not_ check if the language version you're using actually supports that upgrade.
+In certain ecosystems, changes to language constraints are made with a major release, and are documented in the release notes.
+So Renovate's default behavior may be okay in those ecosystems.
+For other ecosystems Renovate's default behavior may seem _wrong_.
+
+As a Renovate user, you can opt into strict compatibility filtering by setting `constraintsFiltering=strict`.
+Before you set `constraintsFiltering=strict`, you should:
+
+- understand the limitations of this setting
+- understand why `constraintsFiltering=strict` is _not_ the default behavior
+
+Please keep reading to learn more.
+
+## Language constraint updating
+
+The first challenge is that Renovate may not yet support the ability to update your language constraints in an automated manner, and even when it does, users may not understand how many updates are depending on it.
+
+For example: a Node.js project has set its `engines` field to `"node": "^18.0.0 || ^20.0.0"`.
+
+Should Renovate _skip_ Node.js `v21` because it is a non-LTS release?
+When Node.js `v22` releases, should Renovate add it to your `engines`, or wait until `v22` becomes the LTS version?
+When Node.js `v18` is EOL, should Renovate drop it from the `engines` field?
+
+Renovate can not guess what users want.
+Users have strong and different opinions on what Renovate should do for each example listed above.
+
+Also, even _if_ Renovate guesses right or adds advanced capabilities to allow this to be configurable: users might still wait on any of these "major" upgrades for months.
+If a project waits to create or merge the update to drop Node.js `v18` from `engines`, then they can _not_ upgrade to any new versions of library dependencies.
+Those library dependencies may have dropped support for Node.js `v18` already.
+
+## Strict filtering limitations
+
+Let's go back to the Node.js project which has its `engines` field set to `"node": "^18.0.0 || ^20.0.0"`.
+
+Now also consider a library which sets its `engines` field to `"node": "^18.12.0 || ^20.9.0"` because the library only supports "LTS releases" of Node.js.
+Strictly speaking, this library is _not_ compatible with the project above, because the project has _wider requirements_ for their Node versions.
+This means Renovate holds back any upgrades for it.
+Should Renovate somehow "think" and _assume_ that this narrower `engines` support is actually OK?
+What if the project _already_ used a current version of this library "in a way that's not officially supported"?
+
+A second problem is that if:
+
+- Renovate can _not_ update the language constraints, or
+- a user _ignores_ or does not see the language upgrade
+
+Then the user may not know that many dependencies are out of date, because Renovate is not creating PRs.
+For example: a project may have 10 dependencies, and 8 of those have updates.
+But all 8 dependencies need the project to update its language constraints _first_.
+The project administrator thinks they are up to date, because Renovate is not creating PRs, but 80% of their dependencies are outdated.
+
+In short, users who set `constraintsFiltering=strict` often do not understand how _strict_ that setting is and how many releases it will _filter out_.
+
+## Transitive constraint limitations
+
+Often a library sets language constraints (like the `engines` examples above), and then depend on libraries with _narrower_ contraints, like `"node": "^20.0.0"`.
+In cases like these, Renovate "trusts" the declaration of the library and may create a update, even _with_ strict constraints filtering.
+
+For some package managers, like `npm`, this incompatibility will _not_ be detected or warned about (even during lock file generation), but this may not be a problem for your application.
+Other package managers, like Poetry, may detect and warn about incompatible language constraints during lock file generation, which Renovate reports as an "Artifacts update error".
+
+## Applying constraints through config
+
+You can set language constraints in the Renovate config.
+For example:
+
+```json title="Renovate config with Node.js constraints"
+{
+ "constraints": {
+ "node": "^18.0.0 || >=20.0.0"
+ }
+}
+```
+
+You may need to set constraints in the Renovate config when:
+
+- The package manager of the project does not support constraints declarations, or
+- The project has not declared any constraints, or
+- You want Renovate to use _different_ constraints to what's declared in the _project_
+
+Renovate will _not_ create "update" PRs to update any of these versions once they become outdated, so you must update those by hand.
+For this reason, setting constraints manually in the Renovate config is _undesirable_.
+We prefer to fix problems in Renovate itself, instead of you setting constraints.
+
+## Future Work
+
+Please start, or join, a GitHub Discussion if you are interested in this topic.
+Subtopics include:
+
+- Improving language constraints update automation in package files
+- Improving versioning calculations of "subset" (is range A a subset of range B)
diff --git a/docs/usage/logo-brand-guidelines.md b/docs/usage/logo-brand-guidelines.md
new file mode 100644
index 00000000000000..7acab6425cd537
--- /dev/null
+++ b/docs/usage/logo-brand-guidelines.md
@@ -0,0 +1,36 @@
+# Logo and brand guidelines
+
+This page explains how you may use the Renovate name, logo and branding.
+
+## Do not pretend to be the real Renovate app
+
+Avoid using our name, logo, or branding in a way that causes people to think you are the real Renovate app on a public platform.
+For example: do _not_ call your self-hosted version something like @realrenovatebot on GitHub.
+
+## Do not pretend to be a Renovate developer
+
+Avoid using our name, logo, or branding in a way that causes people to think you are a developer of Renovate.
+
+## Allowed uses of the Renovate name
+
+You are allowed to use the Renovate name:
+
+- to refer to the official Renovate app
+- as a nickname/shorthand, in contexts where it is clear you are referring to your self-hosted version
+
+## Allowed uses of the Renovate logo
+
+You are allowed to use our logo as:
+
+- an icon in your repository readme, that says you are using Renovate
+- part of a badge in your repository readme, that says you are using Renovate
+- an avatar image for your self-hosted version of Renovate, but give your bot a _different_ name
+
+## Allowed uses of the Renovate branding
+
+Do not use our banner images.
+
+## We keep the rights to our logo, name, and branding
+
+You may only use our logo, name and branding as described in this guideline.
+We keep the rights to our logo, name and branding.
diff --git a/docs/usage/modules/.pages b/docs/usage/modules/.pages
index 74686a6979df56..2c965ed561bdd3 100644
--- a/docs/usage/modules/.pages
+++ b/docs/usage/modules/.pages
@@ -1,3 +1,6 @@
title: Renovate Modules
+nav:
+ - Introduction: index.md
+ - ...
order: asc
sort_type: natural
diff --git a/docs/usage/modules/index.md b/docs/usage/modules/index.md
new file mode 100644
index 00000000000000..50f7fe12b8805c
--- /dev/null
+++ b/docs/usage/modules/index.md
@@ -0,0 +1,14 @@
+---
+title: Modules introduction
+---
+
+# Modules introduction
+
+Renovate modules, please select a subsection.
+
+## Supported modules
+
+- [Datasources](./datasource/index.md)
+- [Managers](./manager/index.md)
+- [Platform](./platform/index.md)
+- [Versioning](./versioning/index.md)
diff --git a/docs/usage/modules/versioning/.pages b/docs/usage/modules/versioning/.pages
new file mode 100644
index 00000000000000..4a59c0f2edb4ed
--- /dev/null
+++ b/docs/usage/modules/versioning/.pages
@@ -0,0 +1,7 @@
+title: Versionings
+nav:
+ - Introduction: index.md
+ - ...
+collapse_single_pages: true
+order: asc
+sort_type: natural
diff --git a/docs/usage/modules/versioning.md b/docs/usage/modules/versioning/index.md
similarity index 100%
rename from docs/usage/modules/versioning.md
rename to docs/usage/modules/versioning/index.md
diff --git a/docs/usage/nuget.md b/docs/usage/nuget.md
index 48d424d2d43037..3547b0ed024e54 100644
--- a/docs/usage/nuget.md
+++ b/docs/usage/nuget.md
@@ -78,15 +78,26 @@ So Renovate behaves like the official NuGet client.
#### v3 feed URL not ending with index.json
-If a `v3` feed URL does not end with `index.json`, you must append `#protocolVersion=3` to the registry URL:
+If a `v3` feed URL does not end with `index.json`, you must specify the version explicitly.
-```json
-{
- "nuget": {
- "registryUrls": ["http://myV3feed#protocolVersion=3"]
+- If the feed is defined in a `NuGet.config` file set the `protocolVersion` attribute to `3`:
+
+ ```xml
+
+
+
+
+ ```
+
+- If the feed is defined via Renovate configuration append `#protocolVersion=3` to the registry URL:
+
+ ```json
+ {
+ "nuget": {
+ "registryUrls": ["http://myV3feed#protocolVersion=3"]
+ }
}
-}
-```
+ ```
You may need this workaround when you use the JFrog Artifactory.
diff --git a/docs/usage/python.md b/docs/usage/python.md
index 52e8d6a8378d89..01bbe3f723ede0 100644
--- a/docs/usage/python.md
+++ b/docs/usage/python.md
@@ -23,24 +23,6 @@ Legacy versions with the `===` prefix are ignored.
1. Renovate searches for the latest version on [PyPI](https://pypi.org/) to decide if there are upgrades
1. If the source package includes a GitHub URL as its source, and has a "changelog" file _or_ uses GitHub releases, a Release Note will be embedded in the generated PR
-## Alternative file names
-
-For the `pip_requirements` manager, the default file matching regex for `requirements.txt` follows common file name conventions.
-
-It will match `requirements.txt` and `requirements.pip`, and any file in the format `requirements-*.txt` or `requirements-*.pip`, to allow for common filename patterns such as `requirements-dev.txt`.
-
-But Renovate may not find all your files.
-
-You can tell Renovate where to find your file(s) by setting your own `fileMatch` regex:
-
-```json title="Setting a custom fileMatch regex"
-{
- "pip_requirements": {
- "fileMatch": ["my/specifically-named.file", "\\.requirements$"]
- }
-}
-```
-
## Alternate registries
By default, Renovate checks for upgrades on the `pypi.org` registry.
diff --git a/docs/usage/release-notes-for-major-versions.md b/docs/usage/release-notes-for-major-versions.md
new file mode 100644
index 00000000000000..d42962fe917bc0
--- /dev/null
+++ b/docs/usage/release-notes-for-major-versions.md
@@ -0,0 +1,157 @@
+# Release notes for major versions of Renovate
+
+It can be hard to keep track of the changes between major versions of Renovate.
+To help you, we've listed the breaking changes, plus the developer commentary for the latest major releases.
+
+The most recent versions are always at the top of the page.
+This is because recent versions may revert changes made in an older version.
+You also don't have to scroll to the bottom of the page to find the latest release notes.
+
+## Version 37
+
+### Breaking changes for 37
+
+- **npm:** drop explicit lerna support
+
+### Commentary for 37
+
+We switched from "merge" strategy to "hunt" strategy to match with how Maven works.
+
+Lerna v7 does not need our explicit support anymore, so we dropped it.
+If you're on a version of Lerna before v7, you should prioritize upgrading to v7.
+
+### Link to release notes for 37
+
+[Release notes for `v37` on GitHub](https://github.com/renovatebot/renovate/releases/tag/37.0.0).
+
+## Version 36
+
+### Breaking changes for 36
+
+- postUpgradeTasks.fileFilters is now optional and defaults to all files
+- `languages` are now called `categories` instead. Use `matchCategories` in `packageRules`
+- Node v19 is no longer supported
+- **datasource:** `semver-coerced` is now the default versioning
+- **presets:** Preset `config:base` is now called `config:recommended` (will be migrated automatically)
+- remove `BUILDPACK` env support
+- **package-rules:** `matchPackageNames` now matches both `depName` (existing) and `packageName` (new) and warns if only `depName` matches
+- **release-notes:** Release notes won't be fetched early for `commitBody` insertion unless explicitly configured with `fetchReleaseNotes=branch`
+- `dockerImagePrefix` is now replaced by `dockerSidecarImage`
+- `matchPaths` and `matchFiles` are now combined into `matchFileNames`, supporting exact match and glob-only. The "any string match" functionality of `matchPaths` is now removed
+- **presets:** v25 compatibility for language-based branch prefixes is removed
+- **npm:** Rollback PRs will no longer be enabled by default for npm (they are now disabled by default for all managers)
+- **post-upgrade-tasks:** dot files will now be included by default for all minimatch results
+- **platform/gitlab:** GitLab `gitAuthor` will change from the account's "email" to "commit_email" if they are different
+- **automerge:** Platform automerge will now be chosen by default whenever automerge is enabled
+- Post upgrade templating is now allowed by default, as long as the post upgrade task command is itself already allowed
+- Official Renovate Docker images now use the "slim" approach with `binarySource=install` by default. e.g. `renovate/renovate:latest` is the slim image, not full
+- The "full" image is now available via the tag `full`, e.g. `renovate/renovate:37-full`, and defaults to `binarySource=global` (no dynamic installs)
+- Third party tools in the full image have been updated to latest/LTS major version
+
+### Commentary for 36
+
+If you're self-hosting Renovate, pay particular attention to:
+
+- Do you want to run the full, or slim versions of the image? We have switched the defaults (latest is now slim, not full)
+- Have you configured `dockerImagePrefix`? If so then you need to use `dockerSidecarImage` instead
+- If you're using `config:base` in your `onboardingConfig` then switch to `config:recommended`
+- `gitAuthor` may change if you're on GitLab and have a different commit email for your bot account. If so then configure `gitIgnoredAuthors` with the old email
+
+### Link to release notes for 36
+
+[Release notes for `v36` on GitHub](https://github.com/renovatebot/renovate/releases/tag/36.0.0).
+
+## Version 35
+
+### Breaking changes for 35
+
+- require NodeJS v18.12+ ([#20838](https://github.com/renovatebot/renovate/pull/20838))
+- **config:** Forked repos will now be processed automatically if `autodiscover=false`. `includeForks` is removed and replaced by new option `forkProcessing`
+- Internal checks such as `renovate/stability-days` will no longer count as passing/green, meaning that actions such as `automerge` won't occur if the only checks are Renovate internal ones. Set `internalChecksAsSuccess=true` to restore existing behavior
+- **versioning:** default versioning is now `semver-coerced`, instead of `semver`
+- **datasource/github-releases:** Regex Manager configurations relying on the github-release data-source with digests will have different digest semantics. The digest will now always correspond to the underlying Git SHA of the release/version. The old behavior can be preserved by switching to the github-release-attachments datasource
+- **versioning:** bump short ranges to version ([#20494](https://github.com/renovatebot/renovate/pull/20494))
+- **config:** `containerbase/` account used for sidecar containers instead of `renovate/`
+- **go:** Renovate will now use go's default `GOPROXY` settings. To avoid using the public proxy, configure `GOPROXY=direct`
+- **datasource/npm:** Package cache will include entries for up to 24 hours after the last lookup. Set `cacheHardTtlMinutes=0` to revert to existing behavior
+- **config:** Renovate now defaults to applying hourly and concurrent PR limits. To revert to unlimited, configure them back to `0`
+- **config:** Renovate will now default to updating locked dependency versions. To revert to previous behavior, configure `rangeStrategy=replace`
+- **config:** PyPI releases will no longer be filtered by default based on `constraints.python` compatibility. To retain existing functionality, set `constraintsFiltering=strict`
+
+### Commentary for 35
+
+Most of these changes will be invisible to the majority of users.
+They may be "breaking" (change of behavior) but good changes of defaults to make.
+
+The biggest change is defaulting `rangeStrategy=auto` to use `update-lockfile` instead of `replace`, which impacts anyone using the recommended `config:base`.
+This will mean that you start seeing some "lockfile-only" PRs for in-range updates, such as updating `package-lock.json` when a range exists in `package.json`.
+
+### Link to release notes for 35
+
+[Release notes for `v35` on GitHub](https://github.com/renovatebot/renovate/releases/tag/35.0.0).
+
+## Version 34
+
+### Breaking changes for 34
+
+- Revert `branchNameStrict` to `false`
+
+### Commentary for 34
+
+Here comes v34 hot on the heels of v33.
+We decided to issue another breaking change to revert one of the breaking changes in v33.
+
+If you are upgrading from v32 to v34 then it means that the setting for `branchNameStrict` remains as `false` and you don't need to worry about that.
+
+If you already upgraded from v32 to v33 then you have a decision to make first:
+
+- set `branchNameStrict` to `true` (like in v33),
+- or let it set back to `false` (like in v32).
+
+Strict branch naming meant that all special characters other than letters, numbers and hyphens were converted to hyphens and then deduplicated, e.g. a branch which in v32 was like `renovate/abc.def-2.x` would become `renovate/abc-def-2-x` in v33.
+If you prefer to revert back to the old way then that will happen automatically in v34.
+If you prefer to keep the way in v33 because you already had a bunch of PRs closed and reopened due to branch names, and don't want to do that again, then add `branchNameStrict: false` to your bot config or your shared config before updating to v34.
+
+Apologies to anyone negatively affected by this v33 change.
+
+### Link to release notes for 34
+
+[Release notes for `v34` on GitHub](https://github.com/renovatebot/renovate/releases/tag/34.0.0).
+
+## Version 33
+
+### Breaking changes for 33
+
+- Node 16 is the required runtime for Renovate
+- [NOTE: This was reverted in `v34`] **config:** `branchNameStrict` default value is now `true`
+- **config:** `internalChecksFilter` default value is now `"strict"`
+- **config:** `ignoreScripts` default value is now `true`. If `allowScripts=true` in global config, `ignoreScripts` must be set to `false` in repo config if you want all repos to run scripts
+- **config:** `autodiscover` filters can no longer include commas
+- **config:** boolean variables must be `true` or `false` when configured in environment variables, and errors will be thrown for invalid values. Previously invalided values were ignored and treated as `false`
+- **datasource/go:** `git-tags` datasource will be used as the fallback instead of `github-tags` if a go package's host type is unknown
+- **jsonnet-bundler:** `depName` now uses the "absolute import" format (e.g. `bar`-> `github.com/foo/bar/baz-wow`)
+- **azure-pipelines:** azure-pipelines manager is now disabled by default
+- **github:** No longer necessary to configure forkMode. Forking mode is now experimental
+- Users of `containerbase` images (such as official Renovate images) will now have dynamic package manager installs enabled by default
+- Dependencies are no longer automatically pinned if `rangeStrategy=auto`, pinning must be opted into using `rangeStrategy=pin`
+
+### Commentary for 33
+
+This release contains some changes of default values/behavior:
+
+- `internalChecksFilter` will now default to `strict`, meaning that updates will be withheld by default when internal status checks are pending. This should reduce the number of "non-actionable" Pull Requests you get
+- `azure-pipelines` manager is disabled by default, because its primary datasource can unfortunately suggest updates which aren't yet installable. Users should opt into this manager once they know the risks
+- `binarySource=install` will now be used instead of `global` whenever Renovate is run within a "containerbase" image. This means dynamic installation of most package managers and languages
+- Dependencies will no longer be pinned by default if `rangeStrategy=auto`. While we recommend pinning dependencies, we decided users should opt into this more explicitly
+
+And two major features!
+
+- AWS CodeCommit platform support
+- OpenTelemetry support
+
+Both the above are considered "experimental".
+Please test them out and let us know your feedback - both positive or negative - so that we can progress them to fully available.
+
+### Link to release notes for 33
+
+[Release notes for `v33` on GitHub](https://github.com/renovatebot/renovate/releases/tag/33.0.0).
diff --git a/docs/usage/security-and-permissions.md b/docs/usage/security-and-permissions.md
index 87eab0e83f2040..4759e7710e679a 100644
--- a/docs/usage/security-and-permissions.md
+++ b/docs/usage/security-and-permissions.md
@@ -65,6 +65,107 @@ For example, if you have an `npm` package and do not configure a private registr
You could avoid this by configuring private registries but such registries need to query public registries anyway.
We do not know of any public registries which reverse lookup IP addresses to associate companies with packages.
+#### Security awareness for self-hosted Renovate instances
+
+##### Introduction
+
+Before you start self-hosting Renovate you must understand the security implications associated with monitoring and updating repositories.
+The process that Renovate uses to update dependencies runs under the same user context as the Renovate process itself.
+This also means the process has the same level of access to information and resources as the user context!
+
+##### Trusting Repository Developers
+
+All self-hosted Renovate instances must operate under a trust relationship with the developers of the monitored repositories.
+This has the following implications:
+
+- Access to information
+- Execution of code
+
+Keep reading to learn more.
+
+###### Access to information
+
+Since the update process runs with the _same_ user privileges as the Renovate process, it inherently has access to the same information and resources.
+This includes sensitive data that may be stored within the environment where Renovate is hosted.
+
+###### Execution of code
+
+In certain scenarios, code from the monitored repository is executed as part of the update process.
+This is particularly true during, for example:
+
+- `postUpgradeTasks`, where scripts specified by the repository are run
+- when a wrapper within the repository is called, like `gradlew`
+
+These scripts can contain arbitrary code.
+This may pose a significant security risk if the repository's integrity is compromised, or if the repository maintainers have malicious intentions.
+
+Because such insider attack is an inherent and unavoidable risk, the Renovate project will not issue CVEs for such attacks or weaknesses other than in exceptional circumstances.
+
+##### Centralized logging and sensitive information management
+
+Centralized logging is key to monitor and troubleshoot self-hosted Renovate environments.
+But logging may inadvertently capture and expose sensitive information.
+Operations that involve `customEnvVariables`, among others, could expose sensitive data, when logging is used.
+
+##### Recommendations
+
+The Renovate maintainers recommend you follow these guidelines.
+
+###### Vet and monitor repositories
+
+_Before_ integrating a repository with your self-hosted Renovate instance, thoroughly vet the repository for security and trustworthiness.
+This means that you should review the:
+
+- repository's ownership
+- contribution history
+- open issues
+- open pull requests
+
+###### Limit permissions
+
+Configure the environment running Renovate with the principle of least privilege.
+Ensure that the Renovate process has only the permissions needed to perform its tasks and no more.
+This reduces the impact of any malicious code execution.
+
+###### Regularly review post-upgrade tasks
+
+Regularly review the actions taken by `postUpgradeTasks` to make sure they do not execute unnecessary or risky operations.
+Consider implementing a review process for changes to these tasks within repositories.
+
+###### Use security tools
+
+Employ security tools and practices, like code scanning and vulnerability assessments, on the Renovate configuration _and_ the repositories Renovate manages.
+This helps identify potentially malicious code before it is executed.
+
+###### Securing environment variables
+
+When configuring `customEnvVariables`: _always_ use Renovate's secrets management syntax `({{ secrets.VAR_NAME }})` to reference sensitive variables securely.
+This makes sure that sensitive data is not exposed as plain text.
+
+###### Logging infrastructure security
+
+Ensure that the logging infrastructure is configured to handle logs as sensitive data.
+This includes measures like:
+
+- log encryption
+- access controls to restrict log viewing to authorized personnel only
+- secure storage and transmission of log data
+
+###### Log review and redaction processes
+
+Implement rigorous log review mechanisms to regularly scan for and redact sensitive information that might be logged inadvertently.
+Automated tools can assist in identifying patterns indicative of sensitive data, such as credentials or personal information, enabling timely redaction or alerting.
+
+###### Stay informed
+
+Keep abreast of updates and security advisories related to Renovate itself.
+Apply updates promptly to ensure that your self-hosted instances get the latest security enhancements and bug fixes.
+
+#### Conclusion
+
+The flexibility and power of self-hosting Renovate also means you must take steps to manage your security.
+By understanding the risks associated with repository management and taking steps to mitigate those risks, organizations can maintain a secure and efficient development workflow.
+
### Hosted/SaaS (the Mend Renovate App)
Users of the Mend Renovate App fall under [Mend's Terms of Service](https://www.mend.io/terms-of-service/) and Privacy Policy.
diff --git a/docs/usage/self-hosted-configuration.md b/docs/usage/self-hosted-configuration.md
index 21d1c8e941c292..9f9a2f0552d4d8 100644
--- a/docs/usage/self-hosted-configuration.md
+++ b/docs/usage/self-hosted-configuration.md
@@ -213,6 +213,38 @@ For example:
}
```
+
+!!! note
+ On Gitea/Forgejo, you can't use `autodiscoverTopics` together with `autodiscoverNamespaces` because both platforms do not support this.
+ Topics are preferred and `autodiscoverNamespaces` will be ignored when you configure `autodiscoverTopics` on Gitea/Forgejo.
+
+## autodiscoverProjects
+
+You can use this option to filter the list of autodiscovered repositories by project names.
+This feature is useful for users who want Renovate to only work on repositories within specific projects or exclude certain repositories from being processed.
+
+```json title="Example for Bitbucket"
+{
+ "platform": "bitbucket",
+ "autodiscoverProjects": ["a-group", "!another-group/some-subgroup"]
+}
+```
+
+The `autodiscoverProjects` config option takes an array of minimatch-compatible globs or RE2-compatible regex strings.
+For more details on this syntax see Renovate's [string pattern matching documentation](./string-pattern-matching.md).
+
+## autodiscoverRepoOrder
+
+The order method for autodiscover server side repository search.
+
+> If multiple `autodiscoverTopics` are used resulting order will be per topic not global.
+
+## autodiscoverRepoSort
+
+The sort method for autodiscover server side repository search.
+
+> If multiple `autodiscoverTopics` are used resulting order will be per topic not global.
+
## autodiscoverTopics
Some platforms allow you to add tags, or topics, to repositories and retrieve repository lists by specifying those topics.
@@ -555,6 +587,12 @@ In practice, it is implemented by converting the `force` configuration into a `p
This is set to `true` by default, meaning that any settings (such as `schedule`) take maximum priority even against custom settings existing inside individual repositories.
It will also override any settings in `packageRules`.
+## forkCreation
+
+This configuration lets you disable the runtime forking of repositories when running in "fork mode".
+
+Usually you will need to keep this as the default `true`, and only set to `false` if you have some out of band process to handle the creation of forks.
+
## forkOrg
This configuration option lets you choose an organization you want repositories forked into when "fork mode" is enabled.
@@ -577,6 +615,10 @@ If this value is configured then Renovate:
Renovate will then create branches on the fork and opens Pull Requests on the parent repository.
+
+!!! note
+ Forked repositories will always be skipped when `forkToken` is set, even if `includeForks` is true.
+
## gitNoVerify
Controls when Renovate passes the `--no-verify` flag to `git`.
@@ -636,12 +678,74 @@ Use the `extends` field instead of this if, for example, you need the ability fo
When Renovate resolves `globalExtends` it does not fully process the configuration.
This means that Renovate does not have the authentication it needs to fetch private things.
+## httpCacheTtlDays
+
+This option sets the number of days that Renovate will cache HTTP responses.
+The default value is 90 days.
+Value of `0` means no caching.
+
+
+!!! warning
+ When you set `httpCacheTtlDays` to `0`, Renovate will remove the cached HTTP data.
+
## includeMirrors
By default, Renovate does not autodiscover repositories that are mirrors.
Change this setting to `true` to include repositories that are mirrors as Renovate targets.
+## inheritConfig
+
+When you enable this option, Renovate will look for the `inheritConfigFileName` file in the `inheritConfigRepoName` repository before processing a repository, and read this in as config.
+
+If the repository is in a nested organization or group on a supported platform such as GitLab, such as `topGroup/nestedGroup/projectName` then Renovate will look in `topGroup/nestedGroup/renovate-config`.
+
+If `inheritConfig` is `true` but the inherited config file does _not_ exist then Renovate will proceed without warning.
+If the file exists but cannot be parsed, then Renovate will raise a config warning issue and abort the job.
+
+The inherited config may include all valid repository config and these config options:
+
+- `bbUseDevelopmentBranch`
+- `onboarding`
+- `onboardingBranch`
+- `onboardingCommitMessage`
+- `onboardingConfig`
+- `onboardingConfigFileName`
+- `onboardingNoDeps`
+- `onboardingPrTitle`
+- `onboardingRebaseCheckbox`
+- `requireConfig`
+
+
+!!! note
+ The above list is prepared manually and may become out of date.
+ Consult the self-hosted configuration docs and look for `inheritConfigSupport` values there for the definitive list.
+
+This way organizations can change/control the default behavior, like whether configs are required and how repositories are onboarded.
+
+We disabled `inheritConfig` in the Mend Renovate App to avoid wasting millions of API calls per week.
+This is because each `404` response from the GitHub API due to a missing org inherited config counts as a used API call.
+We will add a smart/dynamic approach in future, so that we can selectively enable `inheritConfig` per organization.
+
+## inheritConfigFileName
+
+Change this setting if you want Renovate to look for a different file name within the `inheritConfigRepoName` repository.
+You may use nested files, for example: `"some-dir/config.json"`.
+
+## inheritConfigRepoName
+
+Change this setting if you want Renovate to look in an alternative repository for the inherited config.
+The repository must be on the same platform and endpoint, and Renovate's token must have `read` permissions to the repository.
+
+## inheritConfigStrict
+
+By default Renovate will silently (debug log message only) ignore cases where `inheritConfig=true` but no inherited config is found.
+When you set `inheritConfigStrict=true` then Renovate will abort the run and raise a config error if Renovate can't find the inherited config.
+
+
+!!! warning
+ Only set this config option to `true` if _every_ organization has an inherited config file _and_ you want to make sure Renovate _always_ uses that inherited config.
+
## logContext
`logContext` is included with each log entry only if `logFormat="json"` - it is not included in the pretty log output.
@@ -651,6 +755,39 @@ If left as default (null), a random short ID will be selected.
## logFileLevel
+## mergeConfidenceDatasources
+
+This feature is applicable only if you have an access token for Mend's Merge Confidence API.
+
+If set, Renovate will query the merge-confidence JSON API only for datasources that are part of this list.
+Otherwise, it queries all the supported datasources (check default value).
+
+Example:
+
+```js
+modules.exports = {
+ mergeConfidenceDatasources: ['npm'],
+};
+```
+
+## mergeConfidenceEndpoint
+
+This feature is applicable only if you have an access token for Mend's Merge Confidence API.
+
+If set, Renovate will retrieve Merge Confidence data by querying this API.
+Otherwise, it will use the default URL, which is .
+
+If you use the Mend Renovate Enterprise Edition (Renovate EE) and:
+
+- have a static merge confidence token that you set via `MEND_RNV_MC_TOKEN`
+- _or_ set `MEND_RNV_MC_TOKEN` to `auto`
+
+Then you must set this variable at the _server_ and the _workers_.
+
+But if you have specified the token as a [`matchConfidence`](configuration-options.md#matchconfidence) `hostRule`, you only need to set this variable at the _workers_.
+
+This feature is in private beta.
+
## migratePresets
Use this if you have repositories that extend from a particular preset, which has now been renamed or removed.
@@ -773,7 +910,7 @@ This private key is used to decrypt config files.
The corresponding public key can be used to create encrypted values for config files.
If you want a UI to encrypt values you can put the public key in a webpage similar to .
-To create the key pair with GPG use the following commands:
+To create the PGP key pair with GPG use the following commands:
- `gpg --full-generate-key` and follow the prompts to generate a key. Name and email are not important to Renovate, and do not configure a passphrase. Use a 4096bit key.
@@ -825,6 +962,32 @@ uid Renovate Bot
sub rsa4096 2021-09-10 [E]
```
+
+!!! note
+ If you use GnuPG `v2.4` (or newer) to generate the key, then you must disable `AEAD` preferences.
+ This is needed to allow Renovate to decrypt the encrypted values.
+
+```bash
+❯ gpg --edit-key renovate@whitesourcesoftware.com
+gpg> showpref
+[ultimate] (1). Renovate Bot
+ Cipher: AES256, AES192, AES, 3DES
+ AEAD: OCB, EAX
+ Digest: SHA512, SHA384, SHA256, SHA224, SHA1
+ Compression: ZLIB, BZIP2, ZIP, Uncompressed
+ Features: MDC, AEAD, Keyserver no-modify
+
+gpg> setpref AES256 AES192 AES 3DES SHA512 SHA384 SHA256 SHA224 SHA1 ZLIB BZIP2 ZIP
+Set preference list to:
+ Cipher: AES256, AES192, AES, 3DES
+ AEAD:
+ Digest: SHA512, SHA384, SHA256, SHA224, SHA1
+ Compression: ZLIB, BZIP2, ZIP, Uncompressed
+ Features: MDC, Keyserver no-modify
+Really update the preferences? (y/N) y
+gpg> save
+```
+
- Copy the key ID from the output (`794B820F34B34A8DF32AADB20649CEXAMPLEONLY` in the above example) or run `gpg --list-secret-keys` if you forgot to take a copy
@@ -834,7 +997,7 @@ sub rsa4096 2021-09-10 [E]
The private key should then be added to your Renovate Bot global config (either using `privateKeyPath` or exporting it to the `RENOVATE_PRIVATE_KEY` environment variable).
The public key can be used to replace the existing key in for your own use.
-Any encrypted secrets using GPG must have a mandatory organization/group scope, and optionally can be scoped for a single repository only.
+Any PGP-encrypted secrets must have a mandatory organization/group scope, and optionally can be scoped for a single repository only.
The reason for this is to avoid "replay" attacks where someone could learn your encrypted secret and then reuse it in their own Renovate repositories.
Instead, with scoped secrets it means that Renovate ensures that the organization and optionally repository values encrypted with the secret match against the running repository.
@@ -849,11 +1012,15 @@ Instead, with scoped secrets it means that Renovate ensures that the organizatio
Use this field if you need to perform a "key rotation" and support more than one keypair at a time.
Decryption with this key will be tried after `privateKey`.
-If you are migrating from the legacy public key encryption approach to use GPG, then move your legacy private key from `privateKey` to `privateKeyOld` and then put your new GPG private key in `privateKey`.
-Doing so will mean that Renovate will first try to decrypt using the GPG key but fall back to the legacy key and try that next.
+If you are migrating from the legacy public key encryption approach to use a PGP key, then move your legacy private key from `privateKey` to `privateKeyOld` and then put your new PGP private key in `privateKey`.
+Doing so will mean that Renovate will first try to decrypt using the PGP key but fall back to the legacy key and try that next.
You can remove the `privateKeyOld` config option once all the old encrypted values have been migrated, or if you no longer want to support the old key and let the processing of repositories fail.
+
+!!! note
+ Renovate now logs a warning whenever repositories use non-PGP encrypted config variables.
+
## privateKeyPath
Used as an alternative to `privateKey`, if you want the key to be read from disk instead.
@@ -884,6 +1051,25 @@ For TLS/SSL-enabled connections, use the rediss prefix
Example URL structure: `rediss://[[username]:[password]]@localhost:6379/0`.
+## reportPath
+
+`reportPath` describes the location where the report is written to.
+
+If [`reportType`](#reporttype) is set to `file`, then set `reportPath` to a filepath.
+For example: `/foo/bar.json`.
+
+If the value `s3` is used in [`reportType`](#reporttype), then use a S3 URI.
+For example: `s3://bucket-name/key-name`.
+
+## reportType
+
+Defines how the report is exposed:
+
+- `` If unset, no report will be provided, though the debug logs will still have partial information of the report
+- `logging` The report will be printed as part of the log messages on `INFO` level
+- `file` The report will be written to a path provided by [`reportPath`](#reportpath)
+- `s3` The report is pushed to an S3 bucket defined by [`reportPath`](#reportpath). This option reuses [`RENOVATE_X_S3_ENDPOINT`](./self-hosted-experimental.md#renovate_x_s3_endpoint) and [`RENOVATE_X_S3_PATH_STYLE`](./self-hosted-experimental.md#renovate_x_s3_path_style)
+
## repositories
Elements in the `repositories` array can be an object if you wish to define more settings:
@@ -908,17 +1094,9 @@ JSON files will be stored inside the `cacheDir` beside the existing file-based p
}
```
-
-!!! note
- [IAM is supported](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/loading-node-credentials-iam.html) when running Renovate within an EC2 instance in an ECS cluster. In this case, no extra environment variables are required.
- Otherwise, the following environment variables should be set for the S3 client to work.
-
-```
- AWS_ACCESS_KEY_ID
- AWS_SECRET_ACCESS_KEY
- AWS_SESSION_TOKEN
- AWS_REGION
-```
+Renovate uses the [AWS SDK for JavaScript V3](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/welcome.html) to connect to the S3 instance.
+Therefore, Renovate supports all the authentication methods supported by the AWS SDK.
+Read more about the default credential provider chain for AWS SDK for JavaScript V3 [here](https://docs.aws.amazon.com/AWSJavaScriptSDK/v3/latest/Package/-aws-sdk-credential-providers/#fromnodeproviderchain).
!!! tip
@@ -991,12 +1169,6 @@ It could then be used in a repository config or preset like so:
Secret names must start with an upper or lower case character and can have only characters, digits, or underscores.
-## skipInstalls
-
-By default, Renovate will use the most efficient approach to updating package files and lock files, which in most cases skips the need to perform a full module install by the bot.
-If this is set to `false`, then a full installation of modules will be done.
-This is currently applicable to `npm` only, and only used in cases where bugs in `npm` result in incorrect lock files being updated.
-
## token
## unicodeEmoji
@@ -1009,6 +1181,11 @@ For example: `:warning:` will be replaced with `⚠️`.
Some cloud providers offer services to receive metadata about the current instance, for example [AWS Instance metadata](https://docs.aws.amazon.com/AWSEC2/latest/WindowsGuide/ec2-instance-metadata.html) or [GCP VM metadata](https://cloud.google.com/compute/docs/metadata/overview).
You can control if Renovate should try to access these services with the `useCloudMetadataServices` config option.
+## userAgent
+
+If set to any string, Renovate will use this as the `user-agent` it sends with HTTP requests.
+Otherwise, it will default to `RenovateBot/${renovateVersion} (https://github.com/renovatebot/renovate)`.
+
## username
You may need to set a `username` if you:
diff --git a/docs/usage/self-hosted-experimental.md b/docs/usage/self-hosted-experimental.md
index d809b8a5c792c1..1018421c9c7964 100644
--- a/docs/usage/self-hosted-experimental.md
+++ b/docs/usage/self-hosted-experimental.md
@@ -32,51 +32,6 @@ Skipping the check will speed things up, but may result in versions being return
If set to any value, Renovate will always paginate requests to GitHub fully, instead of stopping after 10 pages.
-## `RENOVATE_REUSE_PACKAGE_LOCK`
-
-If set to "false" (string), Renovate will remove any existing `package-lock.json` before trying to update it.
-
-## `RENOVATE_USER_AGENT`
-
-If set to any string, Renovate will use this as the `user-agent` it sends with HTTP requests.
-
-## `RENOVATE_X_AUTODISCOVER_REPO_ORDER`
-
-
-!!! note
- For the Forgejo and Gitea platform only.
-
-The order method for autodiscover server side repository search.
-
-> If multiple `autodiscoverTopics` are used resulting order will be per topic not global.
-
-Allowed values:
-
-- `asc`
-- `desc`
-
-Default value: `asc`.
-
-## `RENOVATE_X_AUTODISCOVER_REPO_SORT`
-
-
-!!! note
- For the Forgejo and Gitea platform only.
-
-The sort method for autodiscover server side repository search.
-
-> If multiple `autodiscoverTopics` are used resulting order will be per topic not global.
-
-Allowed values:
-
-- `alpha`
-- `created`
-- `updated`
-- `size`
-- `id`
-
-Default value: `alpha`.
-
## `RENOVATE_X_DELETE_CONFIG_FILE`
If `true` Renovate tries to delete the self-hosted config file after reading it.
@@ -93,6 +48,10 @@ If set to any value, Renovate will use the Docker Hub API (`https://hub.docker.c
If set to an integer, Renovate will use this as max page number for docker tags lookup on docker registries, instead of the default 20 pages.
This is useful for registries which ignores the `n` parameter in the query string and only return 50 tags per page.
+## `RENOVATE_X_EAGER_GLOBAL_EXTENDS`
+
+Resolve and merge `globalExtends` presets before other global config, instead of after.
+
## `RENOVATE_X_EXEC_GPID_HANDLE`
If set, Renovate will terminate the whole process group of a terminated child process spawned by Renovate.
@@ -131,20 +90,14 @@ Suppress the default warning when a deprecated version of Node.js is used to run
Skip initializing `RE2` for regular expressions and instead use Node-native `RegExp` instead.
-## `RENOVATE_X_MERGE_CONFIDENCE_API_BASE_URL`
-
-If set, Renovate will query this API for Merge Confidence data.
-This feature is in private beta.
-
-## `RENOVATE_X_MERGE_CONFIDENCE_SUPPORTED_DATASOURCES`
+## `RENOVATE_X_NUGET_DOWNLOAD_NUPKGS`
-If set, Renovate will query the merge-confidence JSON API only for datasources that are part of this list.
-The expected value for this environment variable is a JSON array of strings.
+If set to any value, Renovate will download `nupkg` files for determining package metadata.
## `RENOVATE_X_PLATFORM_VERSION`
-If set, Renovate will use this string as GitLab server version instead of checking via the GitLab API.
-This can be useful when you use the GitLab `CI_JOB_TOKEN` to authenticate Renovate.
+Specify this string for Renovate to skip API checks and provide GitLab/Bitbucket server version directly.
+Particularly useful with GitLab's `CI_JOB_TOKEN` to authenticate Renovate or to reduce API calls for Bitbucket.
Read [platform details](modules/platform/gitlab/index.md) to learn why we need the server version on GitLab.
@@ -156,6 +109,10 @@ If set, Renovate will rewrite GitHub Enterprise Server's pagination responses to
!!! note
For the GitHub Enterprise Server platform only.
+## `RENOVATE_X_REPO_CACHE_FORCE_LOCAL`
+
+If set, Renovate will persist repository cache locally after uploading to S3.
+
## `RENOVATE_X_S3_ENDPOINT`
If set, Renovate will use this string as the `endpoint` when instantiating the AWS S3 client.
@@ -172,3 +129,15 @@ Source: [AWS S3 documentation - Interface BucketEndpointInputConfig](https://doc
If set, Renovate will use SQLite as the backend for the package cache.
Don't combine with `redisUrl`, Redis would be preferred over SQlite.
+
+## `RENOVATE_X_SUPPRESS_PRE_COMMIT_WARNING`
+
+Suppress the pre-commit support warning in PR bodies.
+
+## `RENOVATE_X_USE_OPENPGP`
+
+Use `openpgp` instead of `kbpgp` for `PGP` decryption.
+
+## `RENOVATE_X_YARN_PROXY`
+
+Configure global Yarn proxy settings if HTTP proxy environment variables are detected.
diff --git a/docs/usage/templates.md b/docs/usage/templates.md
index 7d942f1a00f453..6468bc05873dc3 100644
--- a/docs/usage/templates.md
+++ b/docs/usage/templates.md
@@ -110,6 +110,12 @@ Returns `true` if at least one expression is `true`.
`{{#if (or isPatch isSingleVersion}}Small update, safer to merge and release.{{else}}Check out the changelog for all versions before merging!{{/if}}`
+### includes
+
+Returns `true` if the value is included on the list given.
+
+`{{#if (includes labels 'dependencies')}}Production Dependencies{{else}}Not Production Dependencies{{/if}}`
+
## Environment variables
By default, you can only access a handful of basic environment variables like `HOME` or `PATH`.
diff --git a/docs/usage/updating-rebasing.md b/docs/usage/updating-rebasing.md
index 58d93f1e08fc75..562dfd56ebb742 100644
--- a/docs/usage/updating-rebasing.md
+++ b/docs/usage/updating-rebasing.md
@@ -12,6 +12,7 @@ Here is a list of the most common cases where Renovate must update/rebase the br
- When a pull request has conflicts due to changes on the base branch
- When you have enabled "Require branches to be up-to-date before merging" on GitHub
- When you have manually told Renovate to rebase when behind the base branch with `"rebaseWhen": "behind-base-branch"`
+- When you have set `keepUpdatedLabel` and included the label on a PR
- When a newer version of the dependency is released
- When you request a manual rebase from the Renovate bot
- When you use `"automerge": true` and `"rebaseWhen": "auto"` on a branch / pr
diff --git a/docs/usage/upgrade-best-practices.md b/docs/usage/upgrade-best-practices.md
index 7852653ef9766d..02cda2732d8cad 100644
--- a/docs/usage/upgrade-best-practices.md
+++ b/docs/usage/upgrade-best-practices.md
@@ -80,7 +80,7 @@ The [GitHub Docs, using third-party actions](https://docs.github.com/en/actions/
We recommend pinning _all_ Actions.
That's why the `helpers:pinGitHubActionDigests` preset pins all GitHub Actions.
-For an in-depth explanation why you should pin your GitHub Actions, read the [Palo Alto Networks blogpost about the GitHub Actions worm](https://www.paloaltonetworks.com/blog/prisma-cloud/github-actions-worm-dependencies/).
+For an in-depth explanation why you should pin your GitHub Actions, read the [Palo Alto Networks blog post about the GitHub Actions worm](https://www.paloaltonetworks.com/blog/prisma-cloud/github-actions-worm-dependencies/).
#### Extends `:pinDevDependencies`
diff --git a/docs/usage/user-stories/swissquote.md b/docs/usage/user-stories/swissquote.md
index e7f187f6bd2e8e..8ffa13e58415bc 100644
--- a/docs/usage/user-stories/swissquote.md
+++ b/docs/usage/user-stories/swissquote.md
@@ -10,7 +10,7 @@
> This article was originally published on [Medium](https://medium.com/swissquote-engineering/how-swissquote-is-keeping-software-dependencies-up-to-date-with-renovate-6246e8b20437) by [Stéphane Goetz](https://onigoetz.ch/), Principal Software Engineer at [Swissquote Bank](https://github.com/swissquote/).
Swissquote has more than 1000 distinct applications running in production.
-They come in many different flavors including services, daemons, and webapps, and their age can be counted from days to more than a decade.
+They come in many different flavors including services, daemons, and web apps, and their age can be counted from days to more than a decade.
While there are many topics of interest when talking about software maintenance, today’s topic is software dependencies.
We’ll see in this article why it’s important to keep them up-to-date and why it’s not as simple as one may think.
@@ -182,7 +182,9 @@ Some features and options we enjoy:
There is an [on-premise option](https://www.mend.io/free-developer-tools/renovate/on-premises/), but you can also use [the Mend Renovate App](https://github.com/marketplace/renovate).
On our side, we’re not using the on-premise but rather a custom scheduler using the open source Docker image.
-## Some stats after two years with Renovate
+## Some stats after four years with Renovate
+
+> The figures here have been updated in November 2023
We started using Renovate Bot in 2019, using the (now deprecated) `renovate/pro` Docker image.
We installed it as a GitHub app and some early adopters started to use it.
@@ -197,18 +199,43 @@ Here is the dashboard for our current scheduler:
We don’t force any team to use Renovate, each team can decide to opt-in and do it for each project separately.
Some statistics:
-- 824 repositories enabled out of about 2000 active repositories
-- 8000 PRs were merged since we installed Renovate
+- 857 repositories enabled out of about 2000 active repositories
+- 11000 PRs were merged since we installed Renovate
- 239 PRs were merged last month
- 2 SSDs died on our Renovate machine with the number of projects to clone again and again
+### How does the scheduler work?
+
+The scheduler is a Node.js application that handles an in-memory queue and starts Docker containers to run Renovate on.
+Our custom scheduler application regularly sends data points to our InfluxDB database, which we then display in Grafana.
+
+Here is how it works:
+
+
+
+All the information on the dashboard you saw above is created from three measurements:
+
+1. Queue: Every 5 minutes, we send the status of the queue size and the number of jobs currently running
+1. Webhook: When receiving a webhook request from GitHub, we send a data point on the duration of treatment for that item
+1. Runs: After each run, we send a data point on the run duration, success, and number of PRs created/updated/merged/closed
+
+The queue is filled by webhooks _or_ by re-queueing all repositories at regular intervals.
+For each repository, we start a Renovate Docker image and pipe its logs to a file.
+This allows us to run ten workers in parallel.
+We could technically run more workers but decided not to hammer our GitHub instance.
+
+You can find more details in this [discussion on the Renovate repository, from November 2023](https://github.com/renovatebot/renovate/discussions/23105#discussioncomment-6366621).
+
## The future of Renovate at Swissquote
Not all teams are using Renovate at this stage, as some teams prefer to manually update their dependencies.
diff --git a/jest.config.ts b/jest.config.ts
index cf59e0deb91a89..9aa56f29293e1b 100644
--- a/jest.config.ts
+++ b/jest.config.ts
@@ -1,5 +1,6 @@
import crypto from 'node:crypto';
import os from 'node:os';
+import { env } from 'node:process';
import v8 from 'node:v8';
import { minimatch } from 'minimatch';
import type { JestConfigWithTsJest } from 'ts-jest';
@@ -205,11 +206,7 @@ const config: JestConfig = {
'!lib/**/{__fixtures__,__mocks__,__testutil__,test}/**/*.{js,ts}',
'!lib/**/types.ts',
],
- coveragePathIgnorePatterns: [
- '/node_modules/',
- '/test/',
- '/tools/',
- ],
+ coveragePathIgnorePatterns: getCoverageIgnorePatterns(),
cacheDirectory: '.cache/jest',
collectCoverage: true,
coverageReporters: ci
@@ -450,3 +447,12 @@ process.stderr.write(`Host stats:
Memory: ${(mem / 1024 / 1024 / 1024).toFixed(2)} GB
HeapLimit: ${(stats.heap_size_limit / 1024 / 1024 / 1024).toFixed(2)} GB
`);
+function getCoverageIgnorePatterns(): string[] | undefined {
+ const patterns = ['/node_modules/', '/test/', '/tools/'];
+
+ if (env.TEST_LEGACY_DECRYPTION !== 'true') {
+ patterns.push('/lib/config/decrypt/legacy.ts');
+ }
+
+ return patterns;
+}
diff --git a/lib/config-validator.ts b/lib/config-validator.ts
index 2a8aff79a338da..3c809bc2ea2e29 100644
--- a/lib/config-validator.ts
+++ b/lib/config-validator.ts
@@ -17,7 +17,7 @@ import {
let returnVal = 0;
async function validate(
- isGlobalConfig: boolean,
+ configType: 'global' | 'repo',
desc: string,
config: RenovateConfig,
strict: boolean,
@@ -37,7 +37,7 @@ async function validate(
}
}
const massagedConfig = massageConfig(migratedConfig);
- const res = await validateConfig(isGlobalConfig, massagedConfig, isPreset);
+ const res = await validateConfig(configType, massagedConfig, isPreset);
if (res.errors.length) {
logger.error(
{ file: desc, errors: res.errors },
@@ -76,7 +76,7 @@ type PackageJson = {
const parsedContent = await getParsedContent(file);
try {
logger.info(`Validating ${file}`);
- await validate(true, file, parsedContent, strict);
+ await validate('global', file, parsedContent, strict);
} catch (err) {
logger.warn({ file, err }, 'File is not valid Renovate config');
returnVal = 1;
@@ -97,7 +97,7 @@ type PackageJson = {
const parsedContent = await getParsedContent(file);
try {
logger.info(`Validating ${file}`);
- await validate(false, file, parsedContent, strict);
+ await validate('repo', file, parsedContent, strict);
} catch (err) {
logger.warn({ file, err }, 'File is not valid Renovate config');
returnVal = 1;
@@ -114,7 +114,7 @@ type PackageJson = {
if (pkgJson.renovate) {
logger.info(`Validating package.json > renovate`);
await validate(
- false,
+ 'repo',
'package.json > renovate',
pkgJson.renovate,
strict,
@@ -124,7 +124,7 @@ type PackageJson = {
logger.info(`Validating package.json > renovate-config`);
for (const presetConfig of Object.values(pkgJson['renovate-config'])) {
await validate(
- false,
+ 'repo',
'package.json > renovate-config',
presetConfig,
strict,
@@ -141,7 +141,7 @@ type PackageJson = {
const file = process.env.RENOVATE_CONFIG_FILE ?? 'config.js';
logger.info(`Validating ${file}`);
try {
- await validate(true, file, fileConfig, strict);
+ await validate('global', file, fileConfig, strict);
} catch (err) {
logger.error({ file, err }, 'File is not valid Renovate config');
returnVal = 1;
diff --git a/lib/config/__snapshots__/decrypt.spec.ts.snap b/lib/config/__snapshots__/decrypt.spec.ts.snap
deleted file mode 100644
index df59cc5018dd7c..00000000000000
--- a/lib/config/__snapshots__/decrypt.spec.ts.snap
+++ /dev/null
@@ -1,7 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`config/decrypt decryptConfig() appends npm token in npmrc 1`] = `
-"foo=bar
-_authToken=abcdef-ghijklm-nopqf-stuvwxyz
-"
-`;
diff --git a/lib/config/decrypt.spec.ts b/lib/config/decrypt.spec.ts
index 30f3a83204b65b..8cde4a0378d6e3 100644
--- a/lib/config/decrypt.spec.ts
+++ b/lib/config/decrypt.spec.ts
@@ -1,11 +1,7 @@
-import { Fixtures } from '../../test/fixtures';
-import { CONFIG_VALIDATION } from '../constants/error-messages';
import { decryptConfig } from './decrypt';
import { GlobalConfig } from './global';
import type { RenovateConfig } from './types';
-const privateKey = Fixtures.get('private.pem');
-const privateKeyPgp = Fixtures.get('private-pgp.pem');
const repository = 'abc/def';
describe('config/decrypt', () => {
@@ -29,184 +25,5 @@ describe('config/decrypt', () => {
expect(res.encrypted).toBeUndefined();
expect(res.a).toBeUndefined();
});
-
- it('handles invalid encrypted type', async () => {
- config.encrypted = 1;
- GlobalConfig.set({ privateKey });
- const res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- });
-
- it('handles invalid encrypted value', async () => {
- config.encrypted = { a: 1 };
- GlobalConfig.set({ privateKey, privateKeyOld: 'invalid-key' });
- await expect(decryptConfig(config, repository)).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- });
-
- it('replaces npm token placeholder in npmrc', async () => {
- GlobalConfig.set({
- privateKey: 'invalid-key',
- privateKeyOld: privateKey,
- }); // test old key failover
- config.npmrc =
- '//registry.npmjs.org/:_authToken=${NPM_TOKEN}\n//registry.npmjs.org/:_authToken=${NPM_TOKEN}\n';
- config.encrypted = {
- npmToken:
- 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
- };
- const res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- expect(res.npmToken).toBeUndefined();
- expect(res.npmrc).toBe(
- '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n',
- );
- });
-
- it('appends npm token in npmrc', async () => {
- GlobalConfig.set({ privateKey });
- config.npmrc = 'foo=bar\n';
- config.encrypted = {
- npmToken:
- 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
- };
- const res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- expect(res.npmToken).toBeUndefined();
- expect(res.npmrc).toMatchSnapshot();
- });
-
- it('decrypts nested', async () => {
- GlobalConfig.set({ privateKey });
- config.packageFiles = [
- {
- packageFile: 'package.json',
- devDependencies: {
- encrypted: {
- branchPrefix:
- 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
- npmToken:
- 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
- },
- },
- },
- 'backend/package.json',
- ];
- // TODO: fix types #22198
- const res = (await decryptConfig(config, repository)) as any;
- expect(res.encrypted).toBeUndefined();
- expect(res.packageFiles[0].devDependencies.encrypted).toBeUndefined();
- expect(res.packageFiles[0].devDependencies.branchPrefix).toBe(
- 'abcdef-ghijklm-nopqf-stuvwxyz',
- );
- expect(res.packageFiles[0].devDependencies.npmToken).toBeUndefined();
- expect(res.packageFiles[0].devDependencies.npmrc).toBe(
- '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n',
- );
- });
-
- it('rejects invalid PGP message', async () => {
- GlobalConfig.set({ privateKey: privateKeyPgp });
- config.encrypted = {
- token:
- 'long-but-wrong-wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
- };
- await expect(decryptConfig(config, repository)).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- config.encrypted = {
- // Missing value
- token:
- 'wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
- };
- await expect(decryptConfig(config, repository)).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- config.encrypted = {
- // Missing org scope
- token:
- 'wcFMAw+4H7SgaqGOAQ//W38A3PmaZnE9XTCHGDQFD52Kz78UYnaiYeAT13cEqYWTwEvQ57B7D7I6i4jCLe7KwkUCS90kyoqd7twD75W/sO70MyIveKnMlqqnpkagQkFgmzMaXXNHaJXEkjzsflTELZu6UsUs/kZYmab7r14YLl9HbH/pqN9exil/9s3ym9URCPOyw/l04KWntdMAy0D+c5M4mE+obv6fz6nDb8tkdeT5Rt2uU+qw3gH1OsB2yu+zTWpI/xTGwDt5nB5txnNTsVrQ/ZK85MSktacGVcYuU9hsEDmSrShmtqlg6Myq+Hjb7cYAp2g4n13C/I3gGGaczl0PZaHD7ALMjI7p6O1q+Ix7vMxipiKMVjS3omJoqBCz3FKc6DVhyX4tfhxgLxFo0DpixNwGbBRbMBO8qZfUk7bicAl/oCRc2Ijmay5DDYuvtkw3G3Ou+sZTe6DNpWUFy6VA4ai7hhcLvcAuiYmLdwPISRR/X4ePa8ZrmSVPyVOvbmmwLhcDYSDlC9Mw4++7ELomlve5kvjVSHvPv9BPVb5sJF7gX4vOT4FrcKalQRPmhNCZrE8tY2lvlrXwV2EEhya8EYv4QTd3JUYEYW5FXiJrORK5KDTnISw+U02nFZjFlnoz9+R6h+aIT1crS3/+YjCHE/EIKvSftOnieYb02Gk7M9nqU19EYL9ApYw4+IjSRgFM3DShIrvuDwDkAwUfaq8mKtr9Vjg/r+yox//GKS3u3r4I3+dfCljA3OwskTPfbSD+huBk4mylIvaL5v8Fngxo979wiLw',
- };
- await expect(decryptConfig(config, repository)).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- config.encrypted = {
- // Impossible to parse
- token:
- 'wcFMAw+4H7SgaqGOAQ//Wa/gHgQdH7tj3LQdW6rWKjzmkYVKZW9EbexJExu4WLaMgEKodlRMilcqCKfQZpjzoiC31J8Ly/x6Soury+lQnLVbtIQ4KWa/uCIz4lXCpPpGNgN2jPfOmdwWBMOcXIT+BgAMxRu3rAmvTtunrkACJ3J92eYNwJhTzp2Azn9LpT7kHnZ64z2SPhbdUgMMhCBwBG5BPArPzF5fdaqa8uUSbKhY0GMiqPXq6Zeq+EBNoPc/RJp2urpYTknO+nRb39avKjihd9MCZ/1d3QYymbRj7SZC3LJhenVF0hil3Uk8TBASnGQiDmBcIXQFhJ0cxavXqKjx+AEALq+kTdwGu5vuE2+2B820/o3lAXR9OnJHr8GodJ2ZBpzOaPrQe5zvxL0gLEeUUPatSOwuLhdo/6+bRCl2wNz23jIjDEFFTmsLqfEHcdVYVTH2QqvLjnUYcCRRuM32vS4rCMOEe0l6p0CV2rk22UZDIPcxqXjKucxse2Sow8ATWiPoIw7zWj7XBLqUKHFnMpPV2dCIKFKBsOKYgLjF4BvKzZJyhmVEPgMcKQLYqeT/2uWDR77NSWH0Cyiwk9M3KbOIMmV3pWh9PiXk6CvumECELbJHYH0Mc+P//BnbDq2Ie9dHdmKhFgRyHU7gWvkPhic9BX36xyldPcnhTgr1XWRoVe0ETGLDPCcqrQ/SUQGrLiujSOgxGu2K/6LDJhi4IKz1/nf7FUSj5eTIDqQiSPP5pXDjlH7oYxXXrHI/aYOCZ5sBx7mOzlEcENIrYblCHO/CYMTWdCJ4Wrftqk7K/A=',
- };
- await expect(decryptConfig(config, repository)).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- config.encrypted = {
- token: 'too-short',
- };
- await expect(decryptConfig(config, repository)).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- });
-
- it('handles PGP org constraint', async () => {
- GlobalConfig.set({ privateKey: privateKeyPgp });
- config.encrypted = {
- token:
- 'wcFMAw+4H7SgaqGOAQ/+Lz6RlbEymbnmMhrktuaGiDPWRNPEQFuMRwwYM6/B/r0JMZa9tskAA5RpyYKxGmJJeuRtlA8GkTw02GoZomlJf/KXJZ95FwSbkXMSRJRD8LJ2402Hw2TaOTaSvfamESnm8zhNo8cok627nkKQkyrpk64heVlU5LIbO2+UgYgbiSQjuXZiW+QuJ1hVRjx011FQgEYc59+22yuKYqd8rrni7TrVqhGRlHCAqvNAGjBI4H7uTFh0sP4auunT/JjxTeTkJoNu8KgS/LdrvISpO67TkQziZo9XD5FOzSN7N3e4f8vO4N4fpjgkIDH/9wyEYe0zYz34xMAFlnhZzqrHycRqzBJuMxGqlFQcKWp9IisLMoVJhLrnvbDLuwwcjeqYkhvODjSs7UDKwTE4X4WmvZr0x4kOclOeAAz/pM6oNVnjgWJd9SnYtoa67bZVkne0k6mYjVhosie8v8icijmJ4OyLZUGWnjZCRd/TPkzQUw+B0yvsop9FYGidhCI+4MVx6W5w7SRtCctxVfCjLpmU4kWaBUUJ5YIQ5xm55yxEYuAsQkxOAYDCMFlV8ntWStYwIG1FsBgJX6VPevXuPPMjWiPNedIpJwBH2PLB4blxMfzDYuCeaIqU4daDaEWxxpuFTTK9fLdJKuipwFG6rwE3OuijeSN+2SLszi834DXtUjQdikHSTQG392+oTmZCFPeffLk/OiV2VpdXF3gGL7sr5M9hOWIZ783q0vW1l6nAElZ7UA//kW+L6QRxbnBVTJK5eCmMY6RJmL76zjqC1jQ0FC10',
- };
- const res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- expect(res.token).toBe('123');
- await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- });
-
- it('handles PGP multi-org constraint', async () => {
- GlobalConfig.set({ privateKey: privateKeyPgp });
- config.encrypted = {
- token:
- 'wcFMAw+4H7SgaqGOAQ//Yk4RTQoLEhO0TKxN2IUBrCi88ts+CG1SXKeL06sJ2qikN/3n2JYAGGKgkHRICfu5dOnsjyFdLJ1XWUrbsM3XgVWikMbrmzD1Xe7N5DsoZXlt4Wa9pZ+IkZuE6XcKKu9whIJ22ciEwCzFwDmk/CBshdCCVVQ3IYuM6uibEHn/AHQ8K15XhraiSzF6DbJpevs5Cy7b5YHFyE936H25CVnouUQnMPsirpQq3pYeMq/oOtV/m4mfRUUQ7MUxvtrwE4lq4hLjFu5n9rwlcqaFPl7I7BEM++1c9LFpYsP5mTS7hHCZ9wXBqER8fa3fKYx0bK1ihCpjP4zUkR7P/uhWDArXamv7gHX2Kj/Qsbegn7KjTdZlggAmaJl/CuSgCbhySy+E55g3Z1QFajiLRpQ5+RsWFDbbI08YEgzyQ0yNCaRvrkgo7kZ1D95rEGRfY96duOQbjzOEqtvYmFChdemZ2+f9Kh/JH1+X9ynxY/zYe/0p/U7WD3QNTYN18loc4aXiB1adXD5Ka2QfNroLudQBmLaJpJB6wASFfuxddsD5yRnO32NSdRaqIWC1x6ti3ZYJZ2RsNwJExPDzjpQTuMOH2jtpu3q7NHmW3snRKy2YAL2UjI0YdeKIlhc/qLCJt9MRcOxWYvujTMD/yGprhG44qf0jjMkJBu7NjuVIMONujabl9b7SUQGfO/t+3rMuC68bQdCGLlO8gf3hvtD99utzXphi6idjC0HKSW/9KzuMkm+syGmIAYq/0L3EFvpZ38uq7z8KzwFFQHI3sBA34bNEr5zpU5OMWg',
- };
- let res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- expect(res.token).toBe('123');
- res = await decryptConfig(config, 'def/ghi');
- expect(res.encrypted).toBeUndefined();
- expect(res.token).toBe('123');
- await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- });
-
- it('handles PGP org/repo constraint', async () => {
- GlobalConfig.set({ privateKey: privateKeyPgp });
- config.encrypted = {
- token:
- 'wcFMAw+4H7SgaqGOAQ//Wp7N0PaDZp0uOdwsc1CuqAq0UPcq+IQdHyKpJs3tHiCecXBHogy4P+rY9nGaUrVneCr4HexuKGuyJf1yl0ZqFffAUac5PjF8eDvjukQGOUq4aBlOogJCEefnuuVxVJx+NRR5iF1P6v57bmI1c+zoqZI/EQB30KU6O1BsdGPLUA/+R3dwCZd5Mbd36s34eYBasqcY9/QbqFcpElXMEPMse3kMCsVXPbZ+UMjtPJiBPUmtJq+ifnu1LzDrfshusSQMwgd/QNk7nEsijiYKllkWhHTP6g7zigvJ46x0h6AYS108YiuK3B9XUhXN9m05Ac6KTEEUdRI3E/dK2dQuRkLjXC8wceQm4A19Gm0uHoMIJYOCbiVoBCH6ayvKbZWZV5lZ4D1JbDNGmKeIj6OX9XWEMKiwTx0Xe89V7BdJzwIGrL0TCLtXuYWZ/R2k+UuBqtgzr44BsBqMpKUA0pcGBoqsEou1M05Ae9fJMF6ADezF5UQZPxT1hrMldiTp3p9iHGfWN2tKHeoW/8CqlIqg9JEkTc+Pl/L9E6ndy5Zjf097PvcmSGhxUQBE7XlrZoIlGhiEU/1HPMen0UUIs0LUu1ywpjCex2yTWnU2YmEwy0MQI1sekSr96QFxDDz9JcynYOYbqR/X9pdxEWyzQ+NJ3n6K97nE1Dj9Sgwu7mFGiUdNkf/SUAF0eZi/eXg71qumpMGBd4eWPtgkeMPLHjvMSYw9vBUfcoKFz6RJ4woG0dw5HOFkPnIjXKWllnl/o01EoBp/o8uswsIS9Nb8i+bp27U6tAHE',
- };
- const res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- expect(res.token).toBe('123');
- await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- });
-
- it('handles PGP multi-org/repo constraint', async () => {
- GlobalConfig.set({ privateKey: privateKeyPgp });
- config.encrypted = {
- token:
- 'wcFMAw+4H7SgaqGOARAAibXL3zr0KZawiND868UGdPpGRo1aVZfn0NUBHpm8mXfgB1rBHaLsP7qa8vxDHpwH9DRD1IyB4vvPUwtu7wmuv1Vtr596tD40CCcCZYB5JjZLWRF0O0xaZFCOi7Z9SqqdaOQoMScyvPO+3/lJkS7zmLllJFH0mQoX5Cr+owUAMSWqbeCQ9r/KAXpnhmpraDjTav48WulcdTMc8iQ/DHimcdzHErLOAjtiQi4OUe1GnDCcN76KQ+c+ZHySnkXrYi/DhOOu9qB4glJ5n68NueFja+8iR39z/wqCI6V6TIUiOyjFN86iVyNPQ4Otem3KuNwrnwSABLDqP491eUNjT8DUDffsyhNC9lnjQLmtViK0EN2yLVpMdHq9cq8lszBChB7gobD9rm8nUHnTuLf6yJvZOj6toD5Yqj8Ibj58wN90Q8CUsBp9/qp0J+hBVUPOx4sT6kM2p6YarlgX3mrIW5c1U+q1eDbCddLjHiU5cW7ja7o+cqlA6mbDRu3HthjBweiXTicXZcRu1o/wy/+laQQ95x5FzAXDnOwQUHBmpTDI3tUJvQ+oy8XyBBbyC0LsBye2c2SLkPJ4Ai3IMR+Mh8puSzVywTbneiAQNBzJHlj5l85nCF2tUjvNo3dWC+9mU5sfXg11iEC6LRbg+icjpqRtTjmQURtciKDUbibWacwU5T/SVAGPXnW7adBOS0PZPIZQcSwjchOdOl0IjzBy6ofu7ODdn2CXZXi8zbevTICXsHvjnW4MAj5oXrStxK3LkWyM3YBOLe7sOfWvWz7n9TM3dHg032navQ',
- };
- let res = await decryptConfig(config, repository);
- expect(res.encrypted).toBeUndefined();
- expect(res.token).toBe('123');
- res = await decryptConfig(config, 'def/def');
- expect(res.encrypted).toBeUndefined();
- expect(res.token).toBe('123');
- await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
- CONFIG_VALIDATION,
- );
- });
});
});
diff --git a/lib/config/decrypt.ts b/lib/config/decrypt.ts
index 4ea8ddcd0ac004..1895b6dc8ff698 100644
--- a/lib/config/decrypt.ts
+++ b/lib/config/decrypt.ts
@@ -1,104 +1,50 @@
-import crypto from 'node:crypto';
import is from '@sindresorhus/is';
-import * as openpgp from 'openpgp';
import { logger } from '../logger';
import { maskToken } from '../util/mask';
import { regEx } from '../util/regex';
import { addSecretForSanitizing } from '../util/sanitize';
import { ensureTrailingSlash } from '../util/url';
+import { tryDecryptKbPgp } from './decrypt/kbpgp';
+import {
+ tryDecryptPublicKeyDefault,
+ tryDecryptPublicKeyPKCS1,
+} from './decrypt/legacy';
+import { tryDecryptOpenPgp } from './decrypt/openpgp';
import { GlobalConfig } from './global';
import { DecryptedObject } from './schema';
import type { RenovateConfig } from './types';
-export async function tryDecryptPgp(
- privateKey: string,
- encryptedStr: string,
-): Promise {
- if (encryptedStr.length < 500) {
- // optimization during transition of public key -> pgp
- return null;
- }
- try {
- const pk = await openpgp.readPrivateKey({
- // prettier-ignore
- armoredKey: privateKey.replace(regEx(/\n[ \t]+/g), '\n'), // little massage to help a common problem
- });
- const startBlock = '-----BEGIN PGP MESSAGE-----\n\n';
- const endBlock = '\n-----END PGP MESSAGE-----';
- let armoredMessage = encryptedStr.trim();
- if (!armoredMessage.startsWith(startBlock)) {
- armoredMessage = `${startBlock}${armoredMessage}`;
- }
- if (!armoredMessage.endsWith(endBlock)) {
- armoredMessage = `${armoredMessage}${endBlock}`;
- }
- const message = await openpgp.readMessage({
- armoredMessage,
- });
- const { data } = await openpgp.decrypt({
- message,
- decryptionKeys: pk,
- });
- logger.debug('Decrypted config using openpgp');
- return data;
- } catch (err) {
- logger.debug({ err }, 'Could not decrypt using openpgp');
- return null;
- }
-}
-
-export function tryDecryptPublicKeyDefault(
- privateKey: string,
- encryptedStr: string,
-): string | null {
- let decryptedStr: string | null = null;
- try {
- decryptedStr = crypto
- .privateDecrypt(privateKey, Buffer.from(encryptedStr, 'base64'))
- .toString();
- logger.debug('Decrypted config using default padding');
- } catch (err) {
- logger.debug('Could not decrypt using default padding');
- }
- return decryptedStr;
-}
-
-export function tryDecryptPublicKeyPKCS1(
- privateKey: string,
- encryptedStr: string,
-): string | null {
- let decryptedStr: string | null = null;
- try {
- decryptedStr = crypto
- .privateDecrypt(
- {
- key: privateKey,
- padding: crypto.constants.RSA_PKCS1_PADDING,
- },
- Buffer.from(encryptedStr, 'base64'),
- )
- .toString();
- } catch (err) {
- logger.debug('Could not decrypt using PKCS1 padding');
- }
- return decryptedStr;
-}
-
export async function tryDecrypt(
privateKey: string,
encryptedStr: string,
repository: string,
+ keyName: string,
): Promise {
let decryptedStr: string | null = null;
if (privateKey?.startsWith('-----BEGIN PGP PRIVATE KEY BLOCK-----')) {
- const decryptedObjStr = await tryDecryptPgp(privateKey, encryptedStr);
+ const decryptedObjStr =
+ process.env.RENOVATE_X_USE_OPENPGP === 'true'
+ ? await tryDecryptOpenPgp(privateKey, encryptedStr)
+ : await tryDecryptKbPgp(privateKey, encryptedStr);
if (decryptedObjStr) {
decryptedStr = validateDecryptedValue(decryptedObjStr, repository);
}
} else {
decryptedStr = tryDecryptPublicKeyDefault(privateKey, encryptedStr);
- if (!is.string(decryptedStr)) {
+ if (is.string(decryptedStr)) {
+ logger.warn(
+ { keyName },
+ 'Encrypted value is using deprecated default padding, please change to using PGP encryption.',
+ );
+ } else {
decryptedStr = tryDecryptPublicKeyPKCS1(privateKey, encryptedStr);
+ // istanbul ignore if
+ if (is.string(decryptedStr)) {
+ logger.warn(
+ { keyName },
+ 'Encrypted value is using deprecated PKCS1 padding, please change to using PGP encryption.',
+ );
+ }
}
}
return decryptedStr;
@@ -191,10 +137,20 @@ export async function decryptConfig(
if (privateKey) {
for (const [eKey, eVal] of Object.entries(val)) {
logger.debug('Trying to decrypt ' + eKey);
- let decryptedStr = await tryDecrypt(privateKey, eVal, repository);
+ let decryptedStr = await tryDecrypt(
+ privateKey,
+ eVal,
+ repository,
+ eKey,
+ );
if (privateKeyOld && !is.nonEmptyString(decryptedStr)) {
logger.debug(`Trying to decrypt with old private key`);
- decryptedStr = await tryDecrypt(privateKeyOld, eVal, repository);
+ decryptedStr = await tryDecrypt(
+ privateKeyOld,
+ eVal,
+ repository,
+ eKey,
+ );
}
if (!is.nonEmptyString(decryptedStr)) {
const error = new Error('config-validation');
diff --git a/lib/config/decrypt/kbpgp.spec.ts b/lib/config/decrypt/kbpgp.spec.ts
new file mode 100644
index 00000000000000..008ba46fa2ce08
--- /dev/null
+++ b/lib/config/decrypt/kbpgp.spec.ts
@@ -0,0 +1,132 @@
+import { Fixtures } from '../../../test/fixtures';
+import { CONFIG_VALIDATION } from '../../constants/error-messages';
+import { decryptConfig } from '../decrypt';
+import { GlobalConfig } from '../global';
+import type { RenovateConfig } from '../types';
+import { tryDecryptKbPgp } from './kbpgp';
+
+const privateKey = Fixtures.get('private-pgp.pem', '..');
+const repository = 'abc/def';
+
+describe('config/decrypt/kbpgp', () => {
+ describe('decryptConfig()', () => {
+ let config: RenovateConfig;
+
+ beforeEach(() => {
+ config = {};
+ GlobalConfig.reset();
+ });
+
+ it('returns null for invalid key', async () => {
+ expect(
+ await tryDecryptKbPgp(
+ 'invalid-key',
+ 'wcFMAw+4H7SgaqGOAQ/+Lz6RlbEymbnmMhrktuaGiDPWRNPEQFuMRwwYM6/B/r0JMZa9tskAA5RpyYKxGmJJeuRtlA8GkTw02GoZomlJf/KXJZ95FwSbkXMSRJRD8LJ2402Hw2TaOTaSvfamESnm8zhNo8cok627nkKQkyrpk64heVlU5LIbO2+UgYgbiSQjuXZiW+QuJ1hVRjx011FQgEYc59+22yuKYqd8rrni7TrVqhGRlHCAqvNAGjBI4H7uTFh0sP4auunT/JjxTeTkJoNu8KgS/LdrvISpO67TkQziZo9XD5FOzSN7N3e4f8vO4N4fpjgkIDH/9wyEYe0zYz34xMAFlnhZzqrHycRqzBJuMxGqlFQcKWp9IisLMoVJhLrnvbDLuwwcjeqYkhvODjSs7UDKwTE4X4WmvZr0x4kOclOeAAz/pM6oNVnjgWJd9SnYtoa67bZVkne0k6mYjVhosie8v8icijmJ4OyLZUGWnjZCRd/TPkzQUw+B0yvsop9FYGidhCI+4MVx6W5w7SRtCctxVfCjLpmU4kWaBUUJ5YIQ5xm55yxEYuAsQkxOAYDCMFlV8ntWStYwIG1FsBgJX6VPevXuPPMjWiPNedIpJwBH2PLB4blxMfzDYuCeaIqU4daDaEWxxpuFTTK9fLdJKuipwFG6rwE3OuijeSN+2SLszi834DXtUjQdikHSTQG392+oTmZCFPeffLk/OiV2VpdXF3gGL7sr5M9hOWIZ783q0vW1l6nAElZ7UA//kW+L6QRxbnBVTJK5eCmMY6RJmL76zjqC1jQ0FC10',
+ ),
+ ).toBeNull();
+ });
+
+ it('rejects invalid PGP message', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'long-but-wrong-wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ // Missing value
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ // Missing org scope
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//W38A3PmaZnE9XTCHGDQFD52Kz78UYnaiYeAT13cEqYWTwEvQ57B7D7I6i4jCLe7KwkUCS90kyoqd7twD75W/sO70MyIveKnMlqqnpkagQkFgmzMaXXNHaJXEkjzsflTELZu6UsUs/kZYmab7r14YLl9HbH/pqN9exil/9s3ym9URCPOyw/l04KWntdMAy0D+c5M4mE+obv6fz6nDb8tkdeT5Rt2uU+qw3gH1OsB2yu+zTWpI/xTGwDt5nB5txnNTsVrQ/ZK85MSktacGVcYuU9hsEDmSrShmtqlg6Myq+Hjb7cYAp2g4n13C/I3gGGaczl0PZaHD7ALMjI7p6O1q+Ix7vMxipiKMVjS3omJoqBCz3FKc6DVhyX4tfhxgLxFo0DpixNwGbBRbMBO8qZfUk7bicAl/oCRc2Ijmay5DDYuvtkw3G3Ou+sZTe6DNpWUFy6VA4ai7hhcLvcAuiYmLdwPISRR/X4ePa8ZrmSVPyVOvbmmwLhcDYSDlC9Mw4++7ELomlve5kvjVSHvPv9BPVb5sJF7gX4vOT4FrcKalQRPmhNCZrE8tY2lvlrXwV2EEhya8EYv4QTd3JUYEYW5FXiJrORK5KDTnISw+U02nFZjFlnoz9+R6h+aIT1crS3/+YjCHE/EIKvSftOnieYb02Gk7M9nqU19EYL9ApYw4+IjSRgFM3DShIrvuDwDkAwUfaq8mKtr9Vjg/r+yox//GKS3u3r4I3+dfCljA3OwskTPfbSD+huBk4mylIvaL5v8Fngxo979wiLw',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ // Impossible to parse
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//Wa/gHgQdH7tj3LQdW6rWKjzmkYVKZW9EbexJExu4WLaMgEKodlRMilcqCKfQZpjzoiC31J8Ly/x6Soury+lQnLVbtIQ4KWa/uCIz4lXCpPpGNgN2jPfOmdwWBMOcXIT+BgAMxRu3rAmvTtunrkACJ3J92eYNwJhTzp2Azn9LpT7kHnZ64z2SPhbdUgMMhCBwBG5BPArPzF5fdaqa8uUSbKhY0GMiqPXq6Zeq+EBNoPc/RJp2urpYTknO+nRb39avKjihd9MCZ/1d3QYymbRj7SZC3LJhenVF0hil3Uk8TBASnGQiDmBcIXQFhJ0cxavXqKjx+AEALq+kTdwGu5vuE2+2B820/o3lAXR9OnJHr8GodJ2ZBpzOaPrQe5zvxL0gLEeUUPatSOwuLhdo/6+bRCl2wNz23jIjDEFFTmsLqfEHcdVYVTH2QqvLjnUYcCRRuM32vS4rCMOEe0l6p0CV2rk22UZDIPcxqXjKucxse2Sow8ATWiPoIw7zWj7XBLqUKHFnMpPV2dCIKFKBsOKYgLjF4BvKzZJyhmVEPgMcKQLYqeT/2uWDR77NSWH0Cyiwk9M3KbOIMmV3pWh9PiXk6CvumECELbJHYH0Mc+P//BnbDq2Ie9dHdmKhFgRyHU7gWvkPhic9BX36xyldPcnhTgr1XWRoVe0ETGLDPCcqrQ/SUQGrLiujSOgxGu2K/6LDJhi4IKz1/nf7FUSj5eTIDqQiSPP5pXDjlH7oYxXXrHI/aYOCZ5sBx7mOzlEcENIrYblCHO/CYMTWdCJ4Wrftqk7K/A=',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ token: 'too-short',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP org constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOAQ/+Lz6RlbEymbnmMhrktuaGiDPWRNPEQFuMRwwYM6/B/r0JMZa9tskAA5RpyYKxGmJJeuRtlA8GkTw02GoZomlJf/KXJZ95FwSbkXMSRJRD8LJ2402Hw2TaOTaSvfamESnm8zhNo8cok627nkKQkyrpk64heVlU5LIbO2+UgYgbiSQjuXZiW+QuJ1hVRjx011FQgEYc59+22yuKYqd8rrni7TrVqhGRlHCAqvNAGjBI4H7uTFh0sP4auunT/JjxTeTkJoNu8KgS/LdrvISpO67TkQziZo9XD5FOzSN7N3e4f8vO4N4fpjgkIDH/9wyEYe0zYz34xMAFlnhZzqrHycRqzBJuMxGqlFQcKWp9IisLMoVJhLrnvbDLuwwcjeqYkhvODjSs7UDKwTE4X4WmvZr0x4kOclOeAAz/pM6oNVnjgWJd9SnYtoa67bZVkne0k6mYjVhosie8v8icijmJ4OyLZUGWnjZCRd/TPkzQUw+B0yvsop9FYGidhCI+4MVx6W5w7SRtCctxVfCjLpmU4kWaBUUJ5YIQ5xm55yxEYuAsQkxOAYDCMFlV8ntWStYwIG1FsBgJX6VPevXuPPMjWiPNedIpJwBH2PLB4blxMfzDYuCeaIqU4daDaEWxxpuFTTK9fLdJKuipwFG6rwE3OuijeSN+2SLszi834DXtUjQdikHSTQG392+oTmZCFPeffLk/OiV2VpdXF3gGL7sr5M9hOWIZ783q0vW1l6nAElZ7UA//kW+L6QRxbnBVTJK5eCmMY6RJmL76zjqC1jQ0FC10',
+ };
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP multi-org constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//Yk4RTQoLEhO0TKxN2IUBrCi88ts+CG1SXKeL06sJ2qikN/3n2JYAGGKgkHRICfu5dOnsjyFdLJ1XWUrbsM3XgVWikMbrmzD1Xe7N5DsoZXlt4Wa9pZ+IkZuE6XcKKu9whIJ22ciEwCzFwDmk/CBshdCCVVQ3IYuM6uibEHn/AHQ8K15XhraiSzF6DbJpevs5Cy7b5YHFyE936H25CVnouUQnMPsirpQq3pYeMq/oOtV/m4mfRUUQ7MUxvtrwE4lq4hLjFu5n9rwlcqaFPl7I7BEM++1c9LFpYsP5mTS7hHCZ9wXBqER8fa3fKYx0bK1ihCpjP4zUkR7P/uhWDArXamv7gHX2Kj/Qsbegn7KjTdZlggAmaJl/CuSgCbhySy+E55g3Z1QFajiLRpQ5+RsWFDbbI08YEgzyQ0yNCaRvrkgo7kZ1D95rEGRfY96duOQbjzOEqtvYmFChdemZ2+f9Kh/JH1+X9ynxY/zYe/0p/U7WD3QNTYN18loc4aXiB1adXD5Ka2QfNroLudQBmLaJpJB6wASFfuxddsD5yRnO32NSdRaqIWC1x6ti3ZYJZ2RsNwJExPDzjpQTuMOH2jtpu3q7NHmW3snRKy2YAL2UjI0YdeKIlhc/qLCJt9MRcOxWYvujTMD/yGprhG44qf0jjMkJBu7NjuVIMONujabl9b7SUQGfO/t+3rMuC68bQdCGLlO8gf3hvtD99utzXphi6idjC0HKSW/9KzuMkm+syGmIAYq/0L3EFvpZ38uq7z8KzwFFQHI3sBA34bNEr5zpU5OMWg',
+ };
+ let res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ res = await decryptConfig(config, 'def/ghi');
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP org/repo constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//Wp7N0PaDZp0uOdwsc1CuqAq0UPcq+IQdHyKpJs3tHiCecXBHogy4P+rY9nGaUrVneCr4HexuKGuyJf1yl0ZqFffAUac5PjF8eDvjukQGOUq4aBlOogJCEefnuuVxVJx+NRR5iF1P6v57bmI1c+zoqZI/EQB30KU6O1BsdGPLUA/+R3dwCZd5Mbd36s34eYBasqcY9/QbqFcpElXMEPMse3kMCsVXPbZ+UMjtPJiBPUmtJq+ifnu1LzDrfshusSQMwgd/QNk7nEsijiYKllkWhHTP6g7zigvJ46x0h6AYS108YiuK3B9XUhXN9m05Ac6KTEEUdRI3E/dK2dQuRkLjXC8wceQm4A19Gm0uHoMIJYOCbiVoBCH6ayvKbZWZV5lZ4D1JbDNGmKeIj6OX9XWEMKiwTx0Xe89V7BdJzwIGrL0TCLtXuYWZ/R2k+UuBqtgzr44BsBqMpKUA0pcGBoqsEou1M05Ae9fJMF6ADezF5UQZPxT1hrMldiTp3p9iHGfWN2tKHeoW/8CqlIqg9JEkTc+Pl/L9E6ndy5Zjf097PvcmSGhxUQBE7XlrZoIlGhiEU/1HPMen0UUIs0LUu1ywpjCex2yTWnU2YmEwy0MQI1sekSr96QFxDDz9JcynYOYbqR/X9pdxEWyzQ+NJ3n6K97nE1Dj9Sgwu7mFGiUdNkf/SUAF0eZi/eXg71qumpMGBd4eWPtgkeMPLHjvMSYw9vBUfcoKFz6RJ4woG0dw5HOFkPnIjXKWllnl/o01EoBp/o8uswsIS9Nb8i+bp27U6tAHE',
+ };
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP multi-org/repo constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOARAAibXL3zr0KZawiND868UGdPpGRo1aVZfn0NUBHpm8mXfgB1rBHaLsP7qa8vxDHpwH9DRD1IyB4vvPUwtu7wmuv1Vtr596tD40CCcCZYB5JjZLWRF0O0xaZFCOi7Z9SqqdaOQoMScyvPO+3/lJkS7zmLllJFH0mQoX5Cr+owUAMSWqbeCQ9r/KAXpnhmpraDjTav48WulcdTMc8iQ/DHimcdzHErLOAjtiQi4OUe1GnDCcN76KQ+c+ZHySnkXrYi/DhOOu9qB4glJ5n68NueFja+8iR39z/wqCI6V6TIUiOyjFN86iVyNPQ4Otem3KuNwrnwSABLDqP491eUNjT8DUDffsyhNC9lnjQLmtViK0EN2yLVpMdHq9cq8lszBChB7gobD9rm8nUHnTuLf6yJvZOj6toD5Yqj8Ibj58wN90Q8CUsBp9/qp0J+hBVUPOx4sT6kM2p6YarlgX3mrIW5c1U+q1eDbCddLjHiU5cW7ja7o+cqlA6mbDRu3HthjBweiXTicXZcRu1o/wy/+laQQ95x5FzAXDnOwQUHBmpTDI3tUJvQ+oy8XyBBbyC0LsBye2c2SLkPJ4Ai3IMR+Mh8puSzVywTbneiAQNBzJHlj5l85nCF2tUjvNo3dWC+9mU5sfXg11iEC6LRbg+icjpqRtTjmQURtciKDUbibWacwU5T/SVAGPXnW7adBOS0PZPIZQcSwjchOdOl0IjzBy6ofu7ODdn2CXZXi8zbevTICXsHvjnW4MAj5oXrStxK3LkWyM3YBOLe7sOfWvWz7n9TM3dHg032navQ',
+ };
+ let res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ res = await decryptConfig(config, 'def/def');
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+ });
+});
diff --git a/lib/config/decrypt/kbpgp.ts b/lib/config/decrypt/kbpgp.ts
new file mode 100644
index 00000000000000..8ed18fe1c5069c
--- /dev/null
+++ b/lib/config/decrypt/kbpgp.ts
@@ -0,0 +1,63 @@
+import * as kbpgp from '@renovatebot/kbpgp';
+import { logger } from '../../logger';
+import { regEx } from '../../util/regex';
+
+export async function tryDecryptKbPgp(
+ privateKey: string,
+ encryptedStr: string,
+): Promise {
+ if (encryptedStr.length < 500) {
+ // optimization during transition of public key -> pgp
+ return null;
+ }
+ try {
+ const pk = await new Promise((resolve, reject) => {
+ kbpgp.KeyManager.import_from_armored_pgp(
+ {
+ armored: privateKey.replace(regEx(/\n[ \t]+/g), '\n'),
+ },
+ (err: Error, pk) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(pk);
+ }
+ },
+ );
+ });
+
+ const ring = new kbpgp.keyring.KeyRing();
+ ring.add_key_manager(pk);
+
+ const startBlock = '-----BEGIN PGP MESSAGE-----\n\n';
+ const endBlock = '\n-----END PGP MESSAGE-----';
+ let armoredMessage = encryptedStr.trim();
+ if (!armoredMessage.startsWith(startBlock)) {
+ armoredMessage = `${startBlock}${armoredMessage}`;
+ }
+ if (!armoredMessage.endsWith(endBlock)) {
+ armoredMessage = `${armoredMessage}${endBlock}`;
+ }
+
+ const data = await new Promise((resolve, reject) => {
+ kbpgp.unbox(
+ {
+ keyfetch: ring,
+ armored: armoredMessage,
+ },
+ (err: Error, literals: any) => {
+ if (err) {
+ reject(err);
+ } else {
+ resolve(literals[0].toString());
+ }
+ },
+ );
+ });
+ logger.debug('Decrypted config using kppgp');
+ return data as string;
+ } catch (err) {
+ logger.debug({ err }, 'Could not decrypt using kppgp');
+ return null;
+ }
+}
diff --git a/lib/config/decrypt/legacy.spec.ts b/lib/config/decrypt/legacy.spec.ts
new file mode 100644
index 00000000000000..c36943526e319c
--- /dev/null
+++ b/lib/config/decrypt/legacy.spec.ts
@@ -0,0 +1,97 @@
+import { Fixtures } from '../../../test/fixtures';
+import { CONFIG_VALIDATION } from '../../constants/error-messages';
+import { decryptConfig } from '../decrypt';
+import { GlobalConfig } from '../global';
+import type { RenovateConfig } from '../types';
+
+const privateKey = Fixtures.get('private.pem', '..');
+const repository = 'abc/def';
+
+describe('config/decrypt/legacy', () => {
+ describe('decryptConfig()', () => {
+ let config: RenovateConfig;
+
+ beforeEach(() => {
+ config = {};
+ GlobalConfig.reset();
+ });
+
+ it('handles invalid encrypted type', async () => {
+ config.encrypted = 1;
+ GlobalConfig.set({ privateKey });
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ });
+
+ it('handles invalid encrypted value', async () => {
+ config.encrypted = { a: 1 };
+ GlobalConfig.set({ privateKey, privateKeyOld: 'invalid-key' });
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('replaces npm token placeholder in npmrc', async () => {
+ GlobalConfig.set({
+ privateKey: 'invalid-key',
+ privateKeyOld: privateKey,
+ }); // test old key failover
+ config.npmrc =
+ '//registry.npmjs.org/:_authToken=${NPM_TOKEN}\n//registry.npmjs.org/:_authToken=${NPM_TOKEN}\n';
+ config.encrypted = {
+ npmToken:
+ 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
+ };
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.npmToken).toBeUndefined();
+ expect(res.npmrc).toBe(
+ '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n',
+ );
+ });
+
+ it('appends npm token in npmrc', async () => {
+ GlobalConfig.set({ privateKey });
+ config.npmrc = 'foo=bar\n';
+ config.encrypted = {
+ npmToken:
+ 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
+ };
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.npmToken).toBeUndefined();
+ expect(res.npmrc).toBe(
+ `foo=bar\n_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n`,
+ );
+ });
+
+ it('decrypts nested', async () => {
+ GlobalConfig.set({ privateKey });
+ config.packageFiles = [
+ {
+ packageFile: 'package.json',
+ devDependencies: {
+ encrypted: {
+ branchPrefix:
+ 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
+ npmToken:
+ 'FLA9YHIzpE7YetAg/P0X46npGRCMqn7hgyzwX5ZQ9wYgu9BRRbTiBVsUIFTyM5BuP1Q22slT2GkWvFvum7GU236Y6QiT7Nr8SLvtsJn2XUuq8H7REFKzdy3+wqyyWbCErYTFyY1dcPM7Ht+CaGDWdd8u/FsoX7AdMRs/X1jNUo6iSmlUiyGlYDKF+QMnCJom1VPVgZXWsGKdjI2MLny991QMaiv0VajmFIh4ENv4CtXOl/1twvIl/6XTXAaqpJJKDTPZEuydi+PHDZmal2RAOfrkH4m0UURa7SlfpUlIg+EaqbNGp85hCYXLwRcEET1OnYr3rH1oYkcYJ40any1tvQ==',
+ },
+ },
+ },
+ 'backend/package.json',
+ ];
+ // TODO: fix types #22198
+ const res = (await decryptConfig(config, repository)) as any;
+ expect(res.encrypted).toBeUndefined();
+ expect(res.packageFiles[0].devDependencies.encrypted).toBeUndefined();
+ expect(res.packageFiles[0].devDependencies.branchPrefix).toBe(
+ 'abcdef-ghijklm-nopqf-stuvwxyz',
+ );
+ expect(res.packageFiles[0].devDependencies.npmToken).toBeUndefined();
+ expect(res.packageFiles[0].devDependencies.npmrc).toBe(
+ '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n',
+ );
+ });
+ });
+});
diff --git a/lib/config/decrypt/legacy.ts b/lib/config/decrypt/legacy.ts
new file mode 100644
index 00000000000000..caa1e8d4058824
--- /dev/null
+++ b/lib/config/decrypt/legacy.ts
@@ -0,0 +1,40 @@
+/** istanbul ignore file */
+import crypto from 'node:crypto';
+import { logger } from '../../logger';
+
+export function tryDecryptPublicKeyPKCS1(
+ privateKey: string,
+ encryptedStr: string,
+): string | null {
+ let decryptedStr: string | null = null;
+ try {
+ decryptedStr = crypto
+ .privateDecrypt(
+ {
+ key: privateKey,
+ padding: crypto.constants.RSA_PKCS1_PADDING,
+ },
+ Buffer.from(encryptedStr, 'base64'),
+ )
+ .toString();
+ } catch (err) {
+ logger.debug('Could not decrypt using PKCS1 padding');
+ }
+ return decryptedStr;
+}
+
+export function tryDecryptPublicKeyDefault(
+ privateKey: string,
+ encryptedStr: string,
+): string | null {
+ let decryptedStr: string | null = null;
+ try {
+ decryptedStr = crypto
+ .privateDecrypt(privateKey, Buffer.from(encryptedStr, 'base64'))
+ .toString();
+ logger.debug('Decrypted config using default padding');
+ } catch (err) {
+ logger.debug('Could not decrypt using default padding');
+ }
+ return decryptedStr;
+}
diff --git a/lib/config/decrypt/openpgp.spec.ts b/lib/config/decrypt/openpgp.spec.ts
new file mode 100644
index 00000000000000..272b758df0c7d2
--- /dev/null
+++ b/lib/config/decrypt/openpgp.spec.ts
@@ -0,0 +1,145 @@
+import { Fixtures } from '../../../test/fixtures';
+import { CONFIG_VALIDATION } from '../../constants/error-messages';
+import { decryptConfig } from '../decrypt';
+import { GlobalConfig } from '../global';
+import type { RenovateConfig } from '../types';
+
+const privateKey = Fixtures.get('private-pgp.pem', '..');
+const repository = 'abc/def';
+
+describe('config/decrypt/openpgp', () => {
+ describe('decryptConfig()', () => {
+ let config: RenovateConfig;
+
+ beforeAll(() => {
+ process.env.RENOVATE_X_USE_OPENPGP = 'true';
+ });
+
+ beforeEach(() => {
+ jest.resetModules();
+ config = {};
+ GlobalConfig.reset();
+ });
+
+ it('rejects invalid PGP message', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'long-but-wrong-wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ // Missing value
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ // Missing org scope
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//W38A3PmaZnE9XTCHGDQFD52Kz78UYnaiYeAT13cEqYWTwEvQ57B7D7I6i4jCLe7KwkUCS90kyoqd7twD75W/sO70MyIveKnMlqqnpkagQkFgmzMaXXNHaJXEkjzsflTELZu6UsUs/kZYmab7r14YLl9HbH/pqN9exil/9s3ym9URCPOyw/l04KWntdMAy0D+c5M4mE+obv6fz6nDb8tkdeT5Rt2uU+qw3gH1OsB2yu+zTWpI/xTGwDt5nB5txnNTsVrQ/ZK85MSktacGVcYuU9hsEDmSrShmtqlg6Myq+Hjb7cYAp2g4n13C/I3gGGaczl0PZaHD7ALMjI7p6O1q+Ix7vMxipiKMVjS3omJoqBCz3FKc6DVhyX4tfhxgLxFo0DpixNwGbBRbMBO8qZfUk7bicAl/oCRc2Ijmay5DDYuvtkw3G3Ou+sZTe6DNpWUFy6VA4ai7hhcLvcAuiYmLdwPISRR/X4ePa8ZrmSVPyVOvbmmwLhcDYSDlC9Mw4++7ELomlve5kvjVSHvPv9BPVb5sJF7gX4vOT4FrcKalQRPmhNCZrE8tY2lvlrXwV2EEhya8EYv4QTd3JUYEYW5FXiJrORK5KDTnISw+U02nFZjFlnoz9+R6h+aIT1crS3/+YjCHE/EIKvSftOnieYb02Gk7M9nqU19EYL9ApYw4+IjSRgFM3DShIrvuDwDkAwUfaq8mKtr9Vjg/r+yox//GKS3u3r4I3+dfCljA3OwskTPfbSD+huBk4mylIvaL5v8Fngxo979wiLw',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ // Impossible to parse
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//Wa/gHgQdH7tj3LQdW6rWKjzmkYVKZW9EbexJExu4WLaMgEKodlRMilcqCKfQZpjzoiC31J8Ly/x6Soury+lQnLVbtIQ4KWa/uCIz4lXCpPpGNgN2jPfOmdwWBMOcXIT+BgAMxRu3rAmvTtunrkACJ3J92eYNwJhTzp2Azn9LpT7kHnZ64z2SPhbdUgMMhCBwBG5BPArPzF5fdaqa8uUSbKhY0GMiqPXq6Zeq+EBNoPc/RJp2urpYTknO+nRb39avKjihd9MCZ/1d3QYymbRj7SZC3LJhenVF0hil3Uk8TBASnGQiDmBcIXQFhJ0cxavXqKjx+AEALq+kTdwGu5vuE2+2B820/o3lAXR9OnJHr8GodJ2ZBpzOaPrQe5zvxL0gLEeUUPatSOwuLhdo/6+bRCl2wNz23jIjDEFFTmsLqfEHcdVYVTH2QqvLjnUYcCRRuM32vS4rCMOEe0l6p0CV2rk22UZDIPcxqXjKucxse2Sow8ATWiPoIw7zWj7XBLqUKHFnMpPV2dCIKFKBsOKYgLjF4BvKzZJyhmVEPgMcKQLYqeT/2uWDR77NSWH0Cyiwk9M3KbOIMmV3pWh9PiXk6CvumECELbJHYH0Mc+P//BnbDq2Ie9dHdmKhFgRyHU7gWvkPhic9BX36xyldPcnhTgr1XWRoVe0ETGLDPCcqrQ/SUQGrLiujSOgxGu2K/6LDJhi4IKz1/nf7FUSj5eTIDqQiSPP5pXDjlH7oYxXXrHI/aYOCZ5sBx7mOzlEcENIrYblCHO/CYMTWdCJ4Wrftqk7K/A=',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ config.encrypted = {
+ token: 'too-short',
+ };
+ await expect(decryptConfig(config, repository)).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP org constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOAQ/+Lz6RlbEymbnmMhrktuaGiDPWRNPEQFuMRwwYM6/B/r0JMZa9tskAA5RpyYKxGmJJeuRtlA8GkTw02GoZomlJf/KXJZ95FwSbkXMSRJRD8LJ2402Hw2TaOTaSvfamESnm8zhNo8cok627nkKQkyrpk64heVlU5LIbO2+UgYgbiSQjuXZiW+QuJ1hVRjx011FQgEYc59+22yuKYqd8rrni7TrVqhGRlHCAqvNAGjBI4H7uTFh0sP4auunT/JjxTeTkJoNu8KgS/LdrvISpO67TkQziZo9XD5FOzSN7N3e4f8vO4N4fpjgkIDH/9wyEYe0zYz34xMAFlnhZzqrHycRqzBJuMxGqlFQcKWp9IisLMoVJhLrnvbDLuwwcjeqYkhvODjSs7UDKwTE4X4WmvZr0x4kOclOeAAz/pM6oNVnjgWJd9SnYtoa67bZVkne0k6mYjVhosie8v8icijmJ4OyLZUGWnjZCRd/TPkzQUw+B0yvsop9FYGidhCI+4MVx6W5w7SRtCctxVfCjLpmU4kWaBUUJ5YIQ5xm55yxEYuAsQkxOAYDCMFlV8ntWStYwIG1FsBgJX6VPevXuPPMjWiPNedIpJwBH2PLB4blxMfzDYuCeaIqU4daDaEWxxpuFTTK9fLdJKuipwFG6rwE3OuijeSN+2SLszi834DXtUjQdikHSTQG392+oTmZCFPeffLk/OiV2VpdXF3gGL7sr5M9hOWIZ783q0vW1l6nAElZ7UA//kW+L6QRxbnBVTJK5eCmMY6RJmL76zjqC1jQ0FC10',
+ };
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP multi-org constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//Yk4RTQoLEhO0TKxN2IUBrCi88ts+CG1SXKeL06sJ2qikN/3n2JYAGGKgkHRICfu5dOnsjyFdLJ1XWUrbsM3XgVWikMbrmzD1Xe7N5DsoZXlt4Wa9pZ+IkZuE6XcKKu9whIJ22ciEwCzFwDmk/CBshdCCVVQ3IYuM6uibEHn/AHQ8K15XhraiSzF6DbJpevs5Cy7b5YHFyE936H25CVnouUQnMPsirpQq3pYeMq/oOtV/m4mfRUUQ7MUxvtrwE4lq4hLjFu5n9rwlcqaFPl7I7BEM++1c9LFpYsP5mTS7hHCZ9wXBqER8fa3fKYx0bK1ihCpjP4zUkR7P/uhWDArXamv7gHX2Kj/Qsbegn7KjTdZlggAmaJl/CuSgCbhySy+E55g3Z1QFajiLRpQ5+RsWFDbbI08YEgzyQ0yNCaRvrkgo7kZ1D95rEGRfY96duOQbjzOEqtvYmFChdemZ2+f9Kh/JH1+X9ynxY/zYe/0p/U7WD3QNTYN18loc4aXiB1adXD5Ka2QfNroLudQBmLaJpJB6wASFfuxddsD5yRnO32NSdRaqIWC1x6ti3ZYJZ2RsNwJExPDzjpQTuMOH2jtpu3q7NHmW3snRKy2YAL2UjI0YdeKIlhc/qLCJt9MRcOxWYvujTMD/yGprhG44qf0jjMkJBu7NjuVIMONujabl9b7SUQGfO/t+3rMuC68bQdCGLlO8gf3hvtD99utzXphi6idjC0HKSW/9KzuMkm+syGmIAYq/0L3EFvpZ38uq7z8KzwFFQHI3sBA34bNEr5zpU5OMWg',
+ };
+ let res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ res = await decryptConfig(config, 'def/ghi');
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP org/repo constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOAQ//Wp7N0PaDZp0uOdwsc1CuqAq0UPcq+IQdHyKpJs3tHiCecXBHogy4P+rY9nGaUrVneCr4HexuKGuyJf1yl0ZqFffAUac5PjF8eDvjukQGOUq4aBlOogJCEefnuuVxVJx+NRR5iF1P6v57bmI1c+zoqZI/EQB30KU6O1BsdGPLUA/+R3dwCZd5Mbd36s34eYBasqcY9/QbqFcpElXMEPMse3kMCsVXPbZ+UMjtPJiBPUmtJq+ifnu1LzDrfshusSQMwgd/QNk7nEsijiYKllkWhHTP6g7zigvJ46x0h6AYS108YiuK3B9XUhXN9m05Ac6KTEEUdRI3E/dK2dQuRkLjXC8wceQm4A19Gm0uHoMIJYOCbiVoBCH6ayvKbZWZV5lZ4D1JbDNGmKeIj6OX9XWEMKiwTx0Xe89V7BdJzwIGrL0TCLtXuYWZ/R2k+UuBqtgzr44BsBqMpKUA0pcGBoqsEou1M05Ae9fJMF6ADezF5UQZPxT1hrMldiTp3p9iHGfWN2tKHeoW/8CqlIqg9JEkTc+Pl/L9E6ndy5Zjf097PvcmSGhxUQBE7XlrZoIlGhiEU/1HPMen0UUIs0LUu1ywpjCex2yTWnU2YmEwy0MQI1sekSr96QFxDDz9JcynYOYbqR/X9pdxEWyzQ+NJ3n6K97nE1Dj9Sgwu7mFGiUdNkf/SUAF0eZi/eXg71qumpMGBd4eWPtgkeMPLHjvMSYw9vBUfcoKFz6RJ4woG0dw5HOFkPnIjXKWllnl/o01EoBp/o8uswsIS9Nb8i+bp27U6tAHE',
+ };
+ const res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('handles PGP multi-org/repo constraint', async () => {
+ GlobalConfig.set({ privateKey });
+ config.encrypted = {
+ token:
+ 'wcFMAw+4H7SgaqGOARAAibXL3zr0KZawiND868UGdPpGRo1aVZfn0NUBHpm8mXfgB1rBHaLsP7qa8vxDHpwH9DRD1IyB4vvPUwtu7wmuv1Vtr596tD40CCcCZYB5JjZLWRF0O0xaZFCOi7Z9SqqdaOQoMScyvPO+3/lJkS7zmLllJFH0mQoX5Cr+owUAMSWqbeCQ9r/KAXpnhmpraDjTav48WulcdTMc8iQ/DHimcdzHErLOAjtiQi4OUe1GnDCcN76KQ+c+ZHySnkXrYi/DhOOu9qB4glJ5n68NueFja+8iR39z/wqCI6V6TIUiOyjFN86iVyNPQ4Otem3KuNwrnwSABLDqP491eUNjT8DUDffsyhNC9lnjQLmtViK0EN2yLVpMdHq9cq8lszBChB7gobD9rm8nUHnTuLf6yJvZOj6toD5Yqj8Ibj58wN90Q8CUsBp9/qp0J+hBVUPOx4sT6kM2p6YarlgX3mrIW5c1U+q1eDbCddLjHiU5cW7ja7o+cqlA6mbDRu3HthjBweiXTicXZcRu1o/wy/+laQQ95x5FzAXDnOwQUHBmpTDI3tUJvQ+oy8XyBBbyC0LsBye2c2SLkPJ4Ai3IMR+Mh8puSzVywTbneiAQNBzJHlj5l85nCF2tUjvNo3dWC+9mU5sfXg11iEC6LRbg+icjpqRtTjmQURtciKDUbibWacwU5T/SVAGPXnW7adBOS0PZPIZQcSwjchOdOl0IjzBy6ofu7ODdn2CXZXi8zbevTICXsHvjnW4MAj5oXrStxK3LkWyM3YBOLe7sOfWvWz7n9TM3dHg032navQ',
+ };
+ let res = await decryptConfig(config, repository);
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ res = await decryptConfig(config, 'def/def');
+ expect(res.encrypted).toBeUndefined();
+ expect(res.token).toBe('123');
+ await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
+ CONFIG_VALIDATION,
+ );
+ });
+
+ it('fails to load openpgp', async () => {
+ jest.doMock('../../expose.cjs', () => ({
+ openpgp: () => {
+ throw new Error('openpgp error');
+ },
+ }));
+ const pgp = await import('./openpgp');
+ const { logger } = await import('../../logger');
+ expect(
+ await pgp.tryDecryptOpenPgp(
+ '',
+ 'wcFMAw+4H7SgaqGOAQ/+Lz6RlbEymbnmMhrktuaGiDPWRNPEQFuMRwwYM6/B/r0JMZa9tskAA5RpyYKxGmJJeuRtlA8GkTw02GoZomlJf/KXJZ95FwSbkXMSRJRD8LJ2402Hw2TaOTaSvfamESnm8zhNo8cok627nkKQkyrpk64heVlU5LIbO2+UgYgbiSQjuXZiW+QuJ1hVRjx011FQgEYc59+22yuKYqd8rrni7TrVqhGRlHCAqvNAGjBI4H7uTFh0sP4auunT/JjxTeTkJoNu8KgS/LdrvISpO67TkQziZo9XD5FOzSN7N3e4f8vO4N4fpjgkIDH/9wyEYe0zYz34xMAFlnhZzqrHycRqzBJuMxGqlFQcKWp9IisLMoVJhLrnvbDLuwwcjeqYkhvODjSs7UDKwTE4X4WmvZr0x4kOclOeAAz/pM6oNVnjgWJd9SnYtoa67bZVkne0k6mYjVhosie8v8icijmJ4OyLZUGWnjZCRd/TPkzQUw+B0yvsop9FYGidhCI+4MVx6W5w7SRtCctxVfCjLpmU4kWaBUUJ5YIQ5xm55yxEYuAsQkxOAYDCMFlV8ntWStYwIG1FsBgJX6VPevXuPPMjWiPNedIpJwBH2PLB4blxMfzDYuCeaIqU4daDaEWxxpuFTTK9fLdJKuipwFG6rwE3OuijeSN+2SLszi834DXtUjQdikHSTQG392+oTmZCFPeffLk/OiV2VpdXF3gGL7sr5M9hOWIZ783q0vW1l6nAElZ7UA//kW+L6QRxbnBVTJK5eCmMY6RJmL76zjqC1jQ0FC10',
+ ),
+ ).toBeNull();
+ expect(logger.warn).toHaveBeenCalled();
+ expect(logger.once.warn).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/lib/config/decrypt/openpgp.ts b/lib/config/decrypt/openpgp.ts
new file mode 100644
index 00000000000000..5ec485da13f991
--- /dev/null
+++ b/lib/config/decrypt/openpgp.ts
@@ -0,0 +1,56 @@
+import { openpgp } from '../../expose.cjs';
+import { logger } from '../../logger';
+import { regEx } from '../../util/regex';
+
+let pgp: typeof import('openpgp') | null | undefined = undefined;
+
+export async function tryDecryptOpenPgp(
+ privateKey: string,
+ encryptedStr: string,
+): Promise {
+ if (encryptedStr.length < 500) {
+ // optimization during transition of public key -> pgp
+ return null;
+ }
+ if (pgp === undefined) {
+ try {
+ pgp = openpgp();
+ } catch (err) {
+ logger.warn({ err }, 'Could load openpgp');
+ pgp = null;
+ }
+ }
+
+ if (pgp === null) {
+ logger.once.warn('Cannot load openpgp, skipping decryption');
+ return null;
+ }
+
+ try {
+ const pk = await pgp.readPrivateKey({
+ // prettier-ignore
+ armoredKey: privateKey.replace(regEx(/\n[ \t]+/g), '\n'), // little massage to help a common problem
+ });
+ const startBlock = '-----BEGIN PGP MESSAGE-----\n\n';
+ const endBlock = '\n-----END PGP MESSAGE-----';
+ let armoredMessage = encryptedStr.trim();
+ if (!armoredMessage.startsWith(startBlock)) {
+ armoredMessage = `${startBlock}${armoredMessage}`;
+ }
+ if (!armoredMessage.endsWith(endBlock)) {
+ armoredMessage = `${armoredMessage}${endBlock}`;
+ }
+ const message = await pgp.readMessage({
+ armoredMessage,
+ });
+ const { data } = await pgp.decrypt({
+ message,
+ decryptionKeys: pk,
+ });
+ logger.debug('Decrypted config using openpgp');
+ return data;
+ } catch (err) {
+ logger.debug({ err }, 'Could not decrypt using openpgp');
+ return null;
+ }
+}
diff --git a/lib/config/global.ts b/lib/config/global.ts
index e59c0ae0ca116a..1077299e45aca7 100644
--- a/lib/config/global.ts
+++ b/lib/config/global.ts
@@ -32,6 +32,12 @@ export class GlobalConfig {
'gitTimeout',
'platform',
'endpoint',
+ 'httpCacheTtlDays',
+ 'autodiscoverRepoSort',
+ 'autodiscoverRepoOrder',
+ 'mergeConfidenceEndpoint',
+ 'mergeConfidenceDatasources',
+ 'userAgent',
];
private static config: RepoGlobalConfig = {};
diff --git a/lib/config/index.spec.ts b/lib/config/index.spec.ts
index 9a3e6a40c61fe9..d8d81d4e262213 100644
--- a/lib/config/index.spec.ts
+++ b/lib/config/index.spec.ts
@@ -1,5 +1,10 @@
import { getConfig } from './defaults';
-import { filterConfig, getManagerConfig, mergeChildConfig } from './index';
+import {
+ filterConfig,
+ getManagerConfig,
+ mergeChildConfig,
+ removeGlobalConfig,
+} from './index';
jest.mock('../modules/datasource/npm');
jest.mock('../../config.js', () => ({}), { virtual: true });
@@ -131,4 +136,20 @@ describe('config/index', () => {
expect(config.vulnerabilitySeverity).toBe('CRITICAL');
});
});
+
+ describe('removeGlobalConfig()', () => {
+ it('removes all global config', () => {
+ const filteredConfig = removeGlobalConfig(defaultConfig, false);
+ expect(filteredConfig).not.toHaveProperty('onboarding');
+ expect(filteredConfig).not.toHaveProperty('binarySource');
+ expect(filteredConfig.prHourlyLimit).toBe(2);
+ });
+
+ it('retains inherited config', () => {
+ const filteredConfig = removeGlobalConfig(defaultConfig, true);
+ expect(filteredConfig).toHaveProperty('onboarding');
+ expect(filteredConfig).not.toHaveProperty('binarySource');
+ expect(filteredConfig.prHourlyLimit).toBe(2);
+ });
+ });
});
diff --git a/lib/config/index.ts b/lib/config/index.ts
index 869a8250e5a9da..0494348f4849a1 100644
--- a/lib/config/index.ts
+++ b/lib/config/index.ts
@@ -31,6 +31,22 @@ export function getManagerConfig(
return managerConfig;
}
+export function removeGlobalConfig(
+ config: RenovateConfig,
+ keepInherited: boolean,
+): RenovateConfig {
+ const outputConfig: RenovateConfig = { ...config };
+ for (const option of options.getOptions()) {
+ if (keepInherited && option.inheritConfigSupport) {
+ continue;
+ }
+ if (option.globalOnly) {
+ delete outputConfig[option.name];
+ }
+ }
+ return outputConfig;
+}
+
export function filterConfig(
inputConfig: AllConfig,
targetStage: RenovateConfigStage,
@@ -39,6 +55,7 @@ export function filterConfig(
const outputConfig: RenovateConfig = { ...inputConfig };
const stages: (string | undefined)[] = [
'global',
+ 'inherit',
'repository',
'package',
'branch',
diff --git a/lib/config/migrate-validate.ts b/lib/config/migrate-validate.ts
index a3d8f8b42c62a8..2ba19032334fe4 100644
--- a/lib/config/migrate-validate.ts
+++ b/lib/config/migrate-validate.ts
@@ -32,7 +32,7 @@ export async function migrateAndValidate(
}: {
warnings: ValidationMessage[];
errors: ValidationMessage[];
- } = await configValidation.validateConfig(false, massagedConfig);
+ } = await configValidation.validateConfig('repo', massagedConfig);
// istanbul ignore if
if (is.nonEmptyArray(warnings)) {
logger.warn({ warnings }, 'Found renovate config warnings');
diff --git a/lib/config/migrations/custom/node-migration.spec.ts b/lib/config/migrations/custom/node-migration.spec.ts
index f9a7ec48fadb80..7171ebc41db193 100644
--- a/lib/config/migrations/custom/node-migration.spec.ts
+++ b/lib/config/migrations/custom/node-migration.spec.ts
@@ -12,7 +12,7 @@ describe('config/migrations/custom/node-migration', () => {
);
});
- it('should not delete node incase it has more than one property', () => {
+ it('should not delete node in case it has more than one property', () => {
expect(NodeMigration).toMigrate(
{
node: { enabled: true, automerge: false },
diff --git a/lib/config/options/index.ts b/lib/config/options/index.ts
index 4b441f962d4770..812c4b53948be4 100644
--- a/lib/config/options/index.ts
+++ b/lib/config/options/index.ts
@@ -2,9 +2,17 @@ import { getManagers } from '../../modules/manager';
import { getCustomManagers } from '../../modules/manager/custom';
import { getPlatformList } from '../../modules/platform';
import { getVersioningList } from '../../modules/versioning';
+import { supportedDatasources } from '../presets/internal/merge-confidence';
import type { RenovateOptions } from '../types';
const options: RenovateOptions[] = [
+ {
+ name: 'mode',
+ description: 'Mode of operation.',
+ type: 'string',
+ default: 'full',
+ allowedValues: ['full', 'silent'],
+ },
{
name: 'allowedHeaders',
description:
@@ -13,6 +21,27 @@ const options: RenovateOptions[] = [
default: ['X-*'],
subType: 'string',
globalOnly: true,
+ patternMatch: true,
+ },
+ {
+ name: 'autodiscoverRepoOrder',
+ description:
+ 'The order method for autodiscover server side repository search.',
+ type: 'string',
+ default: null,
+ globalOnly: true,
+ allowedValues: ['asc', 'desc'],
+ supportedPlatforms: ['gitea'],
+ },
+ {
+ name: 'autodiscoverRepoSort',
+ description:
+ 'The sort method for autodiscover server side repository search.',
+ type: 'string',
+ default: null,
+ globalOnly: true,
+ allowedValues: ['alpha', 'created', 'updated', 'size', 'id'],
+ supportedPlatforms: ['gitea'],
},
{
name: 'allowedEnv',
@@ -22,6 +51,7 @@ const options: RenovateOptions[] = [
default: [],
subType: 'string',
globalOnly: true,
+ patternMatch: true,
},
{
name: 'detectGlobalManagerConfig',
@@ -39,6 +69,25 @@ const options: RenovateOptions[] = [
default: false,
globalOnly: true,
},
+ {
+ name: 'mergeConfidenceEndpoint',
+ description:
+ 'If set, Renovate will query this API for Merge Confidence data.',
+ type: 'string',
+ default: 'https://developer.mend.io/',
+ advancedUse: true,
+ globalOnly: true,
+ },
+ {
+ name: 'mergeConfidenceDatasources',
+ description:
+ 'If set, Renovate will query the merge-confidence JSON API only for datasources that are part of this list.',
+ allowedValues: supportedDatasources,
+ default: supportedDatasources,
+ type: 'array',
+ subType: 'string',
+ globalOnly: true,
+ },
{
name: 'useCloudMetadataServices',
description:
@@ -47,6 +96,14 @@ const options: RenovateOptions[] = [
default: true,
globalOnly: true,
},
+ {
+ name: 'userAgent',
+ description:
+ 'If set to any string, Renovate will use this as the `user-agent` it sends with HTTP requests.',
+ type: 'string',
+ default: null,
+ globalOnly: true,
+ },
{
name: 'allowPostUpgradeCommandTemplating',
description:
@@ -122,6 +179,7 @@ const options: RenovateOptions[] = [
type: 'string',
default: 'renovate/configure',
globalOnly: true,
+ inheritConfigSupport: true,
cli: false,
},
{
@@ -131,6 +189,7 @@ const options: RenovateOptions[] = [
type: 'string',
default: null,
globalOnly: true,
+ inheritConfigSupport: true,
cli: false,
},
{
@@ -140,6 +199,7 @@ const options: RenovateOptions[] = [
type: 'string',
default: 'renovate.json',
globalOnly: true,
+ inheritConfigSupport: true,
cli: false,
},
{
@@ -148,6 +208,7 @@ const options: RenovateOptions[] = [
type: 'boolean',
default: false,
globalOnly: true,
+ inheritConfigSupport: true,
},
{
name: 'onboardingPrTitle',
@@ -156,6 +217,7 @@ const options: RenovateOptions[] = [
type: 'string',
default: 'Configure Renovate',
globalOnly: true,
+ inheritConfigSupport: true,
cli: false,
},
{
@@ -289,7 +351,6 @@ const options: RenovateOptions[] = [
allowedValues: ['disabled', 'enabled', 'reset'],
stage: 'repository',
default: 'disabled',
- experimental: true,
},
{
name: 'repositoryCacheType',
@@ -299,6 +360,23 @@ const options: RenovateOptions[] = [
type: 'string',
stage: 'repository',
default: 'local',
+ },
+ {
+ name: 'reportType',
+ description: 'Set how, or if, reports should be generated.',
+ globalOnly: true,
+ type: 'string',
+ default: null,
+ experimental: true,
+ allowedValues: ['logging', 'file', 's3'],
+ },
+ {
+ name: 'reportPath',
+ description:
+ 'Path to where the file should be written. In case of `s3` this has to be a full S3 URI.',
+ globalOnly: true,
+ type: 'string',
+ default: null,
experimental: true,
},
{
@@ -434,7 +512,7 @@ const options: RenovateOptions[] = [
description:
'Change this value to override the default Renovate sidecar image.',
type: 'string',
- default: 'ghcr.io/containerbase/sidecar:10.2.1',
+ default: 'ghcr.io/containerbase/sidecar:10.6.14',
globalOnly: true,
},
{
@@ -491,6 +569,7 @@ const options: RenovateOptions[] = [
stage: 'repository',
type: 'boolean',
globalOnly: true,
+ inheritConfigSupport: true,
},
{
name: 'onboardingConfig',
@@ -499,6 +578,7 @@ const options: RenovateOptions[] = [
type: 'object',
default: { $schema: 'https://docs.renovatebot.com/renovate-schema.json' },
globalOnly: true,
+ inheritConfigSupport: true,
mergeable: true,
},
{
@@ -530,6 +610,17 @@ const options: RenovateOptions[] = [
supportedPlatforms: ['gitlab'],
globalOnly: true,
},
+ {
+ name: 'forkCreation',
+ description:
+ 'Whether to create forks as needed at runtime when running in "fork mode".',
+ stage: 'repository',
+ type: 'boolean',
+ globalOnly: true,
+ supportedPlatforms: ['github'],
+ experimental: true,
+ default: true,
+ },
{
name: 'forkToken',
description: 'Set a personal access token here to enable "fork mode".',
@@ -556,6 +647,38 @@ const options: RenovateOptions[] = [
default: true,
globalOnly: true,
},
+ {
+ name: 'inheritConfig',
+ description:
+ 'If `true`, Renovate will inherit configuration from the `inheritConfigFileName` file in `inheritConfigRepoName',
+ type: 'boolean',
+ default: false,
+ globalOnly: true,
+ },
+ {
+ name: 'inheritConfigRepoName',
+ description:
+ 'Renovate will look in this repo for the `inheritConfigFileName`.',
+ type: 'string',
+ default: '{{parentOrg}}/renovate-config',
+ globalOnly: true,
+ },
+ {
+ name: 'inheritConfigFileName',
+ description:
+ 'Renovate will look for this config file name in the `inheritConfigRepoName`.',
+ type: 'string',
+ default: 'org-inherited-config.json',
+ globalOnly: true,
+ },
+ {
+ name: 'inheritConfigStrict',
+ description:
+ 'If `true`, any `inheritedConfig` fetch error will result in an aborted run.',
+ type: 'boolean',
+ default: false,
+ globalOnly: true,
+ },
{
name: 'requireConfig',
description:
@@ -565,6 +688,7 @@ const options: RenovateOptions[] = [
default: 'required',
allowedValues: ['required', 'optional', 'ignored'],
globalOnly: true,
+ inheritConfigSupport: true,
},
{
name: 'optimizeForDisabled',
@@ -842,7 +966,6 @@ const options: RenovateOptions[] = [
'Skip installing modules/dependencies if lock file updating is possible without a full install.',
type: 'boolean',
default: null,
- globalOnly: true,
},
{
name: 'autodiscover',
@@ -871,7 +994,19 @@ const options: RenovateOptions[] = [
subType: 'string',
default: null,
globalOnly: true,
- supportedPlatforms: ['gitlab'],
+ supportedPlatforms: ['gitea', 'gitlab'],
+ },
+ {
+ name: 'autodiscoverProjects',
+ description:
+ 'Filter the list of autodiscovered repositories by project names.',
+ stage: 'global',
+ type: 'array',
+ subType: 'string',
+ default: null,
+ globalOnly: true,
+ supportedPlatforms: ['bitbucket'],
+ patternMatch: true,
},
{
name: 'autodiscoverTopics',
@@ -1000,18 +1135,23 @@ const options: RenovateOptions[] = [
default: {},
additionalProperties: {
type: 'string',
- format: 'uri',
},
supportedManagers: [
+ 'ansible',
+ 'bitbucket-pipelines',
+ 'crossplane',
+ 'devcontainer',
+ 'docker-compose',
+ 'dockerfile',
+ 'droneci',
+ 'gitlabci',
'helm-requirements',
- 'helmv3',
'helmfile',
- 'gitlabci',
- 'dockerfile',
- 'docker-compose',
+ 'helmv3',
'kubernetes',
- 'ansible',
- 'droneci',
+ 'kustomize',
+ 'terraform',
+ 'vendir',
'woodpecker',
],
},
@@ -1148,6 +1288,7 @@ const options: RenovateOptions[] = [
mergeable: true,
cli: false,
env: false,
+ patternMatch: true,
},
{
name: 'excludeRepositories',
@@ -1252,7 +1393,6 @@ const options: RenovateOptions[] = [
mergeable: true,
cli: false,
env: false,
- advancedUse: true,
},
{
name: 'excludeDepNames',
@@ -1294,6 +1434,34 @@ const options: RenovateOptions[] = [
cli: false,
env: false,
},
+ {
+ name: 'matchDepPrefixes',
+ description:
+ 'Dep names prefixes to match. Valid only within a `packageRules` object.',
+ type: 'array',
+ subType: 'string',
+ allowString: true,
+ stage: 'package',
+ parents: ['packageRules'],
+ mergeable: true,
+ cli: false,
+ env: false,
+ advancedUse: true,
+ },
+ {
+ name: 'excludeDepPrefixes',
+ description:
+ 'Dep names prefixes to exclude. Valid only within a `packageRules` object.',
+ type: 'array',
+ subType: 'string',
+ allowString: true,
+ stage: 'package',
+ parents: ['packageRules'],
+ mergeable: true,
+ cli: false,
+ env: false,
+ advancedUse: true,
+ },
{
name: 'matchPackagePatterns',
description:
@@ -1355,7 +1523,7 @@ const options: RenovateOptions[] = [
{
name: 'matchCurrentValue',
description:
- 'A regex to match against the raw `currentValue` string of a dependency. Valid only within a `packageRules` object.',
+ 'A regex or glob pattern to match against the raw `currentValue` string of a dependency. Valid only within a `packageRules` object.',
type: 'string',
stage: 'package',
parents: ['packageRules'],
@@ -1377,7 +1545,7 @@ const options: RenovateOptions[] = [
{
name: 'matchNewValue',
description:
- 'A regex to match against the raw `newValue` string of a dependency. Valid only within a `packageRules` object.',
+ 'A regex or glob pattern to match against the raw `newValue` string of a dependency. Valid only within a `packageRules` object.',
type: 'string',
stage: 'package',
parents: ['packageRules'],
@@ -1460,7 +1628,6 @@ const options: RenovateOptions[] = [
mergeable: true,
cli: false,
env: false,
- experimental: true,
},
{
name: 'matchUpdateTypes',
@@ -1498,7 +1665,7 @@ const options: RenovateOptions[] = [
cli: false,
env: false,
},
- // Version behaviour
+ // Version behavior
{
name: 'allowedVersions',
description:
@@ -1540,6 +1707,15 @@ const options: RenovateOptions[] = [
type: 'boolean',
default: false,
},
+ {
+ name: 'separateMultipleMinor',
+ description:
+ 'If set to `true`, Renovate creates separate PRs for each `minor` stream.',
+ stage: 'package',
+ type: 'boolean',
+ default: false,
+ experimental: true,
+ },
{
name: 'separateMinorPatch',
description:
@@ -1751,7 +1927,14 @@ const options: RenovateOptions[] = [
allowedValues: ['auto', 'never'],
default: 'auto',
},
- // PR Behaviour
+ // PR Behavior
+ {
+ name: 'keepUpdatedLabel',
+ description:
+ 'If set, users can add this label to PRs to request they be kept updated with the base branch.',
+ type: 'string',
+ supportedPlatforms: ['azure', 'gitea', 'github', 'gitlab'],
+ },
{
name: 'rollbackPrs',
description:
@@ -1857,6 +2040,7 @@ const options: RenovateOptions[] = [
'Set sorting priority for PR creation. PRs with higher priority are created first, negative priority last.',
type: 'integer',
default: 0,
+ parents: ['packageRules'],
cli: false,
env: false,
},
@@ -1874,6 +2058,7 @@ const options: RenovateOptions[] = [
default: false,
supportedPlatforms: ['bitbucket'],
globalOnly: true,
+ inheritConfigSupport: true,
},
// Automatic merging
{
@@ -1968,6 +2153,8 @@ const options: RenovateOptions[] = [
description: 'Branch name template.',
type: 'string',
default: '{{{branchPrefix}}}{{{additionalBranchPrefix}}}{{{branchTopic}}}',
+ deprecationMsg:
+ 'We strongly recommended that you avoid configuring this field directly. Please edit `branchPrefix`, `additionalBranchPrefix`, or `branchTopic` instead.',
cli: false,
},
{
@@ -1991,6 +2178,8 @@ const options: RenovateOptions[] = [
type: 'string',
default:
'{{{commitMessagePrefix}}} {{{commitMessageAction}}} {{{commitMessageTopic}}} {{{commitMessageExtra}}} {{{commitMessageSuffix}}}',
+ deprecationMsg:
+ 'We deprecated editing the `commitMessage` directly, and we recommend you stop using this config option. Instead use config options like `commitMessageAction`, `commitMessageExtra`, and so on, to create the commit message you want.',
cli: false,
},
{
@@ -2061,9 +2250,11 @@ const options: RenovateOptions[] = [
{
name: 'prTitle',
description:
- 'Pull Request title template (deprecated). Inherits from `commitMessage` if null.',
+ 'Pull Request title template. Inherits from `commitMessage` if null.',
type: 'string',
default: null,
+ deprecationMsg:
+ 'Direct editing of `prTitle` is now deprecated. Instead use config options like `commitMessageAction`, `commitMessageExtra`, and so on, as they will be passed through to `prTitle`.',
cli: false,
},
{
@@ -2092,6 +2283,7 @@ const options: RenovateOptions[] = [
description: 'Customize sections in the Dependency Dashboard issue.',
type: 'object',
default: {},
+ freeChoice: true,
additionalProperties: {
type: 'string',
},
@@ -2344,6 +2536,16 @@ const options: RenovateOptions[] = [
cli: false,
env: false,
},
+ {
+ name: 'readOnly',
+ description:
+ 'Match against requests that only read data and do not mutate anything.',
+ type: 'boolean',
+ stage: 'repository',
+ parents: ['hostRules'],
+ cli: false,
+ env: false,
+ },
{
name: 'timeout',
description: 'Timeout (in milliseconds) for queries to external endpoints.',
@@ -2538,13 +2740,13 @@ const options: RenovateOptions[] = [
Pending: '{{{displayPending}}}',
References: '{{{references}}}',
'Package file': '{{{packageFile}}}',
- Age: "[![age](https://developer.mend.io/api/mc/badges/age/{{datasource}}/{{replace '/' '%2f' depName}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/)",
+ Age: "{{#if newVersion}}[![age](https://developer.mend.io/api/mc/badges/age/{{datasource}}/{{replace '/' '%2f' depName}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/){{/if}}",
Adoption:
- "[![adoption](https://developer.mend.io/api/mc/badges/adoption/{{datasource}}/{{replace '/' '%2f' depName}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/)",
+ "{{#if newVersion}}[![adoption](https://developer.mend.io/api/mc/badges/adoption/{{datasource}}/{{replace '/' '%2f' depName}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/){{/if}}",
Passing:
- "[![passing](https://developer.mend.io/api/mc/badges/compatibility/{{datasource}}/{{replace '/' '%2f' depName}}/{{{currentVersion}}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/)",
+ "{{#if newVersion}}[![passing](https://developer.mend.io/api/mc/badges/compatibility/{{datasource}}/{{replace '/' '%2f' depName}}/{{{currentVersion}}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/){{/if}}",
Confidence:
- "[![confidence](https://developer.mend.io/api/mc/badges/confidence/{{datasource}}/{{replace '/' '%2f' depName}}/{{{currentVersion}}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/)",
+ "{{#if newVersion}}[![confidence](https://developer.mend.io/api/mc/badges/confidence/{{datasource}}/{{replace '/' '%2f' depName}}/{{{currentVersion}}}/{{{newVersion}}}?slim=true)](https://docs.renovatebot.com/merge-confidence/){{/if}}",
},
},
{
@@ -2878,6 +3080,14 @@ const options: RenovateOptions[] = [
default: null,
supportedPlatforms: ['github'],
},
+ {
+ name: 'httpCacheTtlDays',
+ description: 'Maximum duration in days to keep HTTP cache entries.',
+ type: 'integer',
+ stage: 'repository',
+ default: 90,
+ globalOnly: true,
+ },
];
export function getOptions(): RenovateOptions[] {
diff --git a/lib/config/parse.ts b/lib/config/parse.ts
new file mode 100644
index 00000000000000..45bbb8e651d434
--- /dev/null
+++ b/lib/config/parse.ts
@@ -0,0 +1,75 @@
+import jsonValidator from 'json-dup-key-validator';
+import JSON5 from 'json5';
+import upath from 'upath';
+import { logger } from '../logger';
+import { parseJson } from '../util/common';
+
+export function parseFileConfig(
+ fileName: string,
+ fileContents: string,
+):
+ | { success: true; parsedContents: unknown }
+ | { success: false; validationError: string; validationMessage: string } {
+ const fileType = upath.extname(fileName);
+
+ if (fileType === '.json5') {
+ try {
+ return { success: true, parsedContents: JSON5.parse(fileContents) };
+ } catch (err) /* istanbul ignore next */ {
+ logger.debug({ fileName, fileContents }, 'Error parsing JSON5 file');
+ const validationError = 'Invalid JSON5 (parsing failed)';
+ const validationMessage = `JSON5.parse error: \`${err.message.replaceAll(
+ '`',
+ "'",
+ )}\``;
+ return {
+ success: false,
+ validationError,
+ validationMessage,
+ };
+ }
+ } else {
+ let allowDuplicateKeys = true;
+ let jsonValidationError = jsonValidator.validate(
+ fileContents,
+ allowDuplicateKeys,
+ );
+ if (jsonValidationError) {
+ const validationError = 'Invalid JSON (parsing failed)';
+ const validationMessage = jsonValidationError;
+ return {
+ success: false,
+ validationError,
+ validationMessage,
+ };
+ }
+ allowDuplicateKeys = false;
+ jsonValidationError = jsonValidator.validate(
+ fileContents,
+ allowDuplicateKeys,
+ );
+ if (jsonValidationError) {
+ const validationError = 'Duplicate keys in JSON';
+ const validationMessage = JSON.stringify(jsonValidationError);
+ return {
+ success: false,
+ validationError,
+ validationMessage,
+ };
+ }
+ try {
+ return {
+ success: true,
+ parsedContents: parseJson(fileContents, fileName),
+ };
+ } catch (err) /* istanbul ignore next */ {
+ logger.debug({ fileContents }, 'Error parsing renovate config');
+ const validationError = 'Invalid JSON (parsing failed)';
+ const validationMessage = `JSON.parse error: \`${err.message.replaceAll(
+ '`',
+ "'",
+ )}\``;
+ return { success: false, validationError, validationMessage };
+ }
+ }
+}
diff --git a/lib/config/presets/__snapshots__/index.spec.ts.snap b/lib/config/presets/__snapshots__/index.spec.ts.snap
index 74ba7c4ff38165..b3fac71cdca566 100644
--- a/lib/config/presets/__snapshots__/index.spec.ts.snap
+++ b/lib/config/presets/__snapshots__/index.spec.ts.snap
@@ -119,8 +119,12 @@ exports[`config/presets/index resolvePreset resolves eslint 1`] = `
"matchPackageNames": [
"@types/eslint",
"babel-eslint",
+ "@babel/eslint-parser",
],
"matchPackagePrefixes": [
+ "@eslint/",
+ "@stylistic/eslint-plugin",
+ "@types/eslint__",
"@typescript-eslint/",
"eslint",
],
@@ -135,9 +139,11 @@ exports[`config/presets/index resolvePreset resolves linters 1`] = `
"matchPackageNames": [
"@types/eslint",
"babel-eslint",
+ "@babel/eslint-parser",
"friendsofphp/php-cs-fixer",
"squizlabs/php_codesniffer",
"symplify/easy-coding-standard",
+ "@stylistic/stylelint-plugin",
"codelyzer",
"prettier",
"remark-lint",
@@ -148,6 +154,9 @@ exports[`config/presets/index resolvePreset resolves linters 1`] = `
],
"matchPackagePrefixes": [
"ember-template-lint",
+ "@eslint/",
+ "@stylistic/eslint-plugin",
+ "@types/eslint__",
"@typescript-eslint/",
"eslint",
"stylelint",
@@ -169,9 +178,11 @@ exports[`config/presets/index resolvePreset resolves nested groups 1`] = `
"matchPackageNames": [
"@types/eslint",
"babel-eslint",
+ "@babel/eslint-parser",
"friendsofphp/php-cs-fixer",
"squizlabs/php_codesniffer",
"symplify/easy-coding-standard",
+ "@stylistic/stylelint-plugin",
"codelyzer",
"prettier",
"remark-lint",
@@ -182,6 +193,9 @@ exports[`config/presets/index resolvePreset resolves nested groups 1`] = `
],
"matchPackagePrefixes": [
"ember-template-lint",
+ "@eslint/",
+ "@stylistic/eslint-plugin",
+ "@types/eslint__",
"@typescript-eslint/",
"eslint",
"stylelint",
diff --git a/lib/config/presets/common.ts b/lib/config/presets/common.ts
index 52cf7aa26d45d4..4fb427a4a3cfe9 100644
--- a/lib/config/presets/common.ts
+++ b/lib/config/presets/common.ts
@@ -29,6 +29,19 @@ export const removedPresets: Record = {
'helpers:oddIsUnstablePackages': null,
'group:jsTestMonMajor': 'group:jsTestNonMajor',
'github>whitesource/merge-confidence:beta': 'mergeConfidence:all-badges',
+ 'replacements:messageFormat-{{package}}-to-@messageformat/{{package}}':
+ 'replacements:messageFormat-to-scoped',
+ 'regexManagers:biomeVersions': 'customManagers:biomeVersions',
+ 'regexManagers:bitbucketPipelinesVersions':
+ 'customManagers:bitbucketPipelinesVersions',
+ 'regexManagers:dockerfileVersions': 'customManagers:dockerfileVersions',
+ 'regexManagers:githubActionsVersions': 'customManagers:githubActionsVersions',
+ 'regexManagers:gitlabPipelineVersions':
+ 'customManagers:gitlabPipelineVersions',
+ 'regexManagers:helmChartYamlAppVersions':
+ 'customManagers:helmChartYamlAppVersions',
+ 'regexManagers:mavenPropertyVersions': 'customManagers:mavenPropertyVersions',
+ 'regexManagers:tfvarsVersions': 'customManagers:tfvarsVersions',
};
const renamedMonorepos: Record = {
diff --git a/lib/config/presets/index.spec.ts b/lib/config/presets/index.spec.ts
index 2a29243300c9c7..a55ae336459288 100644
--- a/lib/config/presets/index.spec.ts
+++ b/lib/config/presets/index.spec.ts
@@ -233,8 +233,20 @@ describe('config/presets/index', () => {
config.extends = ['packages:eslint', 'packages:stylelint'];
const res = await presets.resolveConfigPresets(config);
expect(res).toEqual({
- matchPackageNames: ['@types/eslint', 'babel-eslint'],
- matchPackagePrefixes: ['@typescript-eslint/', 'eslint', 'stylelint'],
+ matchPackageNames: [
+ '@types/eslint',
+ 'babel-eslint',
+ '@babel/eslint-parser',
+ '@stylistic/stylelint-plugin',
+ ],
+ matchPackagePrefixes: [
+ '@eslint/',
+ '@stylistic/eslint-plugin',
+ '@types/eslint__',
+ '@typescript-eslint/',
+ 'eslint',
+ 'stylelint',
+ ],
});
});
@@ -250,8 +262,18 @@ describe('config/presets/index', () => {
packageRules: [
{
groupName: 'eslint',
- matchPackageNames: ['@types/eslint', 'babel-eslint'],
- matchPackagePrefixes: ['@typescript-eslint/', 'eslint'],
+ matchPackageNames: [
+ '@types/eslint',
+ 'babel-eslint',
+ '@babel/eslint-parser',
+ ],
+ matchPackagePrefixes: [
+ '@eslint/',
+ '@stylistic/eslint-plugin',
+ '@types/eslint__',
+ '@typescript-eslint/',
+ 'eslint',
+ ],
},
],
});
@@ -261,16 +283,16 @@ describe('config/presets/index', () => {
config.extends = ['packages:eslint'];
const res = await presets.resolveConfigPresets(config);
expect(res).toMatchSnapshot();
- expect(res.matchPackagePrefixes).toHaveLength(2);
+ expect(res.matchPackagePrefixes).toHaveLength(5);
});
it('resolves linters', async () => {
config.extends = ['packages:linters'];
const res = await presets.resolveConfigPresets(config);
expect(res).toMatchSnapshot();
- expect(res.matchPackageNames).toHaveLength(9);
+ expect(res.matchPackageNames).toHaveLength(11);
expect(res.matchPackagePatterns).toHaveLength(1);
- expect(res.matchPackagePrefixes).toHaveLength(4);
+ expect(res.matchPackagePrefixes).toHaveLength(7);
});
it('resolves nested groups', async () => {
@@ -279,9 +301,9 @@ describe('config/presets/index', () => {
expect(res).toMatchSnapshot();
const rule = res.packageRules![0];
expect(rule.automerge).toBeTrue();
- expect(rule.matchPackageNames).toHaveLength(9);
+ expect(rule.matchPackageNames).toHaveLength(11);
expect(rule.matchPackagePatterns).toHaveLength(1);
- expect(rule.matchPackagePrefixes).toHaveLength(4);
+ expect(rule.matchPackagePrefixes).toHaveLength(7);
});
it('migrates automerge in presets', async () => {
@@ -314,6 +336,21 @@ describe('config/presets/index', () => {
expect(res).toMatchSnapshot();
});
+ it('resolves self-hosted preset with templating', async () => {
+ GlobalConfig.set({ customEnvVariables: { GIT_REF: 'abc123' } });
+ config.extends = ['local>username/preset-repo#{{ env.GIT_REF }}'];
+ local.getPreset.mockImplementationOnce(({ tag }) =>
+ tag === 'abc123'
+ ? Promise.resolve({ labels: ['self-hosted with template resolved'] })
+ : Promise.reject(new Error('Failed to resolve self-hosted preset')),
+ );
+
+ const res = await presets.resolveConfigPresets(config);
+
+ expect(res.labels).toEqual(['self-hosted with template resolved']);
+ expect(local.getPreset).toHaveBeenCalledOnce();
+ });
+
it('resolves self-hosted transitive presets without baseConfig', async () => {
config.platform = 'gitlab';
config.endpoint = 'https://dummy.example.com/api/v4';
@@ -1040,6 +1077,14 @@ describe('config/presets/index', () => {
`);
});
+ it('handles renamed regexManagers presets', async () => {
+ const res = await presets.getPreset(
+ 'regexManagers:dockerfileVersions',
+ {},
+ );
+ expect(res.customManagers).toHaveLength(1);
+ });
+
it('gets linters', async () => {
const res = await presets.getPreset('packages:linters', {});
expect(res).toMatchSnapshot();
diff --git a/lib/config/presets/index.ts b/lib/config/presets/index.ts
index 827b1eef0768b2..93e8e1fc082399 100644
--- a/lib/config/presets/index.ts
+++ b/lib/config/presets/index.ts
@@ -10,6 +10,7 @@ import * as packageCache from '../../util/cache/package';
import { getTtlOverride } from '../../util/cache/package/decorator';
import { clone } from '../../util/clone';
import { regEx } from '../../util/regex';
+import * as template from '../../util/template';
import { GlobalConfig } from '../global';
import * as massage from '../massage';
import * as migration from '../migration';
@@ -148,6 +149,7 @@ export function parsePreset(input: string): ParsedPreset {
const presetsPackages = [
'compatibility',
'config',
+ 'customManagers',
'default',
'docker',
'group',
@@ -157,7 +159,6 @@ export function parsePreset(input: string): ParsedPreset {
'npm',
'packages',
'preview',
- 'regexManagers',
'replacements',
'schedule',
'security',
@@ -320,6 +321,10 @@ export async function resolveConfigPresets(
let config: AllConfig = {};
// First, merge all the preset configs from left to right
if (inputConfig.extends?.length) {
+ // Compile templates
+ inputConfig.extends = inputConfig.extends.map((tmpl) =>
+ template.compile(tmpl, {}),
+ );
for (const preset of inputConfig.extends) {
if (shouldResolvePreset(preset, existingPresets, ignorePresets)) {
logger.trace(`Resolving preset "${preset}"`);
diff --git a/lib/config/presets/internal/regex-managers.spec.ts b/lib/config/presets/internal/custom-managers.spec.ts
similarity index 69%
rename from lib/config/presets/internal/regex-managers.spec.ts
rename to lib/config/presets/internal/custom-managers.spec.ts
index 2843cdc46bfb01..e869acdabe1e20 100644
--- a/lib/config/presets/internal/regex-managers.spec.ts
+++ b/lib/config/presets/internal/custom-managers.spec.ts
@@ -1,9 +1,170 @@
import { codeBlock } from 'common-tags';
import { regexMatches } from '../../../../test/util';
import { extractPackageFile } from '../../../modules/manager/custom/regex';
-import { presets } from './regex-managers';
+import { presets } from './custom-managers';
+
+describe('config/presets/internal/custom-managers', () => {
+ describe('Update `$schema` version in biome.json', () => {
+ const customManager = presets['biomeVersions'].customManagers?.[0];
+
+ it(`find dependencies in file`, async () => {
+ const fileContent = codeBlock`
+ {
+ "$schema": "https://biomejs.dev/schemas/1.7.3/schema.json",
+ }
+ `;
+
+ const res = await extractPackageFile(
+ fileContent,
+ 'biome.json',
+ customManager!,
+ );
+
+ expect(res?.deps).toMatchObject([
+ {
+ currentValue: '1.7.3',
+ datasource: 'npm',
+ depName: '@biomejs/biome',
+ replaceString: '"https://biomejs.dev/schemas/1.7.3/schema.json"',
+ },
+ ]);
+ });
+
+ describe('matches regexes patterns', () => {
+ it.each`
+ path | expected
+ ${'biome.json'} | ${true}
+ ${'biome.jsonc'} | ${true}
+ ${'foo/biome.json'} | ${true}
+ ${'foo/biome.jsonc'} | ${true}
+ ${'biome.yml'} | ${false}
+ `('$path', ({ path, expected }) => {
+ expect(regexMatches(path, customManager!.fileMatch)).toBe(expected);
+ });
+ });
+ });
+
+ describe('Update `_VERSION` variables in Bitbucket Pipelines', () => {
+ const customManager =
+ presets['bitbucketPipelinesVersions'].customManagers?.[0];
+
+ it(`find dependencies in file`, async () => {
+ const fileContent = codeBlock`
+ script:
+ # renovate: datasource=docker depName=node versioning=docker
+ - export NODE_VERSION=18
+
+ # renovate: datasource=npm depName=pnpm
+ - export PNPM_VERSION="7.25.1"
+
+ # renovate: datasource=npm depName=yarn
+ - export YARN_VERSION 3.3.1
+
+ # renovate: datasource=custom.hashicorp depName=consul
+ - export CONSUL_VERSION 1.3.1
+
+ # renovate: datasource=github-releases depName=kubernetes-sigs/kustomize versioning=regex:^(?.+)/v(?\\d+)\\.(?\\d+)\\.(?\\d+)$ extractVersion=^kustomize/(?.+)$
+ - export KUSTOMIZE_VERSION v5.2.1
+
+ - pipe: something/cool:latest
+ variables:
+ # renovate: datasource=docker depName=node versioning=docker
+ NODE_VERSION: 18
+ # renovate: datasource=npm depName=pnpm
+ PNPM_VERSION:"7.25.1"
+ # renovate: datasource=npm depName=yarn
+ YARN_VERSION: '3.3.1'
+
+ - echo $NODE_VERSION
+ `;
+
+ const res = await extractPackageFile(
+ fileContent,
+ 'bitbucket-pipelines.yml',
+ customManager!,
+ );
+
+ expect(res?.deps).toMatchObject([
+ {
+ currentValue: '18',
+ datasource: 'docker',
+ depName: 'node',
+ replaceString:
+ '# renovate: datasource=docker depName=node versioning=docker\n - export NODE_VERSION=18\n',
+ versioning: 'docker',
+ },
+ {
+ currentValue: '7.25.1',
+ datasource: 'npm',
+ depName: 'pnpm',
+ replaceString:
+ '# renovate: datasource=npm depName=pnpm\n - export PNPM_VERSION="7.25.1"\n',
+ },
+ {
+ currentValue: '3.3.1',
+ datasource: 'npm',
+ depName: 'yarn',
+ replaceString:
+ '# renovate: datasource=npm depName=yarn\n - export YARN_VERSION 3.3.1\n',
+ },
+ {
+ currentValue: '1.3.1',
+ datasource: 'custom.hashicorp',
+ depName: 'consul',
+ replaceString:
+ '# renovate: datasource=custom.hashicorp depName=consul\n - export CONSUL_VERSION 1.3.1\n',
+ },
+ {
+ currentValue: 'v5.2.1',
+ datasource: 'github-releases',
+ depName: 'kubernetes-sigs/kustomize',
+ replaceString:
+ '# renovate: datasource=github-releases depName=kubernetes-sigs/kustomize versioning=regex:^(?.+)/v(?\\d+)\\.(?\\d+)\\.(?\\d+)$ extractVersion=^kustomize/(?.+)$\n - export KUSTOMIZE_VERSION v5.2.1\n',
+ extractVersion: '^kustomize/(?.+)$',
+ versioning:
+ 'regex:^(?.+)/v(?\\d+)\\.(?\\d+)\\.(?\\d+)$',
+ },
+ {
+ currentValue: '18',
+ datasource: 'docker',
+ depName: 'node',
+ replaceString:
+ '# renovate: datasource=docker depName=node versioning=docker\n NODE_VERSION: 18\n',
+ versioning: 'docker',
+ },
+ {
+ currentValue: '7.25.1',
+ datasource: 'npm',
+ depName: 'pnpm',
+ replaceString:
+ '# renovate: datasource=npm depName=pnpm\n PNPM_VERSION:"7.25.1"\n',
+ },
+ {
+ currentValue: '3.3.1',
+ datasource: 'npm',
+ depName: 'yarn',
+ replaceString:
+ "# renovate: datasource=npm depName=yarn\n YARN_VERSION: '3.3.1'\n",
+ },
+ ]);
+ });
+
+ describe('matches regexes patterns', () => {
+ it.each`
+ path | expected
+ ${'bitbucket-pipelines.yml'} | ${true}
+ ${'bitbucket-pipelines.yaml'} | ${true}
+ ${'foo/bitbucket-pipelines.yml'} | ${true}
+ ${'foo/bitbucket-pipelines.yaml'} | ${true}
+ ${'foo/bar/bitbucket-pipelines.yml'} | ${true}
+ ${'foo/bar/bitbucket-pipelines.yaml'} | ${true}
+ ${'bitbucket-pipelines'} | ${false}
+ `('$path', ({ path, expected }) => {
+ expect(regexMatches(path, customManager!.fileMatch)).toBe(expected);
+ });
+ });
+ });
-describe('config/presets/internal/regex-managers', () => {
describe('Update `_VERSION` variables in Dockerfiles', () => {
const customManager = presets['dockerfileVersions'].customManagers?.[0];
@@ -17,6 +178,9 @@ describe('config/presets/internal/regex-managers', () => {
# renovate: datasource=npm depName=pnpm
ENV PNPM_VERSION="7.25.1"
+ # renovate: datasource=npm depName=pnpm
+ ENV PNPM_VERSION='7.25.1'
+
# renovate: datasource=npm depName=yarn
ENV YARN_VERSION 3.3.1
@@ -51,6 +215,13 @@ describe('config/presets/internal/regex-managers', () => {
replaceString:
'# renovate: datasource=npm depName=pnpm\nENV PNPM_VERSION="7.25.1"\n',
},
+ {
+ currentValue: '7.25.1',
+ datasource: 'npm',
+ depName: 'pnpm',
+ replaceString:
+ "# renovate: datasource=npm depName=pnpm\nENV PNPM_VERSION='7.25.1'\n",
+ },
{
currentValue: '3.3.1',
datasource: 'npm',
diff --git a/lib/config/presets/internal/regex-managers.ts b/lib/config/presets/internal/custom-managers.ts
similarity index 73%
rename from lib/config/presets/internal/regex-managers.ts
rename to lib/config/presets/internal/custom-managers.ts
index 857b0e28db8c36..6d772d39debe06 100644
--- a/lib/config/presets/internal/regex-managers.ts
+++ b/lib/config/presets/internal/custom-managers.ts
@@ -3,6 +3,33 @@ import type { Preset } from '../types';
/* eslint sort-keys: ["error", "asc", {caseSensitive: false, natural: true}] */
export const presets: Record = {
+ biomeVersions: {
+ customManagers: [
+ {
+ customType: 'regex',
+ datasourceTemplate: 'npm',
+ depNameTemplate: '@biomejs/biome',
+ fileMatch: ['(^|/)biome.jsonc?$'],
+ matchStrings: [
+ '"https://biomejs.dev/schemas/(?[^"]+)/schema.json"',
+ ],
+ },
+ ],
+ description:
+ 'Update `$schema` version in `biome.json` configuration files.',
+ },
+ bitbucketPipelinesVersions: {
+ customManagers: [
+ {
+ customType: 'regex',
+ fileMatch: ['(^|/)bitbucket-pipelines\\.ya?ml$'],
+ matchStrings: [
+ '# renovate: datasource=(?[a-z-.]+?) depName=(?[^\\s]+?)(?: (lookupName|packageName)=(?[^\\s]+?))?(?: versioning=(?[^\\s]+?))?(?: extractVersion=(?[^\\s]+?))?(?: registryUrl=(?[^\\s]+?))?\\s+.*\\s+[A-Za-z0-9_]+?_VERSION[ =:]\\s?["\']?(?.+?)["\']?\\s',
+ ],
+ },
+ ],
+ description: 'Update `_VERSION` variables in Bitbucket Pipelines',
+ },
dockerfileVersions: {
customManagers: [
{
@@ -12,7 +39,7 @@ export const presets: Record = {
'(^|/)([Dd]ocker|[Cc]ontainer)file[^/]*$',
],
matchStrings: [
- '# renovate: datasource=(?[a-z-.]+?) depName=(?[^\\s]+?)(?: (lookupName|packageName)=(?[^\\s]+?))?(?: versioning=(?[^\\s]+?))?(?: extractVersion=(?[^\\s]+?))?(?: registryUrl=(?[^\\s]+?))?\\s(?:ENV|ARG) .+?_VERSION[ =]"?(?.+?)"?\\s',
+ '# renovate: datasource=(?[a-z-.]+?) depName=(?[^\\s]+?)(?: (lookupName|packageName)=(?[^\\s]+?))?(?: versioning=(?[^\\s]+?))?(?: extractVersion=(?[^\\s]+?))?(?: registryUrl=(?[^\\s]+?))?\\s(?:ENV|ARG)\\s+[A-Za-z0-9_]+?_VERSION[ =]["\']?(?.+?)["\']?\\s',
],
},
],
@@ -40,7 +67,7 @@ export const presets: Record = {
customType: 'regex',
fileMatch: ['\\.gitlab-ci\\.ya?ml$'],
matchStrings: [
- '# renovate: datasource=(?[a-z-.]+?) depName=(?[^\\s]+?)(?: (?:packageName)=(?[^\\s]+?))?(?: versioning=(?[^\\s]+?))?(?: extractVersion=(?[^\\s]+?))?\\s+[A-Za-z0-9_]+?_VERSION\\s*:\\s*["\']?(?.+?)["\']?\\s',
+ '# renovate: datasource=(?[a-z-.]+?) depName=(?[^\\s]+?)(?: (?:packageName)=(?[^\\s]+?))?(?: versioning=(?[^\\s]+?))?(?: extractVersion=(?[^\\s]+?))?(?: registryUrl=(?[^\\s]+?))?\\s+[A-Za-z0-9_]+?_VERSION\\s*:\\s*["\']?(?.+?)["\']?\\s',
],
},
],
diff --git a/lib/config/presets/internal/default.ts b/lib/config/presets/internal/default.ts
index 68895dc50d603e..29fe0081b4ce92 100644
--- a/lib/config/presets/internal/default.ts
+++ b/lib/config/presets/internal/default.ts
@@ -363,7 +363,7 @@ export const presets: Record = {
},
pathSemanticCommitType: {
description:
- 'Use semanticCommitType `{{arg0}}` for all package files matching path `{{arg1}}`.',
+ 'Use semanticCommitType `{{arg1}}` for all package files matching path `{{arg0}}`.',
packageRules: [
{
matchFileNames: ['{{arg0}}'],
@@ -574,6 +574,11 @@ export const presets: Record = {
separateMajorMinor: true,
separateMultipleMajor: true,
},
+ separateMultipleMinorReleases: {
+ description:
+ 'Separate each `minor` version of dependencies into individual branches/PRs.',
+ separateMultipleMinor: true,
+ },
separatePatchReleases: {
description:
'Separate `patch` and `minor` releases of dependencies into separate PRs.',
diff --git a/lib/config/presets/internal/index.spec.ts b/lib/config/presets/internal/index.spec.ts
index fd439b817579e8..1b978f01d0d2b4 100644
--- a/lib/config/presets/internal/index.spec.ts
+++ b/lib/config/presets/internal/index.spec.ts
@@ -30,7 +30,7 @@ describe('config/presets/internal/index', () => {
const config = await resolveConfigPresets(
massageConfig(presetConfig),
);
- const res = await validateConfig(false, config, true);
+ const res = await validateConfig('repo', config, true);
expect(res.errors).toHaveLength(0);
expect(res.warnings).toHaveLength(0);
} catch (err) {
@@ -43,4 +43,15 @@ describe('config/presets/internal/index', () => {
}
}
}
+
+ it('internal presets should not contain handlebars', () => {
+ Object.entries(internal.groups)
+ .map(([groupName, groupPresets]) =>
+ Object.entries(groupPresets).map(
+ ([presetName]) => `${groupName}:${presetName}`,
+ ),
+ )
+ .flat()
+ .forEach((preset) => expect(preset).not.toMatch(/{{.*}}/));
+ });
});
diff --git a/lib/config/presets/internal/index.ts b/lib/config/presets/internal/index.ts
index 37b564cdf1cf4e..5bc8c4181e2cee 100644
--- a/lib/config/presets/internal/index.ts
+++ b/lib/config/presets/internal/index.ts
@@ -1,5 +1,6 @@
import type { Preset, PresetConfig } from '../types';
import * as configPreset from './config';
+import * as customManagersPreset from './custom-managers';
import * as defaultPreset from './default';
import * as dockerPreset from './docker';
import * as groupPreset from './group';
@@ -9,7 +10,6 @@ import * as monorepoPreset from './monorepo';
import * as npm from './npm';
import * as packagesPreset from './packages';
import * as previewPreset from './preview';
-import * as regexManagersPreset from './regex-managers';
import * as replacements from './replacements';
import * as schedulePreset from './schedule';
import * as securityPreset from './security';
@@ -19,6 +19,7 @@ import * as workaroundsPreset from './workarounds';
export const groups: Record> = {
config: configPreset.presets,
+ customManagers: customManagersPreset.presets,
default: defaultPreset.presets,
docker: dockerPreset.presets,
group: groupPreset.presets,
@@ -28,7 +29,6 @@ export const groups: Record> = {
npm: npm.presets,
packages: packagesPreset.presets,
preview: previewPreset.presets,
- regexManagers: regexManagersPreset.presets,
replacements: replacements.presets,
schedule: schedulePreset.presets,
security: securityPreset.presets,
diff --git a/lib/config/presets/internal/monorepo.ts b/lib/config/presets/internal/monorepo.ts
index 6ab272fda827bc..d1b738816f5333 100644
--- a/lib/config/presets/internal/monorepo.ts
+++ b/lib/config/presets/internal/monorepo.ts
@@ -38,6 +38,7 @@ const repoGroups = {
'aspnet-api-versioning': 'https://github.com/Microsoft/aspnet-api-versioning',
'aspnet-health-checks':
'https://github.com/xabaril/AspNetCore.Diagnostics.HealthChecks',
+ astro: 'https://github.com/withastro/astro',
'automapper-dotnet': [
'https://github.com/AutoMapper/AutoMapper',
'https://github.com/AutoMapper/AutoMapper.Extensions.Microsoft.DependencyInjection',
@@ -47,6 +48,7 @@ const repoGroups = {
'https://github.com/awslabs/aws-lambda-powertools-typescript',
'https://github.com/aws-powertools/powertools-lambda-typescript',
],
+ 'aws-sdk-client-mock': 'https://github.com/m-radzikowski/aws-sdk-client-mock',
'aws-sdk-go': 'https://github.com/aws/aws-sdk-go',
'aws-sdk-go-v2': 'https://github.com/aws/aws-sdk-go-v2',
'aws-sdk-js-v3': 'https://github.com/aws/aws-sdk-js-v3',
@@ -70,6 +72,8 @@ const repoGroups = {
capacitor: 'https://github.com/ionic-team/capacitor',
'chakra-ui': 'https://github.com/chakra-ui/chakra-ui',
chromely: 'https://github.com/chromelyapps/Chromely',
+ 'citation-js': 'https://github.com/citation-js/citation-js',
+ ckeditor: 'https://github.com/ckeditor/ckeditor5',
clarity: 'https://github.com/vmware/clarity',
clearscript: [
'https://github.com/microsoft/ClearScript',
@@ -273,8 +277,10 @@ const repoGroups = {
'ember-decorators': 'https://github.com/ember-decorators/ember-decorators',
emojibase: 'https://github.com/milesj/emojibase',
emotion: 'https://github.com/emotion-js/emotion',
+ eslint: 'https://github.com/eslint/eslint',
'eslint-config-globex':
'https://github.com/GlobexDesignsInc/eslint-config-globex',
+ 'eslint-stylistic': 'https://github.com/eslint-stylistic/eslint-stylistic',
expo: 'https://github.com/expo/expo',
'fabric-chaincode-node':
'https://github.com/hyperledger/fabric-chaincode-node',
@@ -333,8 +339,10 @@ const repoGroups = {
'json-smart-v2': 'https://github.com/netplex/json-smart-v2',
jsplumb: 'https://github.com/jsplumb/jsplumb',
junit5: 'https://github.com/junit-team/junit5',
+ kernelmemory: 'https://github.com/microsoft/kernel-memory',
kotlin: 'https://github.com/JetBrains/kotlin',
kroki: 'https://github.com/yuzutech/kroki',
+ ktor: 'https://github.com/ktorio/ktor',
lamar: 'https://github.com/JasperFx/lamar',
lerna: 'https://github.com/lerna/lerna',
lexical: 'https://github.com/facebook/lexical',
@@ -468,6 +476,7 @@ const repoGroups = {
storybook: 'https://github.com/storybookjs/storybook',
'storybook-react-native': 'https://github.com/storybookjs/react-native',
strapi: 'https://github.com/strapi/strapi',
+ strum: 'https://github.com/Peternator7/strum',
'stryker-js': 'https://github.com/stryker-mutator/stryker-js',
surveyjs: 'https://github.com/surveyjs/surveyjs',
'swashbuckle-aspnetcore':
@@ -491,6 +500,9 @@ const repoGroups = {
'theme-ui': 'https://github.com/system-ui/theme-ui',
tika: 'https://github.com/apache/tika',
tiptap: 'https://github.com/ueberdosis/tiptap',
+ 'tokio-prost': 'https://github.com/tokio-rs/prost',
+ 'tokio-tracing': 'https://github.com/tokio-rs/tracing',
+ tonic: 'https://github.com/hyperium/tonic',
treat: 'https://github.com/seek-oss/treat',
trpc: 'https://github.com/trpc/trpc',
'trust-dns': 'https://github.com/bluejekyll/trust-dns',
@@ -510,6 +522,7 @@ const repoGroups = {
vue: ['https://github.com/vuejs/vue', 'https://github.com/vuejs/core'],
'vue-cli': 'https://github.com/vuejs/vue-cli',
vuepress: 'https://github.com/vuejs/vuepress',
+ weasel: 'https://github.com/JasperFx/weasel',
'web3-react': 'https://github.com/Uniswap/web3-react',
webdriverio: 'https://github.com/webdriverio/webdriverio',
wolverine: 'https://github.com/jasperfx/wolverine',
@@ -559,6 +572,7 @@ const patternGroups = {
spfx: ['^@microsoft/sp-', '^@microsoft/eslint-.+-spfx$'],
spock: '^org\\.spockframework:spock-',
'syncfusion-dotnet': '^Syncfusion\\.',
+ 'testing-library': '^@testing-library/',
wordpress: '^@wordpress/',
};
diff --git a/lib/config/presets/internal/packages.ts b/lib/config/presets/internal/packages.ts
index 227f715f862d49..daac49f540fa31 100644
--- a/lib/config/presets/internal/packages.ts
+++ b/lib/config/presets/internal/packages.ts
@@ -22,8 +22,18 @@ export const presets: Record = {
},
eslint: {
description: 'All ESLint packages.',
- matchPackageNames: ['@types/eslint', 'babel-eslint'],
- matchPackagePrefixes: ['@typescript-eslint/', 'eslint'],
+ matchPackageNames: [
+ '@types/eslint',
+ 'babel-eslint',
+ '@babel/eslint-parser',
+ ],
+ matchPackagePrefixes: [
+ '@eslint/',
+ '@stylistic/eslint-plugin',
+ '@types/eslint__',
+ '@typescript-eslint/',
+ 'eslint',
+ ],
},
gatsby: {
description: 'All packages published by Gatsby.',
@@ -129,6 +139,7 @@ export const presets: Record = {
},
stylelint: {
description: 'All Stylelint packages.',
+ matchPackageNames: ['@stylistic/stylelint-plugin'],
matchPackagePrefixes: ['stylelint'],
},
test: {
diff --git a/lib/config/presets/internal/replacements.ts b/lib/config/presets/internal/replacements.ts
index a02e26131e7d51..74335ad3acd1bd 100644
--- a/lib/config/presets/internal/replacements.ts
+++ b/lib/config/presets/internal/replacements.ts
@@ -15,6 +15,8 @@ export const presets: Record = {
'replacements:containerbase',
'replacements:cpx-to-maintenance-fork',
'replacements:cucumber-to-scoped',
+ 'replacements:eslint-config-standard-with-typescript-to-eslint-config-love',
+ 'replacements:eslint-plugin-node-to-maintained-fork',
'replacements:fakerjs-to-scoped',
'replacements:fastify-to-scoped',
'replacements:hapi-to-scoped',
@@ -22,6 +24,7 @@ export const presets: Record = {
'replacements:joi-to-scoped',
'replacements:joi-to-unscoped',
'replacements:k8s-registry-move',
+ 'replacements:mem-rename',
'replacements:middie-to-scoped',
'replacements:now-to-vercel',
'replacements:npm-run-all-to-maintenance-fork',
@@ -36,12 +39,15 @@ export const presets: Record = {
'replacements:rollup-babel-to-scoped',
'replacements:rollup-json-to-scoped',
'replacements:rollup-node-resolve-to-scoped',
+ 'replacements:rollup-terser-to-scoped',
'replacements:rome-to-biome',
'replacements:semantic-release-replace-plugin-to-unscoped',
'replacements:spectre-cli-to-spectre-console-cli',
+ 'replacements:standard-version-to-commit-and-tag',
'replacements:vso-task-lib-to-azure-pipelines-task-lib',
'replacements:vsts-task-lib-to-azure-pipelines-task-lib',
'replacements:xmldom-to-scoped',
+ 'replacements:zap',
],
ignoreDeps: [], // Hack to improve onboarding PR description
},
@@ -169,6 +175,7 @@ export const presets: Record = {
matchDatasources: ['docker'],
matchPackageNames: ['ghcr.io/renovatebot/renovate'],
matchPackagePatterns: ['^(?:docker\\.io/)?renovate/renovate$'],
+ versioning: 'semver',
},
],
},
@@ -194,6 +201,32 @@ export const presets: Record = {
},
],
},
+ 'eslint-config-standard-with-typescript-to-eslint-config-love': {
+ description:
+ '`eslint-config-standard-with-typescript` was renamed to `eslint-config-love`.',
+ packageRules: [
+ {
+ matchCurrentVersion: '^43.0.1',
+ matchDatasources: ['npm'],
+ matchPackageNames: ['eslint-config-standard-with-typescript'],
+ replacementName: 'eslint-config-love',
+ replacementVersion: '43.1.0',
+ },
+ ],
+ },
+ 'eslint-plugin-node-to-maintained-fork': {
+ description:
+ 'Replace stale `eslint-plugin-node` with a maintained fork: `eslint-plugin-n`.',
+ packageRules: [
+ {
+ matchCurrentVersion: '^11.1.0',
+ matchDatasources: ['npm'],
+ matchPackageNames: ['eslint-plugin-node'],
+ replacementName: 'eslint-plugin-n',
+ replacementVersion: '14.0.0',
+ },
+ ],
+ },
'fakerjs-to-scoped': {
description: '`fakerjs` packages became scoped.',
packageRules: [
@@ -648,6 +681,18 @@ export const presets: Record = {
},
],
},
+ 'mem-rename': {
+ description: '`mem` was renamed to `memoize`.',
+ packageRules: [
+ {
+ matchCurrentVersion: '^10.0.0',
+ matchDatasources: ['npm'],
+ matchPackageNames: ['mem'],
+ replacementName: 'memoize',
+ replacementVersion: '10.0.0',
+ },
+ ],
+ },
'middie-to-scoped': {
description: '`middie` became scoped.',
packageRules: [
@@ -810,6 +855,18 @@ export const presets: Record = {
},
],
},
+ 'rollup-terser-to-scoped': {
+ description: 'The terser plugin for rollup became scoped.',
+ packageRules: [
+ {
+ matchCurrentVersion: '>=7.0.0',
+ matchDatasources: ['npm'],
+ matchPackageNames: ['rollup-plugin-terser'],
+ replacementName: '@rollup/plugin-terser',
+ replacementVersion: '0.1.0',
+ },
+ ],
+ },
'rome-to-biome': {
description:
'The Rome repository is archived, and Biome is the community replacement. Read [the Biome announcement](https://biomejs.dev/blog/annoucing-biome/) for migration instructions.',
@@ -846,6 +903,19 @@ export const presets: Record = {
},
],
},
+ 'standard-version-to-commit-and-tag': {
+ description:
+ '`standard-version` is now maintained as `commit-and-tag-version`.',
+ packageRules: [
+ {
+ matchCurrentVersion: '^9.0.0',
+ matchDatasources: ['npm'],
+ matchPackageNames: ['standard-version'],
+ replacementName: 'commit-and-tag-version',
+ replacementVersion: '9.5.0',
+ },
+ ],
+ },
'vso-task-lib-to-azure-pipelines-task-lib': {
description:
'The `vso-task-lib` package is now published as `azure-pipelines-task-lib`.',
@@ -881,6 +951,35 @@ export const presets: Record = {
},
],
},
+ zap: {
+ description: 'Replace ZAP dependencies.',
+ packageRules: [
+ {
+ description:
+ 'The `zap-stable` image has moved to the `zaproxy` organization.',
+ matchCurrentVersion: '>=2.0.0 <2.14.0',
+ matchDatasources: ['docker'],
+ matchPackagePatterns: [
+ '^(?:docker\\.io/)?owasp/zap2docker-stable$',
+ '^(?:docker\\.io/)?softwaresecurityproject/zap-stable$',
+ ],
+ replacementName: 'zaproxy/zap-stable',
+ replacementVersion: '2.14.0',
+ },
+ {
+ description:
+ 'The `zap-bare` image has moved to the `zaproxy` organization.',
+ matchCurrentVersion: '>=2.0.0 <2.14.0',
+ matchDatasources: ['docker'],
+ matchPackagePatterns: [
+ '^(?:docker\\.io/)?owasp/zap2docker-bare$',
+ '^(?:docker\\.io/)?softwaresecurityproject/zap-bare$',
+ ],
+ replacementName: 'zaproxy/zap-bare',
+ replacementVersion: '2.14.0',
+ },
+ ],
+ },
};
const muiReplacement: Replacement[] = [
@@ -913,7 +1012,7 @@ const mui: PresetTemplate = {
const messageFormat: PresetTemplate = {
description:
- 'The `messageformat` monorepo package naming scheme changed from `messageFormat-{{package}}`-to-`@messageformat/{{package}}`.',
+ 'The `messageformat` monorepo package naming scheme changed from `messageFormat-{{package}}` to `@messageformat/{{package}}`.',
packageRules: [
{
matchCurrentVersion: '>=2.0.0 <3.0.0',
@@ -940,7 +1039,7 @@ const messageFormat: PresetTemplate = {
replacementVersion: '5.0.0',
},
],
- title: 'messageFormat-{{package}}-to-@messageformat/{{package}}',
+ title: 'messageFormat-to-scoped',
};
addPresets(presets, messageFormat, mui);
diff --git a/lib/config/presets/internal/security.ts b/lib/config/presets/internal/security.ts
index 6c19f6aaa62cb5..a5e72a45e21eec 100644
--- a/lib/config/presets/internal/security.ts
+++ b/lib/config/presets/internal/security.ts
@@ -21,4 +21,19 @@ export const presets: Record = {
},
],
},
+ 'only-security-updates': {
+ description:
+ 'Only update dependencies if vulnerabilities have been detected.',
+ extends: ['config:recommended'],
+ packageRules: [
+ {
+ enabled: false,
+ matchPackageNames: ['*'],
+ },
+ ],
+ vulnerabilityAlerts: {
+ enabled: true,
+ },
+ osvVulnerabilityAlerts: true,
+ },
};
diff --git a/lib/config/presets/internal/workarounds.spec.ts b/lib/config/presets/internal/workarounds.spec.ts
new file mode 100644
index 00000000000000..f0d8440bbb9917
--- /dev/null
+++ b/lib/config/presets/internal/workarounds.spec.ts
@@ -0,0 +1,45 @@
+import { regEx } from '../../../util/regex';
+import { presets } from './workarounds';
+
+describe('config/presets/internal/workarounds', () => {
+ describe('bitnamiDockerImageVersioning', () => {
+ const versioning = presets.bitnamiDockerImageVersioning.packageRules![0]
+ .versioning as string;
+ const versioningRe = regEx(versioning.substring(6));
+ const matchCurrentValue = presets.bitnamiDockerImageVersioning
+ .packageRules![0].matchCurrentValue as string;
+ const matchCurrentValueRe = regEx(
+ matchCurrentValue.substring(1, matchCurrentValue.length - 1),
+ );
+
+ it.each`
+ input | expected
+ ${'latest'} | ${false}
+ ${'20'} | ${true}
+ ${'20-debian'} | ${false}
+ ${'20-debian-12'} | ${true}
+ ${'1.24'} | ${true}
+ ${'1.24-debian-12'} | ${true}
+ ${'1.24.0'} | ${true}
+ ${'1.24.0-debian-12'} | ${true}
+ ${'1.24.0-debian-12-r24'} | ${true}
+ `('versioning("$input") == "$expected"', ({ input, expected }) => {
+ expect(versioningRe.test(input)).toEqual(expected);
+ });
+
+ it.each`
+ input | expected
+ ${'latest'} | ${false}
+ ${'20'} | ${false}
+ ${'20-debian'} | ${false}
+ ${'20-debian-12'} | ${true}
+ ${'1.24'} | ${false}
+ ${'1.24-debian-12'} | ${true}
+ ${'1.24.0'} | ${false}
+ ${'1.24.0-debian-12'} | ${true}
+ ${'1.24.0-debian-12-r24'} | ${true}
+ `('matchCurrentValue("$input") == "$expected"', ({ input, expected }) => {
+ expect(matchCurrentValueRe.test(input)).toEqual(expected);
+ });
+ });
+});
diff --git a/lib/config/presets/internal/workarounds.ts b/lib/config/presets/internal/workarounds.ts
index 7209fe67eb5844..158515222e43b0 100644
--- a/lib/config/presets/internal/workarounds.ts
+++ b/lib/config/presets/internal/workarounds.ts
@@ -21,9 +21,27 @@ export const presets: Record = {
'workarounds:disableEclipseLifecycleMapping',
'workarounds:disableMavenParentRoot',
'workarounds:containerbase',
+ 'workarounds:bitnamiDockerImageVersioning',
],
ignoreDeps: [], // Hack to improve onboarding PR description
},
+ bitnamiDockerImageVersioning: {
+ description: 'Use custom regex versioning for bitnami images',
+ packageRules: [
+ {
+ matchCurrentValue:
+ '/^(?\\d+)(?:\\.(?\\d+)(?:\\.(?\\d+))?)?-(?.+)-(?\\d+)(?:-r(?\\d+))?$/',
+ matchDatasources: ['docker'],
+ matchPackagePrefixes: [
+ 'bitnami/',
+ 'docker.io/bitnami/',
+ 'gcr.io/bitnami-containers/',
+ ],
+ versioning:
+ 'regex:^(?\\d+)(?:\\.(?\\d+)(?:\\.(?\\d+))?)?(:?-(?.+)-(?\\d+)(?:-r(?\\d+))?)?$',
+ },
+ ],
+ },
containerbase: {
description: 'Add some containerbase overrides.',
packageRules: [
@@ -62,6 +80,12 @@ export const presets: Record = {
doNotUpgradeFromAlpineStableToEdge: {
description: 'Do not upgrade from Alpine stable to edge.',
packageRules: [
+ {
+ allowedVersions: '<20000000',
+ matchCurrentVersion: '!/^\\d{8}$/',
+ matchDatasources: ['docker'],
+ matchDepNames: ['alpine'],
+ },
{
allowedVersions: '<20000000',
matchCurrentVersion: '!/^\\d{8}$/',
@@ -125,7 +149,24 @@ export const presets: Record = {
'^cimg/openjdk',
],
versioning:
- 'regex:^(?\\d+)?(\\.(?\\d+))?(\\.(?\\d+))?([\\._+](?\\d+))?(-(?.*))?$',
+ 'regex:^(?\\d+)?(\\.(?\\d+))?(\\.(?\\d+))?([\\._+](?(\\d\\.?)+)(LTS)?)?(-(?.*))?$',
+ },
+ {
+ allowedVersions: '/^(?:8|11|17|21)(?:\\.|-|$)/',
+ description:
+ 'Limit Java runtime versions to LTS releases. To receive all major releases add `workarounds:javaLTSVersions` to the `ignorePresets` array.',
+ matchDatasources: ['docker', 'java-version'],
+ matchDepNames: [
+ 'eclipse-temurin',
+ 'amazoncorretto',
+ 'adoptopenjdk',
+ 'openjdk',
+ 'java',
+ 'java-jre',
+ 'sapmachine',
+ ],
+ versioning:
+ 'regex:^(?\\d+)?(\\.(?\\d+))?(\\.(?\\d+))?([\\._+](?(\\d\\.?)+)(LTS)?)?(-(?.*))?$',
},
],
},
diff --git a/lib/config/types.ts b/lib/config/types.ts
index 6e0715dabd2d60..2129fd9b7ecc2b 100644
--- a/lib/config/types.ts
+++ b/lib/config/types.ts
@@ -2,12 +2,14 @@ import type { LogLevel } from 'bunyan';
import type { PlatformId } from '../constants';
import type { LogLevelRemap } from '../logger/types';
import type { CustomManager } from '../modules/manager/custom/types';
-import type { HostRule } from '../types';
+import type { RepoSortMethod, SortMethod } from '../modules/platform/types';
+import type { HostRule, SkipReason } from '../types';
import type { GitNoVerifyOption } from '../util/git/types';
import type { MergeConfidence } from '../util/merge-confidence/types';
export type RenovateConfigStage =
| 'global'
+ | 'inherit'
| 'repository'
| 'package'
| 'branch'
@@ -28,69 +30,70 @@ export type RecreateWhen = 'auto' | 'never' | 'always';
// TODO: Proper typings
export interface RenovateSharedConfig {
$schema?: string;
+ addLabels?: string[];
+ autoReplaceGlobalMatch?: boolean;
automerge?: boolean;
+ automergeSchedule?: string[];
automergeStrategy?: MergeStrategy;
- autoReplaceGlobalMatch?: boolean;
- pruneBranchAfterAutomerge?: boolean;
- branchPrefix?: string;
- branchPrefixOld?: string;
branchName?: string;
branchNameStrict?: boolean;
- manager?: string;
+ branchPrefix?: string;
+ branchPrefixOld?: string;
commitMessage?: string;
- commitMessagePrefix?: string;
- commitMessageTopic?: string;
commitMessageAction?: string;
commitMessageExtra?: string;
+ commitMessageLowerCase?: 'auto' | 'never';
+ commitMessagePrefix?: string;
+ commitMessageTopic?: string;
confidential?: boolean;
customChangelogUrl?: string;
+ dependencyDashboardApproval?: boolean;
draftPR?: boolean;
enabled?: boolean;
enabledManagers?: string[];
extends?: string[];
fileMatch?: string[];
force?: RenovateConfig;
+ gitIgnoredAuthors?: string[];
group?: GroupConfig;
groupName?: string;
groupSlug?: string;
- includePaths?: string[];
+ hashedBranchLength?: number;
ignoreDeps?: string[];
ignorePaths?: string[];
ignoreTests?: boolean;
+ includePaths?: string[];
internalChecksAsSuccess?: boolean;
+ keepUpdatedLabel?: string;
labels?: string[];
- addLabels?: string[];
- dependencyDashboardApproval?: boolean;
- hashedBranchLength?: number;
+ manager?: string;
+ milestone?: number;
npmrc?: string;
npmrcMerge?: boolean;
+ platformCommit?: boolean;
postUpgradeTasks?: PostUpgradeTasks;
prBodyColumns?: string[];
prBodyDefinitions?: Record;
prCreation?: 'immediate' | 'not-pending' | 'status-success' | 'approval';
- productLinks?: Record;
prPriority?: number;
+ productLinks?: Record;
+ pruneBranchAfterAutomerge?: boolean;
rebaseLabel?: string;
- respectLatest?: boolean;
- stopUpdatingLabel?: string;
rebaseWhen?: string;
- recreateWhen?: RecreateWhen;
recreateClosed?: boolean;
+ recreateWhen?: RecreateWhen;
repository?: string;
repositoryCache?: RepositoryCacheConfig;
repositoryCacheType?: RepositoryCacheType;
+ respectLatest?: boolean;
schedule?: string[];
- automergeSchedule?: string[];
- semanticCommits?: 'auto' | 'enabled' | 'disabled';
semanticCommitScope?: string | null;
- commitMessageLowerCase?: 'auto' | 'never';
semanticCommitType?: string;
+ semanticCommits?: 'auto' | 'enabled' | 'disabled';
+ stopUpdatingLabel?: string;
suppressNotifications?: string[];
timezone?: string;
unicodeEmoji?: boolean;
- gitIgnoredAuthors?: string[];
- platformCommit?: boolean;
- milestone?: number;
}
// Config options used only within the global worker
@@ -99,26 +102,27 @@ export interface GlobalOnlyConfig {
autodiscover?: boolean;
autodiscoverFilter?: string[] | string;
autodiscoverNamespaces?: string[];
+ autodiscoverProjects?: string[];
autodiscoverTopics?: string[];
baseDir?: string;
cacheDir?: string;
containerbaseDir?: string;
detectHostRulesFromEnv?: boolean;
dockerCliOptions?: string;
+ endpoint?: string;
forceCli?: boolean;
gitNoVerify?: GitNoVerifyOption[];
gitPrivateKey?: string;
globalExtends?: string[];
logFile?: string;
logFileLevel?: LogLevel;
+ platform?: PlatformId;
prCommitsPerRunLimit?: number;
privateKeyPath?: string;
privateKeyPathOld?: string;
- redisUrl?: string;
redisPrefix?: string;
+ redisUrl?: string;
repositories?: RenovateRepository[];
- platform?: PlatformId;
- endpoint?: string;
useCloudMetadataServices?: boolean;
}
@@ -129,32 +133,38 @@ export interface RepoGlobalConfig {
allowPlugins?: boolean;
allowPostUpgradeCommandTemplating?: boolean;
allowScripts?: boolean;
+ allowedEnv?: string[];
allowedHeaders?: string[];
allowedPostUpgradeCommands?: string[];
binarySource?: 'docker' | 'global' | 'install' | 'hermit';
+ cacheDir?: string;
cacheHardTtlMinutes?: number;
cacheTtlOverride?: Record;
+ containerbaseDir?: string;
customEnvVariables?: Record;
dockerChildPrefix?: string;
dockerCliOptions?: string;
dockerSidecarImage?: string;
dockerUser?: string;
dryRun?: DryRunConfig;
+ endpoint?: string;
executionTimeout?: number;
- gitTimeout?: number;
exposeAllEnv?: boolean;
+ gitTimeout?: number;
githubTokenWarn?: boolean;
+ includeMirrors?: boolean;
+ localDir?: string;
+ mergeConfidenceEndpoint?: string;
+ mergeConfidenceDatasources?: string[];
migratePresets?: Record;
+ platform?: PlatformId;
presetCachePersistence?: boolean;
privateKey?: string;
privateKeyOld?: string;
- localDir?: string;
- cacheDir?: string;
- containerbaseDir?: string;
- platform?: PlatformId;
- endpoint?: string;
- includeMirrors?: boolean;
- allowedEnv?: string[];
+ httpCacheTtlDays?: number;
+ autodiscoverRepoSort?: RepoSortMethod;
+ autodiscoverRepoOrder?: SortMethod;
+ userAgent?: string;
}
export interface LegacyAdminConfig {
@@ -213,6 +223,8 @@ export interface RenovateConfig
AssigneesAndReviewersConfig,
ConfigMigration,
Record {
+ reportPath?: string;
+ reportType?: 'logging' | 'file' | 's3' | null;
depName?: string;
baseBranches?: string[];
commitBody?: string;
@@ -228,6 +240,11 @@ export interface RenovateConfig
hostRules?: HostRule[];
+ inheritConfig?: boolean;
+ inheritConfigFileName?: string;
+ inheritConfigRepoName?: string;
+ inheritConfigStrict?: boolean;
+
ignorePresets?: string[];
forkProcessing?: 'auto' | 'enabled' | 'disabled';
isFork?: boolean;
@@ -340,33 +357,35 @@ export interface PackageRule
UpdateConfig,
Record {
description?: string | string[];
+ excludeDepNames?: string[];
+ excludeDepPatterns?: string[];
+ excludeDepPrefixes?: string[];
+ excludePackageNames?: string[];
+ excludePackagePatterns?: string[];
+ excludePackagePrefixes?: string[];
+ excludeRepositories?: string[];
isVulnerabilityAlert?: boolean;
- matchFileNames?: string[];
matchBaseBranches?: string[];
+ matchCategories?: string[];
+ matchConfidence?: MergeConfidence[];
matchCurrentAge?: string;
- matchManagers?: string[];
+ matchCurrentValue?: string;
+ matchCurrentVersion?: string;
matchDatasources?: string[];
- matchDepTypes?: string[];
matchDepNames?: string[];
matchDepPatterns?: string[];
+ matchDepPrefixes?: string[];
+ matchDepTypes?: string[];
+ matchFileNames?: string[];
+ matchManagers?: string[];
+ matchNewValue?: string;
matchPackageNames?: string[];
matchPackagePatterns?: string[];
matchPackagePrefixes?: string[];
matchRepositories?: string[];
- excludeDepNames?: string[];
- excludeDepPatterns?: string[];
- excludePackageNames?: string[];
- excludePackagePatterns?: string[];
- excludePackagePrefixes?: string[];
- excludeRepositories?: string[];
- matchNewValue?: string;
- matchCurrentValue?: string;
- matchCurrentVersion?: string;
matchSourceUrlPrefixes?: string[];
matchSourceUrls?: string[];
matchUpdateTypes?: UpdateType[];
- matchCategories?: string[];
- matchConfidence?: MergeConfidence[];
registryUrls?: string[] | null;
vulnerabilitySeverity?: string;
}
@@ -390,6 +409,8 @@ export interface RenovateOptionBase {
*/
globalOnly?: boolean;
+ inheritConfigSupport?: boolean;
+
allowedValues?: string[];
allowString?: boolean;
@@ -422,6 +443,16 @@ export interface RenovateOptionBase {
experimentalIssues?: number[];
advancedUse?: boolean;
+
+ /**
+ * This is used to add depreciation message in the docs
+ */
+ deprecationMsg?: string;
+
+ /**
+ * For internal use only: add it to any config option that supports regex or glob matching
+ */
+ patternMatch?: boolean;
}
export interface RenovateArrayOption<
@@ -515,6 +546,8 @@ export interface PackageRuleInputConfig extends Record {
releaseTimestamp?: string | null;
repository?: string;
currentVersionTimestamp?: string;
+ enabled?: boolean;
+ skipReason?: SkipReason;
}
export interface ConfigMigration {
diff --git a/lib/config/validation-helpers/regex-glob-matchers.spec.ts b/lib/config/validation-helpers/regex-glob-matchers.spec.ts
new file mode 100644
index 00000000000000..570128326ec09c
--- /dev/null
+++ b/lib/config/validation-helpers/regex-glob-matchers.spec.ts
@@ -0,0 +1,33 @@
+import { check } from './regex-glob-matchers';
+
+describe('config/validation-helpers/regex-glob-matchers', () => {
+ it('should error for multiple match alls', () => {
+ const res = check({
+ val: ['*', '**'],
+ currentPath: 'hostRules[0].allowedHeaders',
+ });
+ expect(res).toHaveLength(1);
+ });
+
+ it('should error for invalid regex', () => {
+ const res = check({
+ val: ['[', '/[/', '/.*[/'],
+ currentPath: 'hostRules[0].allowedHeaders',
+ });
+ expect(res).toHaveLength(2);
+ });
+
+ it('should error for non-strings', () => {
+ const res = check({
+ val: ['*', 2],
+ currentPath: 'hostRules[0].allowedHeaders',
+ });
+ expect(res).toMatchObject([
+ {
+ message:
+ 'hostRules[0].allowedHeaders: should be an array of strings. You have included object.',
+ topic: 'Configuration Error',
+ },
+ ]);
+ });
+});
diff --git a/lib/config/validation-helpers/regex-glob-matchers.ts b/lib/config/validation-helpers/regex-glob-matchers.ts
new file mode 100644
index 00000000000000..a1c25cb82f3839
--- /dev/null
+++ b/lib/config/validation-helpers/regex-glob-matchers.ts
@@ -0,0 +1,44 @@
+import is from '@sindresorhus/is';
+import { getRegexPredicate, isRegexMatch } from '../../util/string-match';
+import type { ValidationMessage } from '../types';
+import type { CheckMatcherArgs } from './types';
+
+/**
+ * Only if type condition or context condition violated then errors array will be mutated to store metadata
+ */
+export function check({
+ val: matchers,
+ currentPath,
+}: CheckMatcherArgs): ValidationMessage[] {
+ const res: ValidationMessage[] = [];
+
+ if (is.array(matchers, is.string)) {
+ if (
+ (matchers.includes('*') || matchers.includes('**')) &&
+ matchers.length > 1
+ ) {
+ res.push({
+ topic: 'Configuration Error',
+ message: `${currentPath}: Your input contains * or ** along with other patterns. Please remove them, as * or ** matches all patterns.`,
+ });
+ }
+ for (const matcher of matchers) {
+ // Validate regex pattern
+ if (isRegexMatch(matcher)) {
+ if (!getRegexPredicate(matcher)) {
+ res.push({
+ topic: 'Configuration Error',
+ message: `Failed to parse regex pattern "${matcher}"`,
+ });
+ }
+ }
+ }
+ } else {
+ res.push({
+ topic: 'Configuration Error',
+ message: `${currentPath}: should be an array of strings. You have included ${typeof matchers}.`,
+ });
+ }
+
+ return res;
+}
diff --git a/lib/config/validation-helpers/types.ts b/lib/config/validation-helpers/types.ts
index 05f70826cfe420..68e10825820310 100644
--- a/lib/config/validation-helpers/types.ts
+++ b/lib/config/validation-helpers/types.ts
@@ -4,3 +4,8 @@ export interface CheckManagerArgs {
resolvedRule: PackageRule;
currentPath: string;
}
+
+export interface CheckMatcherArgs {
+ val: unknown;
+ currentPath: string;
+}
diff --git a/lib/config/validation.spec.ts b/lib/config/validation.spec.ts
index 67943b9aa6b52a..f0bfcda1f90fe2 100644
--- a/lib/config/validation.spec.ts
+++ b/lib/config/validation.spec.ts
@@ -25,7 +25,10 @@ describe('config/validation', () => {
const config = {
prTitle: 'something',
};
- const { warnings } = await configValidation.validateConfig(false, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'repo',
+ config,
+ );
expect(warnings).toHaveLength(1);
expect(warnings).toMatchSnapshot();
});
@@ -35,14 +38,37 @@ describe('config/validation', () => {
binarySource: 'something',
username: 'user',
};
- const { warnings } = await configValidation.validateConfig(false, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'repo',
+ config,
+ );
+ expect(warnings).toHaveLength(2);
+ expect(warnings).toMatchObject([
+ {
+ message: `The "binarySource" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
+ },
+ {
+ message: `The "username" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
+ },
+ ]);
+ });
+
+ it('catches global options in inherit config', async () => {
+ const config = {
+ binarySource: 'something',
+ username: 'user',
+ };
+ const { warnings } = await configValidation.validateConfig(
+ 'inherit',
+ config,
+ );
expect(warnings).toHaveLength(2);
expect(warnings).toMatchObject([
{
- message: `The "binarySource" option is a global option reserved only for Renovate's global configuration and cannot be configured within repository config file.`,
+ message: `The "binarySource" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
},
{
- message: `The "username" option is a global option reserved only for Renovate's global configuration and cannot be configured within repository config file.`,
+ message: `The "username" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
},
]);
});
@@ -57,7 +83,21 @@ describe('config/validation', () => {
},
],
};
- const { warnings } = await configValidation.validateConfig(false, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'repo',
+ config,
+ );
+ expect(warnings).toHaveLength(0);
+ });
+
+ it('does not warn for valid inheritConfig', async () => {
+ const config = {
+ onboarding: false,
+ };
+ const { warnings } = await configValidation.validateConfig(
+ 'inherit',
+ config,
+ );
expect(warnings).toHaveLength(0);
});
@@ -65,7 +105,7 @@ describe('config/validation', () => {
const config = {
commitMessage: '{{{something}}',
};
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toHaveLength(1);
expect(errors).toMatchSnapshot();
});
@@ -95,7 +135,7 @@ describe('config/validation', () => {
},
],
};
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toHaveLength(2);
expect(errors).toMatchSnapshot();
});
@@ -123,10 +163,15 @@ describe('config/validation', () => {
matchCurrentValue: '/^2/i',
enabled: true,
},
+ {
+ matchPackageNames: ['bad'],
+ matchNewValue: '/^2(/',
+ enabled: true,
+ },
],
};
- const { errors } = await configValidation.validateConfig(false, config);
- expect(errors).toHaveLength(2);
+ const { errors } = await configValidation.validateConfig('repo', config);
+ expect(errors).toHaveLength(1);
});
it('catches invalid matchNewValue', async () => {
@@ -152,10 +197,15 @@ describe('config/validation', () => {
matchNewValue: '/^2/i',
enabled: true,
},
+ {
+ matchPackageNames: ['bad'],
+ matchNewValue: '/^2(/',
+ enabled: true,
+ },
],
};
- const { errors } = await configValidation.validateConfig(false, config);
- expect(errors).toHaveLength(2);
+ const { errors } = await configValidation.validateConfig('repo', config);
+ expect(errors).toHaveLength(1);
});
it('catches invalid matchCurrentVersion regex', async () => {
@@ -188,7 +238,7 @@ describe('config/validation', () => {
},
],
};
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toHaveLength(2);
expect(errors).toMatchSnapshot();
});
@@ -197,25 +247,30 @@ describe('config/validation', () => {
const config = {
customDatasources: {
foo: {
+ description: 3,
randomKey: '',
defaultRegistryUrlTemplate: [],
transformTemplates: [{}],
},
},
} as any;
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toMatchObject([
{
message:
- 'Invalid `customDatasources.customDatasources.defaultRegistryUrlTemplate` configuration: is a string',
+ 'Invalid `customDatasources.defaultRegistryUrlTemplate` configuration: is a string',
},
{
message:
- 'Invalid `customDatasources.customDatasources.randomKey` configuration: key is not allowed',
+ 'Invalid `customDatasources.description` configuration: is not an array of strings',
},
{
message:
- 'Invalid `customDatasources.customDatasources.transformTemplates` configuration: is not an array of string',
+ 'Invalid `customDatasources.randomKey` configuration: key is not allowed',
+ },
+ {
+ message:
+ 'Invalid `customDatasources.transformTemplates` configuration: is not an array of string',
},
]);
});
@@ -230,7 +285,7 @@ describe('config/validation', () => {
},
};
// @ts-expect-error invalid options
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toMatchObject([
{
message:
@@ -250,7 +305,7 @@ describe('config/validation', () => {
randomKey: '',
},
} as any;
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toMatchObject([
{
message:
@@ -263,7 +318,7 @@ describe('config/validation', () => {
const config = {
baseBranches: ['/***$}{]][/', '/branch/i'],
};
- const { errors } = await configValidation.validateConfig(false, config);
+ const { errors } = await configValidation.validateConfig('repo', config);
expect(errors).toEqual([
{
topic: 'Configuration Error',
@@ -290,7 +345,7 @@ describe('config/validation', () => {
major: null,
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -308,7 +363,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -326,7 +381,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as any,
);
expect(warnings).toHaveLength(0);
@@ -346,7 +401,7 @@ describe('config/validation', () => {
],
])('validates enabled managers for %s', async (_case, config) => {
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -364,7 +419,7 @@ describe('config/validation', () => {
'errors if included not supported enabled managers for %s',
async (_case, config) => {
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -405,7 +460,7 @@ describe('config/validation', () => {
major: null,
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(1);
@@ -434,7 +489,7 @@ describe('config/validation', () => {
},
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(4);
@@ -453,7 +508,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -472,7 +527,7 @@ describe('config/validation', () => {
},
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -493,7 +548,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -512,7 +567,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as any,
true,
);
@@ -540,7 +595,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as any,
true,
);
@@ -569,7 +624,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as any,
true,
);
@@ -606,7 +661,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as RenovateConfig,
true,
);
@@ -637,7 +692,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as any,
true,
);
@@ -659,7 +714,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -680,7 +735,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -701,10 +756,20 @@ describe('config/validation', () => {
extractVersionTemplate: '^(?v\\d+\\.\\d+)',
depTypeTemplate: 'apple',
},
+ {
+ customType: 'regex',
+ fileMatch: ['Dockerfile'],
+ matchStrings: ['ENV (?.*?)\\s'],
+ packageNameTemplate: 'foo',
+ datasourceTemplate: 'bar',
+ registryUrlTemplate: 'foobar',
+ extractVersionTemplate: '^(?v\\d+\\.\\d+)',
+ depTypeTemplate: 'apple',
+ },
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -727,7 +792,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as any,
true,
);
@@ -748,7 +813,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -762,7 +827,7 @@ describe('config/validation', () => {
$schema: 'renovate.json',
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -775,7 +840,7 @@ describe('config/validation', () => {
extends: [':timezone', ':timezone(Europe/Berlin)'],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -788,7 +853,7 @@ describe('config/validation', () => {
constraints: { packageRules: [{}] },
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config as never, // TODO: #15963
true,
);
@@ -801,7 +866,7 @@ describe('config/validation', () => {
prBodyDefinitions: {},
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -817,7 +882,7 @@ describe('config/validation', () => {
},
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -833,7 +898,7 @@ describe('config/validation', () => {
},
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -854,7 +919,7 @@ describe('config/validation', () => {
},
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -887,7 +952,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(errors).toHaveLength(1);
@@ -908,7 +973,7 @@ describe('config/validation', () => {
},
} as never;
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(errors).toHaveLength(1);
@@ -921,7 +986,7 @@ describe('config/validation', () => {
hostType: 'npm',
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(errors).toHaveLength(0);
@@ -934,7 +999,7 @@ describe('config/validation', () => {
extends: ['foo', 'bar', 42] as never,
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -949,7 +1014,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -968,7 +1033,7 @@ describe('config/validation', () => {
],
} as any;
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -988,7 +1053,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
true,
);
@@ -1004,11 +1069,14 @@ describe('config/validation', () => {
example2: 123,
},
};
- const { warnings } = await configValidation.validateConfig(false, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'repo',
+ config,
+ );
expect(warnings).toMatchObject([
{
topic: 'Configuration Error',
- message: `The "customEnvVariables" option is a global option reserved only for Renovate's global configuration and cannot be configured within repository config file.`,
+ message: `The "customEnvVariables" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
},
]);
});
@@ -1018,7 +1086,7 @@ describe('config/validation', () => {
schedule: ['30 5 * * *'],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -1046,7 +1114,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -1073,7 +1141,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -1100,7 +1168,7 @@ describe('config/validation', () => {
],
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(warnings).toHaveLength(0);
@@ -1123,7 +1191,7 @@ describe('config/validation', () => {
},
};
const { errors, warnings } = await configValidation.validateConfig(
- false,
+ 'repo',
// @ts-expect-error: testing invalid values in env object
config,
);
@@ -1159,7 +1227,7 @@ describe('config/validation', () => {
],
};
const { errors, warnings } = await configValidation.validateConfig(
- false,
+ 'repo',
config,
);
expect(errors).toMatchObject([
@@ -1177,6 +1245,34 @@ describe('config/validation', () => {
expect(warnings).toHaveLength(0);
expect(errors).toHaveLength(2);
});
+
+ it('catches when * or ** is combined with others patterns in a regexOrGlob option', async () => {
+ const config = {
+ packageRules: [
+ {
+ matchRepositories: ['groupA/**', 'groupB/**'], // valid
+ enabled: false,
+ },
+ {
+ matchRepositories: ['*', 'repo'], // invalid
+ enabled: true,
+ },
+ ],
+ };
+ const { errors, warnings } = await configValidation.validateConfig(
+ 'repo',
+ config,
+ );
+ expect(errors).toMatchObject([
+ {
+ message:
+ 'packageRules[1].matchRepositories: Your input contains * or ** along with other patterns. Please remove them, as * or ** matches all patterns.',
+ topic: 'Configuration Error',
+ },
+ ]);
+ expect(errors).toHaveLength(1);
+ expect(warnings).toHaveLength(0);
+ });
});
describe('validateConfig() -> globaOnly options', () => {
@@ -1193,7 +1289,7 @@ describe('config/validation', () => {
allowedHeaders: ['X-Auth-Token'],
};
const { warnings, errors } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toHaveLength(0);
@@ -1211,7 +1307,10 @@ describe('config/validation', () => {
},
],
};
- const { errors } = await configValidation.validateConfig(true, config);
+ const { errors } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
expect(errors).toMatchObject([
{
message:
@@ -1229,7 +1328,7 @@ describe('config/validation', () => {
allowedEnv: ['SOME*'],
};
const { warnings, errors } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toHaveLength(0);
@@ -1242,7 +1341,10 @@ describe('config/validation', () => {
SOME_VAR: 'SOME_VALUE',
},
};
- const { errors } = await configValidation.validateConfig(true, config);
+ const { errors } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
expect(errors).toMatchObject([
{
message:
@@ -1277,7 +1379,7 @@ describe('config/validation', () => {
reviewersSampleSize: null,
};
const { warnings, errors } = await configValidation.validateConfig(
- false,
+ 'repo',
// @ts-expect-error: contains invalid values
config,
);
@@ -1293,7 +1395,7 @@ describe('config/validation', () => {
binarySource: 'invalid' as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1310,7 +1412,7 @@ describe('config/validation', () => {
baseDir: false as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1326,7 +1428,7 @@ describe('config/validation', () => {
requireConfig: 'invalid' as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1343,7 +1445,7 @@ describe('config/validation', () => {
dryRun: 'invalid' as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1360,7 +1462,7 @@ describe('config/validation', () => {
repositoryCache: 'invalid' as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1377,7 +1479,7 @@ describe('config/validation', () => {
onboardingConfigFileName: 'invalid' as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1397,7 +1499,7 @@ describe('config/validation', () => {
},
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1408,7 +1510,7 @@ describe('config/validation', () => {
},
{
topic: 'Configuration Error',
- message: `The "binarySource" option is a global option reserved only for Renovate's global configuration and cannot be configured within repository config file.`,
+ message: `The "binarySource" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
},
]);
});
@@ -1425,7 +1527,7 @@ describe('config/validation', () => {
},
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1442,7 +1544,7 @@ describe('config/validation', () => {
gitUrl: 'invalid' as never,
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toEqual([
@@ -1460,7 +1562,10 @@ describe('config/validation', () => {
unicodeEmoji: false,
detectGlobalManagerConfig: 'invalid-type',
};
- const { warnings } = await configValidation.validateConfig(true, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
expect(warnings).toMatchObject([
{
message: `Configuration option \`detectGlobalManagerConfig\` should be a boolean. Found: ${JSON.stringify(
@@ -1476,7 +1581,10 @@ describe('config/validation', () => {
prCommitsPerRunLimit: 2,
gitTimeout: 'invalid-type',
};
- const { warnings } = await configValidation.validateConfig(true, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
expect(warnings).toMatchObject([
{
message: `Configuration option \`gitTimeout\` should be an integer. Found: ${JSON.stringify(
@@ -1492,9 +1600,10 @@ describe('config/validation', () => {
allowedPostUpgradeCommands: ['cmd'],
checkedBranches: 'invalid-type',
gitNoVerify: ['invalid'],
+ mergeConfidenceDatasources: [1],
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
// @ts-expect-error: contains invalid values
config,
);
@@ -1504,6 +1613,11 @@ describe('config/validation', () => {
'Configuration option `checkedBranches` should be a list (Array).',
topic: 'Configuration Error',
},
+ {
+ topic: 'Configuration Error',
+ message:
+ 'Invalid value `1` for `mergeConfidenceDatasources`. The allowed values are go, maven, npm, nuget, packagist, pypi, rubygems.',
+ },
{
message:
'Invalid value for `gitNoVerify`. The allowed values are commit, push.',
@@ -1525,7 +1639,7 @@ describe('config/validation', () => {
},
};
const { warnings } = await configValidation.validateConfig(
- true,
+ 'global',
// @ts-expect-error: contains invalid values
config,
);
@@ -1549,7 +1663,10 @@ describe('config/validation', () => {
example2: 123,
},
};
- const { warnings } = await configValidation.validateConfig(true, config);
+ const { warnings } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
expect(warnings).toMatchObject([
{
message:
@@ -1567,7 +1684,7 @@ describe('config/validation', () => {
},
};
const { warnings, errors } = await configValidation.validateConfig(
- true,
+ 'global',
config,
);
expect(warnings).toHaveLength(0);
@@ -1586,12 +1703,102 @@ describe('config/validation', () => {
autodiscoverTopics: null,
};
const { warnings, errors } = await configValidation.validateConfig(
- true,
+ 'global',
// @ts-expect-error: contains invalid values
config,
);
expect(warnings).toHaveLength(0);
expect(errors).toHaveLength(0);
});
+
+ it('fails for missing reportPath if reportType is "s3"', async () => {
+ const config: RenovateConfig = {
+ reportType: 's3',
+ };
+ const { warnings, errors } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
+ expect(warnings).toHaveLength(0);
+ expect(errors).toHaveLength(1);
+ });
+
+ it('validates reportPath if reportType is "s3"', async () => {
+ const config: RenovateConfig = {
+ reportType: 's3',
+ reportPath: 's3://bucket-name/key-name',
+ };
+ const { warnings, errors } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
+ expect(warnings).toHaveLength(0);
+ expect(errors).toHaveLength(0);
+ });
+
+ it('fails for missing reportPath if reportType is "file"', async () => {
+ const config: RenovateConfig = {
+ reportType: 'file',
+ };
+ const { warnings, errors } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
+ expect(warnings).toHaveLength(0);
+ expect(errors).toHaveLength(1);
+ });
+
+ it('validates reportPath if reportType is "file"', async () => {
+ const config: RenovateConfig = {
+ reportType: 'file',
+ reportPath: './report.json',
+ };
+ const { warnings, errors } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
+ expect(warnings).toHaveLength(0);
+ expect(errors).toHaveLength(0);
+ });
+
+ it('catches when * or ** is combined with others patterns in a regexOrGlob option', async () => {
+ const config = {
+ packageRules: [
+ {
+ matchRepositories: ['*', 'repo'], // invalid
+ enabled: false,
+ },
+ ],
+ allowedHeaders: ['*', '**'], // invalid
+ autodiscoverProjects: ['**', 'project'], // invalid
+ allowedEnv: ['env_var'], // valid
+ };
+ const { errors, warnings } = await configValidation.validateConfig(
+ 'global',
+ config,
+ );
+ expect(warnings).toMatchObject([
+ {
+ message:
+ 'allowedHeaders: Your input contains * or ** along with other patterns. Please remove them, as * or ** matches all patterns.',
+ topic: 'Configuration Error',
+ },
+ {
+ message:
+ 'autodiscoverProjects: Your input contains * or ** along with other patterns. Please remove them, as * or ** matches all patterns.',
+ topic: 'Configuration Error',
+ },
+ ]);
+
+ expect(errors).toMatchObject([
+ {
+ message:
+ 'packageRules[0].matchRepositories: Your input contains * or ** along with other patterns. Please remove them, as * or ** matches all patterns.',
+ topic: 'Configuration Error',
+ },
+ ]);
+ expect(warnings).toHaveLength(2);
+ expect(errors).toHaveLength(1);
+ });
});
});
diff --git a/lib/config/validation.ts b/lib/config/validation.ts
index 1a5b269cda6671..f7050c2ac7925f 100644
--- a/lib/config/validation.ts
+++ b/lib/config/validation.ts
@@ -24,6 +24,7 @@ import { GlobalConfig } from './global';
import { migrateConfig } from './migration';
import { getOptions } from './options';
import { resolveConfigPresets } from './presets';
+import { supportedDatasources } from './presets/internal/merge-confidence';
import {
AllowedParents,
type RenovateConfig,
@@ -34,12 +35,16 @@ import {
allowedStatusCheckStrings,
} from './types';
import * as managerValidator from './validation-helpers/managers';
+import * as regexOrGlobValidator from './validation-helpers/regex-glob-matchers';
const options = getOptions();
+let optionsInitialized = false;
let optionTypes: Record;
let optionParents: Record;
let optionGlobals: Set;
+let optionInherits: Set;
+let optionRegexOrGlob: Set;
const managerList = getManagerList();
@@ -98,16 +103,50 @@ function getDeprecationMessage(option: string): string | undefined {
return deprecatedOptions[option];
}
+function isInhertConfigOption(key: string): boolean {
+ return optionInherits.has(key);
+}
+
+function isRegexOrGlobOption(key: string): boolean {
+ return optionRegexOrGlob.has(key);
+}
+
function isGlobalOption(key: string): boolean {
- if (!optionGlobals) {
- optionGlobals = new Set();
- for (const option of options) {
- if (option.globalOnly) {
- optionGlobals.add(option.name);
- }
+ return optionGlobals.has(key);
+}
+
+function initOptions(): void {
+ if (optionsInitialized) {
+ return;
+ }
+
+ optionParents = {};
+ optionInherits = new Set();
+ optionTypes = {};
+ optionRegexOrGlob = new Set();
+ optionGlobals = new Set();
+
+ for (const option of options) {
+ optionTypes[option.name] = option.type;
+
+ if (option.parents) {
+ optionParents[option.name] = option.parents;
+ }
+
+ if (option.inheritConfigSupport) {
+ optionInherits.add(option.name);
+ }
+
+ if (option.patternMatch) {
+ optionRegexOrGlob.add(option.name);
+ }
+
+ if (option.globalOnly) {
+ optionGlobals.add(option.name);
}
}
- return optionGlobals.has(key);
+
+ optionsInitialized = true;
}
export function getParentName(parentPath: string | undefined): string {
@@ -121,25 +160,13 @@ export function getParentName(parentPath: string | undefined): string {
}
export async function validateConfig(
- isGlobalConfig: boolean,
+ configType: 'global' | 'inherit' | 'repo',
config: RenovateConfig,
isPreset?: boolean,
parentPath?: string,
): Promise {
- if (!optionTypes) {
- optionTypes = {};
- options.forEach((option) => {
- optionTypes[option.name] = option.type;
- });
- }
- if (!optionParents) {
- optionParents = {};
- options.forEach((option) => {
- if (option.parents) {
- optionParents[option.name] = option.parents;
- }
- });
- }
+ initOptions();
+
let errors: ValidationMessage[] = [];
let warnings: ValidationMessage[] = [];
@@ -164,20 +191,25 @@ export async function validateConfig(
});
}
- if (isGlobalConfig && isGlobalOption(key)) {
- await validateGlobalConfig(
- key,
- val,
- optionTypes[key],
- warnings,
- currentPath,
- );
- continue;
- } else {
- if (isGlobalOption(key) && !isFalseGlobal(key, parentPath)) {
+ if (isGlobalOption(key)) {
+ if (configType === 'global') {
+ await validateGlobalConfig(
+ key,
+ val,
+ optionTypes[key],
+ warnings,
+ errors,
+ currentPath,
+ config,
+ );
+ continue;
+ } else if (
+ !isFalseGlobal(key, parentPath) &&
+ !(configType === 'inherit' && isInhertConfigOption(key))
+ ) {
warnings.push({
topic: 'Configuration Error',
- message: `The "${key}" option is a global option reserved only for Renovate's global configuration and cannot be configured within repository config file.`,
+ message: `The "${key}" option is a global option reserved only for Renovate's global configuration and cannot be configured within a repository's config file.`,
});
continue;
}
@@ -267,7 +299,12 @@ export async function validateConfig(
});
}
} else if (
- ['allowedVersions', 'matchCurrentVersion'].includes(key) &&
+ [
+ 'allowedVersions',
+ 'matchCurrentVersion',
+ 'matchCurrentValue',
+ 'matchNewValue',
+ ].includes(key) &&
isRegexMatch(val)
) {
if (!getRegexPredicate(val)) {
@@ -276,24 +313,6 @@ export async function validateConfig(
message: `Invalid regExp for ${currentPath}: \`${val}\``,
});
}
- } else if (
- key === 'matchCurrentValue' &&
- is.string(val) &&
- !getRegexPredicate(val)
- ) {
- errors.push({
- topic: 'Configuration Error',
- message: `Invalid regExp for ${currentPath}: \`${val}\``,
- });
- } else if (
- key === 'matchNewValue' &&
- is.string(val) &&
- !getRegexPredicate(val)
- ) {
- errors.push({
- topic: 'Configuration Error',
- message: `Invalid regExp for ${currentPath}: \`${val}\``,
- });
} else if (key === 'timezone' && val !== null) {
const [validTimezone, errorMessage] = hasValidTimezone(val as string);
if (!validTimezone) {
@@ -327,7 +346,7 @@ export async function validateConfig(
for (const [subIndex, subval] of val.entries()) {
if (is.object(subval)) {
const subValidation = await validateConfig(
- isGlobalConfig,
+ configType,
subval as RenovateConfig,
isPreset,
`${currentPath}[${subIndex}]`,
@@ -336,6 +355,14 @@ export async function validateConfig(
errors = errors.concat(subValidation.errors);
}
}
+ if (isRegexOrGlobOption(key)) {
+ errors.push(
+ ...regexOrGlobValidator.check({
+ val,
+ currentPath,
+ }),
+ );
+ }
if (key === 'extends') {
for (const subval of val) {
if (is.string(subval)) {
@@ -378,11 +405,13 @@ export async function validateConfig(
'matchDepTypes',
'matchDepNames',
'matchDepPatterns',
+ 'matchDepPrefixes',
'matchPackageNames',
'matchPackagePatterns',
'matchPackagePrefixes',
'excludeDepNames',
'excludeDepPatterns',
+ 'excludeDepPrefixes',
'excludePackageNames',
'excludePackagePatterns',
'excludePackagePrefixes',
@@ -446,6 +475,7 @@ export async function validateConfig(
'separateMajorMinor',
'separateMinorPatch',
'separateMultipleMajor',
+ 'separateMultipleMinor',
'versioning',
];
if (is.nonEmptyArray(resolvedRule.matchUpdateTypes)) {
@@ -627,9 +657,10 @@ export async function validateConfig(
});
}
} else if (key === 'env') {
- const allowedEnvVars = isGlobalConfig
- ? (config.allowedEnv as string[]) ?? []
- : GlobalConfig.get('allowedEnv', []);
+ const allowedEnvVars =
+ configType === 'global'
+ ? (config.allowedEnv as string[]) ?? []
+ : GlobalConfig.get('allowedEnv', []);
for (const [envVarName, envVarValue] of Object.entries(val)) {
if (!is.string(envVarValue)) {
errors.push({
@@ -692,19 +723,28 @@ export async function validateConfig(
if (!allowedKeys.includes(subKey)) {
errors.push({
topic: 'Configuration Error',
- message: `Invalid \`${currentPath}.${key}.${subKey}\` configuration: key is not allowed`,
+ message: `Invalid \`${currentPath}.${subKey}\` configuration: key is not allowed`,
});
} else if (subKey === 'transformTemplates') {
if (!is.array(subValue, is.string)) {
errors.push({
topic: 'Configuration Error',
- message: `Invalid \`${currentPath}.${key}.${subKey}\` configuration: is not an array of string`,
+ message: `Invalid \`${currentPath}.${subKey}\` configuration: is not an array of string`,
+ });
+ }
+ } else if (subKey === 'description') {
+ if (
+ !(is.string(subValue) || is.array(subValue, is.string))
+ ) {
+ errors.push({
+ topic: 'Configuration Error',
+ message: `Invalid \`${currentPath}.${subKey}\` configuration: is not an array of strings`,
});
}
} else if (!is.string(subValue)) {
errors.push({
topic: 'Configuration Error',
- message: `Invalid \`${currentPath}.${key}.${subKey}\` configuration: is a string`,
+ message: `Invalid \`${currentPath}.${subKey}\` configuration: is a string`,
});
}
}
@@ -715,7 +755,7 @@ export async function validateConfig(
.map((option) => option.name);
if (!ignoredObjects.includes(key)) {
const subValidation = await validateConfig(
- isGlobalConfig,
+ configType,
val,
isPreset,
currentPath,
@@ -735,9 +775,10 @@ export async function validateConfig(
}
if (key === 'hostRules' && is.array(val)) {
- const allowedHeaders = isGlobalConfig
- ? (config.allowedHeaders as string[]) ?? []
- : GlobalConfig.get('allowedHeaders', []);
+ const allowedHeaders =
+ configType === 'global'
+ ? (config.allowedHeaders as string[]) ?? []
+ : GlobalConfig.get('allowedHeaders', []);
for (const rule of val as HostRule[]) {
if (!rule.headers) {
continue;
@@ -774,6 +815,19 @@ export async function validateConfig(
return { errors, warnings };
}
+function hasField(
+ customManager: Partial,
+ field: string,
+): boolean {
+ const templateField = `${field}Template` as keyof RegexManagerTemplates;
+ return !!(
+ customManager[templateField] ??
+ customManager.matchStrings?.some((matchString) =>
+ matchString.includes(`(?<${field}>`),
+ )
+ );
+}
+
function validateRegexManagerFields(
customManager: Partial,
currentPath: string,
@@ -801,21 +855,23 @@ function validateRegexManagerFields(
});
}
- const mandatoryFields = ['depName', 'currentValue', 'datasource'];
+ const mandatoryFields = ['currentValue', 'datasource'];
for (const field of mandatoryFields) {
- const templateField = `${field}Template` as keyof RegexManagerTemplates;
- if (
- !customManager[templateField] &&
- !customManager.matchStrings?.some((matchString) =>
- matchString.includes(`(?<${field}>`),
- )
- ) {
+ if (!hasField(customManager, field)) {
errors.push({
topic: 'Configuration Error',
message: `Regex Managers must contain ${field}Template configuration or regex group named ${field}`,
});
}
}
+
+ const nameFields = ['depName', 'packageName'];
+ if (!nameFields.some((field) => hasField(customManager, field))) {
+ errors.push({
+ topic: 'Configuration Error',
+ message: `Regex Managers must contain depName or packageName regex groups or templates`,
+ });
+ }
}
/**
@@ -826,7 +882,9 @@ async function validateGlobalConfig(
val: unknown,
type: string,
warnings: ValidationMessage[],
+ errors: ValidationMessage[],
currentPath: string | undefined,
+ config: RenovateConfig,
): Promise {
if (val !== null) {
if (type === 'string') {
@@ -880,6 +938,17 @@ async function validateGlobalConfig(
message: `Invalid value \`${val}\` for \`${currentPath}\`. The allowed values are ${['default', 'ssh', 'endpoint'].join(', ')}.`,
});
}
+
+ if (
+ key === 'reportType' &&
+ ['s3', 'file'].includes(val) &&
+ !is.string(config.reportPath)
+ ) {
+ errors.push({
+ topic: 'Configuration Error',
+ message: `reportType '${val}' requires a configured reportPath`,
+ });
+ }
} else {
warnings.push({
topic: 'Configuration Error',
@@ -906,6 +975,14 @@ async function validateGlobalConfig(
}
} else if (type === 'array') {
if (is.array(val)) {
+ if (isRegexOrGlobOption(key)) {
+ warnings.push(
+ ...regexOrGlobValidator.check({
+ val,
+ currentPath: currentPath!,
+ }),
+ );
+ }
if (key === 'gitNoVerify') {
const allowedValues = ['commit', 'push'];
for (const value of val as string[]) {
@@ -917,6 +994,17 @@ async function validateGlobalConfig(
}
}
}
+ if (key === 'mergeConfidenceDatasources') {
+ const allowedValues = supportedDatasources;
+ for (const value of val as string[]) {
+ if (!allowedValues.includes(value)) {
+ warnings.push({
+ topic: 'Configuration Error',
+ message: `Invalid value \`${value}\` for \`${currentPath}\`. The allowed values are ${allowedValues.join(', ')}.`,
+ });
+ }
+ }
+ }
} else {
warnings.push({
topic: 'Configuration Error',
@@ -926,14 +1014,14 @@ async function validateGlobalConfig(
} else if (type === 'object') {
if (is.plainObject(val)) {
if (key === 'onboardingConfig') {
- const subValidation = await validateConfig(false, val);
+ const subValidation = await validateConfig('repo', val);
for (const warning of subValidation.warnings.concat(
subValidation.errors,
)) {
warnings.push(warning);
}
} else if (key === 'force') {
- const subValidation = await validateConfig(true, val);
+ const subValidation = await validateConfig('global', val);
for (const warning of subValidation.warnings.concat(
subValidation.errors,
)) {
diff --git a/lib/constants/error-messages.ts b/lib/constants/error-messages.ts
index 95daa453ee8ecb..434c39926f9d10 100644
--- a/lib/constants/error-messages.ts
+++ b/lib/constants/error-messages.ts
@@ -17,6 +17,8 @@ export const CONFIG_PRESETS_INVALID = 'config-presets-invalid';
export const CONFIG_SECRETS_EXPOSED = 'config-secrets-exposed';
export const CONFIG_SECRETS_INVALID = 'config-secrets-invalid';
export const CONFIG_GIT_URL_UNAVAILABLE = 'config-git-url-unavailable';
+export const CONFIG_INHERIT_NOT_FOUND = 'config-inherit-not-found';
+export const CONFIG_INHERIT_PARSE_ERROR = 'config-inherit-parse-error';
// Repository Errors - causes repo to be considered as disabled
export const REPOSITORY_ACCESS_FORBIDDEN = 'forbidden';
@@ -28,6 +30,8 @@ export const REPOSITORY_CLOSED_ONBOARDING = 'disabled-closed-onboarding';
export const REPOSITORY_DISABLED_BY_CONFIG = 'disabled-by-config';
export const REPOSITORY_NO_CONFIG = 'disabled-no-config';
export const REPOSITORY_EMPTY = 'empty';
+export const REPOSITORY_FORK_MISSING = 'fork-missing';
+export const REPOSITORY_FORK_MODE_FORKED = 'fork-mode-forked';
export const REPOSITORY_FORKED = 'fork';
export const REPOSITORY_MIRRORED = 'mirror';
export const REPOSITORY_NOT_FOUND = 'not-found';
diff --git a/lib/constants/platforms.ts b/lib/constants/platforms.ts
index d1ee8156312ba6..65d5c60a7e931d 100644
--- a/lib/constants/platforms.ts
+++ b/lib/constants/platforms.ts
@@ -14,7 +14,6 @@ export const GITEA_API_USING_HOST_TYPES = [
'gitea-changelog',
'gitea-releases',
'gitea-tags',
- 'custom',
];
export const GITHUB_API_USING_HOST_TYPES = [
@@ -26,7 +25,6 @@ export const GITHUB_API_USING_HOST_TYPES = [
'hermit',
'github-changelog',
'conan',
- 'custom',
];
export const GITLAB_API_USING_HOST_TYPES = [
@@ -36,12 +34,10 @@ export const GITLAB_API_USING_HOST_TYPES = [
'gitlab-packages',
'gitlab-changelog',
'pypi',
- 'custom',
];
export const BITBUCKET_API_USING_HOST_TYPES = [
'bitbucket',
'bitbucket-changelog',
'bitbucket-tags',
- 'custom',
];
diff --git a/lib/expose.cjs b/lib/expose.cjs
index b5ee8169d57ba1..c2d3c5d22e6002 100644
--- a/lib/expose.cjs
+++ b/lib/expose.cjs
@@ -22,4 +22,26 @@ function prettier() {
return require('prettier');
}
-module.exports = { re2, pkg, prettier };
+/**
+ * return's openpgp
+ * @returns {typeof import('openpgp')}
+ */
+function openpgp() {
+ return require('openpgp');
+}
+
+/**
+ * return's sqlite
+ * @returns {typeof import('better-sqlite3')}
+ */
+function sqlite() {
+ return require('better-sqlite3');
+}
+
+module.exports = {
+ re2,
+ pkg,
+ openpgp,
+ prettier,
+ sqlite,
+};
diff --git a/lib/instrumentation/__mocks__/index.ts b/lib/instrumentation/__mocks__/index.ts
deleted file mode 100644
index be8ac341cd32d3..00000000000000
--- a/lib/instrumentation/__mocks__/index.ts
+++ /dev/null
@@ -1,5 +0,0 @@
-import { NoopTracer } from '@opentelemetry/api/build/src/trace/NoopTracer';
-import { NoopTracerProvider } from '@opentelemetry/api/build/src/trace/NoopTracerProvider';
-
-export const getTracerProvider = jest.fn(args => new NoopTracerProvider());
-export const getTracer = jest.fn(args => new NoopTracer());
diff --git a/lib/instrumentation/index.spec.ts b/lib/instrumentation/index.spec.ts
index ddb347e0155c6b..6e3927311bdcef 100644
--- a/lib/instrumentation/index.spec.ts
+++ b/lib/instrumentation/index.spec.ts
@@ -1,6 +1,5 @@
import { ProxyTracerProvider } from '@opentelemetry/api';
import * as api from '@opentelemetry/api';
-import { NoopTracerProvider } from '@opentelemetry/api/build/src/trace/NoopTracerProvider';
import { NodeTracerProvider } from '@opentelemetry/sdk-trace-node';
import {
disableInstrumentations,
@@ -28,7 +27,7 @@ describe('instrumentation/index', () => {
const traceProvider = getTracerProvider();
expect(traceProvider).toBeInstanceOf(ProxyTracerProvider);
const provider = traceProvider as ProxyTracerProvider;
- expect(provider.getDelegate()).toBeInstanceOf(NoopTracerProvider);
+ expect(provider.constructor.name).toBe('ProxyTracerProvider');
});
it('activate console logger', () => {
diff --git a/lib/instrumentation/reporting.spec.ts b/lib/instrumentation/reporting.spec.ts
new file mode 100644
index 00000000000000..494e4fd203b0d5
--- /dev/null
+++ b/lib/instrumentation/reporting.spec.ts
@@ -0,0 +1,227 @@
+import type { S3Client } from '@aws-sdk/client-s3';
+import { mockDeep } from 'jest-mock-extended';
+import { s3 } from '../../test/s3';
+import { fs, logger } from '../../test/util';
+import type { RenovateConfig } from '../config/types';
+import type { PackageFile } from '../modules/manager/types';
+import type { BranchCache } from '../util/cache/repository/types';
+import {
+ addBranchStats,
+ addExtractionStats,
+ exportStats,
+ finalizeReport,
+ getReport,
+} from './reporting';
+
+jest.mock('../util/fs', () => mockDeep());
+jest.mock('../util/s3', () => mockDeep());
+jest.mock('../logger', () => mockDeep());
+
+describe('instrumentation/reporting', () => {
+ const branchInformation: Partial[] = [
+ {
+ branchName: 'a-branch-name',
+ prNo: 20,
+ upgrades: [
+ {
+ currentVersion: '21.1.1',
+ currentValue: 'v21.1.1',
+ newVersion: '22.0.0',
+ newValue: 'v22.0.0',
+ },
+ ],
+ },
+ ];
+ const packageFiles: Record = {
+ terraform: [
+ {
+ packageFile: 'terraform/versions.tf',
+ deps: [
+ {
+ currentValue: 'v21.1.1',
+ currentVersion: '4.4.3',
+ updates: [
+ {
+ bucket: 'non-major',
+ newVersion: '4.7.0',
+ newValue: '~> 4.7.0',
+ },
+ ],
+ },
+ ],
+ },
+ ],
+ };
+
+ const expectedReport = {
+ problems: [],
+ repositories: {
+ 'myOrg/myRepo': {
+ problems: [],
+ branches: branchInformation,
+ packageFiles,
+ },
+ },
+ };
+
+ it('return empty report if no stats have been added', () => {
+ const config = {};
+ addBranchStats(config, []);
+ addExtractionStats(config, {
+ branchList: [],
+ branches: [],
+ packageFiles: {},
+ });
+
+ expect(getReport()).toEqual({
+ problems: [],
+ repositories: {},
+ });
+ });
+
+ it('return report if reportType is set to logging', () => {
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 'logging',
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ expect(getReport()).toEqual(expectedReport);
+ });
+
+ it('log report if reportType is set to logging', async () => {
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 'logging',
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ await exportStats(config);
+ expect(logger.logger.info).toHaveBeenCalledWith(
+ { report: expectedReport },
+ 'Printing report',
+ );
+ });
+
+ it('write report if reportType is set to file', async () => {
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 'file',
+ reportPath: './report.json',
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ await exportStats(config);
+ expect(fs.writeSystemFile).toHaveBeenCalledWith(
+ config.reportPath,
+ JSON.stringify(expectedReport),
+ );
+ });
+
+ it('send report to an S3 bucket if reportType is s3', async () => {
+ const mockClient = mockDeep();
+ s3.parseS3Url.mockReturnValue({ Bucket: 'bucket-name', Key: 'key-name' });
+ // @ts-expect-error TS2589
+ s3.getS3Client.mockReturnValue(mockClient);
+
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 's3',
+ reportPath: 's3://bucket-name/key-name',
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ await exportStats(config);
+ expect(mockClient.send.mock.calls[0][0]).toMatchObject({
+ input: {
+ Body: JSON.stringify(expectedReport),
+ },
+ });
+ });
+
+ it('handle failed parsing of S3 url', async () => {
+ s3.parseS3Url.mockReturnValue(null);
+
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 's3',
+ reportPath: 'aPath',
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ await exportStats(config);
+ expect(logger.logger.warn).toHaveBeenCalledWith(
+ { reportPath: config.reportPath },
+ 'Failed to parse s3 URL',
+ );
+ });
+
+ it('catch exception', async () => {
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 'file',
+ reportPath: './report.json',
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ fs.writeSystemFile.mockRejectedValue(null);
+ await expect(exportStats(config)).toResolve();
+ });
+
+ it('should add problems to report', () => {
+ const config: RenovateConfig = {
+ repository: 'myOrg/myRepo',
+ reportType: 'logging',
+ };
+ const expectedReport = {
+ problems: [
+ {
+ level: 30,
+ msg: 'a root problem',
+ },
+ ],
+ repositories: {
+ 'myOrg/myRepo': {
+ problems: [
+ {
+ level: 30,
+ msg: 'a repo problem',
+ },
+ ],
+ branches: branchInformation,
+ packageFiles,
+ },
+ },
+ };
+
+ addBranchStats(config, branchInformation);
+ addExtractionStats(config, { branchList: [], branches: [], packageFiles });
+
+ logger.getProblems.mockReturnValue([
+ {
+ repository: 'myOrg/myRepo',
+ level: 30,
+ msg: 'a repo problem',
+ },
+ {
+ level: 30,
+ msg: 'a root problem',
+ },
+ ]);
+ finalizeReport();
+
+ expect(getReport()).toEqual(expectedReport);
+ });
+});
diff --git a/lib/instrumentation/reporting.ts b/lib/instrumentation/reporting.ts
new file mode 100644
index 00000000000000..8b422f3d3d9b41
--- /dev/null
+++ b/lib/instrumentation/reporting.ts
@@ -0,0 +1,115 @@
+import { PutObjectCommand, PutObjectCommandInput } from '@aws-sdk/client-s3';
+import is from '@sindresorhus/is';
+import type { RenovateConfig } from '../config/types';
+import { getProblems, logger } from '../logger';
+import type { BranchCache } from '../util/cache/repository/types';
+import { writeSystemFile } from '../util/fs';
+import { getS3Client, parseS3Url } from '../util/s3';
+import type { ExtractResult } from '../workers/repository/process/extract-update';
+import type { Report } from './types';
+
+const report: Report = {
+ problems: [],
+ repositories: {},
+};
+
+export function addBranchStats(
+ config: RenovateConfig,
+ branchesInformation: Partial[],
+): void {
+ if (is.nullOrUndefined(config.reportType)) {
+ return;
+ }
+
+ coerceRepo(config.repository!);
+ report.repositories[config.repository!].branches = branchesInformation;
+}
+
+export function addExtractionStats(
+ config: RenovateConfig,
+ extractResult: ExtractResult,
+): void {
+ if (is.nullOrUndefined(config.reportType)) {
+ return;
+ }
+
+ coerceRepo(config.repository!);
+ report.repositories[config.repository!].packageFiles =
+ extractResult.packageFiles;
+}
+
+export function finalizeReport(): void {
+ const allProblems = structuredClone(getProblems());
+ for (const problem of allProblems) {
+ const repository = problem.repository;
+ delete problem.repository;
+
+ // if the problem can be connected to a repository add it their else add to the root list
+ if (repository) {
+ coerceRepo(repository);
+ report.repositories[repository].problems.push(problem);
+ } else {
+ report.problems.push(problem);
+ }
+ }
+}
+
+export async function exportStats(config: RenovateConfig): Promise {
+ try {
+ if (is.nullOrUndefined(config.reportType)) {
+ return;
+ }
+
+ if (config.reportType === 'logging') {
+ logger.info({ report }, 'Printing report');
+ return;
+ }
+
+ if (config.reportType === 'file') {
+ const path = config.reportPath!;
+ await writeSystemFile(path, JSON.stringify(report));
+ logger.debug({ path }, 'Writing report');
+ return;
+ }
+
+ if (config.reportType === 's3') {
+ const s3Url = parseS3Url(config.reportPath!);
+ if (is.nullOrUndefined(s3Url)) {
+ logger.warn(
+ { reportPath: config.reportPath },
+ 'Failed to parse s3 URL',
+ );
+ return;
+ }
+
+ const s3Params: PutObjectCommandInput = {
+ Bucket: s3Url.Bucket,
+ Key: s3Url.Key,
+ Body: JSON.stringify(report),
+ ContentType: 'application/json',
+ };
+
+ const client = getS3Client();
+ const command = new PutObjectCommand(s3Params);
+ await client.send(command);
+ }
+ } catch (err) {
+ logger.warn({ err }, 'Reporting.exportStats() - failure');
+ }
+}
+
+export function getReport(): Report {
+ return structuredClone(report);
+}
+
+function coerceRepo(repository: string): void {
+ if (!is.undefined(report.repositories[repository])) {
+ return;
+ }
+
+ report.repositories[repository] = {
+ problems: [],
+ branches: [],
+ packageFiles: {},
+ };
+}
diff --git a/lib/instrumentation/types.ts b/lib/instrumentation/types.ts
index a753ecb56d1436..d4e86c1c7ea6cc 100644
--- a/lib/instrumentation/types.ts
+++ b/lib/instrumentation/types.ts
@@ -1,4 +1,7 @@
import type { Attributes, SpanKind } from '@opentelemetry/api';
+import type { BunyanRecord } from '../logger/types';
+import type { PackageFile } from '../modules/manager/types';
+import type { BranchCache } from '../util/cache/repository/types';
/**
* The instrumentation decorator parameters.
@@ -24,3 +27,14 @@ export interface SpanParameters {
*/
kind?: SpanKind;
}
+
+export interface Report {
+ problems: BunyanRecord[];
+ repositories: Record;
+}
+
+interface RepoReport {
+ problems: BunyanRecord[];
+ branches: Partial[];
+ packageFiles: Record;
+}
diff --git a/lib/modules/datasource/__snapshots__/metadata.spec.ts.snap b/lib/modules/datasource/__snapshots__/metadata.spec.ts.snap
index 74602e1015d630..3954823e05f0fb 100644
--- a/lib/modules/datasource/__snapshots__/metadata.spec.ts.snap
+++ b/lib/modules/datasource/__snapshots__/metadata.spec.ts.snap
@@ -34,7 +34,7 @@ exports[`modules/datasource/metadata Should handle failed parsing of sourceUrls
exports[`modules/datasource/metadata Should handle manualChangelogUrls 1`] = `
{
- "changelogUrl": "https://github.com/django/django/tree/master/docs/releases",
+ "changelogUrl": "https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt",
"releases": [
{
"releaseTimestamp": "2018-07-13T10:14:17.000Z",
@@ -53,7 +53,7 @@ exports[`modules/datasource/metadata Should handle manualChangelogUrls 1`] = `
"version": "2.2.0",
},
],
- "sourceUrl": "https://github.com/django/django",
+ "sourceUrl": "https://github.com/flyingcircusio/pycountry",
}
`;
diff --git a/lib/modules/datasource/api.ts b/lib/modules/datasource/api.ts
index 4ba206675060c8..ff31d16c158362 100644
--- a/lib/modules/datasource/api.ts
+++ b/lib/modules/datasource/api.ts
@@ -51,6 +51,7 @@ import { PackagistDatasource } from './packagist';
import { PodDatasource } from './pod';
import { PuppetForgeDatasource } from './puppet-forge';
import { PypiDatasource } from './pypi';
+import { PythonVersionDatasource } from './python-version';
import { RepologyDatasource } from './repology';
import { RubyVersionDatasource } from './ruby-version';
import { RubyGemsDatasource } from './rubygems';
@@ -59,6 +60,7 @@ import { SbtPluginDatasource } from './sbt-plugin';
import { TerraformModuleDatasource } from './terraform-module';
import { TerraformProviderDatasource } from './terraform-provider';
import type { DatasourceApi } from './types';
+import { Unity3dDatasource } from './unity3d';
const api = new Map();
export default api;
@@ -119,6 +121,7 @@ api.set(PackagistDatasource.id, new PackagistDatasource());
api.set(PodDatasource.id, new PodDatasource());
api.set(PuppetForgeDatasource.id, new PuppetForgeDatasource());
api.set(PypiDatasource.id, new PypiDatasource());
+api.set(PythonVersionDatasource.id, new PythonVersionDatasource());
api.set(RepologyDatasource.id, new RepologyDatasource());
api.set(RubyVersionDatasource.id, new RubyVersionDatasource());
api.set(RubyGemsDatasource.id, new RubyGemsDatasource());
@@ -126,3 +129,4 @@ api.set(SbtPackageDatasource.id, new SbtPackageDatasource());
api.set(SbtPluginDatasource.id, new SbtPluginDatasource());
api.set(TerraformModuleDatasource.id, new TerraformModuleDatasource());
api.set(TerraformProviderDatasource.id, new TerraformProviderDatasource());
+api.set(Unity3dDatasource.id, new Unity3dDatasource());
diff --git a/lib/modules/datasource/artifactory/index.ts b/lib/modules/datasource/artifactory/index.ts
index 86fe6918b04b44..e66ffb4ebaac59 100644
--- a/lib/modules/datasource/artifactory/index.ts
+++ b/lib/modules/datasource/artifactory/index.ts
@@ -21,6 +21,10 @@ export class ArtifactoryDatasource extends Datasource {
override readonly registryStrategy = 'merge';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the date-like text, next to the version hyperlink tag in the results.';
+
@cache({
namespace: `datasource-${datasource}`,
key: ({ registryUrl, packageName }: GetReleasesConfig) =>
diff --git a/lib/modules/datasource/aws-machine-image/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/aws-machine-image/__snapshots__/index.spec.ts.snap
new file mode 100644
index 00000000000000..1f010f02364d44
--- /dev/null
+++ b/lib/modules/datasource/aws-machine-image/__snapshots__/index.spec.ts.snap
@@ -0,0 +1,115 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`modules/datasource/aws-machine-image/index getSortedAwsMachineImages() with 1 returned image 1`] = `
+[
+ DescribeImagesCommand {
+ "deserialize": [Function],
+ "input": {
+ "Filters": [
+ {
+ "Name": "owner-id",
+ "Values": [
+ "602401143452",
+ ],
+ },
+ {
+ "Name": "name",
+ "Values": [
+ "1image",
+ ],
+ },
+ ],
+ },
+ "middlewareStack": {
+ "add": [Function],
+ "addRelativeTo": [Function],
+ "applyToStack": [Function],
+ "clone": [Function],
+ "concat": [Function],
+ "identify": [Function],
+ "identifyOnResolve": [Function],
+ "remove": [Function],
+ "removeByTag": [Function],
+ "resolve": [Function],
+ "use": [Function],
+ },
+ "serialize": [Function],
+ },
+]
+`;
+
+exports[`modules/datasource/aws-machine-image/index getSortedAwsMachineImages() with 3 returned images 1`] = `
+[
+ DescribeImagesCommand {
+ "deserialize": [Function],
+ "input": {
+ "Filters": [
+ {
+ "Name": "owner-id",
+ "Values": [
+ "602401143452",
+ ],
+ },
+ {
+ "Name": "name",
+ "Values": [
+ "3images",
+ ],
+ },
+ ],
+ },
+ "middlewareStack": {
+ "add": [Function],
+ "addRelativeTo": [Function],
+ "applyToStack": [Function],
+ "clone": [Function],
+ "concat": [Function],
+ "identify": [Function],
+ "identifyOnResolve": [Function],
+ "remove": [Function],
+ "removeByTag": [Function],
+ "resolve": [Function],
+ "use": [Function],
+ },
+ "serialize": [Function],
+ },
+]
+`;
+
+exports[`modules/datasource/aws-machine-image/index getSortedAwsMachineImages() without returned images 1`] = `
+[
+ DescribeImagesCommand {
+ "deserialize": [Function],
+ "input": {
+ "Filters": [
+ {
+ "Name": "owner-id",
+ "Values": [
+ "602401143452",
+ ],
+ },
+ {
+ "Name": "name",
+ "Values": [
+ "noiamge",
+ ],
+ },
+ ],
+ },
+ "middlewareStack": {
+ "add": [Function],
+ "addRelativeTo": [Function],
+ "applyToStack": [Function],
+ "clone": [Function],
+ "concat": [Function],
+ "identify": [Function],
+ "identifyOnResolve": [Function],
+ "remove": [Function],
+ "removeByTag": [Function],
+ "resolve": [Function],
+ "use": [Function],
+ },
+ "serialize": [Function],
+ },
+]
+`;
diff --git a/lib/modules/datasource/aws-machine-image/index.spec.ts b/lib/modules/datasource/aws-machine-image/index.spec.ts
index c0b3ad7b8569de..c966a7b8570923 100644
--- a/lib/modules/datasource/aws-machine-image/index.spec.ts
+++ b/lib/modules/datasource/aws-machine-image/index.spec.ts
@@ -146,40 +146,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
);
expect(res).toStrictEqual([image1, image2, image3]);
expect(ec2Mock.calls()).toHaveLength(1);
- expect(ec2Mock.calls()[0].args).toMatchInlineSnapshot(`
- [
- DescribeImagesCommand {
- "input": {
- "Filters": [
- {
- "Name": "owner-id",
- "Values": [
- "602401143452",
- ],
- },
- {
- "Name": "name",
- "Values": [
- "3images",
- ],
- },
- ],
- },
- "middlewareStack": {
- "add": [Function],
- "addRelativeTo": [Function],
- "applyToStack": [Function],
- "clone": [Function],
- "concat": [Function],
- "identify": [Function],
- "remove": [Function],
- "removeByTag": [Function],
- "resolve": [Function],
- "use": [Function],
- },
- },
- ]
- `);
+ expect(ec2Mock.calls()[0].args).toMatchSnapshot();
});
it('with 1 returned image', async () => {
@@ -190,40 +157,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
);
expect(res).toStrictEqual([image3]);
expect(ec2Mock.calls()).toHaveLength(1);
- expect(ec2Mock.calls()[0].args).toMatchInlineSnapshot(`
- [
- DescribeImagesCommand {
- "input": {
- "Filters": [
- {
- "Name": "owner-id",
- "Values": [
- "602401143452",
- ],
- },
- {
- "Name": "name",
- "Values": [
- "1image",
- ],
- },
- ],
- },
- "middlewareStack": {
- "add": [Function],
- "addRelativeTo": [Function],
- "applyToStack": [Function],
- "clone": [Function],
- "concat": [Function],
- "identify": [Function],
- "remove": [Function],
- "removeByTag": [Function],
- "resolve": [Function],
- "use": [Function],
- },
- },
- ]
- `);
+ expect(ec2Mock.calls()[0].args).toMatchSnapshot();
});
it('without returned images', async () => {
@@ -234,40 +168,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
);
expect(res).toStrictEqual([]);
expect(ec2Mock.calls()).toHaveLength(1);
- expect(ec2Mock.calls()[0].args).toMatchInlineSnapshot(`
- [
- DescribeImagesCommand {
- "input": {
- "Filters": [
- {
- "Name": "owner-id",
- "Values": [
- "602401143452",
- ],
- },
- {
- "Name": "name",
- "Values": [
- "noiamge",
- ],
- },
- ],
- },
- "middlewareStack": {
- "add": [Function],
- "addRelativeTo": [Function],
- "applyToStack": [Function],
- "clone": [Function],
- "concat": [Function],
- "identify": [Function],
- "remove": [Function],
- "removeByTag": [Function],
- "resolve": [Function],
- "use": [Function],
- },
- },
- ]
- `);
+ expect(ec2Mock.calls()[0].args).toMatchSnapshot();
});
});
diff --git a/lib/modules/datasource/aws-machine-image/index.ts b/lib/modules/datasource/aws-machine-image/index.ts
index 8c15677505b0aa..455e1d33bad46e 100644
--- a/lib/modules/datasource/aws-machine-image/index.ts
+++ b/lib/modules/datasource/aws-machine-image/index.ts
@@ -18,6 +18,10 @@ export class AwsMachineImageDataSource extends Datasource {
override readonly caching = true;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `CreationDate` field in the results.';
+
override readonly defaultConfig = {
// Because AMIs don't follow any versioning scheme, we override commitMessageExtra to remove the 'v'
commitMessageExtra: 'to {{{newVersion}}}',
diff --git a/lib/modules/datasource/azure-bicep-resource/index.spec.ts b/lib/modules/datasource/azure-bicep-resource/index.spec.ts
index 4afbff7d53e9ee..0b1dff2be12647 100644
--- a/lib/modules/datasource/azure-bicep-resource/index.spec.ts
+++ b/lib/modules/datasource/azure-bicep-resource/index.spec.ts
@@ -28,7 +28,7 @@ describe('modules/datasource/azure-bicep-resource/index', () => {
expect(result).toBeNull();
});
- it('should return versions when package is a function', async () => {
+ it('should return null when package is a function', async () => {
httpMock
.scope(gitHubHost)
.get(indexPath)
@@ -57,23 +57,10 @@ describe('modules/datasource/azure-bicep-resource/index', () => {
const azureBicepResourceDatasource = new AzureBicepResourceDatasource();
const result = await azureBicepResourceDatasource.getReleases({
- packageName: 'Microsoft.Billing/billingAccounts',
+ packageName: 'unknown',
});
- expect(result).toEqual({
- releases: [
- {
- version: '2019-10-01-preview',
- changelogUrl:
- 'https://learn.microsoft.com/en-us/azure/templates/microsoft.billing/change-log/billingaccounts#2019-10-01-preview',
- },
- {
- version: '2020-05-01',
- changelogUrl:
- 'https://learn.microsoft.com/en-us/azure/templates/microsoft.billing/change-log/billingaccounts#2020-05-01',
- },
- ],
- });
+ expect(result).toBeNull();
});
it('should return versions when package is a resource', async () => {
@@ -117,4 +104,46 @@ describe('modules/datasource/azure-bicep-resource/index', () => {
],
});
});
+
+ it('should return versions when package is a resource and a function', async () => {
+ httpMock
+ .scope(gitHubHost)
+ .get(indexPath)
+ .reply(
+ 200,
+ codeBlock`
+ {
+ "resources": {
+ "Microsoft.OperationalInsights/workspaces@2023-09-01": {
+ "$ref": "operationalinsights/microsoft.operationalinsights/2023-09-01/types.json#/31"
+ }
+ },
+ "resourceFunctions": {
+ "microsoft.operationalinsights/workspaces": {
+ "2015-03-20": [
+ {
+ "$ref": "operationalinsights/workspaces/2015-03-20/types.json#/304"
+ }
+ ]
+ }
+ }
+ }
+ `,
+ );
+
+ const azureBicepResourceDatasource = new AzureBicepResourceDatasource();
+ const result = await azureBicepResourceDatasource.getReleases({
+ packageName: 'Microsoft.OperationalInsights/workspaces',
+ });
+
+ expect(result).toEqual({
+ releases: [
+ {
+ version: '2023-09-01',
+ changelogUrl:
+ 'https://learn.microsoft.com/en-us/azure/templates/microsoft.operationalinsights/change-log/workspaces#2023-09-01',
+ },
+ ],
+ });
+ });
});
diff --git a/lib/modules/datasource/azure-bicep-resource/schema.ts b/lib/modules/datasource/azure-bicep-resource/schema.ts
index 70dd2938f54f03..c94db6faea33de 100644
--- a/lib/modules/datasource/azure-bicep-resource/schema.ts
+++ b/lib/modules/datasource/azure-bicep-resource/schema.ts
@@ -3,9 +3,8 @@ import { z } from 'zod';
export const BicepResourceVersionIndex = z
.object({
resources: z.record(z.string(), z.unknown()),
- resourceFunctions: z.record(z.string(), z.record(z.string(), z.unknown())),
})
- .transform(({ resources, resourceFunctions }) => {
+ .transform(({ resources }) => {
const releaseMap = new Map();
for (const resourceReference of Object.keys(resources)) {
@@ -15,11 +14,6 @@ export const BicepResourceVersionIndex = z
releaseMap.set(type, versions);
}
- for (const [type, versionMap] of Object.entries(resourceFunctions)) {
- const versions = Object.keys(versionMap);
- releaseMap.set(type, versions);
- }
-
return Object.fromEntries(releaseMap);
});
diff --git a/lib/modules/datasource/bitbucket-tags/index.spec.ts b/lib/modules/datasource/bitbucket-tags/index.spec.ts
index 4a8780d689e67f..08f8e8d9a682f0 100644
--- a/lib/modules/datasource/bitbucket-tags/index.spec.ts
+++ b/lib/modules/datasource/bitbucket-tags/index.spec.ts
@@ -62,7 +62,11 @@ describe('modules/datasource/bitbucket-tags/index', () => {
httpMock
.scope('https://api.bitbucket.org')
.get('/2.0/repositories/some/dep2')
- .reply(200, { mainbranch: { name: 'master' } });
+ .reply(200, {
+ mainbranch: { name: 'master' },
+ uuid: '123',
+ full_name: 'some/repo',
+ });
httpMock
.scope('https://api.bitbucket.org')
.get('/2.0/repositories/some/dep2/commits/master')
@@ -87,7 +91,11 @@ describe('modules/datasource/bitbucket-tags/index', () => {
httpMock
.scope('https://api.bitbucket.org')
.get('/2.0/repositories/some/dep2')
- .reply(200, { mainbranch: { name: 'master' } });
+ .reply(200, {
+ mainbranch: { name: 'master' },
+ uuid: '123',
+ full_name: 'some/repo',
+ });
httpMock
.scope('https://api.bitbucket.org')
.get('/2.0/repositories/some/dep2/commits/master')
diff --git a/lib/modules/datasource/bitbucket-tags/index.ts b/lib/modules/datasource/bitbucket-tags/index.ts
index 7a2457c0c3a324..165336cce165b9 100644
--- a/lib/modules/datasource/bitbucket-tags/index.ts
+++ b/lib/modules/datasource/bitbucket-tags/index.ts
@@ -2,7 +2,8 @@ import { cache } from '../../../util/cache/package/decorator';
import type { PackageCacheNamespace } from '../../../util/cache/package/types';
import { BitbucketHttp } from '../../../util/http/bitbucket';
import { ensureTrailingSlash } from '../../../util/url';
-import type { PagedResult, RepoInfoBody } from '../../platform/bitbucket/types';
+import { RepoInfo } from '../../platform/bitbucket/schema';
+import type { PagedResult } from '../../platform/bitbucket/types';
import { Datasource } from '../datasource';
import type { DigestConfig, GetReleasesConfig, ReleaseResult } from '../types';
import type { BitbucketCommit, BitbucketTag } from './types';
@@ -18,6 +19,13 @@ export class BitbucketTagsDatasource extends Datasource {
static readonly defaultRegistryUrls = ['https://bitbucket.org'];
+ static readonly releaseTimestampSupport = true;
+ static readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `date` field in the results.';
+ static readonly sourceUrlSupport = 'package';
+ static readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
static readonly cacheNamespace: PackageCacheNamespace = `datasource-${BitbucketTagsDatasource.id}`;
constructor() {
@@ -102,10 +110,8 @@ export class BitbucketTagsDatasource extends Datasource {
})
async getMainBranch(repo: string): Promise {
return (
- await this.bitbucketHttp.getJson(
- `/2.0/repositories/${repo}`,
- )
- ).body.mainbranch.name;
+ await this.bitbucketHttp.getJson(`/2.0/repositories/${repo}`, RepoInfo)
+ ).body.mainbranch;
}
// getDigest fetched the latest commit for repository main branch
diff --git a/lib/modules/datasource/cdnjs/index.ts b/lib/modules/datasource/cdnjs/index.ts
index 6a142235e97fe3..daf17b6961b725 100644
--- a/lib/modules/datasource/cdnjs/index.ts
+++ b/lib/modules/datasource/cdnjs/index.ts
@@ -28,6 +28,10 @@ export class CdnJsDatasource extends Datasource {
override readonly defaultRegistryUrls = ['https://api.cdnjs.com/'];
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `repository` field in the results.';
+
@cache({
namespace: `datasource-${CdnJsDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => packageName.split('/')[0],
diff --git a/lib/modules/datasource/clojure/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/clojure/__snapshots__/index.spec.ts.snap
index 54e15f3e176c63..483eac0146289c 100644
--- a/lib/modules/datasource/clojure/__snapshots__/index.spec.ts.snap
+++ b/lib/modules/datasource/clojure/__snapshots__/index.spec.ts.snap
@@ -6,6 +6,7 @@ exports[`modules/datasource/clojure/index falls back to next registry url 1`] =
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://clojars.org/repo",
"releases": [
{
@@ -53,6 +54,7 @@ exports[`modules/datasource/clojure/index returns releases from custom repositor
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://custom.registry.renovatebot.com",
"releases": [
{
@@ -80,6 +82,7 @@ exports[`modules/datasource/clojure/index skips registry with invalid XML 1`] =
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://clojars.org/repo",
"releases": [
{
@@ -107,6 +110,7 @@ exports[`modules/datasource/clojure/index skips registry with invalid metadata s
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://clojars.org/repo",
"releases": [
{
diff --git a/lib/modules/datasource/common.spec.ts b/lib/modules/datasource/common.spec.ts
index 703283f9c0e4a7..a86c6a2d3f43de 100644
--- a/lib/modules/datasource/common.spec.ts
+++ b/lib/modules/datasource/common.spec.ts
@@ -226,6 +226,25 @@ describe('modules/datasource/common', () => {
releases: [{ version: '1.0.0' }, { version: '2.0.0' }],
});
});
+
+ it('should match exact constraints', () => {
+ const config = {
+ datasource: 'pypi',
+ packageName: 'bar',
+ versioning: 'pep440',
+ constraintsFiltering: 'strict' as const,
+ constraints: { python: '>=3.8' },
+ };
+ const releaseResult = {
+ releases: [
+ { version: '1.0.0', constraints: { python: ['^1.0.0'] } },
+ { version: '2.0.0', constraints: { python: ['>=3.8'] } },
+ ],
+ };
+ expect(applyConstraintsFiltering(releaseResult, config)).toEqual({
+ releases: [{ version: '2.0.0' }],
+ });
+ });
});
describe('applyVersionCompatibility', () => {
diff --git a/lib/modules/datasource/common.ts b/lib/modules/datasource/common.ts
index 6b952da4f49ac7..efda4b39c46799 100644
--- a/lib/modules/datasource/common.ts
+++ b/lib/modules/datasource/common.ts
@@ -208,6 +208,11 @@ export function applyConstraintsFiltering<
break;
}
+ if (configConstraint === releaseConstraint) {
+ satisfiesConstraints = true;
+ break;
+ }
+
if (versioning.subset?.(configConstraint, releaseConstraint)) {
satisfiesConstraints = true;
break;
diff --git a/lib/modules/datasource/conan/index.ts b/lib/modules/datasource/conan/index.ts
index c4a7179b781ae6..2a82502dc00790 100644
--- a/lib/modules/datasource/conan/index.ts
+++ b/lib/modules/datasource/conan/index.ts
@@ -38,6 +38,10 @@ export class ConanDatasource extends Datasource {
githubHttp: GithubHttp;
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is supported only if the package is served from the Artifactory servers. In which case we determine it from the `properties[conan.package.url]` field in the results.';
+
constructor(id = ConanDatasource.id) {
super(id);
this.githubHttp = new GithubHttp(id);
diff --git a/lib/modules/datasource/conda/index.ts b/lib/modules/datasource/conda/index.ts
index b32cd4de63748b..980eb8de9be103 100644
--- a/lib/modules/datasource/conda/index.ts
+++ b/lib/modules/datasource/conda/index.ts
@@ -23,6 +23,10 @@ export class CondaDatasource extends Datasource {
override readonly caching = true;
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `dev_url` field in the results.';
+
@cache({
namespace: `datasource-${datasource}`,
key: ({ registryUrl, packageName }: GetReleasesConfig) =>
diff --git a/lib/modules/datasource/cpan/index.ts b/lib/modules/datasource/cpan/index.ts
index ee8a808da54bf9..85436a1d925463 100644
--- a/lib/modules/datasource/cpan/index.ts
+++ b/lib/modules/datasource/cpan/index.ts
@@ -18,6 +18,10 @@ export class CpanDatasource extends Datasource {
override readonly defaultVersioning = perlVersioning.id;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `date` field in the results.';
+
@cache({
namespace: `datasource-${CpanDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => `${packageName}`,
diff --git a/lib/modules/datasource/crate/__fixtures__/amethyst b/lib/modules/datasource/crate/__fixtures__/amethyst
index 486375eaedf9a3..e65826c2c196e7 100644
--- a/lib/modules/datasource/crate/__fixtures__/amethyst
+++ b/lib/modules/datasource/crate/__fixtures__/amethyst
@@ -16,4 +16,4 @@
{"name":"amethyst","vers":"0.8.0","deps":[{"name":"amethyst_animation","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_assets","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_audio","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_config","req":"^0.7.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_controls","req":"^0.2.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_core","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_input","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_locale","req":"^0.2.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_renderer","req":"^0.8.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_ui","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_utils","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"derivative","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"fern","req":"^0.5","features":["colored"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"log","req":"^0.4","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rayon","req":"^1.0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rustc_version_runtime","req":"^0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"thread_profiler","req":"^0.1","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"},{"name":"winit","req":"^0.15","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_gltf","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"env_logger","req":"^0.5.10","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"genmesh","req":"^0.6","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"ron","req":"^0.2","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"serde","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"serde_derive","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"vergen","req":"^0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"build"}],"cksum":"f92e4a150ee7d2c22d4dfc1b921b22316e2b5a2823e255ed0c573bcd0f3e5e76","features":{"sdl_controller":["amethyst_input/sdl_controller"],"profiler":["thread_profiler","thread_profiler/thread_profiler","amethyst_animation/profiler","amethyst_assets/profiler","amethyst_audio/profiler","amethyst_config/profiler","amethyst_core/profiler","amethyst_controls/profiler","amethyst_input/profiler","amethyst_locale/profiler","amethyst_renderer/profiler","amethyst_ui/profiler","amethyst_utils/profiler"],"nightly":["amethyst_animation/nightly","amethyst_assets/nightly","amethyst_audio/nightly","amethyst_config/nightly","amethyst_core/nightly","amethyst_controls/nightly","amethyst_renderer/nightly","amethyst_input/nightly","amethyst_ui/nightly","amethyst_utils/nightly"]},"yanked":false,"links":null}
{"name":"amethyst","vers":"0.9.0","deps":[{"name":"amethyst_animation","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_assets","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_audio","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_config","req":"^0.8.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_controls","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_core","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_derive","req":"^0.2.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_input","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_locale","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_network","req":"^0.2.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_renderer","req":"^0.9.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_ui","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_utils","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"derivative","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"fern","req":"^0.5","features":["colored"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"log","req":"^0.4","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rayon","req":"^1.0.2","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rustc_version_runtime","req":"^0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde_derive","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"thread_profiler","req":"^0.3","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"},{"name":"winit","req":"^0.17","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_gltf","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"env_logger","req":"^0.5.13","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"genmesh","req":"^0.6","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"ron","req":"^0.4","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"vergen","req":"^2.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"build"}],"cksum":"c596939802d52ecceff96a0ea507d4b8f1a268b15797e445df94f5307027a923","features":{"sdl_controller":["amethyst_input/sdl_controller"],"profiler":["thread_profiler","thread_profiler/thread_profiler","amethyst_animation/profiler","amethyst_assets/profiler","amethyst_audio/profiler","amethyst_config/profiler","amethyst_core/profiler","amethyst_controls/profiler","amethyst_input/profiler","amethyst_locale/profiler","amethyst_renderer/profiler","amethyst_ui/profiler","amethyst_utils/profiler"],"saveload":["amethyst_core/saveload"],"json":["amethyst_assets/json"],"nightly":["amethyst_animation/nightly","amethyst_assets/nightly","amethyst_audio/nightly","amethyst_config/nightly","amethyst_core/nightly","amethyst_controls/nightly","amethyst_network/nightly","amethyst_renderer/nightly","amethyst_input/nightly","amethyst_ui/nightly","amethyst_utils/nightly"]},"yanked":false,"links":null}
{"name":"amethyst","vers":"0.10.0","deps":[{"name":"amethyst_animation","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_assets","req":"^0.6.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_audio","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_config","req":"^0.9.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_controls","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_core","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_derive","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_input","req":"^0.6.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_locale","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_network","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_renderer","req":"^0.10.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_ui","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_utils","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"crossbeam-channel","req":"^0.3.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"derivative","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"fern","req":"^0.5","features":["colored"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"log","req":"^0.4","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rayon","req":"^1.0.2","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rustc_version_runtime","req":"^0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde_derive","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"thread_profiler","req":"^0.3","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"},{"name":"winit","req":"^0.18","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_gltf","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"env_logger","req":"^0.5.13","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"genmesh","req":"^0.6","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"ron","req":"^0.4","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"vergen","req":"^2.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"build"}],"cksum":"2aeb884ea509846b98408d1c5c5524a894533bd147e66d29b7efc95c4047b73b","features":{"sdl_controller":["amethyst_input/sdl_controller"],"nightly":["amethyst_animation/nightly","amethyst_assets/nightly","amethyst_audio/nightly","amethyst_config/nightly","amethyst_core/nightly","amethyst_controls/nightly","amethyst_network/nightly","amethyst_renderer/nightly","amethyst_input/nightly","amethyst_ui/nightly","amethyst_utils/nightly"],"profiler":["thread_profiler","thread_profiler/thread_profiler","amethyst_animation/profiler","amethyst_assets/profiler","amethyst_audio/profiler","amethyst_config/profiler","amethyst_core/profiler","amethyst_controls/profiler","amethyst_input/profiler","amethyst_locale/profiler","amethyst_renderer/profiler","amethyst_ui/profiler","amethyst_utils/profiler"],"saveload":["amethyst_core/saveload"],"json":["amethyst_assets/json"]},"yanked":false,"links":null}
-{"name":"amethyst","vers":"0.10.1","deps":[{"name":"amethyst_animation","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_assets","req":"^0.6.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_audio","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_config","req":"^0.9.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_controls","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_core","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_derive","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_input","req":"^0.6.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_locale","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_network","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_renderer","req":"^0.10.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_ui","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_utils","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"crossbeam-channel","req":"^0.3.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"derivative","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"fern","req":"^0.5","features":["colored"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"log","req":"^0.4.6","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rayon","req":"^1.0.2","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rustc_version_runtime","req":"^0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde_derive","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"thread_profiler","req":"^0.3","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"},{"name":"winit","req":"^0.18","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_gltf","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"env_logger","req":"^0.5.13","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"genmesh","req":"^0.6","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"ron","req":"^0.4","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"vergen","req":"^3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"build"}],"cksum":"ab13cb760f6ff4b6a23f82599a8c64a77a45ca21cc66b27e1a72a863152747be","features":{"json":["amethyst_assets/json"],"saveload":["amethyst_core/saveload"],"sdl_controller":["amethyst_input/sdl_controller"],"nightly":["amethyst_animation/nightly","amethyst_assets/nightly","amethyst_audio/nightly","amethyst_config/nightly","amethyst_core/nightly","amethyst_controls/nightly","amethyst_network/nightly","amethyst_renderer/nightly","amethyst_input/nightly","amethyst_ui/nightly","amethyst_utils/nightly"],"profiler":["thread_profiler","thread_profiler/thread_profiler","amethyst_animation/profiler","amethyst_assets/profiler","amethyst_audio/profiler","amethyst_config/profiler","amethyst_core/profiler","amethyst_controls/profiler","amethyst_input/profiler","amethyst_locale/profiler","amethyst_renderer/profiler","amethyst_ui/profiler","amethyst_utils/profiler"]},"yanked":true,"links":null}
+{"name":"amethyst","vers":"0.10.1","deps":[{"name":"amethyst_animation","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_assets","req":"^0.6.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_audio","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_config","req":"^0.9.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_controls","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_core","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_derive","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_input","req":"^0.6.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_locale","req":"^0.4.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_network","req":"^0.3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_renderer","req":"^0.10.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_ui","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_utils","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"crossbeam-channel","req":"^0.3.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"derivative","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"fern","req":"^0.5","features":["colored"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"log","req":"^0.4.6","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rayon","req":"^1.0.2","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"rustc_version_runtime","req":"^0.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"serde_derive","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"thread_profiler","req":"^0.3","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"},{"name":"winit","req":"^0.18","features":["serde"],"optional":false,"default_features":true,"target":null,"kind":"normal"},{"name":"amethyst_gltf","req":"^0.5.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"env_logger","req":"^0.5.13","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"genmesh","req":"^0.6","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"ron","req":"^0.4","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev"},{"name":"vergen","req":"^3.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"build"}],"cksum":"ab13cb760f6ff4b6a23f82599a8c64a77a45ca21cc66b27e1a72a863152747be","features":{"json":["amethyst_assets/json"],"saveload":["amethyst_core/saveload"],"sdl_controller":["amethyst_input/sdl_controller"],"nightly":["amethyst_animation/nightly","amethyst_assets/nightly","amethyst_audio/nightly","amethyst_config/nightly","amethyst_core/nightly","amethyst_controls/nightly","amethyst_network/nightly","amethyst_renderer/nightly","amethyst_input/nightly","amethyst_ui/nightly","amethyst_utils/nightly"],"profiler":["thread_profiler","thread_profiler/thread_profiler","amethyst_animation/profiler","amethyst_assets/profiler","amethyst_audio/profiler","amethyst_config/profiler","amethyst_core/profiler","amethyst_controls/profiler","amethyst_input/profiler","amethyst_locale/profiler","amethyst_renderer/profiler","amethyst_ui/profiler","amethyst_utils/profiler"]},"yanked":true,"links":null,"rust_version":"1.60.0"}
diff --git a/lib/modules/datasource/crate/__fixtures__/libc b/lib/modules/datasource/crate/__fixtures__/libc
index 346b382c83a3e5..9ba974fda052cd 100644
--- a/lib/modules/datasource/crate/__fixtures__/libc
+++ b/lib/modules/datasource/crate/__fixtures__/libc
@@ -62,4 +62,4 @@
{"name":"libc","vers":"0.2.48","deps":[{"name":"rustc-std-workspace-core","req":"^1.0.0","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"}],"cksum":"e962c7641008ac010fa60a7dfdc1712449f29c44ef2d4702394aea943ee75047","features":{"use_std":[],"default":["use_std"],"rustc-dep-of-std":["align","rustc-std-workspace-core"],"align":[]},"yanked":false,"links":null}
{"name":"libc","vers":"0.2.49","deps":[{"name":"rustc-std-workspace-core","req":"^1.0.0","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"}],"cksum":"413f3dfc802c5dc91dc570b05125b6cda9855edfaa9825c9849807876376e70e","features":{"use_std":[],"align":[],"extra_traits":[],"default":["use_std"],"rustc-dep-of-std":["align","rustc-std-workspace-core"]},"yanked":false,"links":null}
{"name":"libc","vers":"0.2.50","deps":[{"name":"rustc-std-workspace-core","req":"^1.0.0","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"}],"cksum":"aab692d7759f5cd8c859e169db98ae5b52c924add2af5fbbca11d12fefb567c1","features":{"extra_traits":[],"use_std":[],"rustc-dep-of-std":["align","rustc-std-workspace-core"],"align":[],"default":["use_std"]},"yanked":false,"links":null}
-{"name":"libc","vers":"0.2.51","deps":[{"name":"rustc-std-workspace-core","req":"^1.0.0","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"}],"cksum":"bedcc7a809076656486ffe045abeeac163da1b558e963a31e29fbfbeba916917","features":{"align":[],"rustc-dep-of-std":["align","rustc-std-workspace-core"],"extra_traits":[],"use_std":[],"default":["use_std"]},"yanked":false,"links":null}
+{"name":"libc","vers":"0.2.51+metadata","deps":[{"name":"rustc-std-workspace-core","req":"^1.0.0","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal"}],"cksum":"bedcc7a809076656486ffe045abeeac163da1b558e963a31e29fbfbeba916917","features":{"align":[],"rustc-dep-of-std":["align","rustc-std-workspace-core"],"extra_traits":[],"use_std":[],"default":["use_std"]},"yanked":false,"links":null}
diff --git a/lib/modules/datasource/crate/index.ts b/lib/modules/datasource/crate/index.ts
index 5e6d07b2529c8c..24ec9ce47e02c8 100644
--- a/lib/modules/datasource/crate/index.ts
+++ b/lib/modules/datasource/crate/index.ts
@@ -35,6 +35,10 @@ export class CrateDatasource extends Datasource {
static readonly CRATES_IO_API_BASE_URL = 'https://crates.io/api/v1/';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `repository` field in the results.';
+
@cache({
namespace: `datasource-${CrateDatasource.id}`,
key: ({ registryUrl, packageName }: GetReleasesConfig) =>
@@ -97,11 +101,16 @@ export class CrateDatasource extends Datasource {
result.releases = lines
.map((version) => {
const release: Release = {
- version: version.vers,
+ version: version.vers.replace(/\+.*$/, ''),
};
if (version.yanked) {
release.isDeprecated = true;
}
+ if (version.rust_version) {
+ release.constraints = {
+ rust: [version.rust_version],
+ };
+ }
return release;
})
.filter((release) => release.version);
diff --git a/lib/modules/datasource/crate/types.ts b/lib/modules/datasource/crate/types.ts
index a7b59efbcb1145..0011fadd204449 100644
--- a/lib/modules/datasource/crate/types.ts
+++ b/lib/modules/datasource/crate/types.ts
@@ -27,6 +27,7 @@ export interface RegistryInfo {
export interface CrateRecord {
vers: string;
yanked: boolean;
+ rust_version?: string;
}
export interface CrateMetadata {
diff --git a/lib/modules/datasource/custom/index.spec.ts b/lib/modules/datasource/custom/index.spec.ts
index 001e807c47fe4d..10976ae507f7fb 100644
--- a/lib/modules/datasource/custom/index.spec.ts
+++ b/lib/modules/datasource/custom/index.spec.ts
@@ -697,4 +697,13 @@ describe('modules/datasource/custom/index', () => {
expect(result).toEqual(expected);
});
});
+
+ describe('getDigest', () => {
+ it('returns null as digest should be provided in releases', async () => {
+ const digest = await new CustomDatasource().getDigest({
+ packageName: 'my-package',
+ });
+ expect(digest).toBeNull();
+ });
+ });
});
diff --git a/lib/modules/datasource/custom/index.ts b/lib/modules/datasource/custom/index.ts
index 64d0fcfff9d757..f9ca969710b2ba 100644
--- a/lib/modules/datasource/custom/index.ts
+++ b/lib/modules/datasource/custom/index.ts
@@ -2,7 +2,7 @@ import is from '@sindresorhus/is';
import jsonata from 'jsonata';
import { logger } from '../../../logger';
import { Datasource } from '../datasource';
-import type { GetReleasesConfig, ReleaseResult } from '../types';
+import type { DigestConfig, GetReleasesConfig, ReleaseResult } from '../types';
import { fetchers } from './formats';
import { ReleaseResultZodSchema } from './schema';
import { getCustomConfig } from './utils';
@@ -57,4 +57,12 @@ export class CustomDatasource extends Datasource {
return null;
}
}
+
+ override getDigest(
+ { packageName }: DigestConfig,
+ newValue?: string,
+ ): Promise {
+ // Return null here to support setting a digest: value can be provided digest in getReleases
+ return Promise.resolve(null);
+ }
}
diff --git a/lib/modules/datasource/custom/readme.md b/lib/modules/datasource/custom/readme.md
index 1a6facfa034016..7391076f901b65 100644
--- a/lib/modules/datasource/custom/readme.md
+++ b/lib/modules/datasource/custom/readme.md
@@ -7,11 +7,11 @@ This example shows how to update the `k3s.version` file with a custom datasource
Options:
-| option | default | description |
-| -------------------------- | ------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
-| defaultRegistryUrlTemplate | "" | URL used if no `registryUrl` is provided when looking up new releases |
-| format | "json" | format used by the API. Available values are: `json`, `plain`, `yaml`, `html` |
-| transformTemplates | [] | [JSONata rules](https://docs.jsonata.org/simple) to transform the API output. Each rule will be evaluated after another and the result will be used as input to the next |
+| option | default | description |
+| -------------------------- | -------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |
+| defaultRegistryUrlTemplate | `""` | URL used if no `registryUrl` is provided when looking up new releases |
+| format | `"json"` | format used by the API. Available values are: `json`, `plain`, `yaml`, `html` |
+| transformTemplates | `[]` | [JSONata rules](https://docs.jsonata.org/simple) to transform the API output. Each rule will be evaluated after another and the result will be used as input to the next |
Available template variables:
diff --git a/lib/modules/datasource/dart-version/index.ts b/lib/modules/datasource/dart-version/index.ts
index a3aa8c45687a51..23f95ccab89124 100644
--- a/lib/modules/datasource/dart-version/index.ts
+++ b/lib/modules/datasource/dart-version/index.ts
@@ -21,6 +21,10 @@ export class DartVersionDatasource extends Datasource {
private readonly channels = ['stable', 'beta', 'dev'];
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/dart-lang/sdk.';
+
async getReleases({
registryUrl,
}: GetReleasesConfig): Promise {
diff --git a/lib/modules/datasource/dart/index.ts b/lib/modules/datasource/dart/index.ts
index 30da6a53c23e5c..88e549cfeb8903 100644
--- a/lib/modules/datasource/dart/index.ts
+++ b/lib/modules/datasource/dart/index.ts
@@ -15,6 +15,13 @@ export class DartDatasource extends Datasource {
override readonly defaultRegistryUrls = ['https://pub.dartlang.org/'];
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `published` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `repository` field of the latest release object in the results.';
+
async getReleases({
packageName,
registryUrl,
diff --git a/lib/modules/datasource/datasource.ts b/lib/modules/datasource/datasource.ts
index 2d88417201d244..fce559644110b4 100644
--- a/lib/modules/datasource/datasource.ts
+++ b/lib/modules/datasource/datasource.ts
@@ -6,6 +6,7 @@ import type {
GetReleasesConfig,
RegistryStrategy,
ReleaseResult,
+ SourceUrlSupport,
} from './types';
export abstract class Datasource implements DatasourceApi {
@@ -25,6 +26,12 @@ export abstract class Datasource implements DatasourceApi {
registryStrategy: RegistryStrategy | undefined = 'first';
+ releaseTimestampSupport = false;
+ releaseTimestampNote?: string | undefined;
+
+ sourceUrlSupport: SourceUrlSupport = 'none';
+ sourceUrlNote?: string | undefined;
+
protected http: Http;
abstract getReleases(
diff --git a/lib/modules/datasource/deno/index.ts b/lib/modules/datasource/deno/index.ts
index 425cf975c065f7..e396c3b7d8a8b4 100644
--- a/lib/modules/datasource/deno/index.ts
+++ b/lib/modules/datasource/deno/index.ts
@@ -22,6 +22,13 @@ export class DenoDatasource extends Datasource {
override readonly defaultRegistryUrls = ['https://apiland.deno.dev'];
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `uploaded_at` field in the results.';
+ override readonly sourceUrlSupport = 'release';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `repository` field in the results.';
+
constructor() {
super(DenoDatasource.id);
}
diff --git a/lib/modules/datasource/docker/common.ts b/lib/modules/datasource/docker/common.ts
index 99945705ce7e7c..1ccc12c66a648d 100644
--- a/lib/modules/datasource/docker/common.ts
+++ b/lib/modules/datasource/docker/common.ts
@@ -32,11 +32,11 @@ import { googleRegex } from './google';
import type { OciHelmConfig } from './schema';
import type { RegistryRepository } from './types';
-export const dockerDatasourceId = 'docker' as const;
+export const dockerDatasourceId = 'docker';
-export const imageUrlLabel = 'org.opencontainers.image.url' as const;
+export const imageUrlLabel = 'org.opencontainers.image.url';
-export const sourceLabel = 'org.opencontainers.image.source' as const;
+export const sourceLabel = 'org.opencontainers.image.source';
export const sourceLabels = [sourceLabel, 'org.label-schema.vcs-url'] as const;
export const gitRefLabel = 'org.opencontainers.image.revision';
diff --git a/lib/modules/datasource/docker/dockerhub-cache.spec.ts b/lib/modules/datasource/docker/dockerhub-cache.spec.ts
new file mode 100644
index 00000000000000..fe8ad65504229e
--- /dev/null
+++ b/lib/modules/datasource/docker/dockerhub-cache.spec.ts
@@ -0,0 +1,176 @@
+import { mocked } from '../../../../test/util';
+import * as _packageCache from '../../../util/cache/package';
+import { DockerHubCache, DockerHubCacheData } from './dockerhub-cache';
+import type { DockerHubTag } from './schema';
+
+jest.mock('../../../util/cache/package');
+const packageCache = mocked(_packageCache);
+
+function oldCacheData(): DockerHubCacheData {
+ return {
+ items: {
+ 1: {
+ id: 1,
+ last_updated: '2022-01-01',
+ name: '1',
+ tag_last_pushed: '2022-01-01',
+ digest: 'sha256:111',
+ },
+ 2: {
+ id: 2,
+ last_updated: '2022-01-02',
+ name: '2',
+ tag_last_pushed: '2022-01-02',
+ digest: 'sha256:222',
+ },
+ 3: {
+ id: 3,
+ last_updated: '2022-01-03',
+ name: '3',
+ tag_last_pushed: '2022-01-03',
+ digest: 'sha256:333',
+ },
+ },
+ updatedAt: '2022-01-01',
+ };
+}
+
+function newItem(): DockerHubTag {
+ return {
+ id: 4,
+ last_updated: '2022-01-04',
+ name: '4',
+ tag_last_pushed: '2022-01-04',
+ digest: 'sha256:444',
+ };
+}
+
+function newCacheData(): DockerHubCacheData {
+ const { items } = oldCacheData();
+ const item = newItem();
+ return {
+ items: {
+ ...items,
+ [item.id]: item,
+ },
+ updatedAt: '2022-01-04',
+ };
+}
+
+describe('modules/datasource/docker/dockerhub-cache', () => {
+ beforeEach(() => {
+ jest.resetAllMocks();
+ });
+
+ const dockerRepository = 'foo/bar';
+
+ it('initializes empty cache', async () => {
+ packageCache.get.mockResolvedValue(undefined);
+
+ const res = await DockerHubCache.init(dockerRepository);
+
+ expect(res).toEqual({
+ dockerRepository,
+ cache: {
+ items: {},
+ updatedAt: null,
+ },
+ isChanged: false,
+ });
+ });
+
+ it('initializes cache with data', async () => {
+ const oldCache = oldCacheData();
+ packageCache.get.mockResolvedValue(oldCache);
+
+ const res = await DockerHubCache.init(dockerRepository);
+
+ expect(res).toEqual({
+ dockerRepository,
+ cache: oldCache,
+ isChanged: false,
+ });
+ });
+
+ it('reconciles new items', async () => {
+ const oldCache = oldCacheData();
+ const newCache = newCacheData();
+
+ packageCache.get.mockResolvedValue(oldCache);
+ const cache = await DockerHubCache.init(dockerRepository);
+ const newItems: DockerHubTag[] = [newItem()];
+
+ const needNextPage = cache.reconcile(newItems);
+
+ expect(needNextPage).toBe(true);
+ expect(cache).toEqual({
+ cache: newCache,
+ dockerRepository: 'foo/bar',
+ isChanged: true,
+ });
+
+ const res = cache.getItems();
+ expect(res).toEqual(Object.values(newCache.items));
+
+ await cache.save();
+ expect(packageCache.set).toHaveBeenCalledWith(
+ 'datasource-docker-hub-cache',
+ 'foo/bar',
+ newCache,
+ 3 * 60 * 24 * 30,
+ );
+ });
+
+ it('reconciles existing items', async () => {
+ const oldCache = oldCacheData();
+
+ packageCache.get.mockResolvedValue(oldCache);
+ const cache = await DockerHubCache.init(dockerRepository);
+ const items: DockerHubTag[] = Object.values(oldCache.items);
+
+ const needNextPage = cache.reconcile(items);
+
+ expect(needNextPage).toBe(false);
+ expect(cache).toEqual({
+ cache: oldCache,
+ dockerRepository: 'foo/bar',
+ isChanged: false,
+ });
+
+ const res = cache.getItems();
+ expect(res).toEqual(items);
+
+ await cache.save();
+ expect(packageCache.set).not.toHaveBeenCalled();
+ });
+
+ it('reconciles from empty cache', async () => {
+ const item = newItem();
+ const expectedCache = {
+ items: {
+ [item.id]: item,
+ },
+ updatedAt: item.last_updated,
+ };
+ const cache = await DockerHubCache.init(dockerRepository);
+
+ const needNextPage = cache.reconcile([item]);
+ expect(needNextPage).toBe(true);
+ expect(cache).toEqual({
+ cache: expectedCache,
+ dockerRepository: 'foo/bar',
+ isChanged: true,
+ });
+
+ const res = cache.getItems();
+ expect(res).toEqual([item]);
+
+ await cache.save();
+ expect(packageCache.set).toHaveBeenCalledWith(
+ 'datasource-docker-hub-cache',
+ 'foo/bar',
+ expectedCache,
+ 3 * 60 * 24 * 30,
+ );
+ });
+});
diff --git a/lib/modules/datasource/docker/dockerhub-cache.ts b/lib/modules/datasource/docker/dockerhub-cache.ts
new file mode 100644
index 00000000000000..0e97726fc01fb2
--- /dev/null
+++ b/lib/modules/datasource/docker/dockerhub-cache.ts
@@ -0,0 +1,78 @@
+import { dequal } from 'dequal';
+import { DateTime } from 'luxon';
+import * as packageCache from '../../../util/cache/package';
+import type { DockerHubTag } from './schema';
+
+export interface DockerHubCacheData {
+ items: Record;
+ updatedAt: string | null;
+}
+
+const cacheNamespace = 'datasource-docker-hub-cache';
+
+export class DockerHubCache {
+ private isChanged = false;
+
+ private constructor(
+ private dockerRepository: string,
+ private cache: DockerHubCacheData,
+ ) {}
+
+ static async init(dockerRepository: string): Promise {
+ let repoCache = await packageCache.get(
+ cacheNamespace,
+ dockerRepository,
+ );
+
+ repoCache ??= {
+ items: {},
+ updatedAt: null,
+ };
+
+ return new DockerHubCache(dockerRepository, repoCache);
+ }
+
+ reconcile(items: DockerHubTag[]): boolean {
+ let needNextPage = true;
+
+ let { updatedAt } = this.cache;
+ let latestDate = updatedAt ? DateTime.fromISO(updatedAt) : null;
+
+ for (const newItem of items) {
+ const id = newItem.id;
+ const oldItem = this.cache.items[id];
+
+ if (dequal(oldItem, newItem)) {
+ needNextPage = false;
+ continue;
+ }
+
+ this.cache.items[newItem.id] = newItem;
+ const newItemDate = DateTime.fromISO(newItem.last_updated);
+ if (!latestDate || latestDate < newItemDate) {
+ updatedAt = newItem.last_updated;
+ latestDate = newItemDate;
+ }
+
+ this.isChanged = true;
+ }
+
+ this.cache.updatedAt = updatedAt;
+ return needNextPage;
+ }
+
+ async save(): Promise {
+ if (this.isChanged) {
+ await packageCache.set(
+ cacheNamespace,
+ this.dockerRepository,
+ this.cache,
+ 3 * 60 * 24 * 30,
+ );
+ }
+ }
+
+ getItems(): DockerHubTag[] {
+ return Object.values(this.cache.items);
+ }
+}
diff --git a/lib/modules/datasource/docker/ecr.ts b/lib/modules/datasource/docker/ecr.ts
index 005c7405767b15..f90a6608495a5b 100644
--- a/lib/modules/datasource/docker/ecr.ts
+++ b/lib/modules/datasource/docker/ecr.ts
@@ -6,7 +6,9 @@ import type { HttpResponse } from '../../../util/http/types';
import { regEx } from '../../../util/regex';
import { addSecretForSanitizing } from '../../../util/sanitize';
-export const ecrRegex = regEx(/\d+\.dkr\.ecr\.([-a-z0-9]+)\.amazonaws\.com/);
+export const ecrRegex = regEx(
+ /\d+\.dkr\.ecr(?:-fips)?\.([-a-z0-9]+)\.amazonaws\.com/,
+);
export const ecrPublicRegex = regEx(/public\.ecr\.aws/);
export async function getECRAuthToken(
diff --git a/lib/modules/datasource/docker/index.spec.ts b/lib/modules/datasource/docker/index.spec.ts
index bbef484cbd3465..e5805c7ed906e0 100644
--- a/lib/modules/datasource/docker/index.spec.ts
+++ b/lib/modules/datasource/docker/index.spec.ts
@@ -491,14 +491,16 @@ describe('modules/datasource/docker/index', () => {
.scope(gcrUrl)
.get('/')
.reply(200)
- .head('/some-project/some-package/manifests/some-tag')
+ .head('/google.com/some-project/some-package/manifests/some-tag')
.reply(200, '', { 'docker-content-digest': 'some-digest' });
hostRules.find.mockReturnValue({});
const res = await getDigest(
{
datasource: 'docker',
- packageName: 'eu.gcr.io/some-project/some-package',
+ registryUrl: 'https://eu.gcr.io',
+ lookupName: 'google.com/some-project/some-package',
+ packageName: 'eu.gcr.io/google.com/some-project/some-package',
},
'some-tag',
);
@@ -742,6 +744,83 @@ describe('modules/datasource/docker/index', () => {
);
});
+ it('supports architecture-specific digest whithout manifest list', async () => {
+ const currentDigest =
+ 'sha256:81c09f6d42c2db8121bcd759565ea244cedc759f36a0f090ec7da9de4f7f8fe4';
+
+ httpMock
+ .scope(authUrl)
+ .get(
+ '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
+ )
+ .times(4)
+ .reply(200, { token: 'some-token' });
+ httpMock
+ .scope(baseUrl)
+ .get('/')
+ .times(3)
+ .reply(401, '', {
+ 'www-authenticate':
+ 'Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="repository:library/some-dep:pull"',
+ })
+ .head('/library/some-dep/manifests/' + currentDigest)
+ .reply(200, '', {
+ 'content-type':
+ 'application/vnd.docker.distribution.manifest.v2+json',
+ })
+ .get('/library/some-dep/manifests/' + currentDigest)
+ .reply(200, {
+ schemaVersion: 2,
+ mediaType: 'application/vnd.docker.distribution.manifest.v2+json',
+ config: {
+ digest: 'some-config-digest',
+ mediaType: 'application/vnd.docker.container.image.v1+json',
+ },
+ })
+ .get('/library/some-dep/blobs/some-config-digest')
+ .reply(200, {
+ architecture: 'amd64',
+ });
+ httpMock
+ .scope(baseUrl)
+ .get('/')
+ .reply(401, '', {
+ 'www-authenticate':
+ 'Bearer realm="https://auth.docker.io/token",service="registry.docker.io",scope="repository:library/some-dep:pull"',
+ })
+ .get('/library/some-dep/manifests/some-new-value')
+ .reply(
+ 200,
+ {
+ schemaVersion: 2,
+ mediaType: 'application/vnd.docker.distribution.manifest.v2+json',
+ config: {
+ mediaType: 'application/vnd.docker.container.image.v1+json',
+ size: 2917,
+ digest:
+ 'sha256:4591c431eb2fcf90ebb32476db6cfe342617fc3d3ca9653b9e0c47859cac1cf9',
+ },
+ },
+ {
+ 'docker-content-digest': 'some-new-digest',
+ },
+ );
+
+ const res = await getDigest(
+ {
+ datasource: 'docker',
+ packageName: 'some-dep',
+ currentDigest,
+ },
+ 'some-new-value',
+ );
+
+ expect(logger.logger.debug).toHaveBeenCalledWith(
+ `Current digest ${currentDigest} relates to architecture amd64`,
+ );
+ expect(res).toBe('some-new-digest');
+ });
+
it('handles missing architecture-specific digest', async () => {
const currentDigest =
'sha256:81c09f6d42c2db8121bcd759565ea244cedc759f36a0f090ec7da9de4f7f8fe4';
@@ -1457,6 +1536,7 @@ describe('modules/datasource/docker/index', () => {
packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
}),
).toEqual({
+ lookupName: 'node',
registryUrl: 'https://123456789.dkr.ecr.us-east-1.amazonaws.com',
releases: [],
});
@@ -1509,6 +1589,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'public.ecr.aws/amazonlinux/amazonlinux',
}),
).toEqual({
+ lookupName: 'amazonlinux/amazonlinux',
registryUrl: 'https://public.ecr.aws',
releases: [],
});
@@ -1567,6 +1648,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'ecr-proxy.company.com/node',
}),
).toEqual({
+ lookupName: 'node',
registryUrl: 'https://ecr-proxy.company.com',
releases: [],
sourceUrl: 'https://github.com/renovatebot/renovate',
@@ -1791,21 +1873,25 @@ describe('modules/datasource/docker/index', () => {
process.env.RENOVATE_X_DOCKER_HUB_TAGS = 'true';
httpMock
.scope(dockerHubUrl)
- .get('/library/node/tags?page_size=1000')
+ .get('/library/node/tags?page_size=1000&ordering=last_updated')
.reply(200, {
- next: `${dockerHubUrl}/library/node/tags?page=2&page_size=1000`,
+ next: `${dockerHubUrl}/library/node/tags?page=2&page_size=1000&ordering=last_updated`,
results: [
{
+ id: 2,
+ last_updated: '2021-01-01T00:00:00.000Z',
name: '1.0.0',
tag_last_pushed: '2021-01-01T00:00:00.000Z',
digest: 'aaa',
},
],
})
- .get('/library/node/tags?page=2&page_size=1000')
+ .get('/library/node/tags?page=2&page_size=1000&ordering=last_updated')
.reply(200, {
results: [
{
+ id: 1,
+ last_updated: '2020-01-01T00:00:00.000Z',
name: '0.9.0',
tag_last_pushed: '2020-01-01T00:00:00.000Z',
digest: 'bbb',
@@ -1833,7 +1919,7 @@ describe('modules/datasource/docker/index', () => {
const tags = ['1.0.0'];
httpMock
.scope(dockerHubUrl)
- .get('/library/node/tags?page_size=1000')
+ .get('/library/node/tags?page_size=1000&ordering=last_updated')
.reply(404);
httpMock
.scope(baseUrl)
@@ -1861,21 +1947,25 @@ describe('modules/datasource/docker/index', () => {
process.env.RENOVATE_X_DOCKER_HUB_TAGS = 'true';
httpMock
.scope(dockerHubUrl)
- .get('/library/node/tags?page_size=1000')
+ .get('/library/node/tags?page_size=1000&ordering=last_updated')
.reply(200, {
- next: `${dockerHubUrl}/library/node/tags?page=2&page_size=1000`,
+ next: `${dockerHubUrl}/library/node/tags?page=2&page_size=1000&ordering=last_updated`,
results: [
{
+ id: 2,
+ last_updated: '2021-01-01T00:00:00.000Z',
name: '1.0.0',
tag_last_pushed: '2021-01-01T00:00:00.000Z',
digest: 'aaa',
},
],
})
- .get('/library/node/tags?page=2&page_size=1000')
+ .get('/library/node/tags?page=2&page_size=1000&ordering=last_updated')
.reply(200, {
results: [
{
+ id: 1,
+ last_updated: '2020-01-01T00:00:00.000Z',
name: '0.9.0',
tag_last_pushed: '2020-01-01T00:00:00.000Z',
digest: 'bbb',
@@ -2026,6 +2116,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [
{
@@ -2081,6 +2172,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [
{
@@ -2144,6 +2236,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [],
sourceUrl: 'https://github.com/renovatebot/renovate',
@@ -2171,6 +2264,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [],
});
@@ -2195,6 +2289,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [],
});
@@ -2216,6 +2311,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [],
});
@@ -2266,6 +2362,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [
{
@@ -2320,6 +2417,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [
{
@@ -2350,6 +2448,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [],
});
@@ -2404,6 +2503,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'registry.company.com/node',
});
expect(res).toEqual({
+ lookupName: 'node',
registryUrl: 'https://registry.company.com',
releases: [],
});
@@ -2463,6 +2563,7 @@ describe('modules/datasource/docker/index', () => {
packageName: 'ghcr.io/visualon/drone-git',
});
expect(res).toEqual({
+ lookupName: 'visualon/drone-git',
registryUrl: 'https://ghcr.io',
sourceUrl: 'https://github.com/visualon/drone-git',
releases: [{ version: '1.0.0' }],
diff --git a/lib/modules/datasource/docker/index.ts b/lib/modules/datasource/docker/index.ts
index 0653cc22f6f4b4..c0266ac7a09a03 100644
--- a/lib/modules/datasource/docker/index.ts
+++ b/lib/modules/datasource/docker/index.ts
@@ -37,6 +37,7 @@ import {
sourceLabel,
sourceLabels,
} from './common';
+import { DockerHubCache } from './dockerhub-cache';
import { ecrPublicRegex, ecrRegex, isECRMaxResultsError } from './ecr';
import {
DistributionManifest,
@@ -80,6 +81,13 @@ export class DockerDatasource extends Datasource {
override readonly defaultConfig = defaultConfig;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `tag_last_pushed` field in thre results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `org.opencontainers.image.source` and `org.label-schema.vcs-url` labels present in the metadata of the **latest stable** image found on the Docker registry.';
+
constructor() {
super(DockerDatasource.id);
}
@@ -291,7 +299,14 @@ export class DockerDatasource extends Datasource {
const parsed = ManifestJson.safeParse(manifestResponse.body);
if (!parsed.success) {
logger.debug(
- { registry, dockerRepository, tag, err: parsed.error },
+ {
+ registry,
+ dockerRepository,
+ tag,
+ body: manifestResponse.body,
+ headers: manifestResponse.headers,
+ err: parsed.error,
+ },
'Invalid manifest response',
);
return null;
@@ -788,13 +803,22 @@ export class DockerDatasource extends Datasource {
},
})
override async getDigest(
- { registryUrl, packageName, currentDigest }: DigestConfig,
+ { registryUrl, lookupName, packageName, currentDigest }: DigestConfig,
newValue?: string,
): Promise {
- const { registryHost, dockerRepository } = getRegistryRepository(
- packageName,
- registryUrl!,
- );
+ let registryHost: string;
+ let dockerRepository: string;
+ if (registryUrl && lookupName) {
+ // Reuse the resolved values from getReleases()
+ registryHost = registryUrl;
+ dockerRepository = lookupName;
+ } else {
+ // Resolve values independently
+ ({ registryHost, dockerRepository } = getRegistryRepository(
+ packageName,
+ registryUrl!,
+ ));
+ }
logger.debug(
// TODO: types (#22198)
`getDigest(${registryHost}, ${dockerRepository}, ${newValue})`,
@@ -846,23 +870,45 @@ export class DockerDatasource extends Datasource {
);
if (architecture && manifestResponse) {
- const parse = ManifestJson.safeParse(manifestResponse.body);
- const manifestList = parse.success
- ? parse.data
- : /* istanbul ignore next: hard to test */ null;
- if (
- manifestList &&
- (manifestList.mediaType ===
- 'application/vnd.docker.distribution.manifest.list.v2+json' ||
+ const parsed = ManifestJson.safeParse(manifestResponse.body);
+ /* istanbul ignore else: hard to test */
+ if (parsed.success) {
+ const manifestList = parsed.data;
+ if (
+ manifestList.mediaType ===
+ 'application/vnd.docker.distribution.manifest.list.v2+json' ||
manifestList.mediaType ===
- 'application/vnd.oci.image.index.v1+json')
- ) {
- for (const manifest of manifestList.manifests) {
- if (manifest.platform?.architecture === architecture) {
- digest = manifest.digest;
- break;
+ 'application/vnd.oci.image.index.v1+json'
+ ) {
+ for (const manifest of manifestList.manifests) {
+ if (manifest.platform?.architecture === architecture) {
+ digest = manifest.digest;
+ break;
+ }
}
+ // TODO: return null if no matching architecture digest found
+ // https://github.com/renovatebot/renovate/discussions/22639
+ } else if (
+ hasKey('docker-content-digest', manifestResponse.headers)
+ ) {
+ // TODO: return null if no matching architecture, requires to fetch the config manifest
+ // https://github.com/renovatebot/renovate/discussions/22639
+ digest = manifestResponse.headers[
+ 'docker-content-digest'
+ ] as string;
}
+ } else {
+ logger.debug(
+ {
+ registryHost,
+ dockerRepository,
+ newTag,
+ body: manifestResponse.body,
+ headers: manifestResponse.headers,
+ err: parsed.error,
+ },
+ 'Failed to parse manifest response',
+ );
}
}
@@ -918,10 +964,11 @@ export class DockerDatasource extends Datasource {
key: (dockerRepository: string) => `${dockerRepository}`,
})
async getDockerHubTags(dockerRepository: string): Promise {
- const result: Release[] = [];
- let url: null | string =
- `https://hub.docker.com/v2/repositories/${dockerRepository}/tags?page_size=1000`;
- while (url) {
+ let url = `https://hub.docker.com/v2/repositories/${dockerRepository}/tags?page_size=1000&ordering=last_updated`;
+
+ const cache = await DockerHubCache.init(dockerRepository);
+ let needNextPage: boolean = true;
+ while (needNextPage) {
const { val, err } = await this.http
.getJsonSafe(url, DockerHubTagsPage)
.unwrap();
@@ -931,11 +978,39 @@ export class DockerDatasource extends Datasource {
return null;
}
- result.push(...val.items);
- url = val.nextPage;
+ const { results, next } = val;
+
+ needNextPage = cache.reconcile(results);
+
+ if (!next) {
+ break;
+ }
+
+ url = next;
}
- return result;
+ await cache.save();
+
+ const items = cache.getItems();
+ return items.map(
+ ({
+ name: version,
+ tag_last_pushed: releaseTimestamp,
+ digest: newDigest,
+ }) => {
+ const release: Release = { version };
+
+ if (releaseTimestamp) {
+ release.releaseTimestamp = releaseTimestamp;
+ }
+
+ if (newDigest) {
+ release.newDigest = newDigest;
+ }
+
+ return release;
+ },
+ );
}
/**
@@ -1006,6 +1081,10 @@ export class DockerDatasource extends Datasource {
registryUrl: registryHost,
releases,
};
+ if (dockerRepository !== packageName) {
+ // This will be reused later if a getDigest() call is made
+ ret.lookupName = dockerRepository;
+ }
const tags = releases.map((release) => release.version);
const latestTag = tags.includes('latest')
diff --git a/lib/modules/datasource/docker/schema.ts b/lib/modules/datasource/docker/schema.ts
index 1af87da24b49d4..6a58e08abc513a 100644
--- a/lib/modules/datasource/docker/schema.ts
+++ b/lib/modules/datasource/docker/schema.ts
@@ -1,7 +1,6 @@
import { z } from 'zod';
import { logger } from '../../../logger';
import { Json, LooseArray } from '../../../util/schema-utils';
-import type { Release } from '../types';
// OCI manifests
@@ -155,39 +154,23 @@ export const Manifest = ManifestObject.passthrough()
export type Manifest = z.infer;
export const ManifestJson = Json.pipe(Manifest);
-export const DockerHubTag = z
- .object({
- name: z.string(),
- tag_last_pushed: z.string().datetime().nullable().catch(null),
- digest: z.string().nullable().catch(null),
- })
- .transform(({ name, tag_last_pushed, digest }) => {
- const release: Release = { version: name };
-
- if (tag_last_pushed) {
- release.releaseTimestamp = tag_last_pushed;
- }
-
- if (digest) {
- release.newDigest = digest;
- }
-
- return release;
- });
-
-export const DockerHubTagsPage = z
- .object({
- next: z.string().nullable().catch(null),
- results: LooseArray(DockerHubTag, {
- onError: /* istanbul ignore next */ ({ error }) => {
- logger.debug(
- { error },
- 'Docker: Failed to parse some tags from Docker Hub',
- );
- },
- }),
- })
- .transform(({ next, results }) => ({
- nextPage: next,
- items: results,
- }));
+export const DockerHubTag = z.object({
+ id: z.number(),
+ last_updated: z.string().datetime(),
+ name: z.string(),
+ tag_last_pushed: z.string().datetime().nullable().catch(null),
+ digest: z.string().nullable().catch(null),
+});
+export type DockerHubTag = z.infer;
+
+export const DockerHubTagsPage = z.object({
+ next: z.string().nullable().catch(null),
+ results: LooseArray(DockerHubTag, {
+ onError: /* istanbul ignore next */ ({ error }) => {
+ logger.debug(
+ { error },
+ 'Docker: Failed to parse some tags from Docker Hub',
+ );
+ },
+ }),
+});
diff --git a/lib/modules/datasource/dotnet-version/index.spec.ts b/lib/modules/datasource/dotnet-version/index.spec.ts
index e41ddf85995e30..84ed99c2e9d495 100644
--- a/lib/modules/datasource/dotnet-version/index.spec.ts
+++ b/lib/modules/datasource/dotnet-version/index.spec.ts
@@ -128,16 +128,31 @@ describe('modules/datasource/dotnet-version/index', () => {
expect(res?.sourceUrl).toBe('https://github.com/dotnet/sdk');
expect(res?.releases).toHaveLength(19);
expect(res?.releases).toIncludeAllPartialMembers([
- { version: '3.1.100-preview1-014459' },
- { version: '3.1.423' },
- { version: '5.0.100-preview.1.20155.7' },
- { version: '5.0.408' },
- { version: '6.0.100-preview.1.21103.13' },
- { version: '6.0.401' },
- { version: '6.0.304' },
- { version: '6.0.109' },
- { version: '7.0.100-preview.1.22110.4' },
- { version: '7.0.100-rc.1.22431.12' },
+ {
+ version: '3.1.100-preview1-014459',
+ releaseTimestamp: '2019-10-15T00:00:00.000Z',
+ },
+ { version: '3.1.423', releaseTimestamp: '2022-09-13T00:00:00.000Z' },
+ {
+ version: '5.0.100-preview.1.20155.7',
+ releaseTimestamp: '2020-03-16T00:00:00.000Z',
+ },
+ { version: '5.0.408', releaseTimestamp: '2022-05-10T00:00:00.000Z' },
+ {
+ version: '6.0.100-preview.1.21103.13',
+ releaseTimestamp: '2021-02-17T00:00:00.000Z',
+ },
+ { version: '6.0.401', releaseTimestamp: '2022-09-13T00:00:00.000Z' },
+ { version: '6.0.304', releaseTimestamp: '2022-09-13T00:00:00.000Z' },
+ { version: '6.0.109', releaseTimestamp: '2022-09-13T00:00:00.000Z' },
+ {
+ version: '7.0.100-preview.1.22110.4',
+ releaseTimestamp: '2022-02-17T00:00:00.000Z',
+ },
+ {
+ version: '7.0.100-rc.1.22431.12',
+ releaseTimestamp: '2022-09-14T00:00:00.000Z',
+ },
]);
});
@@ -164,14 +179,29 @@ describe('modules/datasource/dotnet-version/index', () => {
expect(res?.sourceUrl).toBe('https://github.com/dotnet/runtime');
expect(res?.releases).toHaveLength(17);
expect(res?.releases).toIncludeAllPartialMembers([
- { version: '3.1.0-preview1.19506.1' },
- { version: '3.1.29' },
- { version: '5.0.0-preview.1.20120.5' },
- { version: '5.0.17' },
- { version: '6.0.0-preview.1.21102.12' },
- { version: '6.0.9' },
- { version: '7.0.0-preview.1.22076.8' },
- { version: '7.0.0-rc.1.22426.10' },
+ {
+ version: '3.1.0-preview1.19506.1',
+ releaseTimestamp: '2019-10-15T00:00:00.000Z',
+ },
+ { version: '3.1.29', releaseTimestamp: '2022-09-13T00:00:00.000Z' },
+ {
+ version: '5.0.0-preview.1.20120.5',
+ releaseTimestamp: '2020-03-16T00:00:00.000Z',
+ },
+ { version: '5.0.17', releaseTimestamp: '2022-05-10T00:00:00.000Z' },
+ {
+ version: '6.0.0-preview.1.21102.12',
+ releaseTimestamp: '2021-02-17T00:00:00.000Z',
+ },
+ { version: '6.0.9', releaseTimestamp: '2022-09-13T00:00:00.000Z' },
+ {
+ version: '7.0.0-preview.1.22076.8',
+ releaseTimestamp: '2022-02-17T00:00:00.000Z',
+ },
+ {
+ version: '7.0.0-rc.1.22426.10',
+ releaseTimestamp: '2022-09-14T00:00:00.000Z',
+ },
]);
});
});
diff --git a/lib/modules/datasource/dotnet-version/index.ts b/lib/modules/datasource/dotnet-version/index.ts
index c7051630d6223c..f5a799dc5f123f 100644
--- a/lib/modules/datasource/dotnet-version/index.ts
+++ b/lib/modules/datasource/dotnet-version/index.ts
@@ -23,6 +23,13 @@ export class DotnetVersionDatasource extends Datasource {
'https://dotnetcli.blob.core.windows.net/dotnet/release-metadata/releases-index.json',
];
+ override releaseTimestampSupport = true;
+ override releaseTimestampNote =
+ 'The release timestamp is determined from the `release-date` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL https://github.com/dotnet/sdk for the `dotnet-sdk` package and, the https://github.com/dotnet/runtime URL for the `dotnet-runtime` package.';
+
@cache({
namespace: `datasource-${DotnetVersionDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => packageName,
diff --git a/lib/modules/datasource/endoflife-date/index.ts b/lib/modules/datasource/endoflife-date/index.ts
index 036a9f3e719635..630129d977ba99 100644
--- a/lib/modules/datasource/endoflife-date/index.ts
+++ b/lib/modules/datasource/endoflife-date/index.ts
@@ -14,6 +14,10 @@ export class EndoflifeDatePackagesource extends Datasource {
override readonly caching = true;
override readonly defaultVersioning = 'loose';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `releaseDate` field in the results.';
+
constructor() {
super(EndoflifeDatePackagesource.id);
}
diff --git a/lib/modules/datasource/flutter-version/index.ts b/lib/modules/datasource/flutter-version/index.ts
index bead276e7a3f48..46dd2ce47e8d44 100644
--- a/lib/modules/datasource/flutter-version/index.ts
+++ b/lib/modules/datasource/flutter-version/index.ts
@@ -21,6 +21,13 @@ export class FlutterVersionDatasource extends Datasource {
override readonly defaultVersioning = semverId;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `release_date` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/flutter/flutter.';
+
async getReleases({
registryUrl,
}: GetReleasesConfig): Promise {
diff --git a/lib/modules/datasource/galaxy-collection/index.ts b/lib/modules/datasource/galaxy-collection/index.ts
index 3fd7e63ace7b37..ec109bc450a7a5 100644
--- a/lib/modules/datasource/galaxy-collection/index.ts
+++ b/lib/modules/datasource/galaxy-collection/index.ts
@@ -28,6 +28,15 @@ export class GalaxyCollectionDatasource extends Datasource {
override readonly defaultVersioning = pep440Versioning.id;
+ override readonly releaseTimestampSupport = true;
+ override releaseTimestampNote =
+ 'The release timestamp is determined from the `created_at` field in the results.';
+ // sourceUrl is returned in each release as well as the ReleaseResult
+ // the one present in release result is the sourceUrl of the latest release
+ override readonly sourceUrlSupport = 'release';
+ override readonly sourceUrlNote =
+ 'The `sourceUrl` is determined from the `repository` field in the results.';
+
@cache({
namespace: `datasource-${GalaxyCollectionDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => packageName,
diff --git a/lib/modules/datasource/galaxy/index.ts b/lib/modules/datasource/galaxy/index.ts
index 698d9ede78f73c..64e0412f51cc2b 100644
--- a/lib/modules/datasource/galaxy/index.ts
+++ b/lib/modules/datasource/galaxy/index.ts
@@ -19,6 +19,13 @@ export class GalaxyDatasource extends Datasource {
override readonly defaultVersioning = pep440Versioning.id;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `created` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `github_user` and `github_repo` fields in the results.';
+
@cache({
namespace: 'datasource-galaxy',
key: (getReleasesConfig: GetReleasesConfig) =>
diff --git a/lib/modules/datasource/git-refs/index.ts b/lib/modules/datasource/git-refs/index.ts
index d6f93ba1a12753..a5271f9e02c68a 100644
--- a/lib/modules/datasource/git-refs/index.ts
+++ b/lib/modules/datasource/git-refs/index.ts
@@ -17,6 +17,10 @@ export class GitRefsDatasource extends GitDatasource {
override readonly customRegistrySupport = false;
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
@cache({
namespace: `datasource-${GitRefsDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => packageName,
@@ -51,7 +55,7 @@ export class GitRefsDatasource extends GitDatasource {
releases: uniqueRefs.map((ref) => ({
version: ref,
gitRef: ref,
- newDigest: rawRefs!.find((rawRef) => rawRef.value === ref)?.hash,
+ newDigest: rawRefs.find((rawRef) => rawRef.value === ref)?.hash,
})),
};
diff --git a/lib/modules/datasource/git-tags/index.ts b/lib/modules/datasource/git-tags/index.ts
index 09f084d09b345e..d5300c7ba4ba63 100644
--- a/lib/modules/datasource/git-tags/index.ts
+++ b/lib/modules/datasource/git-tags/index.ts
@@ -11,6 +11,9 @@ export class GitTagsDatasource extends GitDatasource {
}
override readonly customRegistrySupport = false;
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
@cache({
namespace: `datasource-${GitTagsDatasource.id}`,
diff --git a/lib/modules/datasource/gitea-releases/index.ts b/lib/modules/datasource/gitea-releases/index.ts
index 66d41f61c11ca9..a2febc4d922b28 100644
--- a/lib/modules/datasource/gitea-releases/index.ts
+++ b/lib/modules/datasource/gitea-releases/index.ts
@@ -16,6 +16,13 @@ export class GiteaReleasesDatasource extends Datasource {
private static readonly cacheNamespace: PackageCacheNamespace = `datasource-${GiteaReleasesDatasource.id}`;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `published_at` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
constructor() {
super(GiteaReleasesDatasource.id);
}
diff --git a/lib/modules/datasource/gitea-tags/index.ts b/lib/modules/datasource/gitea-tags/index.ts
index d177a50f560e41..fb893b0c33c59a 100644
--- a/lib/modules/datasource/gitea-tags/index.ts
+++ b/lib/modules/datasource/gitea-tags/index.ts
@@ -16,6 +16,13 @@ export class GiteaTagsDatasource extends Datasource {
private static readonly cacheNamespace: PackageCacheNamespace = `datasource-${GiteaTagsDatasource.id}`;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `created` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
constructor() {
super(GiteaTagsDatasource.id);
}
diff --git a/lib/modules/datasource/github-release-attachments/index.ts b/lib/modules/datasource/github-release-attachments/index.ts
index d79242124e5acb..da41d74df40512 100644
--- a/lib/modules/datasource/github-release-attachments/index.ts
+++ b/lib/modules/datasource/github-release-attachments/index.ts
@@ -38,6 +38,14 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
override http: GithubHttp;
+ override readonly releaseTimestampSupport = true;
+ // Note: not sure
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `releaseTimestamp` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
constructor() {
super(GithubReleaseAttachmentsDatasource.id);
this.http = new GithubHttp(GithubReleaseAttachmentsDatasource.id);
@@ -222,7 +230,7 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
}
/**
- * This function can be used to fetch releases with a customisable versioning
+ * This function can be used to fetch releases with a customizable versioning
* (e.g. semver) and with releases.
*
* This function will:
diff --git a/lib/modules/datasource/github-releases/index.ts b/lib/modules/datasource/github-releases/index.ts
index b13d0e15000eed..1963f34f4089bd 100644
--- a/lib/modules/datasource/github-releases/index.ts
+++ b/lib/modules/datasource/github-releases/index.ts
@@ -21,6 +21,14 @@ export class GithubReleasesDatasource extends Datasource {
override http: GithubHttp;
+ override readonly releaseTimestampSupport = true;
+ // Note: not sure
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `releaseTimestamp` field from the response.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
constructor() {
super(GithubReleasesDatasource.id);
this.http = new GithubHttp(GithubReleasesDatasource.id);
diff --git a/lib/modules/datasource/github-runners/index.spec.ts b/lib/modules/datasource/github-runners/index.spec.ts
index 0875f1dabfc9ab..aa2bcbd3e8effe 100644
--- a/lib/modules/datasource/github-runners/index.spec.ts
+++ b/lib/modules/datasource/github-runners/index.spec.ts
@@ -15,6 +15,7 @@ describe('modules/datasource/github-runners/index', () => {
{ version: '18.04', isDeprecated: true },
{ version: '20.04' },
{ version: '22.04' },
+ { version: '24.04', isStable: false },
],
sourceUrl: 'https://github.com/actions/runner-images',
});
@@ -35,9 +36,9 @@ describe('modules/datasource/github-runners/index', () => {
{ version: '13-xlarge' },
{ version: '13-large' },
{ version: '13' },
- { version: '14-xlarge', isStable: false },
- { version: '14-large', isStable: false },
- { version: '14', isStable: false },
+ { version: '14-xlarge' },
+ { version: '14-large' },
+ { version: '14' },
],
sourceUrl: 'https://github.com/actions/runner-images',
});
diff --git a/lib/modules/datasource/github-runners/index.ts b/lib/modules/datasource/github-runners/index.ts
index 9748f5db90c495..d77eda75b178f3 100644
--- a/lib/modules/datasource/github-runners/index.ts
+++ b/lib/modules/datasource/github-runners/index.ts
@@ -5,20 +5,25 @@ import type { GetReleasesConfig, Release, ReleaseResult } from '../types';
export class GithubRunnersDatasource extends Datasource {
static readonly id = 'github-runners';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/actions/runner-images.';
+
/**
* Only add stable runners to the datasource. See datasource readme for details.
*/
private static readonly releases: Record = {
ubuntu: [
+ { version: '24.04', isStable: false },
{ version: '22.04' },
{ version: '20.04' },
{ version: '18.04', isDeprecated: true },
{ version: '16.04', isDeprecated: true },
],
macos: [
- { version: '14', isStable: false },
- { version: '14-large', isStable: false },
- { version: '14-xlarge', isStable: false },
+ { version: '14' },
+ { version: '14-large' },
+ { version: '14-xlarge' },
{ version: '13' },
{ version: '13-large' },
{ version: '13-xlarge' },
diff --git a/lib/modules/datasource/github-runners/readme.md b/lib/modules/datasource/github-runners/readme.md
index 1cc9b6f758ec9a..4c3d92bb3249e9 100644
--- a/lib/modules/datasource/github-runners/readme.md
+++ b/lib/modules/datasource/github-runners/readme.md
@@ -1,7 +1,7 @@
This datasource returns a list of all runners that are hosted by GitHub.
The datasource is based on [GitHub's `runner-images` repository](https://github.com/actions/runner-images).
-Examples: `windows-2022` / `ubuntu-22.04` / `macos-13`
+Examples: `windows-2022` / `ubuntu-24.04` / `macos-14`
## Maintenance
diff --git a/lib/modules/datasource/github-tags/index.ts b/lib/modules/datasource/github-tags/index.ts
index 328374cb91db5d..0fdca9b6b3020d 100644
--- a/lib/modules/datasource/github-tags/index.ts
+++ b/lib/modules/datasource/github-tags/index.ts
@@ -20,6 +20,14 @@ export class GithubTagsDatasource extends Datasource {
override readonly registryStrategy = 'hunt';
+ override readonly releaseTimestampSupport = true;
+ // Note: not sure
+ override readonly releaseTimestampNote =
+ 'The get release timestamp is determined from the `releaseTimestamp` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
override http: GithubHttp;
constructor() {
diff --git a/lib/modules/datasource/gitlab-packages/index.ts b/lib/modules/datasource/gitlab-packages/index.ts
index e30bc997a3e0c3..28d0483aae5069 100644
--- a/lib/modules/datasource/gitlab-packages/index.ts
+++ b/lib/modules/datasource/gitlab-packages/index.ts
@@ -19,6 +19,10 @@ export class GitlabPackagesDatasource extends Datasource {
override defaultRegistryUrls = ['https://gitlab.com'];
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `created_at` field in the results.';
+
constructor() {
super(datasource);
this.http = new GitlabHttp(datasource);
diff --git a/lib/modules/datasource/gitlab-releases/index.ts b/lib/modules/datasource/gitlab-releases/index.ts
index a6458d35e02ff2..54ba562796a795 100644
--- a/lib/modules/datasource/gitlab-releases/index.ts
+++ b/lib/modules/datasource/gitlab-releases/index.ts
@@ -11,6 +11,13 @@ export class GitlabReleasesDatasource extends Datasource {
static readonly registryStrategy = 'first';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `released_at` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
constructor() {
super(GitlabReleasesDatasource.id);
this.http = new GitlabHttp(GitlabReleasesDatasource.id);
diff --git a/lib/modules/datasource/gitlab-tags/index.ts b/lib/modules/datasource/gitlab-tags/index.ts
index d1317c5f803569..247b4d5e69bfe8 100644
--- a/lib/modules/datasource/gitlab-tags/index.ts
+++ b/lib/modules/datasource/gitlab-tags/index.ts
@@ -12,6 +12,13 @@ export class GitlabTagsDatasource extends Datasource {
protected override http: GitlabHttp;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'To get release timestamp we use the `created_at` field from the response.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined by using the `packageName` and `registryUrl`.';
+
constructor() {
super(GitlabTagsDatasource.id);
this.http = new GitlabHttp(GitlabTagsDatasource.id);
diff --git a/lib/modules/datasource/go/base.ts b/lib/modules/datasource/go/base.ts
index 3bde8e040f1ab3..f963490eebf5a6 100644
--- a/lib/modules/datasource/go/base.ts
+++ b/lib/modules/datasource/go/base.ts
@@ -255,7 +255,7 @@ export class BaseGoDatasource {
if (datasource !== null) {
return datasource;
}
- // fall back to old behaviour if detection did not work
+ // fall back to old behavior if detection did not work
switch (detectPlatform(goImportURL)) {
case 'github': {
diff --git a/lib/modules/datasource/go/goproxy-parser.spec.ts b/lib/modules/datasource/go/goproxy-parser.spec.ts
new file mode 100644
index 00000000000000..3dc0cd067f6e5e
--- /dev/null
+++ b/lib/modules/datasource/go/goproxy-parser.spec.ts
@@ -0,0 +1,135 @@
+import * as memCache from '../../../util/cache/memory';
+import { parseGoproxy, parseNoproxy } from './goproxy-parser';
+
+describe('modules/datasource/go/goproxy-parser', () => {
+ beforeEach(() => {
+ memCache.init();
+ });
+
+ describe('parseGoproxy', () => {
+ it('parses single url', () => {
+ const result = parseGoproxy('foo');
+ expect(result).toMatchObject([{ url: 'foo' }]);
+ });
+
+ it('parses multiple urls', () => {
+ const result = parseGoproxy('foo,bar|baz,qux');
+ expect(result).toMatchObject([
+ { url: 'foo', fallback: ',' },
+ { url: 'bar', fallback: '|' },
+ { url: 'baz', fallback: ',' },
+ { url: 'qux' },
+ ]);
+ });
+
+ it('ignores everything starting from "direct" and "off" keywords', () => {
+ expect(parseGoproxy(undefined)).toBeEmpty();
+ expect(parseGoproxy(undefined)).toBeEmpty();
+ expect(parseGoproxy('')).toBeEmpty();
+ expect(parseGoproxy('off')).toMatchObject([
+ { url: 'off', fallback: '|' },
+ ]);
+ expect(parseGoproxy('direct')).toMatchObject([
+ { url: 'direct', fallback: '|' },
+ ]);
+ expect(parseGoproxy('foo,off|direct,qux')).toMatchObject([
+ { url: 'foo', fallback: ',' },
+ { url: 'off', fallback: '|' },
+ { url: 'direct', fallback: ',' },
+ { url: 'qux', fallback: '|' },
+ ]);
+ });
+
+ it('caches results', () => {
+ expect(parseGoproxy('foo,bar')).toBe(parseGoproxy('foo,bar'));
+ });
+ });
+
+ describe('parseNoproxy', () => {
+ it('produces regex', () => {
+ expect(parseNoproxy(undefined)).toBeNull();
+ expect(parseNoproxy(null)).toBeNull();
+ expect(parseNoproxy('')).toBeNull();
+ expect(parseNoproxy('/')).toBeNull();
+ expect(parseNoproxy('*')?.source).toBe('^(?:[^\\/]*)(?:\\/.*)?$');
+ expect(parseNoproxy('?')?.source).toBe('^(?:[^\\/])(?:\\/.*)?$');
+ expect(parseNoproxy('foo')?.source).toBe('^(?:foo)(?:\\/.*)?$');
+ expect(parseNoproxy('\\f\\o\\o')?.source).toBe('^(?:foo)(?:\\/.*)?$');
+ expect(parseNoproxy('foo,bar')?.source).toBe('^(?:foo|bar)(?:\\/.*)?$');
+ expect(parseNoproxy('[abc]')?.source).toBe('^(?:[abc])(?:\\/.*)?$');
+ expect(parseNoproxy('[a-c]')?.source).toBe('^(?:[a-c])(?:\\/.*)?$');
+ expect(parseNoproxy('[\\a-\\c]')?.source).toBe('^(?:[a-c])(?:\\/.*)?$');
+ expect(parseNoproxy('a.b.c')?.source).toBe('^(?:a\\.b\\.c)(?:\\/.*)?$');
+ expect(parseNoproxy('trailing/')?.source).toBe(
+ '^(?:trailing)(?:\\/.*)?$',
+ );
+ });
+
+ it('matches on real package prefixes', () => {
+ expect(parseNoproxy('ex.co')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('ex.co/')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar')).toBeTrue();
+ expect(parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar')).toBeTrue();
+ expect(parseNoproxy('*/foo/*')?.test('example.com/foo/bar')).toBeTrue();
+ expect(parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar')).toBeTrue();
+ expect(parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz')).toBeTrue();
+ expect(parseNoproxy('ex.co')?.test('ex.co/foo/v2')).toBeTrue();
+
+ expect(parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar')).toBeTrue();
+ expect(parseNoproxy('*/foo/*')?.test('example.com/foo/bar')).toBeTrue();
+ expect(parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar')).toBeTrue();
+ expect(parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz')).toBeTrue();
+ expect(
+ parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test('ex.co/foo/bar'),
+ ).toBeTrue();
+ expect(
+ parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test('ex.co/foo/baz'),
+ ).toBeTrue();
+ expect(
+ parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test('ex.co/foo/qux'),
+ ).toBeFalse();
+
+ expect(parseNoproxy('ex')?.test('ex.co/foo')).toBeFalse();
+
+ expect(parseNoproxy('aba')?.test('x/aba')).toBeFalse();
+ expect(parseNoproxy('x/b')?.test('x/aba')).toBeFalse();
+ expect(parseNoproxy('x/ab')?.test('x/aba')).toBeFalse();
+ expect(parseNoproxy('x/ab[a-b]')?.test('x/aba')).toBeTrue();
+ });
+
+ it('matches on wildcards', () => {
+ expect(parseNoproxy('/*/')?.test('ex.co/foo')).toBeFalse();
+ expect(parseNoproxy('*/foo')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('*/fo')?.test('ex.co/foo')).toBeFalse();
+ expect(parseNoproxy('*/fo?')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('*/fo*')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('*fo*')?.test('ex.co/foo')).toBeFalse();
+
+ expect(parseNoproxy('*.co')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('ex*')?.test('ex.co/foo')).toBeTrue();
+ expect(parseNoproxy('*/foo')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/foo/')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/foo/*')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/foo/*/')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/v2')?.test('ex.co/foo/v2')).toBeFalse();
+ expect(parseNoproxy('*/*/v2')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/*/*')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/*/*/')?.test('ex.co/foo/v2')).toBeTrue();
+ expect(parseNoproxy('*/*/*')?.test('ex.co/foo')).toBeFalse();
+ expect(parseNoproxy('*/*/*/')?.test('ex.co/foo')).toBeFalse();
+
+ expect(parseNoproxy('*/*/*,,')?.test('ex.co/repo')).toBeFalse();
+ expect(parseNoproxy('*/*/*,,*/repo')?.test('ex.co/repo')).toBeTrue();
+ expect(parseNoproxy(',,*/repo')?.test('ex.co/repo')).toBeTrue();
+ });
+
+ it('matches on character ranges', () => {
+ expect(parseNoproxy('x/ab[a-b]')?.test('x/aba')).toBeTrue();
+ expect(parseNoproxy('x/ab[a-b]')?.test('x/abc')).toBeFalse();
+ });
+
+ it('caches results', () => {
+ expect(parseNoproxy('foo/bar')).toBe(parseNoproxy('foo/bar'));
+ });
+ });
+});
diff --git a/lib/modules/datasource/go/goproxy-parser.ts b/lib/modules/datasource/go/goproxy-parser.ts
new file mode 100644
index 00000000000000..78f8bdd4feae4a
--- /dev/null
+++ b/lib/modules/datasource/go/goproxy-parser.ts
@@ -0,0 +1,115 @@
+import is from '@sindresorhus/is';
+import moo from 'moo';
+import * as memCache from '../../../util/cache/memory';
+import { regEx } from '../../../util/regex';
+import type { GoproxyItem } from './types';
+
+/**
+ * Parse `GOPROXY` to the sequence of url + fallback strategy tags.
+ *
+ * @example
+ * parseGoproxy('foo.example.com|bar.example.com,baz.example.com')
+ * // [
+ * // { url: 'foo.example.com', fallback: '|' },
+ * // { url: 'bar.example.com', fallback: ',' },
+ * // { url: 'baz.example.com', fallback: '|' },
+ * // ]
+ *
+ * @see https://golang.org/ref/mod#goproxy-protocol
+ */
+export function parseGoproxy(
+ input: string | undefined = process.env.GOPROXY,
+): GoproxyItem[] {
+ if (!is.string(input)) {
+ return [];
+ }
+
+ const cacheKey = `goproxy::${input}`;
+ const cachedResult = memCache.get(cacheKey);
+ if (cachedResult) {
+ return cachedResult;
+ }
+
+ const result: GoproxyItem[] = input
+ .split(regEx(/([^,|]*(?:,|\|))/))
+ .filter(Boolean)
+ .map((s) => s.split(/(?=,|\|)/)) // TODO: #12872 lookahead
+ .map(([url, separator]) => ({
+ url,
+ fallback: separator === ',' ? ',' : '|',
+ }));
+
+ memCache.set(cacheKey, result);
+ return result;
+}
+
+// https://golang.org/pkg/path/#Match
+const noproxyLexer = moo.states({
+ main: {
+ separator: {
+ match: /\s*?,\s*?/, // TODO #12870
+ value: (_: string) => '|',
+ },
+ asterisk: {
+ match: '*',
+ value: (_: string) => '[^/]*',
+ },
+ qmark: {
+ match: '?',
+ value: (_: string) => '[^/]',
+ },
+ characterRangeOpen: {
+ match: '[',
+ push: 'characterRange',
+ value: (_: string) => '[',
+ },
+ trailingSlash: {
+ match: /\/$/,
+ value: (_: string) => '',
+ },
+ char: {
+ match: /[^*?\\[\n]/,
+ value: (s: string) => s.replace(regEx('\\.', 'g'), '\\.'),
+ },
+ escapedChar: {
+ match: /\\./, // TODO #12870
+ value: (s: string) => s.slice(1),
+ },
+ },
+ characterRange: {
+ char: /[^\\\]\n]/, // TODO #12870
+ escapedChar: {
+ match: /\\./, // TODO #12870
+ value: (s: string) => s.slice(1),
+ },
+ characterRangeEnd: {
+ match: ']',
+ pop: 1,
+ },
+ },
+});
+
+export function parseNoproxy(
+ input: unknown = process.env.GONOPROXY ?? process.env.GOPRIVATE,
+): RegExp | null {
+ if (!is.string(input)) {
+ return null;
+ }
+
+ const cacheKey = `noproxy::${input}`;
+ const cachedResult = memCache.get(cacheKey);
+ if (cachedResult !== undefined) {
+ return cachedResult;
+ }
+
+ const noproxyPattern = [...noproxyLexer.reset(input)]
+ .map(({ value }) => value)
+ .join('');
+
+ const result = noproxyPattern
+ ? regEx(`^(?:${noproxyPattern})(?:/.*)?$`)
+ : null;
+
+ memCache.set(cacheKey, result);
+ return result;
+}
diff --git a/lib/modules/datasource/go/index.spec.ts b/lib/modules/datasource/go/index.spec.ts
index dad705a914dc18..18c3d831ab775c 100644
--- a/lib/modules/datasource/go/index.spec.ts
+++ b/lib/modules/datasource/go/index.spec.ts
@@ -193,5 +193,20 @@ describe('modules/datasource/go/index', () => {
expect(res).not.toBeNull();
expect(res).toBeDefined();
});
+
+ describe('GOPROXY', () => {
+ afterEach(() => {
+ delete process.env.GOPROXY;
+ });
+
+ it('returns null when GOPROXY contains off', async () => {
+ process.env.GOPROXY = 'https://proxy.golang.org,off';
+ const res = await datasource.getDigest(
+ { packageName: 'golang.org/x/text' },
+ 'v1.2.3',
+ );
+ expect(res).toBeNull();
+ });
+ });
});
});
diff --git a/lib/modules/datasource/go/index.ts b/lib/modules/datasource/go/index.ts
index 48702e2d2090b9..a3abb57cf753b9 100644
--- a/lib/modules/datasource/go/index.ts
+++ b/lib/modules/datasource/go/index.ts
@@ -1,4 +1,5 @@
import is from '@sindresorhus/is';
+import { logger } from '../../../logger';
import { cache } from '../../../util/cache/package/decorator';
import { regEx } from '../../../util/regex';
import { addSecretForSanitizing } from '../../../util/sanitize';
@@ -11,6 +12,7 @@ import { GithubTagsDatasource } from '../github-tags';
import { GitlabTagsDatasource } from '../gitlab-tags';
import type { DigestConfig, GetReleasesConfig, ReleaseResult } from '../types';
import { BaseGoDatasource } from './base';
+import { parseGoproxy } from './goproxy-parser';
import { GoDirectDatasource } from './releases-direct';
import { GoProxyDatasource } from './releases-goproxy';
@@ -29,6 +31,13 @@ export class GoDatasource extends Datasource {
override readonly customRegistrySupport = false;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'If the release timestamp is not returned from the respective datasoure used to fetch the releases, then Renovate uses the `Time` field in the results instead.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `packageName` and `registryUrl`.';
+
readonly goproxy = new GoProxyDatasource();
readonly direct = new GoDirectDatasource();
@@ -49,7 +58,7 @@ export class GoDatasource extends Datasource {
* go.getDigest
*
* This datasource resolves a go module URL into its source repository
- * and then fetches the digest it if it is on GitHub.
+ * and then fetches the digest if it is on GitHub.
*
* This function will:
* - Determine the source URL for the module
@@ -63,6 +72,13 @@ export class GoDatasource extends Datasource {
{ packageName }: DigestConfig,
value?: string | null,
): Promise {
+ if (parseGoproxy().some(({ url }) => url === 'off')) {
+ logger.debug(
+ `Skip digest fetch for ${packageName} with GOPROXY containing "off"`,
+ );
+ return null;
+ }
+
const source = await BaseGoDatasource.getDatasource(packageName);
if (!source) {
return null;
diff --git a/lib/modules/datasource/go/readme.md b/lib/modules/datasource/go/readme.md
index 1546b270d1796c..df5dea4828834e 100644
--- a/lib/modules/datasource/go/readme.md
+++ b/lib/modules/datasource/go/readme.md
@@ -1,5 +1,26 @@
-This datasource will default to using the `GOPROXY` settings `https://proxy.golang.org,direct` if there is no value defined in environment variables.
+The best way to lookup Go Modules is using Go proxies.
-To override this default and use a different proxy, simply configure `GOPROXY` to an alternative setting in env.
+## GOPROXY settings
+
+This datasource will use default `GOPROXY` settings of `https://proxy.golang.org,direct` if the environment variable is unset.
+
+To override this default and use a different proxy in self-hosted environments, configure `GOPROXY` to an alternative setting in env.
To override this default and stop using any proxy at all, set `GOPROXY` to the value `direct`.
+
+## Pseudo versions
+
+Go proxies return an empty list of versions when queried (`@v/list`) for a package which uses pseudo versions, but return the latest pseudo-version when queried for `@latest`.
+
+If the `@latest` endpoint returns a pseudo-version, and the release list is empty, then this datasource will return the latest pseudo-version as the only release/version for the package.
+
+## Checking for new major releases
+
+When a Go proxy is queried for `@v/list` it returns only versions for v0 or v1 of a package.
+Therefore Renovate will also query `@v2/list` just in case there also exists a v2 of the package.
+Similarly, if the dependency is already on a higher version such as `v5`, Renovate will check in case higher major versions exist.
+You do not need to be worried about any 404 responses which result from such checks - they are the only way for Renovate to know if newer major releases exist.
+
+## Fallback to direct lookups
+
+If no result is found from Go proxy lookups then Renovate will fall back to direct lookups.
diff --git a/lib/modules/datasource/go/releases-goproxy.spec.ts b/lib/modules/datasource/go/releases-goproxy.spec.ts
index 708c8e39ab41b2..22f6c8d1596723 100644
--- a/lib/modules/datasource/go/releases-goproxy.spec.ts
+++ b/lib/modules/datasource/go/releases-goproxy.spec.ts
@@ -59,223 +59,6 @@ describe('modules/datasource/go/releases-goproxy', () => {
});
});
- describe('parseGoproxy', () => {
- it('parses single url', () => {
- const result = datasource.parseGoproxy('foo');
- expect(result).toMatchObject([{ url: 'foo' }]);
- });
-
- it('parses multiple urls', () => {
- const result = datasource.parseGoproxy('foo,bar|baz,qux');
- expect(result).toMatchObject([
- { url: 'foo', fallback: ',' },
- { url: 'bar', fallback: '|' },
- { url: 'baz', fallback: ',' },
- { url: 'qux' },
- ]);
- });
-
- it('ignores everything starting from "direct" and "off" keywords', () => {
- expect(datasource.parseGoproxy(undefined)).toBeEmpty();
- expect(datasource.parseGoproxy(undefined)).toBeEmpty();
- expect(datasource.parseGoproxy('')).toBeEmpty();
- expect(datasource.parseGoproxy('off')).toMatchObject([
- { url: 'off', fallback: '|' },
- ]);
- expect(datasource.parseGoproxy('direct')).toMatchObject([
- { url: 'direct', fallback: '|' },
- ]);
- expect(datasource.parseGoproxy('foo,off|direct,qux')).toMatchObject([
- { url: 'foo', fallback: ',' },
- { url: 'off', fallback: '|' },
- { url: 'direct', fallback: ',' },
- { url: 'qux', fallback: '|' },
- ]);
- });
- });
-
- describe('GoProxyDatasource.parseNoproxy', () => {
- it('produces regex', () => {
- expect(GoProxyDatasource.parseNoproxy(undefined)).toBeNull();
- expect(GoProxyDatasource.parseNoproxy(null)).toBeNull();
- expect(GoProxyDatasource.parseNoproxy('')).toBeNull();
- expect(GoProxyDatasource.parseNoproxy('/')).toBeNull();
- expect(GoProxyDatasource.parseNoproxy('*')?.source).toBe(
- '^(?:[^\\/]*)(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('?')?.source).toBe(
- '^(?:[^\\/])(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('foo')?.source).toBe(
- '^(?:foo)(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('\\f\\o\\o')?.source).toBe(
- '^(?:foo)(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('foo,bar')?.source).toBe(
- '^(?:foo|bar)(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('[abc]')?.source).toBe(
- '^(?:[abc])(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('[a-c]')?.source).toBe(
- '^(?:[a-c])(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('[\\a-\\c]')?.source).toBe(
- '^(?:[a-c])(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('a.b.c')?.source).toBe(
- '^(?:a\\.b\\.c)(?:\\/.*)?$',
- );
- expect(GoProxyDatasource.parseNoproxy('trailing/')?.source).toBe(
- '^(?:trailing)(?:\\/.*)?$',
- );
- });
-
- it('matches on real package prefixes', () => {
- expect(
- GoProxyDatasource.parseNoproxy('ex.co')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo/*')?.test('example.com/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co')?.test('ex.co/foo/v2'),
- ).toBeTrue();
-
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo/*')?.test('example.com/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test(
- 'ex.co/foo/bar',
- ),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test(
- 'ex.co/foo/baz',
- ),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test(
- 'ex.co/foo/qux',
- ),
- ).toBeFalse();
-
- expect(
- GoProxyDatasource.parseNoproxy('ex')?.test('ex.co/foo'),
- ).toBeFalse();
-
- expect(GoProxyDatasource.parseNoproxy('aba')?.test('x/aba')).toBeFalse();
- expect(GoProxyDatasource.parseNoproxy('x/b')?.test('x/aba')).toBeFalse();
- expect(GoProxyDatasource.parseNoproxy('x/ab')?.test('x/aba')).toBeFalse();
- expect(
- GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/aba'),
- ).toBeTrue();
- });
-
- it('matches on wildcards', () => {
- expect(
- GoProxyDatasource.parseNoproxy('/*/')?.test('ex.co/foo'),
- ).toBeFalse();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/fo')?.test('ex.co/foo'),
- ).toBeFalse();
- expect(
- GoProxyDatasource.parseNoproxy('*/fo?')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/fo*')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*fo*')?.test('ex.co/foo'),
- ).toBeFalse();
-
- expect(
- GoProxyDatasource.parseNoproxy('*.co')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('ex*')?.test('ex.co/foo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo/')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo/*')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/foo/*/')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/v2')?.test('ex.co/foo/v2'),
- ).toBeFalse();
- expect(
- GoProxyDatasource.parseNoproxy('*/*/v2')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/*/*')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/*/*/')?.test('ex.co/foo/v2'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('*/*/*')?.test('ex.co/foo'),
- ).toBeFalse();
- expect(
- GoProxyDatasource.parseNoproxy('*/*/*/')?.test('ex.co/foo'),
- ).toBeFalse();
-
- expect(
- GoProxyDatasource.parseNoproxy('*/*/*,,')?.test('ex.co/repo'),
- ).toBeFalse();
- expect(
- GoProxyDatasource.parseNoproxy('*/*/*,,*/repo')?.test('ex.co/repo'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy(',,*/repo')?.test('ex.co/repo'),
- ).toBeTrue();
- });
-
- it('matches on character ranges', () => {
- expect(
- GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/aba'),
- ).toBeTrue();
- expect(
- GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/abc'),
- ).toBeFalse();
- });
- });
-
describe('getReleases', () => {
const baseUrl = 'https://proxy.golang.org';
@@ -767,7 +550,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
expect(res).toBeNull();
});
- it('returns latest even if package has no releases', async () => {
+ it('uses latest if package has no releases', async () => {
process.env.GOPROXY = baseUrl;
httpMock
@@ -784,7 +567,13 @@ describe('modules/datasource/go/releases-goproxy', () => {
});
expect(res).toEqual({
- releases: [],
+ releases: [
+ {
+ newDigest: '921286631fa9',
+ releaseTimestamp: '2023-09-05T20:02:55Z',
+ version: 'v0.0.0-20230905200255-921286631fa9',
+ },
+ ],
sourceUrl: 'https://github.com/google/btree',
tags: { latest: 'v0.0.0-20230905200255-921286631fa9' },
});
diff --git a/lib/modules/datasource/go/releases-goproxy.ts b/lib/modules/datasource/go/releases-goproxy.ts
index fc98b95b23a714..432bf48ebffcc8 100644
--- a/lib/modules/datasource/go/releases-goproxy.ts
+++ b/lib/modules/datasource/go/releases-goproxy.ts
@@ -1,6 +1,5 @@
import is from '@sindresorhus/is';
import { DateTime } from 'luxon';
-import moo from 'moo';
import { logger } from '../../../logger';
import { cache } from '../../../util/cache/package/decorator';
import { filterMap } from '../../../util/filter-map';
@@ -12,10 +11,9 @@ import { Datasource } from '../datasource';
import type { GetReleasesConfig, Release, ReleaseResult } from '../types';
import { BaseGoDatasource } from './base';
import { getSourceUrl } from './common';
+import { parseGoproxy, parseNoproxy } from './goproxy-parser';
import { GoDirectDatasource } from './releases-direct';
-import type { GoproxyItem, VersionInfo } from './types';
-
-const parsedGoproxy: Record = {};
+import type { VersionInfo } from './types';
const modRegex = regEx(/^(?.*?)(?:[./]v(?\d+))?$/);
@@ -26,6 +24,24 @@ const pseudoVersionRegex = regEx(
/v\d+\.\d+\.\d+-(?:\w+\.)?(?:0\.)?(?\d{14})-(?[a-f0-9]{12})/i,
);
+export function pseudoVersionToRelease(pseudoVersion: string): Release | null {
+ const match = pseudoVersion.match(pseudoVersionRegex)?.groups;
+ if (!match) {
+ return null;
+ }
+
+ const { digest: newDigest, timestamp } = match;
+ const releaseTimestamp = DateTime.fromFormat(timestamp, 'yyyyMMddHHmmss', {
+ zone: 'UTC',
+ }).toISO({ suppressMilliseconds: true });
+
+ return {
+ version: pseudoVersion,
+ newDigest,
+ releaseTimestamp,
+ };
+}
+
export class GoProxyDatasource extends Datasource {
static readonly id = 'go-proxy';
@@ -46,8 +62,8 @@ export class GoProxyDatasource extends Datasource {
if (goproxy === 'direct') {
return this.direct.getReleases(config);
}
- const proxyList = this.parseGoproxy(goproxy);
- const noproxy = GoProxyDatasource.parseNoproxy();
+ const proxyList = parseGoproxy(goproxy);
+ const noproxy = parseNoproxy();
let result: ReleaseResult | null = null;
@@ -71,9 +87,6 @@ export class GoProxyDatasource extends Datasource {
result = res;
break;
}
- if (res.tags?.latest) {
- result = res;
- }
} catch (err) {
const statusCode = err?.response?.statusCode;
const canFallback =
@@ -103,106 +116,6 @@ export class GoProxyDatasource extends Datasource {
return result;
}
- /**
- * Parse `GOPROXY` to the sequence of url + fallback strategy tags.
- *
- * @example
- * parseGoproxy('foo.example.com|bar.example.com,baz.example.com')
- * // [
- * // { url: 'foo.example.com', fallback: '|' },
- * // { url: 'bar.example.com', fallback: ',' },
- * // { url: 'baz.example.com', fallback: '|' },
- * // ]
- *
- * @see https://golang.org/ref/mod#goproxy-protocol
- */
- parseGoproxy(input: string | undefined = process.env.GOPROXY): GoproxyItem[] {
- if (!is.string(input)) {
- return [];
- }
-
- if (parsedGoproxy[input]) {
- return parsedGoproxy[input];
- }
-
- const result: GoproxyItem[] = input
- .split(regEx(/([^,|]*(?:,|\|))/))
- .filter(Boolean)
- .map((s) => s.split(/(?=,|\|)/)) // TODO: #12872 lookahead
- .map(([url, separator]) => ({
- url,
- fallback: separator === ',' ? ',' : '|',
- }));
-
- parsedGoproxy[input] = result;
- return result;
- }
- // https://golang.org/pkg/path/#Match
- static lexer = moo.states({
- main: {
- separator: {
- match: /\s*?,\s*?/, // TODO #12870
- value: (_: string) => '|',
- },
- asterisk: {
- match: '*',
- value: (_: string) => '[^/]*',
- },
- qmark: {
- match: '?',
- value: (_: string) => '[^/]',
- },
- characterRangeOpen: {
- match: '[',
- push: 'characterRange',
- value: (_: string) => '[',
- },
- trailingSlash: {
- match: /\/$/,
- value: (_: string) => '',
- },
- char: {
- match: /[^*?\\[\n]/,
- value: (s: string) => s.replace(regEx('\\.', 'g'), '\\.'),
- },
- escapedChar: {
- match: /\\./, // TODO #12870
- value: (s: string) => s.slice(1),
- },
- },
- characterRange: {
- char: /[^\\\]\n]/, // TODO #12870
- escapedChar: {
- match: /\\./, // TODO #12870
- value: (s: string) => s.slice(1),
- },
- characterRangeEnd: {
- match: ']',
- pop: 1,
- },
- },
- });
-
- static parsedNoproxy: Record = {};
-
- static parseNoproxy(
- input: unknown = process.env.GONOPROXY ?? process.env.GOPRIVATE,
- ): RegExp | null {
- if (!is.string(input)) {
- return null;
- }
- if (this.parsedNoproxy[input] !== undefined) {
- return this.parsedNoproxy[input];
- }
- this.lexer.reset(input);
- const noproxyPattern = [...this.lexer].map(({ value }) => value).join('');
- const result = noproxyPattern
- ? regEx(`^(?:${noproxyPattern})(?:/.*)?$`)
- : null;
- this.parsedNoproxy[input] = result;
- return result;
- }
-
/**
* Avoid ambiguity when serving from case-insensitive file systems.
*
@@ -221,30 +134,12 @@ export class GoProxyDatasource extends Datasource {
}
const [version, releaseTimestamp] = str.trim().split(regEx(/\s+/));
- const release: Release = { version };
+ const release: Release = pseudoVersionToRelease(version) ?? { version };
if (releaseTimestamp) {
release.releaseTimestamp = releaseTimestamp;
}
- const pseudoVersionMatch = version.match(pseudoVersionRegex)?.groups;
- if (pseudoVersionMatch) {
- const { digest: newDigest, timestamp } = pseudoVersionMatch;
-
- if (newDigest) {
- release.newDigest = newDigest;
- }
-
- const pseudoVersionReleaseTimestamp = DateTime.fromFormat(
- timestamp,
- 'yyyyMMddHHmmss',
- { zone: 'UTC' },
- ).toISO({ suppressMilliseconds: true });
- if (pseudoVersionReleaseTimestamp) {
- release.releaseTimestamp = pseudoVersionReleaseTimestamp;
- }
- }
-
return release;
});
}
@@ -336,6 +231,12 @@ export class GoProxyDatasource extends Datasource {
if (goVersioning.isGreaterThan(latestVersion, result.tags.latest)) {
result.tags.latest = latestVersion;
}
+ if (!result.releases.length) {
+ const releaseFromLatest = pseudoVersionToRelease(latestVersion);
+ if (releaseFromLatest) {
+ result.releases.push(releaseFromLatest);
+ }
+ }
}
}
@@ -344,7 +245,7 @@ export class GoProxyDatasource extends Datasource {
static getCacheKey({ packageName }: GetReleasesConfig): string {
const goproxy = process.env.GOPROXY;
- const noproxy = GoProxyDatasource.parseNoproxy();
+ const noproxy = parseNoproxy();
// TODO: types (#22198)
return `${packageName}@@${goproxy}@@${noproxy?.toString()}`;
}
diff --git a/lib/modules/datasource/golang-version/index.ts b/lib/modules/datasource/golang-version/index.ts
index 79e907f3418136..65e7d9b87b37c3 100644
--- a/lib/modules/datasource/golang-version/index.ts
+++ b/lib/modules/datasource/golang-version/index.ts
@@ -32,6 +32,13 @@ export class GolangVersionDatasource extends Datasource {
override readonly defaultVersioning = semverVersioningId;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `Date` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/golang/go.';
+
@cache({ namespace: `datasource-${GolangVersionDatasource.id}`, key: 'all' })
async getReleases({
registryUrl,
diff --git a/lib/modules/datasource/gradle-version/index.ts b/lib/modules/datasource/gradle-version/index.ts
index 069b51f7190a77..bb6368e19803f1 100644
--- a/lib/modules/datasource/gradle-version/index.ts
+++ b/lib/modules/datasource/gradle-version/index.ts
@@ -20,6 +20,13 @@ export class GradleVersionDatasource extends Datasource {
override readonly registryStrategy = 'merge';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `buildTime` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/gradle/gradle.';
+
private static readonly buildTimeRegex = regEx(
'^(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\+\\d\\d\\d\\d)$',
);
diff --git a/lib/modules/datasource/helm/index.ts b/lib/modules/datasource/helm/index.ts
index ada0c21779c1b8..e5ccc32b5113d1 100644
--- a/lib/modules/datasource/helm/index.ts
+++ b/lib/modules/datasource/helm/index.ts
@@ -25,6 +25,13 @@ export class HelmDatasource extends Datasource {
override readonly defaultVersioning = helmVersioning.id;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timstamp is determined from the `created` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `home` field or the `sources` field in the results.';
+
@cache({
namespace: `datasource-${HelmDatasource.id}`,
key: (helmRepository: string) => helmRepository,
diff --git a/lib/modules/datasource/hermit/index.ts b/lib/modules/datasource/hermit/index.ts
index 4db90e81b975ec..167c1544e64a2a 100644
--- a/lib/modules/datasource/hermit/index.ts
+++ b/lib/modules/datasource/hermit/index.ts
@@ -30,6 +30,10 @@ export class HermitDatasource extends Datasource {
'https://github.com/cashapp/hermit-packages',
];
+ override readonly sourceUrlSupport = 'release';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `Repository` field in the results.';
+
pathRegex: RegExp;
constructor() {
diff --git a/lib/modules/datasource/hex/index.ts b/lib/modules/datasource/hex/index.ts
index e6ee7d927cc364..c21b87785402ee 100644
--- a/lib/modules/datasource/hex/index.ts
+++ b/lib/modules/datasource/hex/index.ts
@@ -18,6 +18,13 @@ export class HexDatasource extends Datasource {
override readonly defaultVersioning = hexVersioning.id;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined the `inserted_at` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `Github` field in the results.';
+
@cache({
namespace: `datasource-${HexDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => packageName,
diff --git a/lib/modules/datasource/hexpm-bob/index.ts b/lib/modules/datasource/hexpm-bob/index.ts
index 5d1f0a89ee1086..c841f26e9a38d4 100644
--- a/lib/modules/datasource/hexpm-bob/index.ts
+++ b/lib/modules/datasource/hexpm-bob/index.ts
@@ -24,6 +24,13 @@ export class HexpmBobDatasource extends Datasource {
override readonly defaultVersioning = semverId;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `buildDate` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL https://github.com/elixir-lang/elixir.git for the `elixir` package and the https://github.com/erlang/otp.git URL for the `erlang` package.';
+
@cache({
namespace: `datasource-${datasource}`,
key: ({ registryUrl, packageName }: GetReleasesConfig) =>
diff --git a/lib/modules/datasource/index.ts b/lib/modules/datasource/index.ts
index 743552e57ea2b8..a8a7d1c1c6502a 100644
--- a/lib/modules/datasource/index.ts
+++ b/lib/modules/datasource/index.ts
@@ -398,15 +398,18 @@ function getDigestConfig(
datasource: DatasourceApi,
config: GetDigestInputConfig,
): DigestConfig {
- const { currentValue, currentDigest } = config;
+ const { lookupName, currentValue, currentDigest } = config;
const packageName = config.replacementName ?? config.packageName;
- const [registryUrl] = resolveRegistryUrls(
- datasource,
- config.defaultRegistryUrls,
- config.registryUrls,
- config.additionalRegistryUrls,
- );
- return { packageName, registryUrl, currentValue, currentDigest };
+ // Prefer registryUrl from getReleases() lookup if it has been passed
+ const registryUrl =
+ config.registryUrl ??
+ resolveRegistryUrls(
+ datasource,
+ config.defaultRegistryUrls,
+ config.registryUrls,
+ config.additionalRegistryUrls,
+ )[0];
+ return { lookupName, packageName, registryUrl, currentValue, currentDigest };
}
export function getDigest(
diff --git a/lib/modules/datasource/jenkins-plugins/index.ts b/lib/modules/datasource/jenkins-plugins/index.ts
index a19ead3bb18e33..28cbca46d5831d 100644
--- a/lib/modules/datasource/jenkins-plugins/index.ts
+++ b/lib/modules/datasource/jenkins-plugins/index.ts
@@ -25,6 +25,13 @@ export class JenkinsPluginsDatasource extends Datasource {
private static readonly packageInfoPath = 'current/update-center.actual.json';
private static readonly packageVersionsPath = 'current/plugin-versions.json';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The releaseTimestamp is determined from the `releaseTimestamp` or `buildDate` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `scm` field in the results.';
+
async getReleases({
packageName,
registryUrl,
diff --git a/lib/modules/datasource/maven/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/maven/__snapshots__/index.spec.ts.snap
index bdc86e88e02ff1..d6ca5371d2e484 100644
--- a/lib/modules/datasource/maven/__snapshots__/index.spec.ts.snap
+++ b/lib/modules/datasource/maven/__snapshots__/index.spec.ts.snap
@@ -6,6 +6,7 @@ exports[`modules/datasource/maven/index falls back to next registry url 1`] = `
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://repo.maven.apache.org/maven2",
"releases": [
{
@@ -53,6 +54,7 @@ exports[`modules/datasource/maven/index removes authentication header after redi
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://frontend_for_private_s3_repository/maven2",
"releases": [
{
@@ -89,6 +91,7 @@ exports[`modules/datasource/maven/index returns releases 1`] = `
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://repo.maven.apache.org/maven2",
"releases": [
{
@@ -116,6 +119,7 @@ exports[`modules/datasource/maven/index returns releases from custom repository
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://custom.registry.renovatebot.com",
"releases": [
{
@@ -143,6 +147,7 @@ exports[`modules/datasource/maven/index skips registry with invalid XML 1`] = `
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://repo.maven.apache.org/maven2",
"releases": [
{
@@ -170,6 +175,7 @@ exports[`modules/datasource/maven/index skips registry with invalid metadata str
"group": "org.example",
"homepage": "https://package.example.org/about",
"name": "package",
+ "packageScope": "org.example",
"registryUrl": "https://repo.maven.apache.org/maven2",
"releases": [
{
diff --git a/lib/modules/datasource/maven/index.spec.ts b/lib/modules/datasource/maven/index.spec.ts
index f0d931a336450e..d51f6f1f025a1d 100644
--- a/lib/modules/datasource/maven/index.spec.ts
+++ b/lib/modules/datasource/maven/index.spec.ts
@@ -240,6 +240,7 @@ describe('modules/datasource/maven/index', () => {
group: 'org.example',
homepage: 'https://package.example.org/about',
name: 'package',
+ packageScope: 'org.example',
registryUrl: 'https://repo.maven.apache.org/maven2',
releases: [
{
@@ -268,6 +269,7 @@ describe('modules/datasource/maven/index', () => {
group: 'org.example',
homepage: 'https://package.example.org/about',
name: 'package',
+ packageScope: 'org.example',
registryUrl: 'https://repo.maven.apache.org/maven2',
releases: [
{ version: '1.0.0', releaseTimestamp: '2021-02-22T14:43:00.000Z' },
@@ -484,6 +486,7 @@ describe('modules/datasource/maven/index', () => {
group: 'org.example',
homepage: 'https://package.example.org/about',
name: 'package',
+ packageScope: 'org.example',
registryUrl:
'artifactregistry://maven.pkg.dev/some-project/some-repository',
releases: [
@@ -535,6 +538,7 @@ describe('modules/datasource/maven/index', () => {
group: 'org.example',
homepage: 'https://package.example.org/about',
name: 'package',
+ packageScope: 'org.example',
registryUrl:
'artifactregistry://maven.pkg.dev/some-project/some-repository',
releases: [
diff --git a/lib/modules/datasource/maven/index.ts b/lib/modules/datasource/maven/index.ts
index 3b2c752d8714ce..3232823257c48e 100644
--- a/lib/modules/datasource/maven/index.ts
+++ b/lib/modules/datasource/maven/index.ts
@@ -69,6 +69,13 @@ export class MavenDatasource extends Datasource {
override readonly registryStrategy: RegistryStrategy = 'merge';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `Last-Modified` header or the `lastModified` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `scm` tags in the results.';
+
constructor(id = MavenDatasource.id) {
super(id);
}
@@ -114,6 +121,10 @@ export class MavenDatasource extends Datasource {
dependency: MavenDependency,
repoUrl: string,
): Promise {
+ if (!repoUrl.startsWith(MAVEN_REPO)) {
+ return inputReleaseMap;
+ }
+
const cacheNs = 'datasource-maven:index-html-releases';
const cacheKey = `${repoUrl}${dependency.dependencyUrl}`;
let workingReleaseMap = await packageCache.get(
@@ -124,27 +135,21 @@ export class MavenDatasource extends Datasource {
workingReleaseMap = {};
let retryEarlier = false;
try {
- if (repoUrl.startsWith(MAVEN_REPO)) {
- const indexUrl = getMavenUrl(dependency, repoUrl, 'index.html');
- const res = await downloadHttpProtocol(this.http, indexUrl);
- const { body = '' } = res;
- for (const line of body.split(newlineRegex)) {
- const match = line.trim().match(mavenCentralHtmlVersionRegex);
- if (match) {
- const { version, releaseTimestamp: timestamp } =
- match?.groups ?? /* istanbul ignore next: hard to test */ {};
- if (version && timestamp) {
- const date = DateTime.fromFormat(
- timestamp,
- 'yyyy-MM-dd HH:mm',
- {
- zone: 'UTC',
- },
- );
- if (date.isValid) {
- const releaseTimestamp = date.toISO();
- workingReleaseMap[version] = { version, releaseTimestamp };
- }
+ const indexUrl = getMavenUrl(dependency, repoUrl, 'index.html');
+ const res = await downloadHttpProtocol(this.http, indexUrl);
+ const { body = '' } = res;
+ for (const line of body.split(newlineRegex)) {
+ const match = line.trim().match(mavenCentralHtmlVersionRegex);
+ if (match) {
+ const { version, releaseTimestamp: timestamp } =
+ match?.groups ?? /* istanbul ignore next: hard to test */ {};
+ if (version && timestamp) {
+ const date = DateTime.fromFormat(timestamp, 'yyyy-MM-dd HH:mm', {
+ zone: 'UTC',
+ });
+ if (date.isValid) {
+ const releaseTimestamp = date.toISO();
+ workingReleaseMap[version] = { version, releaseTimestamp };
}
}
}
diff --git a/lib/modules/datasource/maven/util.ts b/lib/modules/datasource/maven/util.ts
index 072699cc2cb2ab..24b92cc02dfbef 100644
--- a/lib/modules/datasource/maven/util.ts
+++ b/lib/modules/datasource/maven/util.ts
@@ -6,11 +6,7 @@ import { HOST_DISABLED } from '../../../constants/error-messages';
import { logger } from '../../../logger';
import { ExternalHostError } from '../../../types/errors/external-host-error';
import type { Http } from '../../../util/http';
-import type {
- HttpOptions,
- HttpRequestOptions,
- HttpResponse,
-} from '../../../util/http/types';
+import type { HttpOptions, HttpResponse } from '../../../util/http/types';
import { regEx } from '../../../util/regex';
import { getS3Client, parseS3Url } from '../../../util/s3';
import { streamToString } from '../../../util/streams';
@@ -69,7 +65,7 @@ function isUnsupportedHostError(err: { name: string }): boolean {
export async function downloadHttpProtocol(
http: Http,
pkgUrl: URL | string,
- opts: HttpOptions & HttpRequestOptions = {},
+ opts: HttpOptions = {},
): Promise> {
let raw: HttpResponse;
try {
@@ -449,6 +445,11 @@ export async function getDependencyInfo(
}
}
+ const groupId = pomContent.valueWithPath('groupId');
+ if (groupId) {
+ result.packageScope = groupId;
+ }
+
const parent = pomContent.childNamed('parent');
if (recursionLimit > 0 && parent && (!result.sourceUrl || !result.homepage)) {
// if we found a parent and are missing some information
diff --git a/lib/modules/datasource/metadata-manual.ts b/lib/modules/datasource/metadata-manual.ts
index eb77f8583340c9..7b59ad571eea0a 100644
--- a/lib/modules/datasource/metadata-manual.ts
+++ b/lib/modules/datasource/metadata-manual.ts
@@ -17,17 +17,11 @@ export const manualChangelogUrls: Record> = {
'https://github.com/angular/angular/blob/master/packages/zone.js/CHANGELOG.md',
},
pypi: {
- alembic: 'https://alembic.sqlalchemy.org/en/latest/changelog.html',
beautifulsoup4:
'https://bazaar.launchpad.net/~leonardr/beautifulsoup/bs4/view/head:/CHANGELOG',
- django: 'https://github.com/django/django/tree/master/docs/releases',
- djangorestframework:
- 'https://www.django-rest-framework.org/community/release-notes/',
flake8: 'https://flake8.pycqa.org/en/latest/release-notes/index.html',
'django-storages':
'https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst',
- hypothesis:
- 'https://github.com/HypothesisWorks/hypothesis/blob/master/hypothesis-python/docs/changes.rst',
lxml: 'https://git.launchpad.net/lxml/plain/CHANGES.txt',
mypy: 'https://mypy-lang.blogspot.com/',
phonenumbers:
@@ -38,12 +32,9 @@ export const manualChangelogUrls: Record> = {
'https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt',
'django-debug-toolbar':
'https://django-debug-toolbar.readthedocs.io/en/latest/changes.html',
- 'firebase-admin':
- 'https://firebase.google.com/support/release-notes/admin/python',
requests: 'https://github.com/psf/requests/blob/master/HISTORY.md',
sqlalchemy: 'https://docs.sqlalchemy.org/en/latest/changelog/',
uwsgi: 'https://uwsgi-docs.readthedocs.io/en/latest/#release-notes',
- wagtail: 'https://github.com/wagtail/wagtail/tree/master/docs/releases',
},
docker: {
'gitlab/gitlab-ce':
diff --git a/lib/modules/datasource/metadata.spec.ts b/lib/modules/datasource/metadata.spec.ts
index 73fc900561cdb3..0b49c803ce8db8 100644
--- a/lib/modules/datasource/metadata.spec.ts
+++ b/lib/modules/datasource/metadata.spec.ts
@@ -27,12 +27,12 @@ describe('modules/datasource/metadata', () => {
};
const datasource = PypiDatasource.id;
- const packageName = 'django';
+ const packageName = 'pycountry';
addMetaData(dep, datasource, packageName);
expect(dep).toMatchSnapshot({
changelogUrl:
- 'https://github.com/django/django/tree/master/docs/releases',
+ 'https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt',
});
});
@@ -512,13 +512,13 @@ describe('modules/datasource/metadata', () => {
const dep = partial({});
const datasource = PypiDatasource.id;
- const packageName = 'django';
+ const packageName = 'pycountry';
addMetaData(dep, datasource, packageName);
expect(dep).toEqual({
changelogUrl:
- 'https://github.com/django/django/tree/master/docs/releases',
- sourceUrl: 'https://github.com/django/django',
+ 'https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt',
+ sourceUrl: 'https://github.com/flyingcircusio/pycountry',
});
});
diff --git a/lib/modules/datasource/metadata.ts b/lib/modules/datasource/metadata.ts
index cbe96e040eb5ad..c0b1cd6a5de6f3 100644
--- a/lib/modules/datasource/metadata.ts
+++ b/lib/modules/datasource/metadata.ts
@@ -5,7 +5,7 @@ import { detectPlatform } from '../../util/common';
import { parseGitUrl } from '../../util/git/url';
import * as hostRules from '../../util/host-rules';
import { regEx } from '../../util/regex';
-import { parseUrl, trimTrailingSlash, validateUrl } from '../../util/url';
+import { isHttpUrl, parseUrl, trimTrailingSlash } from '../../util/url';
import { manualChangelogUrls, manualSourceUrls } from './metadata-manual';
import type { ReleaseResult } from './types';
@@ -195,7 +195,7 @@ export function addMetaData(
];
for (const urlKey of urlKeys) {
const urlVal = dep[urlKey];
- if (is.string(urlVal) && validateUrl(urlVal.trim())) {
+ if (is.string(urlVal) && isHttpUrl(urlVal.trim())) {
dep[urlKey] = urlVal.trim() as never;
} else {
delete dep[urlKey];
diff --git a/lib/modules/datasource/node-version/index.ts b/lib/modules/datasource/node-version/index.ts
index f93b88bfa8716d..c37c707c32cabf 100644
--- a/lib/modules/datasource/node-version/index.ts
+++ b/lib/modules/datasource/node-version/index.ts
@@ -13,14 +13,19 @@ export class NodeVersionDatasource extends Datasource {
super(datasource);
}
- override readonly customRegistrySupport = false;
-
override readonly defaultRegistryUrls = [defaultRegistryUrl];
override readonly defaultVersioning = versioning;
override readonly caching = true;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `date` field.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/nodejs/node';
+
@cache({
namespace: `datasource-${datasource}`,
// TODO: types (#22198)
diff --git a/lib/modules/datasource/npm/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/npm/__snapshots__/index.spec.ts.snap
index 02cf5b7c7e009e..643d0d75ba63df 100644
--- a/lib/modules/datasource/npm/__snapshots__/index.spec.ts.snap
+++ b/lib/modules/datasource/npm/__snapshots__/index.spec.ts.snap
@@ -221,6 +221,7 @@ Marking the latest version of an npm package as deprecated results in the entire
"dependencies": undefined,
"devDependencies": undefined,
"gitRef": undefined,
+ "isDeprecated": true,
"releaseTimestamp": "2018-05-06T05:21:53.000Z",
"version": "0.0.1",
},
diff --git a/lib/modules/datasource/npm/get.spec.ts b/lib/modules/datasource/npm/get.spec.ts
index b101911166ee7b..b8a3809bc4bc28 100644
--- a/lib/modules/datasource/npm/get.spec.ts
+++ b/lib/modules/datasource/npm/get.spec.ts
@@ -4,7 +4,7 @@ import { ExternalHostError } from '../../../types/errors/external-host-error';
import * as _packageCache from '../../../util/cache/package';
import * as hostRules from '../../../util/host-rules';
import { Http } from '../../../util/http';
-import { getDependency } from './get';
+import { CACHE_REVISION, getDependency } from './get';
import { resolveRegistryUrl, setNpmrc } from './npmrc';
jest.mock('../../../util/cache/package');
@@ -245,6 +245,91 @@ describe('modules/datasource/npm/get', () => {
expect(await getDependency(http, registryUrl, 'npm-error-402')).toBeNull();
});
+ it('throw ExternalHostError when error happens on registry.npmjs.org', async () => {
+ httpMock
+ .scope('https://registry.npmjs.org')
+ .get('/npm-parse-error')
+ .reply(200, 'not-a-json');
+ const registryUrl = resolveRegistryUrl('npm-parse-error');
+ await expect(
+ getDependency(http, registryUrl, 'npm-parse-error'),
+ ).rejects.toThrow(ExternalHostError);
+ });
+
+ it('redact body for ExternalHostError when error happens on registry.npmjs.org', async () => {
+ httpMock
+ .scope('https://registry.npmjs.org')
+ .get('/npm-parse-error')
+ .reply(200, 'not-a-json');
+ const registryUrl = resolveRegistryUrl('npm-parse-error');
+ let thrownError;
+ try {
+ await getDependency(http, registryUrl, 'npm-parse-error');
+ } catch (error) {
+ thrownError = error;
+ }
+ expect(thrownError.err.name).toBe('ParseError');
+ expect(thrownError.err.body).toBe('err.body deleted by Renovate');
+ });
+
+ it('do not throw ExternalHostError when error happens on custom host', async () => {
+ setNpmrc('registry=https://test.org');
+ httpMock
+ .scope('https://test.org')
+ .get('/npm-parse-error')
+ .reply(200, 'not-a-json');
+ const registryUrl = resolveRegistryUrl('npm-parse-error');
+ expect(
+ await getDependency(http, registryUrl, 'npm-parse-error'),
+ ).toBeNull();
+ });
+
+ it('do not throw ExternalHostError when error happens on registry.npmjs.org when hostRules disables abortOnError', async () => {
+ hostRules.add({
+ matchHost: 'https://registry.npmjs.org',
+ abortOnError: false,
+ });
+ httpMock
+ .scope('https://registry.npmjs.org')
+ .get('/npm-parse-error')
+ .reply(200, 'not-a-json');
+ const registryUrl = resolveRegistryUrl('npm-parse-error');
+ expect(
+ await getDependency(http, registryUrl, 'npm-parse-error'),
+ ).toBeNull();
+ });
+
+ it('do not throw ExternalHostError when error happens on registry.npmjs.org when hostRules without protocol disables abortOnError', async () => {
+ hostRules.add({
+ matchHost: 'registry.npmjs.org',
+ abortOnError: false,
+ });
+ httpMock
+ .scope('https://registry.npmjs.org')
+ .get('/npm-parse-error')
+ .reply(200, 'not-a-json');
+ const registryUrl = resolveRegistryUrl('npm-parse-error');
+ expect(
+ await getDependency(http, registryUrl, 'npm-parse-error'),
+ ).toBeNull();
+ });
+
+ it('throw ExternalHostError when error happens on custom host when hostRules enables abortOnError', async () => {
+ setNpmrc('registry=https://test.org');
+ hostRules.add({
+ matchHost: 'https://test.org',
+ abortOnError: true,
+ });
+ httpMock
+ .scope('https://test.org')
+ .get('/npm-parse-error')
+ .reply(200, 'not-a-json');
+ const registryUrl = resolveRegistryUrl('npm-parse-error');
+ await expect(
+ getDependency(http, registryUrl, 'npm-parse-error'),
+ ).rejects.toThrow(ExternalHostError);
+ });
+
it('massages non-compliant repository urls', async () => {
setNpmrc('registry=https://test.org\n_authToken=XXX');
@@ -473,16 +558,31 @@ describe('modules/datasource/npm/get', () => {
`);
});
- it('returns cached legacy', async () => {
- packageCache.get.mockResolvedValueOnce({ some: 'result' });
- const dep = await getDependency(http, 'https://some.url', 'some-package');
- expect(dep).toMatchObject({ some: 'result' });
+ it('discards cache with no revision', async () => {
+ setNpmrc('registry=https://test.org\n_authToken=XXX');
+
+ packageCache.get.mockResolvedValueOnce({
+ some: 'result',
+ cacheData: { softExpireAt: '2099' },
+ });
+
+ httpMock
+ .scope('https://test.org')
+ .get('/@neutrinojs%2Freact')
+ .reply(200, {
+ name: '@neutrinojs/react',
+ versions: { '1.0.0': {} },
+ });
+ const registryUrl = resolveRegistryUrl('@neutrinojs/react');
+ const dep = await getDependency(http, registryUrl, '@neutrinojs/react');
+
+ expect(dep?.releases).toHaveLength(1);
});
it('returns unexpired cache', async () => {
packageCache.get.mockResolvedValueOnce({
some: 'result',
- cacheData: { softExpireAt: '2099' },
+ cacheData: { revision: CACHE_REVISION, softExpireAt: '2099' },
});
const dep = await getDependency(http, 'https://some.url', 'some-package');
expect(dep).toMatchObject({ some: 'result' });
@@ -492,6 +592,7 @@ describe('modules/datasource/npm/get', () => {
packageCache.get.mockResolvedValueOnce({
some: 'result',
cacheData: {
+ revision: CACHE_REVISION,
softExpireAt: '2020',
etag: 'some-etag',
},
@@ -508,6 +609,7 @@ describe('modules/datasource/npm/get', () => {
packageCache.get.mockResolvedValueOnce({
some: 'result',
cacheData: {
+ revision: CACHE_REVISION,
softExpireAt: '2020',
etag: 'some-etag',
},
diff --git a/lib/modules/datasource/npm/get.ts b/lib/modules/datasource/npm/get.ts
index 6018d41efbac4b..cc8024ebdd0e80 100644
--- a/lib/modules/datasource/npm/get.ts
+++ b/lib/modules/datasource/npm/get.ts
@@ -7,13 +7,17 @@ import { HOST_DISABLED } from '../../../constants/error-messages';
import { logger } from '../../../logger';
import { ExternalHostError } from '../../../types/errors/external-host-error';
import * as packageCache from '../../../util/cache/package';
+import * as hostRules from '../../../util/host-rules';
import type { Http } from '../../../util/http';
import type { HttpOptions } from '../../../util/http/types';
import { regEx } from '../../../util/regex';
+import { HttpCacheStats } from '../../../util/stats';
import { joinUrlParts } from '../../../util/url';
import type { Release, ReleaseResult } from '../types';
import type { CachedReleaseResult, NpmResponse } from './types';
+export const CACHE_REVISION = 1;
+
const SHORT_REPO_REGEX = regEx(
/^((?bitbucket|github|gitlab):)?(?[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+)$/,
);
@@ -82,20 +86,25 @@ export async function getDependency(
cacheNamespace,
packageUrl,
);
- if (cachedResult) {
- if (cachedResult.cacheData) {
+ if (cachedResult?.cacheData) {
+ if (cachedResult.cacheData.revision === CACHE_REVISION) {
const softExpireAt = DateTime.fromISO(
cachedResult.cacheData.softExpireAt,
);
if (softExpireAt.isValid && softExpireAt > DateTime.local()) {
logger.trace('Cached result is not expired - reusing');
+ HttpCacheStats.incLocalHits(packageUrl);
delete cachedResult.cacheData;
return cachedResult;
}
+
logger.trace('Cached result is soft expired');
+ HttpCacheStats.incLocalMisses(packageUrl);
} else {
- logger.trace('Reusing legacy cached result');
- return cachedResult;
+ logger.trace(
+ `Package cache for npm package "${packageName}" is from an old revision - discarding`,
+ );
+ delete cachedResult.cacheData;
}
}
const cacheMinutes = process.env.RENOVATE_CACHE_NPM_MINUTES
@@ -120,9 +129,27 @@ export async function getDependency(
logger.trace({ packageName }, 'Using cached etag');
options.headers = { 'If-None-Match': cachedResult.cacheData.etag };
}
+
+ // set abortOnError for registry.npmjs.org if no hostRule with explicit abortOnError exists
+ if (
+ registryUrl === 'https://registry.npmjs.org' &&
+ hostRules.find({ url: 'https://registry.npmjs.org' })?.abortOnError ===
+ undefined
+ ) {
+ logger.trace(
+ { packageName, registry: 'https://registry.npmjs.org' },
+ 'setting abortOnError hostRule for well known host',
+ );
+ hostRules.add({
+ matchHost: 'https://registry.npmjs.org',
+ abortOnError: true,
+ });
+ }
+
const raw = await http.getJson(packageUrl, options);
if (cachedResult?.cacheData && raw.statusCode === 304) {
logger.trace(`Cached npm result for ${packageName} is revalidated`);
+ HttpCacheStats.incRemoteHits(packageUrl);
cachedResult.cacheData.softExpireAt = softExpireAt;
await packageCache.set(
cacheNamespace,
@@ -133,6 +160,7 @@ export async function getDependency(
delete cachedResult.cacheData;
return cachedResult;
}
+ HttpCacheStats.incRemoteMisses(packageUrl);
const etag = raw.headers.etag;
const res = raw.body;
if (!res.versions || !Object.keys(res.versions).length) {
@@ -193,6 +221,9 @@ export async function getDependency(
) {
release.sourceDirectory = source.sourceDirectory;
}
+ if (dep.deprecationMessage) {
+ release.isDeprecated = true;
+ }
return release;
});
logger.trace({ dep }, 'dep');
@@ -202,7 +233,7 @@ export async function getDependency(
regEx(/(^|,)\s*public\s*(,|$)/).test(cacheControl)
) {
dep.isPrivate = false;
- const cacheData = { softExpireAt, etag };
+ const cacheData = { revision: CACHE_REVISION, softExpireAt, etag };
await packageCache.set(
cacheNamespace,
packageUrl,
@@ -216,29 +247,32 @@ export async function getDependency(
}
return dep;
} catch (err) {
+ const actualError = err instanceof ExternalHostError ? err.err : err;
const ignoredStatusCodes = [401, 402, 403, 404];
const ignoredResponseCodes = ['ENOTFOUND'];
if (
- err.message === HOST_DISABLED ||
- ignoredStatusCodes.includes(err.statusCode) ||
- ignoredResponseCodes.includes(err.code)
+ actualError.message === HOST_DISABLED ||
+ ignoredStatusCodes.includes(actualError.statusCode) ||
+ ignoredResponseCodes.includes(actualError.code)
) {
return null;
}
- if (uri.host === 'registry.npmjs.org') {
+
+ if (err instanceof ExternalHostError) {
if (cachedResult) {
logger.warn(
- { err },
- 'npmjs error, reusing expired cached result instead',
+ { err, host: uri.host },
+ `npm host error, reusing expired cached result instead`,
);
delete cachedResult.cacheData;
return cachedResult;
}
- // istanbul ignore if
- if (err.name === 'ParseError' && err.body) {
- err.body = 'err.body deleted by Renovate';
+
+ if (actualError.name === 'ParseError' && actualError.body) {
+ actualError.body = 'err.body deleted by Renovate';
+ err.err = actualError;
}
- throw new ExternalHostError(err);
+ throw err;
}
logger.debug({ err }, 'Unknown npm lookup error');
return null;
diff --git a/lib/modules/datasource/npm/index.ts b/lib/modules/datasource/npm/index.ts
index 84795f0f7bc14f..65a4df8c1b8d7a 100644
--- a/lib/modules/datasource/npm/index.ts
+++ b/lib/modules/datasource/npm/index.ts
@@ -14,6 +14,13 @@ export class NpmDatasource extends Datasource {
override readonly defaultVersioning = npmVersioning.id;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `time` field in the results.';
+ override readonly sourceUrlSupport = 'release';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `repository` field in the results.';
+
constructor() {
super(NpmDatasource.id);
}
diff --git a/lib/modules/datasource/npm/npmrc.ts b/lib/modules/datasource/npm/npmrc.ts
index ad605fe4ff2bc7..73b2e1eef52896 100644
--- a/lib/modules/datasource/npm/npmrc.ts
+++ b/lib/modules/datasource/npm/npmrc.ts
@@ -8,7 +8,7 @@ import type { HostRule } from '../../../types';
import * as hostRules from '../../../util/host-rules';
import { regEx } from '../../../util/regex';
import { fromBase64 } from '../../../util/string';
-import { ensureTrailingSlash, validateUrl } from '../../../util/url';
+import { ensureTrailingSlash, isHttpUrl } from '../../../util/url';
import { defaultRegistryUrls } from './common';
import type { NpmrcRules } from './types';
@@ -89,7 +89,7 @@ export function convertNpmrcToRules(npmrc: Record): NpmrcRules {
const { registry } = npmrc;
// packageRules order matters, so look for a default registry first
if (is.nonEmptyString(registry)) {
- if (validateUrl(registry)) {
+ if (isHttpUrl(registry)) {
// Default registry
rules.packageRules?.push({
matchDatasources,
@@ -108,7 +108,7 @@ export function convertNpmrcToRules(npmrc: Record): NpmrcRules {
const keyType = keyParts.pop();
if (keyType === 'registry' && keyParts.length && is.nonEmptyString(value)) {
const scope = keyParts.join(':');
- if (validateUrl(value)) {
+ if (isHttpUrl(value)) {
rules.packageRules?.push({
matchDatasources,
matchPackagePrefixes: [scope + '/'],
diff --git a/lib/modules/datasource/npm/types.ts b/lib/modules/datasource/npm/types.ts
index e67dd987a0e0ef..2cd011d84bc072 100644
--- a/lib/modules/datasource/npm/types.ts
+++ b/lib/modules/datasource/npm/types.ts
@@ -35,6 +35,7 @@ export interface NpmResponse {
export interface CachedReleaseResult extends ReleaseResult {
cacheData?: {
+ revision?: number;
etag: string | undefined;
softExpireAt: string;
};
diff --git a/lib/modules/datasource/nuget/__fixtures__/nlog/NLog.4.7.3-no-repo.nupkg b/lib/modules/datasource/nuget/__fixtures__/nlog/NLog.4.7.3-no-repo.nupkg
new file mode 100644
index 00000000000000..77ce3d810f1ade
Binary files /dev/null and b/lib/modules/datasource/nuget/__fixtures__/nlog/NLog.4.7.3-no-repo.nupkg differ
diff --git a/lib/modules/datasource/nuget/__fixtures__/nlog/NLog.4.7.3.nupkg b/lib/modules/datasource/nuget/__fixtures__/nlog/NLog.4.7.3.nupkg
new file mode 100644
index 00000000000000..6ec85f1ec04651
Binary files /dev/null and b/lib/modules/datasource/nuget/__fixtures__/nlog/NLog.4.7.3.nupkg differ
diff --git a/lib/modules/datasource/nuget/v3.spec.ts b/lib/modules/datasource/nuget/common.spec.ts
similarity index 85%
rename from lib/modules/datasource/nuget/v3.spec.ts
rename to lib/modules/datasource/nuget/common.spec.ts
index 0a2bd8f315ec53..8296ca85e35345 100644
--- a/lib/modules/datasource/nuget/v3.spec.ts
+++ b/lib/modules/datasource/nuget/common.spec.ts
@@ -1,6 +1,6 @@
-import { sortNugetVersions } from './v3';
+import { sortNugetVersions } from './common';
-describe('modules/datasource/nuget/v3', () => {
+describe('modules/datasource/nuget/common', () => {
it.each<{ version: string; other: string; result: number }>`
version | other | result
${'invalid1'} | ${'invalid2'} | ${0}
diff --git a/lib/modules/datasource/nuget/common.ts b/lib/modules/datasource/nuget/common.ts
index 55ce19a78f768b..14bb38d6942111 100644
--- a/lib/modules/datasource/nuget/common.ts
+++ b/lib/modules/datasource/nuget/common.ts
@@ -1,6 +1,7 @@
import { logger } from '../../../logger';
import { regEx } from '../../../util/regex';
import { parseUrl } from '../../../util/url';
+import { api as versioning } from '../../versioning/nuget';
import type { ParsedRegistryUrl } from './types';
const buildMetaRe = regEx(/\+.+$/g);
@@ -11,10 +12,14 @@ export function removeBuildMeta(version: string): string {
const urlWhitespaceRe = regEx(/\s/g);
-export function massageUrl(url: string): string {
+export function massageUrl(url: string | null | undefined): string | null {
+ if (url === null || url === undefined) {
+ return null;
+ }
+
let resultUrl = url;
- // During `dotnet pack` certain URLs are being URL decoded which may introduce whitespaces
+ // During `dotnet pack` certain URLs are being URL decoded which may introduce whitespace
// and causes Markdown link generation problems.
resultUrl = resultUrl.replace(urlWhitespaceRe, '%20');
@@ -47,3 +52,23 @@ export function parseRegistryUrl(registryUrl: string): ParsedRegistryUrl {
const feedUrl = parsedUrl.href;
return { feedUrl, protocolVersion };
}
+
+/**
+ * Compare two versions. Return:
+ * - `1` if `a > b` or `b` is invalid
+ * - `-1` if `a < b` or `a` is invalid
+ * - `0` if `a == b` or both `a` and `b` are invalid
+ */
+export function sortNugetVersions(a: string, b: string): number {
+ if (versioning.isValid(a)) {
+ if (versioning.isValid(b)) {
+ return versioning.sortVersions(a, b);
+ } else {
+ return 1;
+ }
+ } else if (versioning.isValid(b)) {
+ return -1;
+ } else {
+ return 0;
+ }
+}
diff --git a/lib/modules/datasource/nuget/index.spec.ts b/lib/modules/datasource/nuget/index.spec.ts
index db7bbaa7b52d61..ce25cf7722b9c0 100644
--- a/lib/modules/datasource/nuget/index.spec.ts
+++ b/lib/modules/datasource/nuget/index.spec.ts
@@ -1,8 +1,12 @@
+import { Readable } from 'stream';
import { mockDeep } from 'jest-mock-extended';
+import { join } from 'upath';
import { getPkgReleases } from '..';
import { Fixtures } from '../../../../test/fixtures';
import * as httpMock from '../../../../test/http-mock';
-import { logger } from '../../../../test/util';
+import { logger, mocked } from '../../../../test/util';
+import { GlobalConfig } from '../../../config/global';
+import * as _packageCache from '../../../util/cache/package';
import * as _hostRules from '../../../util/host-rules';
import { id as versioning } from '../../versioning/nuget';
import { parseRegistryUrl } from './common';
@@ -14,6 +18,9 @@ const hostRules: any = _hostRules;
jest.mock('../../../util/host-rules', () => mockDeep());
+jest.mock('../../../util/cache/package', () => mockDeep());
+const packageCache = mocked(_packageCache);
+
const pkgInfoV3FromNuget = Fixtures.get('nunit/v3_nuget_org.xml');
const pkgListV3Registration = Fixtures.get('nunit/v3_registration.json');
@@ -105,6 +112,10 @@ const configV3AzureDevOps = {
};
describe('modules/datasource/nuget/index', () => {
+ beforeEach(() => {
+ GlobalConfig.reset();
+ });
+
describe('parseRegistryUrl', () => {
it('extracts feed version from registry URL hash (v3)', () => {
const parsed = parseRegistryUrl('https://my-registry#protocolVersion=3');
@@ -302,6 +313,160 @@ describe('modules/datasource/nuget/index', () => {
);
});
+ describe('determine source URL from nupkg', () => {
+ beforeEach(() => {
+ GlobalConfig.set({
+ cacheDir: join('/tmp/cache'),
+ });
+ process.env.RENOVATE_X_NUGET_DOWNLOAD_NUPKGS = 'true';
+ });
+
+ afterEach(() => {
+ delete process.env.RENOVATE_X_NUGET_DOWNLOAD_NUPKGS;
+ });
+
+ it('can determine source URL from nupkg when PackageBaseAddress is missing', async () => {
+ const nugetIndex = `
+ {
+ "version": "3.0.0",
+ "resources": [
+ {
+ "@id": "https://some-registry/v3/metadata",
+ "@type": "RegistrationsBaseUrl/3.0.0-beta",
+ "comment": "Get package metadata."
+ }
+ ]
+ }
+ `;
+ const nlogRegistration = `
+ {
+ "count": 1,
+ "items": [
+ {
+ "@id": "https://some-registry/v3/metadata/nlog/4.7.3.json",
+ "lower": "4.7.3",
+ "upper": "4.7.3",
+ "count": 1,
+ "items": [
+ {
+ "@id": "foo",
+ "catalogEntry": {
+ "id": "NLog",
+ "version": "4.7.3",
+ "packageContent": "https://some-registry/v3-flatcontainer/nlog/4.7.3/nlog.4.7.3.nupkg"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ `;
+ httpMock
+ .scope('https://some-registry')
+ .get('/v3/index.json')
+ .twice()
+ .reply(200, nugetIndex)
+ .get('/v3/metadata/nlog/index.json')
+ .reply(200, nlogRegistration)
+ .get('/v3-flatcontainer/nlog/4.7.3/nlog.4.7.3.nupkg')
+ .reply(200, () => {
+ const readableStream = new Readable();
+ readableStream.push(Fixtures.getBinary('nlog/NLog.4.7.3.nupkg'));
+ readableStream.push(null);
+ return readableStream;
+ });
+ const res = await getPkgReleases({
+ datasource,
+ versioning,
+ packageName: 'NLog',
+ registryUrls: ['https://some-registry/v3/index.json'],
+ });
+ expect(logger.logger.debug).toHaveBeenCalledWith(
+ 'Determined sourceUrl https://github.com/NLog/NLog.git from https://some-registry/v3-flatcontainer/nlog/4.7.3/nlog.4.7.3.nupkg',
+ );
+ expect(packageCache.set).toHaveBeenCalledWith(
+ 'datasource-nuget',
+ 'cache-decorator:source-url:https://some-registry/v3/index.json:NLog',
+ {
+ cachedAt: expect.any(String),
+ value: 'https://github.com/NLog/NLog.git',
+ },
+ 60 * 24 * 7,
+ );
+ expect(res?.sourceUrl).toBeDefined();
+ });
+
+ it('can handle nupkg without repository metadata', async () => {
+ const nugetIndex = `
+ {
+ "version": "3.0.0",
+ "resources": [
+ {
+ "@id": "https://some-registry/v3/metadata",
+ "@type": "RegistrationsBaseUrl/3.0.0-beta",
+ "comment": "Get package metadata."
+ }
+ ]
+ }
+ `;
+ const nlogRegistration = `
+ {
+ "count": 1,
+ "items": [
+ {
+ "@id": "https://some-registry/v3/metadata/nlog/4.7.3.json",
+ "lower": "4.7.3",
+ "upper": "4.7.3",
+ "count": 1,
+ "items": [
+ {
+ "@id": "foo",
+ "catalogEntry": {
+ "id": "NLog",
+ "version": "4.7.3",
+ "packageContent": "https://some-registry/v3-flatcontainer/nlog/4.7.3/nlog.4.7.3.nupkg"
+ }
+ }
+ ]
+ }
+ ]
+ }
+ `;
+ httpMock
+ .scope('https://some-registry')
+ .get('/v3/index.json')
+ .twice()
+ .reply(200, nugetIndex)
+ .get('/v3/metadata/nlog/index.json')
+ .reply(200, nlogRegistration)
+ .get('/v3-flatcontainer/nlog/4.7.3/nlog.4.7.3.nupkg')
+ .reply(200, () => {
+ const readableStream = new Readable();
+ readableStream.push(
+ Fixtures.getBinary('nlog/NLog.4.7.3-no-repo.nupkg'),
+ );
+ readableStream.push(null);
+ return readableStream;
+ });
+ const res = await getPkgReleases({
+ datasource,
+ versioning,
+ packageName: 'NLog',
+ registryUrls: ['https://some-registry/v3/index.json'],
+ });
+ expect(packageCache.set).toHaveBeenCalledWith(
+ 'datasource-nuget',
+ 'cache-decorator:source-url:https://some-registry/v3/index.json:NLog',
+ {
+ cachedAt: expect.any(String),
+ value: null,
+ },
+ 60 * 24 * 7,
+ );
+ expect(res?.sourceUrl).toBeUndefined();
+ });
+ });
+
it('returns null for non 200 (v3v2)', async () => {
httpMock.scope('https://api.nuget.org').get('/v3/index.json').reply(500);
httpMock
diff --git a/lib/modules/datasource/nuget/index.ts b/lib/modules/datasource/nuget/index.ts
index a34bcb00881adc..1307901f72ef36 100644
--- a/lib/modules/datasource/nuget/index.ts
+++ b/lib/modules/datasource/nuget/index.ts
@@ -3,8 +3,8 @@ import * as nugetVersioning from '../../versioning/nuget';
import { Datasource } from '../datasource';
import type { GetReleasesConfig, ReleaseResult } from '../types';
import { parseRegistryUrl } from './common';
-import * as v2 from './v2';
-import * as v3 from './v3';
+import { NugetV2Api } from './v2';
+import { NugetV3Api } from './v3';
// https://api.nuget.org/v3/index.json is a default official nuget feed
export const nugetOrg = 'https://api.nuget.org/v3/index.json';
@@ -18,6 +18,21 @@ export class NugetDatasource extends Datasource {
override readonly registryStrategy = 'merge';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote = `
+ For the v2 API, the release timestamp is determined from the \`Publised\` tag and,
+ for the v3 API, the release timestamp is determined from the \`published\` field in the results.
+ `;
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote = `
+ For the v2 API, the source URL is determined from the \`ProjectUrl\` tag and,
+ for the v3 API, the source URL is determined from the \`metadata.repository@url\` field in the results.
+ `;
+
+ readonly v2Api = new NugetV2Api();
+
+ readonly v3Api = new NugetV3Api();
+
constructor() {
super(NugetDatasource.id);
}
@@ -33,12 +48,17 @@ export class NugetDatasource extends Datasource {
}
const { feedUrl, protocolVersion } = parseRegistryUrl(registryUrl);
if (protocolVersion === 2) {
- return v2.getReleases(this.http, feedUrl, packageName);
+ return this.v2Api.getReleases(this.http, feedUrl, packageName);
}
if (protocolVersion === 3) {
- const queryUrl = await v3.getResourceUrl(this.http, feedUrl);
+ const queryUrl = await this.v3Api.getResourceUrl(this.http, feedUrl);
if (queryUrl) {
- return v3.getReleases(this.http, feedUrl, queryUrl, packageName);
+ return this.v3Api.getReleases(
+ this.http,
+ feedUrl,
+ queryUrl,
+ packageName,
+ );
}
}
return null;
diff --git a/lib/modules/datasource/nuget/types.ts b/lib/modules/datasource/nuget/types.ts
index 29aba2a5c6b4a5..36fa672e83b223 100644
--- a/lib/modules/datasource/nuget/types.ts
+++ b/lib/modules/datasource/nuget/types.ts
@@ -5,11 +5,13 @@ export interface ServicesIndexRaw {
}[];
}
+// See https://learn.microsoft.com/en-us/nuget/api/registration-base-url-resource#catalog-entry
export interface CatalogEntry {
version: string;
published?: string;
projectUrl?: string;
listed?: boolean;
+ packageContent?: string;
}
export interface CatalogPage {
diff --git a/lib/modules/datasource/nuget/v2.ts b/lib/modules/datasource/nuget/v2.ts
index 71629f90f8c436..618cd30c1283e3 100644
--- a/lib/modules/datasource/nuget/v2.ts
+++ b/lib/modules/datasource/nuget/v2.ts
@@ -1,70 +1,74 @@
import { XmlDocument, XmlElement } from 'xmldoc';
import { logger } from '../../../logger';
import type { Http } from '../../../util/http';
-import type { HttpResponse } from '../../../util/http/types';
import { regEx } from '../../../util/regex';
import type { ReleaseResult } from '../types';
import { massageUrl, removeBuildMeta } from './common';
-function getPkgProp(pkgInfo: XmlElement, propName: string): string | undefined {
- return pkgInfo.childNamed('m:properties')?.childNamed(`d:${propName}`)?.val;
-}
+export class NugetV2Api {
+ getPkgProp(pkgInfo: XmlElement, propName: string): string | undefined {
+ return pkgInfo.childNamed('m:properties')?.childNamed(`d:${propName}`)?.val;
+ }
-export async function getReleases(
- http: Http,
- feedUrl: string,
- pkgName: string,
-): Promise {
- const dep: ReleaseResult = {
- releases: [],
- };
- let pkgUrlList: string | null = `${feedUrl.replace(
- regEx(/\/+$/),
- '',
- )}/FindPackagesById()?id=%27${pkgName}%27&$select=Version,IsLatestVersion,ProjectUrl,Published`;
- while (pkgUrlList !== null) {
- // typescript issue
- const pkgVersionsListRaw: HttpResponse = await http.get(pkgUrlList);
- const pkgVersionsListDoc = new XmlDocument(pkgVersionsListRaw.body);
+ async getReleases(
+ http: Http,
+ feedUrl: string,
+ pkgName: string,
+ ): Promise {
+ const dep: ReleaseResult = {
+ releases: [],
+ };
+ let pkgUrlList: string | null = `${feedUrl.replace(
+ regEx(/\/+$/),
+ '',
+ )}/FindPackagesById()?id=%27${pkgName}%27&$select=Version,IsLatestVersion,ProjectUrl,Published`;
+ while (pkgUrlList !== null) {
+ // typescript issue
+ const pkgVersionsListRaw = await http.get(pkgUrlList);
+ const pkgVersionsListDoc = new XmlDocument(pkgVersionsListRaw.body);
- const pkgInfoList = pkgVersionsListDoc.childrenNamed('entry');
+ const pkgInfoList = pkgVersionsListDoc.childrenNamed('entry');
- for (const pkgInfo of pkgInfoList) {
- const version = getPkgProp(pkgInfo, 'Version');
- const releaseTimestamp = getPkgProp(pkgInfo, 'Published');
- dep.releases.push({
- // TODO: types (#22198)
- version: removeBuildMeta(`${version}`),
- releaseTimestamp,
- });
- try {
- const pkgIsLatestVersion = getPkgProp(pkgInfo, 'IsLatestVersion');
- if (pkgIsLatestVersion === 'true') {
- dep['tags'] = { latest: removeBuildMeta(`${version}`) };
- const projectUrl = getPkgProp(pkgInfo, 'ProjectUrl');
- if (projectUrl) {
- dep.sourceUrl = massageUrl(projectUrl);
+ for (const pkgInfo of pkgInfoList) {
+ const version = this.getPkgProp(pkgInfo, 'Version');
+ const releaseTimestamp = this.getPkgProp(pkgInfo, 'Published');
+ dep.releases.push({
+ // TODO: types (#22198)
+ version: removeBuildMeta(`${version}`),
+ releaseTimestamp,
+ });
+ try {
+ const pkgIsLatestVersion = this.getPkgProp(
+ pkgInfo,
+ 'IsLatestVersion',
+ );
+ if (pkgIsLatestVersion === 'true') {
+ dep['tags'] = { latest: removeBuildMeta(`${version}`) };
+ const projectUrl = this.getPkgProp(pkgInfo, 'ProjectUrl');
+ if (projectUrl) {
+ dep.sourceUrl = massageUrl(projectUrl);
+ }
}
+ } catch (err) /* istanbul ignore next */ {
+ logger.debug(
+ { err, pkgName, feedUrl },
+ `nuget registry failure: can't parse pkg info for project url`,
+ );
}
- } catch (err) /* istanbul ignore next */ {
- logger.debug(
- { err, pkgName, feedUrl },
- `nuget registry failure: can't parse pkg info for project url`,
- );
}
- }
- const nextPkgUrlListLink = pkgVersionsListDoc
- .childrenNamed('link')
- .find((node) => node.attr.rel === 'next');
+ const nextPkgUrlListLink = pkgVersionsListDoc
+ .childrenNamed('link')
+ .find((node) => node.attr.rel === 'next');
- pkgUrlList = nextPkgUrlListLink ? nextPkgUrlListLink.attr.href : null;
- }
+ pkgUrlList = nextPkgUrlListLink ? nextPkgUrlListLink.attr.href : null;
+ }
- // dep not found if no release, so we can try next registry
- if (dep.releases.length === 0) {
- return null;
- }
+ // dep not found if no release, so we can try next registry
+ if (dep.releases.length === 0) {
+ return null;
+ }
- return dep;
+ return dep;
+ }
}
diff --git a/lib/modules/datasource/nuget/v3.ts b/lib/modules/datasource/nuget/v3.ts
index 6f53906b3afc6a..1d0dc587c01d3a 100644
--- a/lib/modules/datasource/nuget/v3.ts
+++ b/lib/modules/datasource/nuget/v3.ts
@@ -1,16 +1,21 @@
import is from '@sindresorhus/is';
+import extract from 'extract-zip';
import semver from 'semver';
+import upath from 'upath';
import { XmlDocument } from 'xmldoc';
import { logger } from '../../../logger';
import { ExternalHostError } from '../../../types/errors/external-host-error';
import * as packageCache from '../../../util/cache/package';
+import { cache } from '../../../util/cache/package/decorator';
+import * as fs from '../../../util/fs';
+import { ensureCacheDir } from '../../../util/fs';
import { Http, HttpError } from '../../../util/http';
import * as p from '../../../util/promises';
import { regEx } from '../../../util/regex';
import { ensureTrailingSlash } from '../../../util/url';
import { api as versioning } from '../../versioning/nuget';
import type { Release, ReleaseResult } from '../types';
-import { massageUrl, removeBuildMeta } from './common';
+import { massageUrl, removeBuildMeta, sortNugetVersions } from './common';
import type {
CatalogEntry,
CatalogPage,
@@ -18,227 +23,287 @@ import type {
ServicesIndexRaw,
} from './types';
-const cacheNamespace = 'datasource-nuget';
-
-export async function getResourceUrl(
- http: Http,
- url: string,
- resourceType = 'RegistrationsBaseUrl',
-): Promise {
- // https://docs.microsoft.com/en-us/nuget/api/service-index
- const resultCacheKey = `${url}:${resourceType}`;
- const cachedResult = await packageCache.get(
- cacheNamespace,
- resultCacheKey,
- );
-
- // istanbul ignore if
- if (cachedResult) {
- return cachedResult;
- }
- let servicesIndexRaw: ServicesIndexRaw | undefined;
- try {
- const responseCacheKey = url;
- servicesIndexRaw = await packageCache.get(
- cacheNamespace,
- responseCacheKey,
+export class NugetV3Api {
+ static readonly cacheNamespace = 'datasource-nuget';
+
+ async getResourceUrl(
+ http: Http,
+ url: string,
+ resourceType = 'RegistrationsBaseUrl',
+ ): Promise {
+ // https://docs.microsoft.com/en-us/nuget/api/service-index
+ const resultCacheKey = `${url}:${resourceType}`;
+ const cachedResult = await packageCache.get(
+ NugetV3Api.cacheNamespace,
+ resultCacheKey,
);
- // istanbul ignore else: currently not testable
- if (!servicesIndexRaw) {
- servicesIndexRaw = (await http.getJson(url)).body;
- await packageCache.set(
- cacheNamespace,
+
+ // istanbul ignore if
+ if (cachedResult) {
+ return cachedResult;
+ }
+ let servicesIndexRaw: ServicesIndexRaw | undefined;
+ try {
+ const responseCacheKey = url;
+ servicesIndexRaw = await packageCache.get(
+ NugetV3Api.cacheNamespace,
responseCacheKey,
- servicesIndexRaw,
- 3 * 24 * 60,
);
- }
+ // istanbul ignore else: currently not testable
+ if (!servicesIndexRaw) {
+ servicesIndexRaw = (await http.getJson(url)).body;
+ await packageCache.set(
+ NugetV3Api.cacheNamespace,
+ responseCacheKey,
+ servicesIndexRaw,
+ 3 * 24 * 60,
+ );
+ }
+
+ const services = servicesIndexRaw.resources
+ .map(({ '@id': serviceId, '@type': t }) => ({
+ serviceId,
+ type: t?.split('/')?.shift(),
+ version: t?.split('/')?.pop(),
+ }))
+ .filter(
+ ({ type, version }) => type === resourceType && semver.valid(version),
+ )
+ .sort((x, y) =>
+ x.version && y.version
+ ? semver.compare(x.version, y.version)
+ : /* istanbul ignore next: hard to test */ 0,
+ );
+
+ if (services.length === 0) {
+ await packageCache.set(
+ NugetV3Api.cacheNamespace,
+ resultCacheKey,
+ null,
+ 60,
+ );
+ logger.debug(
+ { url, servicesIndexRaw },
+ `no ${resourceType} services found`,
+ );
+ return null;
+ }
+
+ const { serviceId, version } = services.pop()!;
+
+ // istanbul ignore if
+ if (
+ resourceType === 'RegistrationsBaseUrl' &&
+ version &&
+ !version.startsWith('3.0.0-') &&
+ !semver.satisfies(version, '^3.0.0')
+ ) {
+ logger.warn(
+ { url, version },
+ `Nuget: Unknown version returned. Only v3 is supported`,
+ );
+ }
- const services = servicesIndexRaw.resources
- .map(({ '@id': serviceId, '@type': t }) => ({
+ await packageCache.set(
+ NugetV3Api.cacheNamespace,
+ resultCacheKey,
serviceId,
- type: t?.split('/')?.shift(),
- version: t?.split('/')?.pop(),
- }))
- .filter(
- ({ type, version }) => type === resourceType && semver.valid(version),
- )
- .sort((x, y) =>
- x.version && y.version
- ? semver.compare(x.version, y.version)
- : /* istanbul ignore next: hard to test */ 0,
+ 60,
);
-
- if (services.length === 0) {
- await packageCache.set(cacheNamespace, resultCacheKey, null, 60);
+ return serviceId;
+ } catch (err) {
+ // istanbul ignore if: not easy testable with nock
+ if (err instanceof ExternalHostError) {
+ throw err;
+ }
logger.debug(
- { url, servicesIndexRaw },
- `no ${resourceType} services found`,
+ { err, url, servicesIndexRaw },
+ `nuget registry failure: can't get ${resourceType}`,
);
return null;
}
+ }
- const { serviceId, version } = services.pop()!;
-
- // istanbul ignore if
- if (
- resourceType === 'RegistrationsBaseUrl' &&
- version &&
- !version.startsWith('3.0.0-') &&
- !semver.satisfies(version, '^3.0.0')
- ) {
- logger.warn(
- { url, version },
- `Nuget: Unknown version returned. Only v3 is supported`,
- );
+ async getCatalogEntry(
+ http: Http,
+ catalogPage: CatalogPage,
+ ): Promise {
+ let items = catalogPage.items;
+ if (!items) {
+ const url = catalogPage['@id'];
+ const catalogPageFull = await http.getJson(url);
+ items = catalogPageFull.body.items;
}
+ return items.map(({ catalogEntry }) => catalogEntry);
+ }
- await packageCache.set(cacheNamespace, resultCacheKey, serviceId, 60);
- return serviceId;
- } catch (err) {
- // istanbul ignore if: not easy testable with nock
- if (err instanceof ExternalHostError) {
- throw err;
- }
- logger.debug(
- { err, url, servicesIndexRaw },
- `nuget registry failure: can't get ${resourceType}`,
+ async getReleases(
+ http: Http,
+ registryUrl: string,
+ feedUrl: string,
+ pkgName: string,
+ ): Promise {
+ const baseUrl = feedUrl.replace(regEx(/\/*$/), '');
+ const url = `${baseUrl}/${pkgName.toLowerCase()}/index.json`;
+ const packageRegistration = await http.getJson(url);
+ const catalogPages = packageRegistration.body.items || [];
+ const catalogPagesQueue = catalogPages.map(
+ (page) => (): Promise => this.getCatalogEntry(http, page),
);
- return null;
- }
-}
+ const catalogEntries = (await p.all(catalogPagesQueue))
+ .flat()
+ .sort((a, b) => sortNugetVersions(a.version, b.version));
-async function getCatalogEntry(
- http: Http,
- catalogPage: CatalogPage,
-): Promise {
- let items = catalogPage.items;
- if (!items) {
- const url = catalogPage['@id'];
- const catalogPageFull = await http.getJson(url);
- items = catalogPageFull.body.items;
- }
- return items.map(({ catalogEntry }) => catalogEntry);
-}
+ let homepage: string | null = null;
+ let latestStable: string | null = null;
+ let nupkgUrl: string | null = null;
+ const releases = catalogEntries.map(
+ ({
+ version,
+ published: releaseTimestamp,
+ projectUrl,
+ listed,
+ packageContent,
+ }) => {
+ const release: Release = { version: removeBuildMeta(version) };
+ if (releaseTimestamp) {
+ release.releaseTimestamp = releaseTimestamp;
+ }
+ if (versioning.isValid(version) && versioning.isStable(version)) {
+ latestStable = removeBuildMeta(version);
+ homepage = projectUrl ? massageUrl(projectUrl) : homepage;
+ nupkgUrl = massageUrl(packageContent);
+ }
+ if (listed === false) {
+ release.isDeprecated = true;
+ }
+ return release;
+ },
+ );
-/**
- * Compare two versions. Return:
- * - `1` if `a > b` or `b` is invalid
- * - `-1` if `a < b` or `a` is invalid
- * - `0` if `a == b` or both `a` and `b` are invalid
- */
-export function sortNugetVersions(a: string, b: string): number {
- if (versioning.isValid(a)) {
- if (versioning.isValid(b)) {
- return versioning.sortVersions(a, b);
- } else {
- return 1;
+ if (!releases.length) {
+ return null;
}
- } else if (versioning.isValid(b)) {
- return -1;
- } else {
- return 0;
- }
-}
-
-export async function getReleases(
- http: Http,
- registryUrl: string,
- feedUrl: string,
- pkgName: string,
-): Promise {
- const baseUrl = feedUrl.replace(regEx(/\/*$/), '');
- const url = `${baseUrl}/${pkgName.toLowerCase()}/index.json`;
- const packageRegistration = await http.getJson(url);
- const catalogPages = packageRegistration.body.items || [];
- const catalogPagesQueue = catalogPages.map(
- (page) => (): Promise => getCatalogEntry(http, page),
- );
- const catalogEntries = (await p.all(catalogPagesQueue))
- .flat()
- .sort((a, b) => sortNugetVersions(a.version, b.version));
-
- let homepage: string | null = null;
- let latestStable: string | null = null;
- const releases = catalogEntries.map(
- ({ version, published: releaseTimestamp, projectUrl, listed }) => {
- const release: Release = { version: removeBuildMeta(version) };
- if (releaseTimestamp) {
- release.releaseTimestamp = releaseTimestamp;
- }
- if (versioning.isValid(version) && versioning.isStable(version)) {
- latestStable = removeBuildMeta(version);
- homepage = projectUrl ? massageUrl(projectUrl) : homepage;
- }
- if (listed === false) {
- release.isDeprecated = true;
- }
- return release;
- },
- );
- if (!releases.length) {
- return null;
- }
-
- // istanbul ignore next: only happens when no stable version exists
- if (latestStable === null && catalogPages.length) {
- const last = catalogEntries.pop()!;
- latestStable = removeBuildMeta(last.version);
- homepage ??= last.projectUrl ?? null;
- }
+ // istanbul ignore next: only happens when no stable version exists
+ if (latestStable === null && catalogPages.length) {
+ const last = catalogEntries.pop()!;
+ latestStable = removeBuildMeta(last.version);
+ homepage ??= last.projectUrl ?? null;
+ nupkgUrl ??= massageUrl(last.packageContent);
+ }
- const dep: ReleaseResult = {
- releases,
- };
+ const dep: ReleaseResult = {
+ releases,
+ };
- try {
- const packageBaseAddress = await getResourceUrl(
- http,
- registryUrl,
- 'PackageBaseAddress',
- );
- // istanbul ignore else: this is a required v3 api
- if (is.nonEmptyString(packageBaseAddress)) {
- const nuspecUrl = `${ensureTrailingSlash(
- packageBaseAddress,
- )}${pkgName.toLowerCase()}/${
- // TODO: types (#22198)
- latestStable
- }/${pkgName.toLowerCase()}.nuspec`;
- const metaresult = await http.get(nuspecUrl);
- const nuspec = new XmlDocument(metaresult.body);
- const sourceUrl = nuspec.valueWithPath('metadata.repository@url');
- if (sourceUrl) {
- dep.sourceUrl = massageUrl(sourceUrl);
+ try {
+ const packageBaseAddress = await this.getResourceUrl(
+ http,
+ registryUrl,
+ 'PackageBaseAddress',
+ );
+ if (is.nonEmptyString(packageBaseAddress)) {
+ const nuspecUrl = `${ensureTrailingSlash(
+ packageBaseAddress,
+ )}${pkgName.toLowerCase()}/${
+ // TODO: types (#22198)
+ latestStable
+ }/${pkgName.toLowerCase()}.nuspec`;
+ const metaresult = await http.get(nuspecUrl);
+ const nuspec = new XmlDocument(metaresult.body);
+ const sourceUrl = nuspec.valueWithPath('metadata.repository@url');
+ if (sourceUrl) {
+ dep.sourceUrl = massageUrl(sourceUrl);
+ }
+ } else if (nupkgUrl) {
+ const sourceUrl = await this.getSourceUrlFromNupkg(
+ http,
+ registryUrl,
+ pkgName,
+ latestStable,
+ nupkgUrl,
+ );
+ if (sourceUrl) {
+ dep.sourceUrl = massageUrl(sourceUrl);
+ logger.debug(`Determined sourceUrl ${sourceUrl} from ${nupkgUrl}`);
+ }
+ }
+ } catch (err) {
+ // istanbul ignore if: not easy testable with nock
+ if (err instanceof ExternalHostError) {
+ throw err;
+ }
+ // ignore / silence 404. Seen on proget, if remote connector is used and package is not yet cached
+ if (err instanceof HttpError && err.response?.statusCode === 404) {
+ logger.debug(
+ { registryUrl, pkgName, pkgVersion: latestStable },
+ `package manifest (.nuspec) not found`,
+ );
+ return dep;
}
- }
- } catch (err) {
- // istanbul ignore if: not easy testable with nock
- if (err instanceof ExternalHostError) {
- throw err;
- }
- // ignore / silence 404. Seen on proget, if remote connector is used and package is not yet cached
- if (err instanceof HttpError && err.response?.statusCode === 404) {
logger.debug(
- { registryUrl, pkgName, pkgVersion: latestStable },
- `package manifest (.nuspec) not found`,
+ { err, registryUrl, pkgName, pkgVersion: latestStable },
+ `Cannot obtain sourceUrl`,
);
return dep;
}
- logger.debug(
- { err, registryUrl, pkgName, pkgVersion: latestStable },
- `Cannot obtain sourceUrl`,
- );
+
+ // istanbul ignore else: not easy testable
+ if (homepage) {
+ // only assign if not assigned
+ dep.sourceUrl ??= homepage;
+ dep.homepage ??= homepage;
+ }
+
return dep;
}
- // istanbul ignore else: not easy testable
- if (homepage) {
- // only assign if not assigned
- dep.sourceUrl ??= homepage;
- dep.homepage ??= homepage;
+ @cache({
+ namespace: NugetV3Api.cacheNamespace,
+ key: (
+ _http: Http,
+ registryUrl: string,
+ packageName: string,
+ _packageVersion: string | null,
+ _nupkgUrl: string,
+ ) => `source-url:${registryUrl}:${packageName}`,
+ ttlMinutes: 10080, // 1 week
+ })
+ async getSourceUrlFromNupkg(
+ http: Http,
+ _registryUrl: string,
+ packageName: string,
+ packageVersion: string | null,
+ nupkgUrl: string,
+ ): Promise {
+ // istanbul ignore if: experimental feature
+ if (!process.env.RENOVATE_X_NUGET_DOWNLOAD_NUPKGS) {
+ logger.once.debug('RENOVATE_X_NUGET_DOWNLOAD_NUPKGS is not set');
+ return null;
+ }
+ const cacheDir = await ensureCacheDir('nuget');
+ const nupkgFile = upath.join(
+ cacheDir,
+ `${packageName}.${packageVersion}.nupkg`,
+ );
+ const nupkgContentsDir = upath.join(
+ cacheDir,
+ `${packageName}.${packageVersion}`,
+ );
+ const readStream = http.stream(nupkgUrl);
+ try {
+ const writeStream = fs.createCacheWriteStream(nupkgFile);
+ await fs.pipeline(readStream, writeStream);
+ await extract(nupkgFile, { dir: nupkgContentsDir });
+ const nuspecFile = upath.join(nupkgContentsDir, `${packageName}.nuspec`);
+ const nuspec = new XmlDocument(
+ await fs.readCacheFile(nuspecFile, 'utf8'),
+ );
+ return nuspec.valueWithPath('metadata.repository@url') ?? null;
+ } finally {
+ await fs.rmCache(nupkgFile);
+ await fs.rmCache(nupkgContentsDir);
+ }
}
-
- return dep;
}
diff --git a/lib/modules/datasource/orb/index.ts b/lib/modules/datasource/orb/index.ts
index 98f0a2c7e8cdf4..2f2a1c20a711cd 100644
--- a/lib/modules/datasource/orb/index.ts
+++ b/lib/modules/datasource/orb/index.ts
@@ -29,6 +29,10 @@ export class OrbDatasource extends Datasource {
override readonly defaultRegistryUrls = ['https://circleci.com/'];
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `createdAt` field in the results.';
+
@cache({
namespace: `datasource-${OrbDatasource.id}`,
key: ({ packageName }: GetReleasesConfig) => packageName,
diff --git a/lib/modules/datasource/packagist/index.ts b/lib/modules/datasource/packagist/index.ts
index 1c4d4719ae717e..f427929c1e3051 100644
--- a/lib/modules/datasource/packagist/index.ts
+++ b/lib/modules/datasource/packagist/index.ts
@@ -32,6 +32,14 @@ export class PackagistDatasource extends Datasource {
override readonly registryStrategy = 'hunt';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `time` field in the results.';
+ // Note: this can be changed to 'release', as the source is present in each release but we remove it while processing
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from `source` field in the results.';
+
// We calculate auth at this datasource layer so that we can know whether it's safe to cache or not
private static getHostOpts(url: string): HttpOptions {
const { username, password } = hostRules.find({
diff --git a/lib/modules/datasource/puppet-forge/index.ts b/lib/modules/datasource/puppet-forge/index.ts
index fed7e182eca36e..f656f4d5faad79 100644
--- a/lib/modules/datasource/puppet-forge/index.ts
+++ b/lib/modules/datasource/puppet-forge/index.ts
@@ -12,6 +12,10 @@ export class PuppetForgeDatasource extends Datasource {
override readonly defaultRegistryUrls = [PUPPET_FORGE];
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `created_at` field from the response.';
+
async getReleases({
packageName,
registryUrl,
@@ -50,6 +54,7 @@ export class PuppetForgeDatasource extends Datasource {
): ReleaseResult {
const result: ReleaseResult = {
releases,
+ // the homepage url in the fixtures is a github repo, we can use this as sourceUrl
homepage: module.homepage_url,
};
diff --git a/lib/modules/datasource/pypi/__snapshots__/index.spec.ts.snap b/lib/modules/datasource/pypi/__snapshots__/index.spec.ts.snap
index 42dc4dcea02587..1af50f1d6d055b 100644
--- a/lib/modules/datasource/pypi/__snapshots__/index.spec.ts.snap
+++ b/lib/modules/datasource/pypi/__snapshots__/index.spec.ts.snap
@@ -265,9 +265,6 @@ exports[`modules/datasource/pypi/index getReleases respects constraints 1`] = `
{
"registryUrl": "https://pypi.org/pypi",
"releases": [
- {
- "version": "0.4.0",
- },
{
"version": "0.4.1",
},
diff --git a/lib/modules/datasource/pypi/common.ts b/lib/modules/datasource/pypi/common.ts
index 17650cb4c12603..737b6a216e555d 100644
--- a/lib/modules/datasource/pypi/common.ts
+++ b/lib/modules/datasource/pypi/common.ts
@@ -7,6 +7,6 @@ export function isGitHubRepo(url: string): boolean {
}
// https://packaging.python.org/en/latest/specifications/name-normalization/
-export function normalizeDepName(name: string): string {
+export function normalizePythonDepName(name: string): string {
return name.replace(/[-_.]+/g, '-').toLowerCase();
}
diff --git a/lib/modules/datasource/pypi/index.ts b/lib/modules/datasource/pypi/index.ts
index 9ca84c592f6cee..477525bdd48fa5 100644
--- a/lib/modules/datasource/pypi/index.ts
+++ b/lib/modules/datasource/pypi/index.ts
@@ -1,4 +1,5 @@
import url from 'node:url';
+import is from '@sindresorhus/is';
import changelogFilenameRegex from 'changelog-filename-regex';
import { logger } from '../../../logger';
import { coerceArray } from '../../../util/array';
@@ -8,7 +9,7 @@ import { ensureTrailingSlash } from '../../../util/url';
import * as pep440 from '../../versioning/pep440';
import { Datasource } from '../datasource';
import type { GetReleasesConfig, Release, ReleaseResult } from '../types';
-import { isGitHubRepo, normalizeDepName } from './common';
+import { isGitHubRepo, normalizePythonDepName } from './common';
import type { PypiJSON, PypiJSONRelease, Releases } from './types';
export class PypiDatasource extends Datasource {
@@ -30,6 +31,12 @@ export class PypiDatasource extends Datasource {
override readonly registryStrategy = 'merge';
+ override readonly releaseTimestampNote =
+ 'The relase timestamp is determined from the `upload_time` field in the results.';
+ override readonly sourceUrlSupport = 'release';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `homepage` field if it is a github repository, else we use the `project_urls` field.';
+
async getReleases({
packageName,
registryUrl,
@@ -85,7 +92,7 @@ export class PypiDatasource extends Datasource {
): Promise {
const lookupUrl = url.resolve(
hostUrl,
- `${normalizeDepName(packageName)}/json`,
+ `${normalizePythonDepName(packageName)}/json`,
);
const dependency: ReleaseResult = { releases: [] };
logger.trace({ lookupUrl }, 'Pypi api got lookup');
@@ -156,9 +163,11 @@ export class PypiDatasource extends Datasource {
result.isDeprecated = isDeprecated;
}
// There may be multiple releases with different requires_python, so we return all in an array
+ const pythonConstraints = releases
+ .map(({ requires_python }) => requires_python)
+ .filter(is.string);
result.constraints = {
- // TODO: string[] isn't allowed here
- python: releases.map(({ requires_python }) => requires_python) as any,
+ python: Array.from(new Set(pythonConstraints)),
};
return result;
});
@@ -223,7 +232,7 @@ export class PypiDatasource extends Datasource {
): Promise {
const lookupUrl = url.resolve(
hostUrl,
- ensureTrailingSlash(normalizeDepName(packageName)),
+ ensureTrailingSlash(normalizePythonDepName(packageName)),
);
const dependency: ReleaseResult = { releases: [] };
const response = await this.http.get(lookupUrl);
diff --git a/lib/modules/datasource/python-version/__fixtures__/eol.json b/lib/modules/datasource/python-version/__fixtures__/eol.json
new file mode 100644
index 00000000000000..4f9d6d19e384d9
--- /dev/null
+++ b/lib/modules/datasource/python-version/__fixtures__/eol.json
@@ -0,0 +1,4 @@
+[
+ {"cycle":"3.12","releaseDate":"2023-10-02","support":"2025-04-02","eol":"2028-10-31","latest":"3.12.2","latestReleaseDate":"2024-02-06","lts":false},
+ {"cycle":"3.7","releaseDate":"2018-06-26","support":"2020-06-27","eol":"2023-06-27","latest":"3.7.17","latestReleaseDate":"2023-06-05","lts":false}
+]
diff --git a/lib/modules/datasource/python-version/__fixtures__/release.json b/lib/modules/datasource/python-version/__fixtures__/release.json
new file mode 100644
index 00000000000000..af122e745d4e87
--- /dev/null
+++ b/lib/modules/datasource/python-version/__fixtures__/release.json
@@ -0,0 +1,7 @@
+[
+ {"name": "Python 3.12.0", "slug": "python-3120", "version": 3, "is_published": true, "is_latest": false, "release_date": "2023-10-02T12:50:09Z", "pre_release": false, "release_page": null, "release_notes_url": "https://docs.python.org/release/3.12.0/whatsnew/changelog.html#python-3-12-0", "show_on_download_page": true, "resource_uri": "https://www.python.org/api/v2/downloads/release/832/"},
+ {"name": "Python 3.12.0a1", "slug": "python-3120a1", "version": 3, "is_published": true, "is_latest": false, "release_date": "2022-10-25T02:16:12Z", "pre_release": true, "release_page": null, "release_notes_url": "", "show_on_download_page": false, "resource_uri": "https://www.python.org/api/v2/downloads/release/767/"},
+ {"name": "Python 3.12.2", "slug": "python-3122", "version": 3, "is_published": true, "is_latest": true, "release_date": "2024-02-06T21:40:35Z", "pre_release": false, "release_page": null, "release_notes_url": "https://docs.python.org/release/3.12.2/whatsnew/changelog.html#python-3-12-2", "show_on_download_page": true, "resource_uri": "https://www.python.org/api/v2/downloads/release/871/"},
+ {"name": "Python 3.7.8", "slug": "python-378", "version": 3, "is_published": true, "is_latest": false, "release_date": "2020-06-27T12:55:01Z", "pre_release": false, "release_page": null, "release_notes_url": "https://docs.python.org/release/3.7.8/whatsnew/changelog.html#changelog", "show_on_download_page": true, "resource_uri": "https://www.python.org/api/v2/downloads/release/442/"},
+ {"name": "Python 3.7.9", "slug": "python-379", "version": 3, "is_published": true, "is_latest": false, "release_date": "2020-08-17T22:00:00Z", "pre_release": false, "release_page": null, "release_notes_url": "https://docs.python.org/release/3.7.9/whatsnew/changelog.html#changelog", "show_on_download_page": true, "resource_uri": "https://www.python.org/api/v2/downloads/release/482/"}
+]
diff --git a/lib/modules/datasource/python-version/common.ts b/lib/modules/datasource/python-version/common.ts
new file mode 100644
index 00000000000000..304693714f1d7d
--- /dev/null
+++ b/lib/modules/datasource/python-version/common.ts
@@ -0,0 +1,5 @@
+export const defaultRegistryUrl =
+ 'https://www.python.org/api/v2/downloads/release';
+export const githubBaseUrl = 'https://api.github.com/';
+
+export const datasource = 'python-version';
diff --git a/lib/modules/datasource/python-version/index.spec.ts b/lib/modules/datasource/python-version/index.spec.ts
new file mode 100644
index 00000000000000..4333bfe0cec344
--- /dev/null
+++ b/lib/modules/datasource/python-version/index.spec.ts
@@ -0,0 +1,148 @@
+import { satisfies } from '@renovatebot/pep440';
+import { getPkgReleases } from '..';
+import { Fixtures } from '../../../../test/fixtures';
+import * as httpMock from '../../../../test/http-mock';
+import { EXTERNAL_HOST_ERROR } from '../../../constants/error-messages';
+import * as githubGraphql from '../../../util/github/graphql';
+import { registryUrl as eolRegistryUrl } from '../endoflife-date/common';
+import { datasource, defaultRegistryUrl } from './common';
+import { PythonVersionDatasource } from '.';
+
+describe('modules/datasource/python-version/index', () => {
+ describe('dependent datasources', () => {
+ it('returns Python EOL data', async () => {
+ const datasource = new PythonVersionDatasource();
+ httpMock
+ .scope(eolRegistryUrl)
+ .get('/python.json')
+ .reply(200, Fixtures.get('eol.json'));
+ const res = await datasource.getEolReleases();
+ expect(
+ res?.releases.find((release) => release.version === '3.7.17')
+ ?.isDeprecated,
+ ).toBeTrue();
+ });
+ });
+
+ describe('getReleases', () => {
+ beforeEach(() => {
+ httpMock
+ .scope('https://endoflife.date')
+ .get('/api/python.json')
+ .reply(200, Fixtures.get('eol.json'));
+
+ jest.spyOn(githubGraphql, 'queryReleases').mockResolvedValueOnce([
+ {
+ id: 1,
+ url: 'https://example.com',
+ name: 'containerbase/python-prebuild',
+ description: 'some description',
+ version: '3.12.1',
+ releaseTimestamp: '2020-03-09T13:00:00Z',
+ },
+ {
+ id: 2,
+ url: 'https://example.com',
+ name: 'containerbase/python-prebuild',
+ description: 'some description',
+ version: '3.12.0',
+ releaseTimestamp: '2020-03-09T13:00:00Z',
+ },
+ {
+ id: 3,
+ url: 'https://example.com',
+ name: 'containerbase/python-prebuild',
+ description: 'some description',
+ version: '3.7.8',
+ releaseTimestamp: '2020-03-09T13:00:00Z',
+ },
+ ]);
+ });
+
+ it('throws for 500', async () => {
+ httpMock.scope(defaultRegistryUrl).get('').reply(500);
+ await expect(
+ getPkgReleases({
+ datasource,
+ packageName: 'python',
+ }),
+ ).rejects.toThrow(EXTERNAL_HOST_ERROR);
+ });
+
+ it('returns null for error', async () => {
+ httpMock.scope(defaultRegistryUrl).get('').replyWithError('error');
+ expect(
+ await getPkgReleases({
+ datasource,
+ packageName: 'python',
+ }),
+ ).toBeNull();
+ });
+
+ it('returns null for empty 200 OK', async () => {
+ httpMock.scope(defaultRegistryUrl).get('').reply(200, []);
+ expect(
+ await getPkgReleases({
+ datasource,
+ packageName: 'python',
+ }),
+ ).toBeNull();
+ });
+
+ describe('processes real data', () => {
+ beforeEach(() => {
+ httpMock
+ .scope(defaultRegistryUrl)
+ .get('')
+ .reply(200, Fixtures.get('release.json'));
+ });
+
+ it('returns the correct data', async () => {
+ const res = await getPkgReleases({
+ datasource,
+ packageName: 'python',
+ });
+ expect(res?.releases[0]).toEqual({
+ isDeprecated: true,
+ isStable: true,
+ releaseTimestamp: '2020-06-27T12:55:01.000Z',
+ version: '3.7.8',
+ });
+ });
+
+ it('only returns stable versions', async () => {
+ const res = await getPkgReleases({
+ datasource,
+ packageName: 'python',
+ });
+ expect(res?.releases).toHaveLength(2);
+ for (const release of res?.releases ?? []) {
+ expect(release.isStable).toBeTrue();
+ }
+ });
+
+ it('only returns versions that are prebuilt', async () => {
+ const res = await getPkgReleases({
+ datasource,
+ packageName: 'python',
+ });
+ expect(
+ res?.releases.filter((release) =>
+ satisfies(release.version, '>3.12.1'),
+ ),
+ ).toHaveLength(0);
+ });
+
+ it('returns isDeprecated status for Python 3 minor releases', async () => {
+ const res = await getPkgReleases({
+ datasource,
+ packageName: 'python',
+ });
+ expect(res?.releases).toHaveLength(2);
+ for (const release of res?.releases ?? []) {
+ expect(release.isDeprecated).toBeBoolean();
+ }
+ });
+ });
+ });
+});
diff --git a/lib/modules/datasource/python-version/index.ts b/lib/modules/datasource/python-version/index.ts
new file mode 100644
index 00000000000000..682c6ceb928747
--- /dev/null
+++ b/lib/modules/datasource/python-version/index.ts
@@ -0,0 +1,92 @@
+import { cache } from '../../../util/cache/package/decorator';
+import { id as versioning } from '../../versioning/python';
+import { Datasource } from '../datasource';
+import { EndoflifeDatePackagesource } from '../endoflife-date';
+import { registryUrl as eolRegistryUrl } from '../endoflife-date/common';
+import { GithubReleasesDatasource } from '../github-releases';
+import type { GetReleasesConfig, ReleaseResult } from '../types';
+import { datasource, defaultRegistryUrl, githubBaseUrl } from './common';
+import { PythonRelease } from './schema';
+
+export class PythonVersionDatasource extends Datasource {
+ static readonly id = datasource;
+ pythonPrebuildDatasource: GithubReleasesDatasource;
+ pythonEolDatasource: EndoflifeDatePackagesource;
+
+ constructor() {
+ super(datasource);
+ this.pythonPrebuildDatasource = new GithubReleasesDatasource();
+ this.pythonEolDatasource = new EndoflifeDatePackagesource();
+ }
+
+ override readonly customRegistrySupport = false;
+
+ override readonly defaultRegistryUrls = [defaultRegistryUrl];
+
+ override readonly defaultVersioning = versioning;
+
+ override readonly caching = true;
+
+ async getPrebuildReleases(): Promise {
+ return await this.pythonPrebuildDatasource.getReleases({
+ registryUrl: githubBaseUrl,
+ packageName: 'containerbase/python-prebuild',
+ });
+ }
+
+ async getEolReleases(): Promise {
+ return await this.pythonEolDatasource.getReleases({
+ registryUrl: eolRegistryUrl,
+ packageName: 'python',
+ });
+ }
+
+ @cache({
+ namespace: `datasource-${datasource}`,
+ key: ({ registryUrl }: GetReleasesConfig) => `${registryUrl}`,
+ })
+ async getReleases({
+ registryUrl,
+ }: GetReleasesConfig): Promise {
+ // istanbul ignore if
+ if (!registryUrl) {
+ return null;
+ }
+ const pythonPrebuildReleases = await this.getPrebuildReleases();
+ const pythonPrebuildVersions = new Set(
+ pythonPrebuildReleases?.releases.map((release) => release.version),
+ );
+ const pythonEolReleases = await this.getEolReleases();
+ const pythonEolVersions = new Map(
+ pythonEolReleases?.releases
+ .filter((release) => release.isDeprecated !== undefined)
+ .map((release) => [
+ release.version.split('.').slice(0, 2).join('.'),
+ release.isDeprecated,
+ ]),
+ );
+ const result: ReleaseResult = {
+ homepage: 'https://python.org',
+ sourceUrl: 'https://github.com/python/cpython',
+ registryUrl,
+ releases: [],
+ };
+ try {
+ const response = await this.http.getJson(registryUrl, PythonRelease);
+ result.releases.push(
+ ...response.body
+ .filter((release) => release.isStable)
+ .filter((release) => pythonPrebuildVersions.has(release.version)),
+ );
+ } catch (err) {
+ this.handleGenericErrors(err);
+ }
+ for (const release of result.releases) {
+ release.isDeprecated = pythonEolVersions.get(
+ release.version.split('.').slice(0, 2).join('.'),
+ );
+ }
+
+ return result.releases.length ? result : null;
+ }
+}
diff --git a/lib/modules/datasource/python-version/readme.md b/lib/modules/datasource/python-version/readme.md
new file mode 100644
index 00000000000000..f904048f539810
--- /dev/null
+++ b/lib/modules/datasource/python-version/readme.md
@@ -0,0 +1,36 @@
+This datasource returns Python releases from the [python.org API](https://www.python.org/api/v2/downloads/release/).
+
+It also fetches deprecated versions from the [Endoflife Date datasource](../endoflife-date/index.md).
+
+Because Renovate depends on [`containerbase/python-prebuild`](https://github.com/containerbase/python-prebuild/releases) it will also fetch releases from the GitHub API.
+
+## Example custom manager
+
+Below is a [custom regex manager](../../manager/regex/index.md) to update the Python versions in a Dockerfile.
+Python versions sometimes drop the dot that separate the major and minor number: so `3.11` becomes `311`.
+The example below handles this case.
+
+```dockerfile
+ARG PYTHON_VERSION=311
+FROM image-python${PYTHON_VERSION}-builder:1.0.0
+```
+
+```json
+{
+ "customManagers": [
+ {
+ "customType": "regex",
+ "fileMatch": ["^Dockerfile$"],
+ "matchStringsStrategy": "any",
+ "matchStrings": [
+ "ARG PYTHON_VERSION=\"?(?3(?\\d+))\"?\\s"
+ ],
+ "autoReplaceStringTemplate": "ARG PYTHON_VERSION={{{replace '\\.' '' newValue}}}\n",
+ "currentValueTemplate": "3.{{{minor}}}",
+ "datasourceTemplate": "python-version",
+ "versioningTemplate": "python",
+ "depNameTemplate": "python"
+ }
+ ]
+}
+```
diff --git a/lib/modules/datasource/python-version/schema.ts b/lib/modules/datasource/python-version/schema.ts
new file mode 100644
index 00000000000000..2f804ef8a42f4d
--- /dev/null
+++ b/lib/modules/datasource/python-version/schema.ts
@@ -0,0 +1,31 @@
+import { z } from 'zod';
+import type { Release } from '../types';
+
+export const PythonRelease = z
+ .object({
+ /** e.g: "Python 3.9.0b1" */
+ name: z.string(),
+ /** e.g: "python-390b1" */
+ slug: z.string(),
+ /** Major version e.g: 3 */
+ version: z.number(),
+ /** is latest major version, true for Python 2.7.18 and latest Python 3 */
+ is_latest: z.boolean(),
+ is_published: z.boolean(),
+ release_date: z.string(),
+ pre_release: z.boolean(),
+ release_page: z.string().nullable(),
+ show_on_download_page: z.boolean(),
+ /** Changelog e.g: "https://docs.python.org/…html#python-3-9-0-beta-1" */
+ release_notes_url: z.string(),
+ /** Download URL e.g: "https://www.python.org/api/v2/downloads/release/436/" */
+ resource_uri: z.string(),
+ })
+ .transform(
+ ({ name, release_date: releaseTimestamp, pre_release }): Release => {
+ const version = name?.replace('Python', '').trim();
+ const isStable = pre_release === false;
+ return { version, releaseTimestamp, isStable };
+ },
+ )
+ .array();
diff --git a/lib/modules/datasource/ruby-version/index.ts b/lib/modules/datasource/ruby-version/index.ts
index 60fae662e90a5c..7f2a2613c1d6f8 100644
--- a/lib/modules/datasource/ruby-version/index.ts
+++ b/lib/modules/datasource/ruby-version/index.ts
@@ -20,6 +20,13 @@ export class RubyVersionDatasource extends Datasource {
override readonly defaultVersioning = rubyVersioningId;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `release-list` table in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'We use the URL: https://github.com/ruby/ruby.';
+
@cache({ namespace: `datasource-${RubyVersionDatasource.id}`, key: 'all' })
async getReleases({
registryUrl,
diff --git a/lib/modules/datasource/rubygems/index.ts b/lib/modules/datasource/rubygems/index.ts
index 58833d513b67dc..942bfd6395bc97 100644
--- a/lib/modules/datasource/rubygems/index.ts
+++ b/lib/modules/datasource/rubygems/index.ts
@@ -47,6 +47,13 @@ export class RubyGemsDatasource extends Datasource {
private readonly versionsEndpointCache: VersionsEndpointCache;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `created_at` field in the results.';
+ override readonly sourceUrlSupport = 'release';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `source_code_uri` field in the results.';
+
@cache({
namespace: `datasource-${RubyGemsDatasource.id}`,
key: ({ packageName, registryUrl }: GetReleasesConfig) =>
diff --git a/lib/modules/datasource/sbt-package/index.ts b/lib/modules/datasource/sbt-package/index.ts
index c0b83d6c950146..b714aae786c7b6 100644
--- a/lib/modules/datasource/sbt-package/index.ts
+++ b/lib/modules/datasource/sbt-package/index.ts
@@ -24,6 +24,10 @@ export class SbtPackageDatasource extends MavenDatasource {
override readonly registryStrategy = 'hunt';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `scm` tags in the results.';
+
constructor(id = SbtPackageDatasource.id) {
super(id);
this.http = new Http('sbt');
diff --git a/lib/modules/datasource/sbt-plugin/index.ts b/lib/modules/datasource/sbt-plugin/index.ts
index 8fd574e56d227e..1d4c18a6ab039a 100644
--- a/lib/modules/datasource/sbt-plugin/index.ts
+++ b/lib/modules/datasource/sbt-plugin/index.ts
@@ -23,6 +23,10 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
override readonly defaultVersioning = ivyVersioning.id;
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the `scm` tags in the results.';
+
constructor() {
super(SbtPluginDatasource.id);
this.http = new Http('sbt');
diff --git a/lib/modules/datasource/terraform-module/index.ts b/lib/modules/datasource/terraform-module/index.ts
index 9476cef7e5c3c3..4e3b25e4bfd3ec 100644
--- a/lib/modules/datasource/terraform-module/index.ts
+++ b/lib/modules/datasource/terraform-module/index.ts
@@ -2,7 +2,7 @@ import { logger } from '../../../logger';
import { cache } from '../../../util/cache/package/decorator';
import { regEx } from '../../../util/regex';
import { coerceString } from '../../../util/string';
-import { validateUrl } from '../../../util/url';
+import { isHttpUrl } from '../../../util/url';
import * as hashicorpVersioning from '../../versioning/hashicorp';
import type { GetReleasesConfig, ReleaseResult } from '../types';
import { TerraformDatasource } from './base';
@@ -25,6 +25,13 @@ export class TerraformModuleDatasource extends TerraformDatasource {
override readonly defaultVersioning = hashicorpVersioning.id;
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is only supported for the latest version, and is determined from the `published_at` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the the `source` field in the results.';
+
readonly extendedApiRegistryUrls = [
'https://registry.terraform.io',
'https://app.terraform.io',
@@ -32,7 +39,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
/**
* This function will fetch a package from the specified Terraform registry and return all semver versions.
- * - `sourceUrl` is supported of "source" field is set
+ * - `sourceUrl` is supported if "source" field is set
* - `homepage` is set to the Terraform registry's page if it's on the official main registry
*/
@cache({
@@ -162,7 +169,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
};
// Add the source URL if given
- if (validateUrl(res.modules[0].source)) {
+ if (isHttpUrl(res.modules[0].source)) {
dep.sourceUrl = res.modules[0].source;
}
diff --git a/lib/modules/datasource/terraform-module/utils.ts b/lib/modules/datasource/terraform-module/utils.ts
index e3449faa23f039..b6f981f1dc6d88 100644
--- a/lib/modules/datasource/terraform-module/utils.ts
+++ b/lib/modules/datasource/terraform-module/utils.ts
@@ -1,4 +1,4 @@
-import { joinUrlParts, validateUrl } from '../../../util/url';
+import { isHttpUrl, joinUrlParts } from '../../../util/url';
import type {
ServiceDiscoveryEndpointType,
ServiceDiscoveryResult,
@@ -12,7 +12,7 @@ export function createSDBackendURL(
): string {
const sdEndpoint = sdResult[sdType] ?? '';
const fullPath = joinUrlParts(sdEndpoint, subPath);
- if (validateUrl(fullPath)) {
+ if (isHttpUrl(fullPath)) {
return fullPath;
}
return joinUrlParts(registryURL, fullPath);
diff --git a/lib/modules/datasource/terraform-provider/index.ts b/lib/modules/datasource/terraform-provider/index.ts
index d52a78198b1652..56c78a7522b68b 100644
--- a/lib/modules/datasource/terraform-provider/index.ts
+++ b/lib/modules/datasource/terraform-provider/index.ts
@@ -42,6 +42,13 @@ export class TerraformProviderDatasource extends TerraformDatasource {
override readonly registryStrategy = 'hunt';
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is only supported for the latest version, and is determined from the `published_at` field in the results.';
+ override readonly sourceUrlSupport = 'package';
+ override readonly sourceUrlNote =
+ 'The source URL is determined from the the `source` field in the results.';
+
@cache({
namespace: `datasource-${TerraformProviderDatasource.id}`,
key: (getReleasesConfig: GetReleasesConfig) =>
diff --git a/lib/modules/datasource/types.ts b/lib/modules/datasource/types.ts
index f848ffa89fba27..98f6279ebb2101 100644
--- a/lib/modules/datasource/types.ts
+++ b/lib/modules/datasource/types.ts
@@ -9,6 +9,8 @@ export interface GetDigestInputConfig {
packageName: string;
defaultRegistryUrls?: string[];
registryUrls?: string[] | null;
+ registryUrl?: string;
+ lookupName?: string;
additionalRegistryUrls?: string[];
currentValue?: string;
currentDigest?: string;
@@ -17,6 +19,7 @@ export interface GetDigestInputConfig {
export interface DigestConfig {
packageName: string;
+ lookupName?: string;
registryUrl?: string;
currentValue?: string;
currentDigest?: string;
@@ -83,10 +86,12 @@ export interface ReleaseResult {
registryUrl?: string;
replacementName?: string;
replacementVersion?: string;
+ lookupName?: string;
+ packageScope?: string;
}
export type RegistryStrategy = 'first' | 'hunt' | 'merge';
-
+export type SourceUrlSupport = 'package' | 'release' | 'none';
export interface DatasourceApi extends ModuleApi {
id: string;
getDigest?(config: DigestConfig, newValue?: string): Promise;
@@ -108,6 +113,24 @@ export interface DatasourceApi extends ModuleApi {
*/
customRegistrySupport: boolean;
+ /**
+ * Whether release timestamp can be returned.
+ */
+ releaseTimestampSupport: boolean;
+ /**
+ * Notes on how release timestamp is determined.
+ */
+ releaseTimestampNote?: string;
+
+ /**
+ * Whether sourceURL can be returned.
+ */
+ sourceUrlSupport: SourceUrlSupport;
+ /**
+ * Notes on how sourceURL is determined.
+ */
+ sourceUrlNote?: string;
+
/**
* Whether to perform caching in the datasource index/wrapper or not.
* true: datasoure index wrapper should cache all results (based on registryUrl/packageName)
diff --git a/lib/modules/datasource/unity3d/__fixtures__/beta.xml b/lib/modules/datasource/unity3d/__fixtures__/beta.xml
new file mode 100644
index 00000000000000..ac53f15db16eff
--- /dev/null
+++ b/lib/modules/datasource/unity3d/__fixtures__/beta.xml
@@ -0,0 +1,21 @@
+
+
+
+ Latest Unity Beta Releases
+ https://unity.com/
+ Latest Unity Beta Releases
+ en
+
+
+ 2023.3.0b6
+ https://unity.com/releases/editor/beta/2023.3.0b6
+
+Beta description
+
+ 2024-02-07T07:24:40
+ Unity Technologies
+ 4ca2224a582d
+
+
+
+
diff --git a/lib/modules/datasource/unity3d/__fixtures__/lts.xml b/lib/modules/datasource/unity3d/__fixtures__/lts.xml
new file mode 100644
index 00000000000000..dc273bf6c8c232
--- /dev/null
+++ b/lib/modules/datasource/unity3d/__fixtures__/lts.xml
@@ -0,0 +1,21 @@
+
+
+
+ Latest Unity Lts Releases
+ https://unity.com/
+ Latest Unity LTS Releases
+ en
+
+
+ 2021.3.35f1
+ https://unity.com/releases/editor/whats-new/2021.3.35
+
+LTS description
+
+ 2024-02-06T15:40:15
+ Unity Technologies
+ 157b46ce122a
+
+
+
+
diff --git a/lib/modules/datasource/unity3d/__fixtures__/no_channel.xml b/lib/modules/datasource/unity3d/__fixtures__/no_channel.xml
new file mode 100644
index 00000000000000..5afd7008d27baa
--- /dev/null
+++ b/lib/modules/datasource/unity3d/__fixtures__/no_channel.xml
@@ -0,0 +1,4 @@
+
+
+
+
diff --git a/lib/modules/datasource/unity3d/__fixtures__/no_item.xml b/lib/modules/datasource/unity3d/__fixtures__/no_item.xml
new file mode 100644
index 00000000000000..604cbe1c269827
--- /dev/null
+++ b/lib/modules/datasource/unity3d/__fixtures__/no_item.xml
@@ -0,0 +1,11 @@
+
+
+
+ Latest Unity Beta Releases
+ https://unity.com/
+ Latest Unity Beta Releases
+ en
+
+
+
+
diff --git a/lib/modules/datasource/unity3d/__fixtures__/no_title.xml b/lib/modules/datasource/unity3d/__fixtures__/no_title.xml
new file mode 100644
index 00000000000000..35ac231d5b46fb
--- /dev/null
+++ b/lib/modules/datasource/unity3d/__fixtures__/no_title.xml
@@ -0,0 +1,20 @@
+
+
+
+ Latest Unity Full Releases
+ https://unity.com/
+ Latest Unity Full Releases
+ en
+
+
+ https://unity.com/releases/editor/whats-new/2021.3.35
+
+Stable description2
+
+ 2024-02-06T15:40:15
+ Unity Technologies
+ 157b46ce122a
+
+
+
+
diff --git a/lib/modules/datasource/unity3d/__fixtures__/stable.xml b/lib/modules/datasource/unity3d/__fixtures__/stable.xml
new file mode 100644
index 00000000000000..268e62ddec6ce4
--- /dev/null
+++ b/lib/modules/datasource/unity3d/__fixtures__/stable.xml
@@ -0,0 +1,31 @@
+
+
+
+ Latest Unity Full Releases
+ https://unity.com/
+ Latest Unity Full Releases
+ en
+
+
+ 2023.2.9f1
+ https://unity.com/releases/editor/whats-new/2023.2.9
+
+Stable description
+
+ 2024-02-07T06:56:57
+ Unity Technologies
+ 0c9c2e1f4bef
+
+
+ 2021.3.35f1
+ https://unity.com/releases/editor/whats-new/2021.3.35
+
+Stable description2
+
+ 2024-02-06T15:40:15
+ Unity Technologies
+ 157b46ce122a
+
+
+
+
diff --git a/lib/modules/datasource/unity3d/index.spec.ts b/lib/modules/datasource/unity3d/index.spec.ts
new file mode 100644
index 00000000000000..b706c72a7b92c2
--- /dev/null
+++ b/lib/modules/datasource/unity3d/index.spec.ts
@@ -0,0 +1,307 @@
+import { getPkgReleases } from '..';
+import { Fixtures } from '../../../../test/fixtures';
+import * as httpMock from '../../../../test/http-mock';
+import { Unity3dDatasource } from '.';
+
+describe('modules/datasource/unity3d/index', () => {
+ const fixtures = Object.fromEntries(
+ [
+ ...Object.keys(Unity3dDatasource.streams),
+ 'no_title',
+ 'no_channel',
+ 'no_item',
+ ].map((fixture) => [fixture, Fixtures.get(fixture + '.xml')]),
+ );
+
+ const mockRSSFeeds = (streams: { [keys: string]: string }) => {
+ Object.entries(streams).map(([stream, url]) => {
+ const content = fixtures[stream];
+
+ const uri = new URL(url);
+
+ httpMock.scope(uri.origin).get(uri.pathname).reply(200, content);
+ });
+ };
+
+ const stableStreamUrl = new URL(Unity3dDatasource.streams.stable);
+
+ it('handle 500 response', async () => {
+ httpMock
+ .scope(stableStreamUrl.origin)
+ .get(stableStreamUrl.pathname)
+ .reply(500, '500');
+
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ const response = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(response).toBeNull();
+ });
+
+ it('handle 200 with no XML', async () => {
+ httpMock
+ .scope(stableStreamUrl.origin)
+ .get(stableStreamUrl.pathname)
+ .reply(200, 'not xml');
+
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ const response = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(response).toBeNull();
+ });
+
+ it('handles missing title element', async () => {
+ const content = fixtures.no_title;
+ httpMock
+ .scope(stableStreamUrl.origin)
+ .get(stableStreamUrl.pathname)
+ .reply(200, content);
+
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ const responses = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(responses).toEqual({
+ releases: [],
+ homepage: 'https://unity.com/',
+ registryUrl: Unity3dDatasource.streams.stable,
+ });
+ });
+
+ it('handles missing channel element', async () => {
+ const content = fixtures.no_channel;
+ httpMock
+ .scope(stableStreamUrl.origin)
+ .get(stableStreamUrl.pathname)
+ .reply(200, content);
+
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ const responses = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(responses).toEqual({
+ releases: [],
+ homepage: 'https://unity.com/',
+ registryUrl: Unity3dDatasource.streams.stable,
+ });
+ });
+
+ it('handles missing item element', async () => {
+ const content = fixtures.no_item;
+ httpMock
+ .scope(stableStreamUrl.origin)
+ .get(stableStreamUrl.pathname)
+ .reply(200, content);
+
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ const responses = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(responses).toEqual({
+ releases: [],
+ homepage: 'https://unity.com/',
+ registryUrl: Unity3dDatasource.streams.stable,
+ });
+ });
+
+ it('returns beta if requested', async () => {
+ mockRSSFeeds({ beta: Unity3dDatasource.streams.beta });
+ const responses = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.beta],
+ });
+
+ expect(responses).toEqual({
+ registryUrl: Unity3dDatasource.streams.beta,
+ releases: [
+ {
+ changelogUrl: 'https://unity.com/releases/editor/beta/2023.3.0b6',
+ isStable: false,
+ registryUrl: Unity3dDatasource.streams.beta,
+ releaseTimestamp: '2024-02-07T07:24:40.000Z',
+ version: '2023.3.0b6',
+ },
+ ],
+ homepage: 'https://unity.com/',
+ });
+ });
+
+ it('returns stable and lts releases by default', async () => {
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ mockRSSFeeds(qualifyingStreams);
+ const responses = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ });
+
+ expect(responses).toEqual(
+ expect.objectContaining({
+ releases: [
+ {
+ changelogUrl:
+ 'https://unity.com/releases/editor/whats-new/2021.3.35',
+ isStable: true,
+ registryUrl: Unity3dDatasource.streams.stable,
+ releaseTimestamp: '2024-02-06T15:40:15.000Z',
+ version: '2021.3.35f1',
+ },
+ {
+ changelogUrl:
+ 'https://unity.com/releases/editor/whats-new/2023.2.9',
+ isStable: true,
+ registryUrl: Unity3dDatasource.streams.stable,
+ releaseTimestamp: '2024-02-07T06:56:57.000Z',
+ version: '2023.2.9f1',
+ },
+ ],
+ homepage: 'https://unity.com/',
+ }),
+ );
+
+ expect(responses).toEqual(
+ expect.objectContaining({
+ releases: expect.not.arrayContaining([
+ expect.objectContaining({
+ version: expect.stringMatching(/\(b\)/),
+ registryUrl: Unity3dDatasource.streams.beta,
+ }),
+ expect.objectContaining({
+ version: expect.stringMatching(/\(b\)/),
+ registryUrl: Unity3dDatasource.streams.beta,
+ }),
+ ]),
+ homepage: 'https://unity.com/',
+ }),
+ );
+ });
+
+ it('returns hash if requested', async () => {
+ mockRSSFeeds({ stable: Unity3dDatasource.streams.stable });
+ const responsesWithHash = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersionWithRevision',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(responsesWithHash).toEqual(
+ expect.objectContaining({
+ releases: expect.arrayContaining([
+ expect.objectContaining({
+ version: expect.stringMatching(/\(.*\)/),
+ }),
+ ]),
+ homepage: 'https://unity.com/',
+ registryUrl: Unity3dDatasource.streams.stable,
+ }),
+ );
+ });
+
+ it('returns no hash if not requested', async () => {
+ mockRSSFeeds({ stable: Unity3dDatasource.streams.stable });
+ const responsesWithoutHash = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams.stable],
+ });
+
+ expect(responsesWithoutHash).toEqual(
+ expect.objectContaining({
+ releases: expect.not.arrayContaining([
+ expect.objectContaining({
+ version: expect.stringMatching(/\(.*\)/),
+ }),
+ ]),
+ homepage: 'https://unity.com/',
+ registryUrl: Unity3dDatasource.streams.stable,
+ }),
+ );
+ });
+
+ it('returns different versions for each stream', async () => {
+ mockRSSFeeds(Unity3dDatasource.streams);
+ const responses: { [keys: string]: string[] } = Object.fromEntries(
+ await Promise.all(
+ Object.keys(Unity3dDatasource.streams).map(async (stream) => [
+ stream,
+ (
+ await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersion',
+ registryUrls: [Unity3dDatasource.streams[stream]],
+ })
+ )?.releases.map((release) => release.version),
+ ]),
+ ),
+ );
+
+ // none of the items in responses.beta are in responses.stable or responses.lts
+ expect(
+ responses.beta.every(
+ (betaVersion) =>
+ !responses.stable.includes(betaVersion) &&
+ !responses.lts.includes(betaVersion),
+ ),
+ ).toBe(true);
+ // some items in responses.stable are in responses.lts
+ expect(
+ responses.stable.some((stableVersion) =>
+ responses.lts.includes(stableVersion),
+ ),
+ ).toBe(true);
+ // not all items in responses.stable are in responses.lts
+ expect(
+ responses.stable.every((stableVersion) =>
+ responses.lts.includes(stableVersion),
+ ),
+ ).toBe(false);
+ });
+
+ it('returns only lts and stable by default', async () => {
+ const qualifyingStreams = { ...Unity3dDatasource.streams };
+ delete qualifyingStreams.beta;
+ mockRSSFeeds(qualifyingStreams);
+ const responses = await getPkgReleases({
+ datasource: Unity3dDatasource.id,
+ packageName: 'm_EditorVersionWithRevision',
+ });
+
+ expect(responses).toEqual(
+ expect.objectContaining({
+ releases: expect.arrayContaining([
+ expect.objectContaining({
+ version: expect.stringMatching(/[fp]/),
+ registryUrl: expect.stringMatching(/(releases|lts)/),
+ }),
+ expect.objectContaining({
+ version: expect.stringMatching(/[fp]/),
+ registryUrl: expect.stringMatching(/(releases|lts)/),
+ }),
+ ]),
+ homepage: 'https://unity.com/',
+ }),
+ );
+ });
+});
diff --git a/lib/modules/datasource/unity3d/index.ts b/lib/modules/datasource/unity3d/index.ts
new file mode 100644
index 00000000000000..24298d4a6c5e90
--- /dev/null
+++ b/lib/modules/datasource/unity3d/index.ts
@@ -0,0 +1,96 @@
+import { XmlDocument, XmlElement } from 'xmldoc';
+import { logger } from '../../../logger';
+import { cache } from '../../../util/cache/package/decorator';
+import * as Unity3dVersioning from '../../versioning/unity3d';
+import { Datasource } from '../datasource';
+import type { GetReleasesConfig, Release, ReleaseResult } from '../types';
+
+export class Unity3dDatasource extends Datasource {
+ static readonly homepage = 'https://unity.com/';
+ static readonly streams: Record = {
+ lts: `${Unity3dDatasource.homepage}releases/editor/lts-releases.xml`,
+ stable: `${Unity3dDatasource.homepage}releases/editor/releases.xml`,
+ beta: `${Unity3dDatasource.homepage}releases/editor/beta/latest.xml`,
+ };
+
+ static readonly id = 'unity3d';
+
+ override readonly defaultRegistryUrls = [
+ Unity3dDatasource.streams.stable,
+ Unity3dDatasource.streams.lts,
+ ];
+
+ override readonly defaultVersioning = Unity3dVersioning.id;
+
+ override readonly registryStrategy = 'merge';
+
+ override readonly releaseTimestampSupport = true;
+ override readonly releaseTimestampNote =
+ 'The release timestamp is determined from the `pubDate` tag in the results.';
+
+ constructor() {
+ super(Unity3dDatasource.id);
+ }
+
+ async getByStream(
+ registryUrl: string | undefined,
+ withHash: boolean,
+ ): Promise {
+ let channel: XmlElement | undefined = undefined;
+ try {
+ const response = await this.http.get(registryUrl!);
+ const document = new XmlDocument(response.body);
+ channel = document.childNamed('channel');
+ } catch (err) {
+ logger.error(
+ { err, registryUrl },
+ 'Failed to request releases from Unity3d datasource',
+ );
+ return null;
+ }
+
+ if (!channel) {
+ return {
+ releases: [],
+ homepage: Unity3dDatasource.homepage,
+ registryUrl,
+ };
+ }
+ const releases = channel
+ .childrenNamed('item')
+ .map((itemNode) => {
+ const versionWithHash = `${itemNode.childNamed('title')?.val} (${itemNode.childNamed('guid')?.val})`;
+ const versionWithoutHash = itemNode.childNamed('title')?.val;
+ const release: Release = {
+ version: withHash ? versionWithHash : versionWithoutHash!,
+ releaseTimestamp: itemNode.childNamed('pubDate')?.val,
+ changelogUrl: itemNode.childNamed('link')?.val,
+ isStable: registryUrl !== Unity3dDatasource.streams.beta,
+ registryUrl,
+ };
+ return release;
+ })
+ .filter((release) => !!release);
+
+ return {
+ releases,
+ homepage: Unity3dDatasource.homepage,
+ registryUrl,
+ };
+ }
+
+ @cache({
+ namespace: `datasource-${Unity3dDatasource.id}`,
+ key: ({ registryUrl, packageName }: GetReleasesConfig) =>
+ `${registryUrl}:${packageName}`,
+ })
+ async getReleases({
+ packageName,
+ registryUrl,
+ }: GetReleasesConfig): Promise {
+ return await this.getByStream(
+ registryUrl,
+ packageName === 'm_EditorVersionWithRevision',
+ );
+ }
+}
diff --git a/lib/modules/manager/ansible/readme.md b/lib/modules/manager/ansible/readme.md
index 17d2176b176557..0182734c67eab7 100644
--- a/lib/modules/manager/ansible/readme.md
+++ b/lib/modules/manager/ansible/readme.md
@@ -1,3 +1,3 @@
Supports Docker-type dependency extraction from Ansible configuration files.
-If you need to change the versioning format, read the [versioning](../../versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../versioning/index.md) documentation to learn more.
diff --git a/lib/modules/manager/api.ts b/lib/modules/manager/api.ts
index 6f618abaee49d2..66cac2fdf84bef 100644
--- a/lib/modules/manager/api.ts
+++ b/lib/modules/manager/api.ts
@@ -24,6 +24,7 @@ import * as conan from './conan';
import * as cpanfile from './cpanfile';
import * as crossplane from './crossplane';
import * as depsEdn from './deps-edn';
+import * as devContainer from './devcontainer';
import * as dockerCompose from './docker-compose';
import * as dockerfile from './dockerfile';
import * as droneci from './droneci';
@@ -75,6 +76,7 @@ import * as puppet from './puppet';
import * as pyenv from './pyenv';
import * as rubyVersion from './ruby-version';
import * as sbt from './sbt';
+import * as scalafmt from './scalafmt';
import * as setupCfg from './setup-cfg';
import * as swift from './swift';
import * as tekton from './tekton';
@@ -86,6 +88,7 @@ import * as tflintPlugin from './tflint-plugin';
import * as travis from './travis';
import type { ManagerApi } from './types';
import * as velaci from './velaci';
+import * as vendir from './vendir';
import * as woodpecker from './woodpecker';
const api = new Map();
@@ -117,6 +120,7 @@ api.set('conan', conan);
api.set('cpanfile', cpanfile);
api.set('crossplane', crossplane);
api.set('deps-edn', depsEdn);
+api.set('devcontainer', devContainer);
api.set('docker-compose', dockerCompose);
api.set('dockerfile', dockerfile);
api.set('droneci', droneci);
@@ -168,6 +172,7 @@ api.set('puppet', puppet);
api.set('pyenv', pyenv);
api.set('ruby-version', rubyVersion);
api.set('sbt', sbt);
+api.set('scalafmt', scalafmt);
api.set('setup-cfg', setupCfg);
api.set('swift', swift);
api.set('tekton', tekton);
@@ -178,4 +183,5 @@ api.set('terragrunt-version', terragruntVersion);
api.set('tflint-plugin', tflintPlugin);
api.set('travis', travis);
api.set('velaci', velaci);
+api.set('vendir', vendir);
api.set('woodpecker', woodpecker);
diff --git a/lib/modules/manager/argocd/__fixtures__/validApplication.yml b/lib/modules/manager/argocd/__fixtures__/validApplication.yml
index 4533b0b64491d7..fb88a817345c5a 100644
--- a/lib/modules/manager/argocd/__fixtures__/validApplication.yml
+++ b/lib/modules/manager/argocd/__fixtures__/validApplication.yml
@@ -128,4 +128,13 @@ spec:
helm:
valueFiles:
- $foo/values.yaml
-
+---
+{{- if .Values.enabled }}
+apiVersion: argoproj.io/v1alpha1
+kind: Application
+spec:
+ source:
+ chart: somechart
+ repoURL: https://git.example.com/foo/bar.git
+ targetRevision: 3.2.1
+{{- end }}
diff --git a/lib/modules/manager/argocd/extract.spec.ts b/lib/modules/manager/argocd/extract.spec.ts
index 0d4c70873a44af..b04165b347803a 100644
--- a/lib/modules/manager/argocd/extract.spec.ts
+++ b/lib/modules/manager/argocd/extract.spec.ts
@@ -165,6 +165,12 @@ spec:
depName: 'somechart',
registryUrls: ['https://foo.io/repo'],
},
+ {
+ currentValue: '3.2.1',
+ datasource: 'helm',
+ depName: 'somechart',
+ registryUrls: ['https://git.example.com/foo/bar.git'],
+ },
],
});
});
diff --git a/lib/modules/manager/argocd/extract.ts b/lib/modules/manager/argocd/extract.ts
index 49d3daa6c9134f..a4286fe8e7f17c 100644
--- a/lib/modules/manager/argocd/extract.ts
+++ b/lib/modules/manager/argocd/extract.ts
@@ -6,6 +6,7 @@ import { parseYaml } from '../../../util/yaml';
import { DockerDatasource } from '../../datasource/docker';
import { GitTagsDatasource } from '../../datasource/git-tags';
import { HelmDatasource } from '../../datasource/helm';
+import { isOCIRegistry, removeOCIPrefix } from '../helmv3/oci';
import type {
ExtractConfig,
PackageDependency,
@@ -36,6 +37,7 @@ export function extractPackageFile(
definitions = parseYaml(content, null, {
customSchema: ApplicationDefinition,
failureBehaviour: 'filter',
+ removeTemplates: true,
});
} catch (err) {
logger.debug({ err, packageFile }, 'Failed to parse ArgoCD definition.');
@@ -51,12 +53,8 @@ function processSource(source: ApplicationSource): PackageDependency | null {
// a chart variable is defined this is helm declaration
if (source.chart) {
// assume OCI helm chart if repoURL doesn't contain explicit protocol
- if (
- source.repoURL.startsWith('oci://') ||
- !source.repoURL.includes('://')
- ) {
- let registryURL = source.repoURL.replace('oci://', '');
- registryURL = trimTrailingSlash(registryURL);
+ if (isOCIRegistry(source.repoURL) || !source.repoURL.includes('://')) {
+ const registryURL = trimTrailingSlash(removeOCIPrefix(source.repoURL));
return {
depName: `${registryURL}/${source.chart}`,
diff --git a/lib/modules/manager/argocd/readme.md b/lib/modules/manager/argocd/readme.md
index f00c6f5bfd2447..2b86d4cdac6b10 100644
--- a/lib/modules/manager/argocd/readme.md
+++ b/lib/modules/manager/argocd/readme.md
@@ -2,7 +2,7 @@ To use the `argocd` manager you must set your own `fileMatch` pattern.
The `argocd` manager has no default `fileMatch` pattern, because there is no common filename or directory name convention for Argo CD YAML files.
By setting your own `fileMatch` Renovate avoids having to check each `*.yaml` file in a repository for a Argo CD definition.
-If you need to change the versioning format, read the [versioning](../../../modules/versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../../modules/versioning/index.md) documentation to learn more.
Some configuration examples:
diff --git a/lib/modules/manager/asdf/extract.spec.ts b/lib/modules/manager/asdf/extract.spec.ts
index ec528331a5a37e..0b168813e77f7e 100644
--- a/lib/modules/manager/asdf/extract.spec.ts
+++ b/lib/modules/manager/asdf/extract.spec.ts
@@ -53,6 +53,7 @@ bun 0.2.2
cargo-make 0.36.2
checkov 2.3.3
clojure 1.11.1.1182
+cosign 2.2.4
crystal 1.6.1
dart 2.19.3
deno 1.26.2
@@ -69,6 +70,7 @@ github-cli 2.32.1
gohugo extended_0.104.3
golang 1.19.2
golangci-lint 1.52.2
+gomplate 3.11.7
hadolint 2.12.0
haskell 9.4.2
helm 3.10.1
@@ -85,6 +87,7 @@ kustomize 4.5.7
lua 5.4.4
maven 3.9.6
mimirtool 2.11.0
+minikube 1.33.1
nim 1.6.8
nodejs 18.12.0
ocaml 4.14.0
@@ -96,6 +99,7 @@ poetry 1.3.2
pre-commit 3.3.1
pulumi 3.57.1
python 3.11.0
+rebar 3.23.0
ruby 3.1.2
rust 1.64.0
sbt 1.9.7
@@ -182,6 +186,13 @@ dummy 1.2.3
'regex:^(?\\d+?)\\.(?\\d+?)\\.(?\\d+)\\.(?\\d+)$',
depName: 'clojure',
},
+ {
+ currentValue: '2.2.4',
+ datasource: 'github-releases',
+ packageName: 'sigstore/cosign',
+ depName: 'cosign',
+ extractVersion: '^v(?\\S+)',
+ },
{
currentValue: '1.6.1',
datasource: 'github-releases',
@@ -286,6 +297,13 @@ dummy 1.2.3
depName: 'golangci-lint',
extractVersion: '^v(?.+)',
},
+ {
+ currentValue: '3.11.7',
+ datasource: 'github-releases',
+ packageName: 'hairyhenderson/gomplate',
+ depName: 'gomplate',
+ extractVersion: '^v(?.+)',
+ },
{
currentValue: '2.12.0',
datasource: 'github-tags',
@@ -395,6 +413,13 @@ dummy 1.2.3
depName: 'mimirtool',
extractVersion: '^mimir-(?\\S+)',
},
+ {
+ currentValue: '1.33.1',
+ datasource: 'github-releases',
+ packageName: 'kubernetes/minikube',
+ depName: 'minikube',
+ extractVersion: '^v(?\\S+)',
+ },
{
currentValue: '1.6.8',
datasource: 'github-tags',
@@ -468,6 +493,12 @@ dummy 1.2.3
depName: 'python',
extractVersion: '^v(?\\S+)',
},
+ {
+ currentValue: '3.23.0',
+ datasource: 'github-tags',
+ packageName: 'erlang/rebar3',
+ depName: 'rebar',
+ },
{
currentValue: '3.1.2',
datasource: 'ruby-version',
diff --git a/lib/modules/manager/asdf/upgradeable-tooling.ts b/lib/modules/manager/asdf/upgradeable-tooling.ts
index 45051832b7053a..d0a1426bd012b0 100644
--- a/lib/modules/manager/asdf/upgradeable-tooling.ts
+++ b/lib/modules/manager/asdf/upgradeable-tooling.ts
@@ -86,7 +86,7 @@ export const upgradeableTooling: Record = {
},
},
'cargo-make': {
- asdfPluginUrl: 'https://github.com/kachick/asdf-cargo-make',
+ asdfPluginUrl: 'https://github.com/mise-plugins/asdf-cargo-make',
config: {
datasource: GithubReleasesDatasource.id,
packageName: 'sagiegurari/cargo-make',
@@ -107,6 +107,14 @@ export const upgradeableTooling: Record = {
versioning: `${regexVersioning.id}:^(?\\d+?)\\.(?\\d+?)\\.(?\\d+)\\.(?\\d+)$`,
},
},
+ cosign: {
+ asdfPluginUrl: 'https://gitlab.com/wt0f/asdf-cosign',
+ config: {
+ datasource: GithubReleasesDatasource.id,
+ packageName: 'sigstore/cosign',
+ extractVersion: '^v(?\\S+)',
+ },
+ },
crystal: {
asdfPluginUrl: 'https://github.com/asdf-community/asdf-crystal',
config: {
@@ -221,6 +229,14 @@ export const upgradeableTooling: Record = {
extractVersion: '^v(?.+)',
},
},
+ gomplate: {
+ asdfPluginUrl: 'https://github.com/sneakybeaky/asdf-gomplate',
+ config: {
+ datasource: GithubReleasesDatasource.id,
+ packageName: 'hairyhenderson/gomplate',
+ extractVersion: '^v(?.+)',
+ },
+ },
hadolint: {
asdfPluginUrl: 'https://github.com/looztra/asdf-hadolint.git',
config: {
@@ -399,6 +415,14 @@ export const upgradeableTooling: Record = {
extractVersion: '^mimir-(?\\S+)',
},
},
+ minikube: {
+ asdfPluginUrl: 'https://github.com/alvarobp/asdf-minikube.git',
+ config: {
+ datasource: GithubReleasesDatasource.id,
+ packageName: 'kubernetes/minikube',
+ extractVersion: '^v(?\\S+)',
+ },
+ },
nim: {
asdfPluginUrl: 'https://github.com/asdf-community/asdf-nim',
config: {
@@ -484,6 +508,13 @@ export const upgradeableTooling: Record = {
extractVersion: '^v(?\\S+)',
},
},
+ rebar: {
+ asdfPluginUrl: 'https://github.com/Stratus3D/asdf-rebar.git',
+ config: {
+ datasource: GithubTagsDatasource.id,
+ packageName: 'erlang/rebar3',
+ },
+ },
ruby: {
asdfPluginUrl: 'https://github.com/asdf-vm/asdf-ruby',
config: {
@@ -654,7 +685,7 @@ export const upgradeableTooling: Record = {
},
},
yamlfmt: {
- asdfPluginUrl: 'https://github.com/kachick/asdf-yamlfmt',
+ asdfPluginUrl: 'https://github.com/mise-plugins/asdf-yamlfmt',
config: {
datasource: GithubReleasesDatasource.id,
packageName: 'google/yamlfmt',
diff --git a/lib/modules/manager/azure-pipelines/extract.spec.ts b/lib/modules/manager/azure-pipelines/extract.spec.ts
index e53b41639e5495..eba2d1114abaca 100644
--- a/lib/modules/manager/azure-pipelines/extract.spec.ts
+++ b/lib/modules/manager/azure-pipelines/extract.spec.ts
@@ -35,11 +35,14 @@ describe('modules/manager/azure-pipelines/extract', () => {
describe('extractRepository()', () => {
it('should extract repository information', () => {
expect(
- extractRepository({
- type: 'github',
- name: 'user/repo',
- ref: 'refs/tags/v1.0.0',
- }),
+ extractRepository(
+ {
+ type: 'github',
+ name: 'user/repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ 'user',
+ ),
).toMatchObject({
depName: 'user/repo',
packageName: 'https://github.com/user/repo.git',
@@ -48,30 +51,39 @@ describe('modules/manager/azure-pipelines/extract', () => {
it('should return null when repository type is not github', () => {
expect(
- extractRepository({
- type: 'bitbucket',
- name: 'user/repo',
- ref: 'refs/tags/v1.0.0',
- }),
+ extractRepository(
+ {
+ type: 'bitbucket',
+ name: 'user/repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ 'user/repo',
+ ),
).toBeNull();
});
it('should return null when reference is not defined specified', () => {
expect(
- extractRepository({
- type: 'github',
- name: 'user/repo',
- }),
+ extractRepository(
+ {
+ type: 'github',
+ name: 'user/repo',
+ },
+ 'user/repo',
+ ),
).toBeNull();
});
it('should return null when reference is invalid tag format', () => {
expect(
- extractRepository({
- type: 'github',
- name: 'user/repo',
- ref: 'refs/head/master',
- }),
+ extractRepository(
+ {
+ type: 'github',
+ name: 'user/repo',
+ ref: 'refs/head/master',
+ },
+ 'user/repo',
+ ),
).toBeNull();
});
@@ -82,43 +94,91 @@ describe('modules/manager/azure-pipelines/extract', () => {
});
expect(
- extractRepository({
- type: 'git',
- name: 'project/repo',
- ref: 'refs/tags/v1.0.0',
- }),
+ extractRepository(
+ {
+ type: 'git',
+ name: 'project/repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ 'otherProject/otherRepo',
+ ),
).toMatchObject({
depName: 'project/repo',
packageName: 'https://dev.azure.com/renovate-org/project/_git/repo',
});
});
- it('should return null if repository type is git and project not in name', () => {
+ it('should extract Azure repository information if project is not in name but is in the config repository', () => {
GlobalConfig.set({
platform: 'azure',
endpoint: 'https://dev.azure.com/renovate-org',
});
expect(
- extractRepository({
- type: 'git',
- name: 'repo',
- ref: 'refs/tags/v1.0.0',
- }),
+ extractRepository(
+ {
+ type: 'git',
+ name: 'repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ 'project/otherrepo',
+ ),
+ ).toMatchObject({
+ depName: 'project/repo',
+ packageName: 'https://dev.azure.com/renovate-org/project/_git/repo',
+ });
+ });
+
+ it('should return null if repository type is git and project not in name nor in config repository name', () => {
+ GlobalConfig.set({
+ platform: 'azure',
+ endpoint: 'https://dev.azure.com/renovate-org',
+ });
+
+ expect(
+ extractRepository(
+ {
+ type: 'git',
+ name: 'repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ '',
+ ),
+ ).toBeNull();
+ });
+
+ it('should return null if repository type is git and currentRepository is undefined', () => {
+ GlobalConfig.set({
+ platform: 'azure',
+ endpoint: 'https://dev.azure.com/renovate-org',
+ });
+
+ expect(
+ extractRepository(
+ {
+ type: 'git',
+ name: 'repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ undefined,
+ ),
).toBeNull();
});
- it('should extract return null for git repo type if platform not Azure', () => {
+ it('should return null for git repo type if platform not Azure', () => {
GlobalConfig.set({
platform: 'github',
});
expect(
- extractRepository({
- type: 'git',
- name: 'project/repo',
- ref: 'refs/tags/v1.0.0',
- }),
+ extractRepository(
+ {
+ type: 'git',
+ name: 'project/repo',
+ ref: 'refs/tags/v1.0.0',
+ },
+ '',
+ ),
).toBeNull();
});
});
@@ -153,11 +213,15 @@ describe('modules/manager/azure-pipelines/extract', () => {
describe('extractPackageFile()', () => {
it('returns null for invalid azure pipelines files', () => {
- expect(extractPackageFile('}', azurePipelinesFilename)).toBeNull();
+ expect(
+ extractPackageFile('}', azurePipelinesFilename, { repository: 'repo' }),
+ ).toBeNull();
});
it('extracts dependencies', () => {
- const res = extractPackageFile(azurePipelines, azurePipelinesFilename);
+ const res = extractPackageFile(azurePipelines, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toMatchObject([
{
depName: 'user/repo',
@@ -180,7 +244,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
it('should return null when there is no dependency found', () => {
expect(
- extractPackageFile(azurePipelinesNoDependency, azurePipelinesFilename),
+ extractPackageFile(azurePipelinesNoDependency, azurePipelinesFilename, {
+ repository: 'repo',
+ }),
).toBeNull();
});
@@ -196,7 +262,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -219,7 +287,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -242,7 +312,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -264,7 +336,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -286,7 +360,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -308,7 +384,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -330,7 +408,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -352,7 +432,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -373,7 +455,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -392,7 +476,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -409,7 +495,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
inputs:
script: 'echo Hello World'
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res?.deps).toEqual([
{
depName: 'Bash',
@@ -424,7 +512,9 @@ describe('modules/manager/azure-pipelines/extract', () => {
steps:
- bash: 'echo Hello World';
`;
- const res = extractPackageFile(packageFile, azurePipelinesFilename);
+ const res = extractPackageFile(packageFile, azurePipelinesFilename, {
+ repository: 'repo',
+ });
expect(res).toBeNull();
});
});
diff --git a/lib/modules/manager/azure-pipelines/extract.ts b/lib/modules/manager/azure-pipelines/extract.ts
index 433d4b7c2f987f..7a814f0100f3f3 100644
--- a/lib/modules/manager/azure-pipelines/extract.ts
+++ b/lib/modules/manager/azure-pipelines/extract.ts
@@ -6,7 +6,11 @@ import { joinUrlParts } from '../../../util/url';
import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks';
import { GitTagsDatasource } from '../../datasource/git-tags';
import { getDep } from '../dockerfile/extract';
-import type { PackageDependency, PackageFileContent } from '../types';
+import type {
+ ExtractConfig,
+ PackageDependency,
+ PackageFileContent,
+} from '../types';
import {
AzurePipelines,
AzurePipelinesYaml,
@@ -23,22 +27,20 @@ const AzurePipelinesTaskRegex = regEx(/^(?[^@]+)@(?.*)$/);
export function extractRepository(
repository: Repository,
+ currentRepository?: string,
): PackageDependency | null {
let repositoryUrl = null;
+ let depName = repository.name;
+
if (repository.type === 'github') {
repositoryUrl = `https://github.com/${repository.name}.git`;
} else if (repository.type === 'git') {
- // "git" type indicates an AzureDevOps repository.
- // The repository URL is only deducible if we are running on AzureDevOps (so can use the endpoint)
- // and the name is of the form `Project/Repository`.
- // The name could just be the repository name, in which case AzureDevOps defaults to the
- // same project, which is not currently accessible here. It could be deduced later by exposing
- // the repository URL to managers.
- // https://docs.microsoft.com/en-us/azure/devops/pipelines/yaml-schema/resources-repositories-repository?view=azure-pipelines#types
const platform = GlobalConfig.get('platform');
const endpoint = GlobalConfig.get('endpoint');
+
if (platform === 'azure' && endpoint) {
+ // extract the project name if the repository from which the pipline is referencing templates contains the Azure DevOps project name
if (repository.name.includes('/')) {
const [projectName, repoName] = repository.name.split('/');
repositoryUrl = joinUrlParts(
@@ -47,9 +49,20 @@ export function extractRepository(
'_git',
encodeURIComponent(repoName),
);
+
+ // if the repository from which the pipline is referencing templates does not contain the Azure DevOps project name, get the project name from the repository containing the pipeline file being process
+ } else if (currentRepository?.includes('/')) {
+ const projectName = currentRepository.split('/')[0];
+ depName = `${projectName}/${repository.name}`;
+ repositoryUrl = joinUrlParts(
+ endpoint,
+ encodeURIComponent(projectName),
+ '_git',
+ encodeURIComponent(repository.name),
+ );
} else {
logger.debug(
- 'Renovate cannot update repositories that do not include the project name',
+ 'Renovate cannot update Azure pipelines in git repositories when neither the current repository nor the target repository contains the Azure DevOps project name.',
);
}
}
@@ -67,7 +80,7 @@ export function extractRepository(
autoReplaceStringTemplate: 'refs/tags/{{newValue}}',
currentValue: repository.ref.replace('refs/tags/', ''),
datasource: GitTagsDatasource.id,
- depName: repository.name,
+ depName,
depType: 'gitTags',
packageName: repositoryUrl,
replaceString: repository.ref,
@@ -168,6 +181,7 @@ function extractJobs(jobs: Jobs | undefined): PackageDependency[] {
export function extractPackageFile(
content: string,
packageFile: string,
+ config: ExtractConfig,
): PackageFileContent | null {
logger.trace(`azurePipelines.extractPackageFile(${packageFile})`);
const deps: PackageDependency[] = [];
@@ -178,7 +192,7 @@ export function extractPackageFile(
}
for (const repository of coerceArray(pkg.resources?.repositories)) {
- const dep = extractRepository(repository);
+ const dep = extractRepository(repository, config.repository);
if (dep) {
deps.push(dep);
}
diff --git a/lib/modules/manager/azure-pipelines/readme.md b/lib/modules/manager/azure-pipelines/readme.md
index aee00bea3e63ae..3536e8181f2527 100644
--- a/lib/modules/manager/azure-pipelines/readme.md
+++ b/lib/modules/manager/azure-pipelines/readme.md
@@ -42,7 +42,7 @@ resources:
ref: refs/tags/v0.5.1
containers:
- container: linux
- image: ubuntu:16.04
+ image: ubuntu:24.04
- container: python
image: python:3.7@sha256:3870d35b962a943df72d948580fc66ceaaee1c4fbd205930f32e0f0760eb1077
diff --git a/lib/modules/manager/batect/readme.md b/lib/modules/manager/batect/readme.md
index 6c16968630fe09..5ba4e364834c6b 100644
--- a/lib/modules/manager/batect/readme.md
+++ b/lib/modules/manager/batect/readme.md
@@ -28,6 +28,6 @@ For example:
### Bundle versioning
-This manager assumes that any bundles referenced use tags for versioning, and that these tags use [SemVer](../../versioning.md#semantic-versioning).
+This manager assumes that any bundles referenced use tags for versioning, and that these tags use [SemVer](../../versioning/semver/index.md).
The implementation of SemVer is strict - versions must follow the `X.Y.Z` or `vX.Y.Z` format.
Versions that don't match this format (eg. `X.Y`) will be ignored.
diff --git a/lib/modules/manager/bazel/index.ts b/lib/modules/manager/bazel/index.ts
index 77b156c93b3469..5b12b8aafe65cf 100644
--- a/lib/modules/manager/bazel/index.ts
+++ b/lib/modules/manager/bazel/index.ts
@@ -9,7 +9,7 @@ import { extractPackageFile } from './extract';
export { extractPackageFile, updateArtifacts };
export const defaultConfig = {
- fileMatch: ['(^|/)WORKSPACE(|\\.bazel)$', '\\.bzl$'],
+ fileMatch: ['(^|/)WORKSPACE(|\\.bazel|\\.bzlmod)$', '\\.bzl$'],
};
export const categories: Category[] = ['bazel'];
diff --git a/lib/modules/manager/bazel/readme.md b/lib/modules/manager/bazel/readme.md
index 9f00e3f1d78476..4c36ae61b0bd4e 100644
--- a/lib/modules/manager/bazel/readme.md
+++ b/lib/modules/manager/bazel/readme.md
@@ -2,4 +2,4 @@ Bazel is quite unlike most other "package managers" that Renovate supports, whic
Instead, Bazel is a build tool so supports a multitude of languages/datasources.
Renovate does not support all possible Bazel references, although would like to, and feature requests are welcome.
-If you need to change the versioning format, read the [versioning](../../versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../versioning/index.md) documentation to learn more.
diff --git a/lib/modules/manager/bitbucket-pipelines/readme.md b/lib/modules/manager/bitbucket-pipelines/readme.md
index dbd5f08fc60c13..99e3c571924277 100644
--- a/lib/modules/manager/bitbucket-pipelines/readme.md
+++ b/lib/modules/manager/bitbucket-pipelines/readme.md
@@ -1,3 +1,3 @@
Extracts dependencies from Bitbucket Pipelines config files.
-If you need to change the versioning format, read the [versioning](../../versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../versioning/index.md) documentation to learn more.
diff --git a/lib/modules/manager/buildkite/readme.md b/lib/modules/manager/buildkite/readme.md
index 50b917a34d5c81..23ba19fe73ef6c 100644
--- a/lib/modules/manager/buildkite/readme.md
+++ b/lib/modules/manager/buildkite/readme.md
@@ -1,3 +1,3 @@
Used for updating Docker dependencies in Buildkite configuration files.
-If you need to change the versioning format, read the [versioning](../../versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../versioning/index.md) documentation to learn more.
diff --git a/lib/modules/manager/bundler/__fixtures__/Gemfile.sourceBlockWithGroups b/lib/modules/manager/bundler/__fixtures__/Gemfile.sourceBlockWithGroups
new file mode 100644
index 00000000000000..2b0dc5ec239fd4
--- /dev/null
+++ b/lib/modules/manager/bundler/__fixtures__/Gemfile.sourceBlockWithGroups
@@ -0,0 +1,12 @@
+source 'https://hub.tech.my.domain.de/artifactory/api/gems/my-gems-prod-local/' do
+ gem 'sfn_my_dep1', "~> 1"
+ gem 'sfn_my_dep2', "~> 1"
+
+ group :test, :development do
+ gem 'internal_test_gem', "~> 1"
+ end
+
+ group :production do
+ gem 'internal_production_gem', "~> 1"
+ end
+end
diff --git a/lib/modules/manager/bundler/extract.spec.ts b/lib/modules/manager/bundler/extract.spec.ts
index fe7916baeebcf1..cbee5d033f7c3e 100644
--- a/lib/modules/manager/bundler/extract.spec.ts
+++ b/lib/modules/manager/bundler/extract.spec.ts
@@ -26,6 +26,9 @@ const sourceBlockWithNewLinesGemfileLock = Fixtures.get(
const sourceBlockWithNewLinesGemfile = Fixtures.get(
'Gemfile.sourceBlockWithNewLines',
);
+const sourceBlockWithGroupsGemfile = Fixtures.get(
+ 'Gemfile.sourceBlockWithGroups',
+);
describe('modules/manager/bundler/extract', () => {
describe('extractPackageFile()', () => {
@@ -124,4 +127,18 @@ describe('modules/manager/bundler/extract', () => {
expect(res).toMatchSnapshot();
expect(res?.deps).toHaveLength(2);
});
+
+ it('parses source blocks with groups in Gemfile', async () => {
+ fs.readLocalFile.mockResolvedValueOnce(sourceBlockWithGroupsGemfile);
+ const res = await extractPackageFile(
+ sourceBlockWithGroupsGemfile,
+ 'Gemfile',
+ );
+ expect(res?.deps).toMatchObject([
+ { depName: 'internal_test_gem', currentValue: '"~> 1"' },
+ { depName: 'internal_production_gem', currentValue: '"~> 1"' },
+ { depName: 'sfn_my_dep1', currentValue: '"~> 1"' },
+ { depName: 'sfn_my_dep2', currentValue: '"~> 1"' },
+ ]);
+ });
});
diff --git a/lib/modules/manager/bundler/extract.ts b/lib/modules/manager/bundler/extract.ts
index 167dcaeccb9600..b204056b5db61f 100644
--- a/lib/modules/manager/bundler/extract.ts
+++ b/lib/modules/manager/bundler/extract.ts
@@ -16,12 +16,70 @@ export async function extractPackageFile(
content: string,
packageFile?: string,
): Promise {
+ let lineNumber: number;
+ async function processGroupBlock(
+ line: string,
+ repositoryUrl?: string,
+ trimGroupLine: boolean = false,
+ ): Promise {
+ const groupMatch = regEx(/^group\s+(.*?)\s+do/).exec(line);
+ if (groupMatch) {
+ const depTypes = groupMatch[1]
+ .split(',')
+ .map((group) => group.trim())
+ .map((group) => group.replace(regEx(/^:/), ''));
+
+ const groupLineNumber = lineNumber;
+ let groupContent = '';
+ let groupLine = '';
+
+ while (
+ lineNumber < lines.length &&
+ (trimGroupLine ? groupLine.trim() !== 'end' : groupLine !== 'end')
+ ) {
+ lineNumber += 1;
+ groupLine = lines[lineNumber];
+
+ // istanbul ignore if
+ if (!is.string(groupLine)) {
+ logger.debug(
+ { content, packageFile, type: 'groupLine' },
+ 'Bundler parsing error',
+ );
+ groupLine = 'end';
+ }
+ if (trimGroupLine ? groupLine.trim() !== 'end' : groupLine !== 'end') {
+ groupContent += formatContent(groupLine);
+ }
+ }
+
+ const groupRes = await extractPackageFile(groupContent);
+ if (groupRes) {
+ res.deps = res.deps.concat(
+ groupRes.deps.map((dep) => {
+ const depObject = {
+ ...dep,
+ depTypes,
+ managerData: {
+ lineNumber:
+ Number(dep.managerData?.lineNumber) + groupLineNumber + 1,
+ },
+ };
+ if (repositoryUrl) {
+ depObject.registryUrls = [repositoryUrl];
+ }
+ return depObject;
+ }),
+ );
+ }
+ }
+ }
const res: PackageFileContent = {
registryUrls: [],
deps: [],
};
const lines = content.split(newlineRegex);
- for (let lineNumber = 0; lineNumber < lines.length; lineNumber += 1) {
+ for (lineNumber = 0; lineNumber < lines.length; lineNumber += 1) {
const line = lines[lineNumber];
let sourceMatch: RegExpMatchArray | null = null;
for (const delimiter of delimiters) {
@@ -61,44 +119,9 @@ export async function extractPackageFile(
dep.datasource = RubyGemsDatasource.id;
res.deps.push(dep);
}
- const groupMatch = regEx(/^group\s+(.*?)\s+do/).exec(line);
- if (groupMatch) {
- const depTypes = groupMatch[1]
- .split(',')
- .map((group) => group.trim())
- .map((group) => group.replace(regEx(/^:/), ''));
- const groupLineNumber = lineNumber;
- let groupContent = '';
- let groupLine = '';
- while (lineNumber < lines.length && groupLine !== 'end') {
- lineNumber += 1;
- groupLine = lines[lineNumber];
- // istanbul ignore if
- if (!is.string(groupLine)) {
- logger.debug(
- { content, packageFile, type: 'groupLine' },
- 'Bundler parsing error',
- );
- groupLine = 'end';
- }
- if (groupLine !== 'end') {
- groupContent += formatContent(groupLine);
- }
- }
- const groupRes = await extractPackageFile(groupContent);
- if (groupRes) {
- res.deps = res.deps.concat(
- groupRes.deps.map((dep) => ({
- ...dep,
- depTypes,
- managerData: {
- lineNumber:
- Number(dep.managerData?.lineNumber) + groupLineNumber + 1,
- },
- })),
- );
- }
- }
+
+ await processGroupBlock(line);
+
for (const delimiter of delimiters) {
const sourceBlockMatch = regEx(
`^source\\s+${delimiter}(.*?)${delimiter}\\s+do`,
@@ -108,6 +131,7 @@ export async function extractPackageFile(
const sourceLineNumber = lineNumber;
let sourceContent = '';
let sourceLine = '';
+
while (lineNumber < lines.length && sourceLine.trim() !== 'end') {
lineNumber += 1;
sourceLine = lines[lineNumber];
@@ -119,11 +143,16 @@ export async function extractPackageFile(
);
sourceLine = 'end';
}
- if (sourceLine !== 'end') {
+
+ await processGroupBlock(sourceLine.trim(), repositoryUrl, true);
+
+ if (sourceLine.trim() !== 'end') {
sourceContent += formatContent(sourceLine);
}
}
+
const sourceRes = await extractPackageFile(sourceContent);
+
if (sourceRes) {
res.deps = res.deps.concat(
sourceRes.deps.map((dep) => ({
diff --git a/lib/modules/manager/bundler/update-locked.spec.ts b/lib/modules/manager/bundler/update-locked.spec.ts
index 37caaadac05207..a0566ef66b1765 100644
--- a/lib/modules/manager/bundler/update-locked.spec.ts
+++ b/lib/modules/manager/bundler/update-locked.spec.ts
@@ -53,7 +53,7 @@ describe('modules/manager/bundler/update-locked', () => {
expect(updateLockedDependency(config).status).toBe('unsupported');
});
- it('returns update-falied incase of errors', () => {
+ it('returns update-failed in case of errors', () => {
const config: UpdateLockedConfig = {
packageFile: 'Gemfile',
lockFile: 'Gemfile.lock',
diff --git a/lib/modules/manager/cargo/__fixtures__/Cargo.7.toml b/lib/modules/manager/cargo/__fixtures__/Cargo.7.toml
new file mode 100644
index 00000000000000..aa2274f4a8aabf
--- /dev/null
+++ b/lib/modules/manager/cargo/__fixtures__/Cargo.7.toml
@@ -0,0 +1,9 @@
+[package]
+name = "renovate-test"
+version = "0.1.0"
+authors = ["John Doe "]
+edition = "2018"
+
+[dependencies]
+tokio = "0.2"
+
diff --git a/lib/modules/manager/cargo/extract.spec.ts b/lib/modules/manager/cargo/extract.spec.ts
index cf80672e7cf76c..16d8c7b2b04ff2 100644
--- a/lib/modules/manager/cargo/extract.spec.ts
+++ b/lib/modules/manager/cargo/extract.spec.ts
@@ -23,6 +23,7 @@ const cargo4toml = Fixtures.get('Cargo.4.toml');
const cargo5toml = Fixtures.get('Cargo.5.toml');
const cargo6configtoml = Fixtures.get('cargo.6.config.toml');
const cargo6toml = Fixtures.get('Cargo.6.toml');
+const cargo7toml = Fixtures.get('Cargo.7.toml');
const lockfileUpdateCargotoml = Fixtures.get('lockfile-update/Cargo.toml');
@@ -167,6 +168,31 @@ replace-with = "private-crates"`,
]);
});
+ it('extracts overridden source registry indexes from .cargo/config.toml', async () => {
+ mockReadLocalFile({
+ '.cargo/config.toml': codeBlock`[source.crates-io-replacement]
+registry = "https://github.com/replacement/testregistry"
+
+[source.crates-io]
+replace-with = "crates-io-replacement"`,
+ });
+ const res = await extractPackageFile(cargo7toml, 'Cargo.toml', {
+ ...config,
+ });
+ expect(res?.deps).toEqual([
+ {
+ currentValue: '0.2',
+ datasource: 'crate',
+ depName: 'tokio',
+ depType: 'dependencies',
+ managerData: {
+ nestedVersion: false,
+ },
+ registryUrls: ['https://github.com/replacement/testregistry'],
+ },
+ ]);
+ });
+
it('extracts registries overridden to the default', async () => {
mockReadLocalFile({
'.cargo/config.toml': codeBlock`[source.mcorbin]
diff --git a/lib/modules/manager/cargo/extract.ts b/lib/modules/manager/cargo/extract.ts
index cd0d3f746fbe4e..76ea1bdd2de440 100644
--- a/lib/modules/manager/cargo/extract.ts
+++ b/lib/modules/manager/cargo/extract.ts
@@ -191,6 +191,14 @@ function resolveRegistryIndex(
);
}
+ const sourceRegistry = config.source?.[registryName]?.registry;
+ if (sourceRegistry) {
+ logger.debug(
+ `Replacing cargo source registry with ${sourceRegistry} for ${registryName}`,
+ );
+ return sourceRegistry;
+ }
+
const registryIndex = config.registries?.[registryName]?.index;
if (registryIndex) {
return registryIndex;
diff --git a/lib/modules/manager/cargo/range.spec.ts b/lib/modules/manager/cargo/range.spec.ts
index 62c07b1389b2b3..d41d3e6cad3571 100644
--- a/lib/modules/manager/cargo/range.spec.ts
+++ b/lib/modules/manager/cargo/range.spec.ts
@@ -15,11 +15,11 @@ describe('modules/manager/cargo/range', () => {
expect(getRangeStrategy(config)).toBe('widen');
});
- it('defaults to bump', () => {
+ it('defaults to update-lockfile', () => {
const config: RangeConfig = {
rangeStrategy: 'auto',
currentValue: '1.0.0',
};
- expect(getRangeStrategy(config)).toBe('bump');
+ expect(getRangeStrategy(config)).toBe('update-lockfile');
});
});
diff --git a/lib/modules/manager/cargo/range.ts b/lib/modules/manager/cargo/range.ts
index a04628d55c127d..fbfe99da61a6f7 100644
--- a/lib/modules/manager/cargo/range.ts
+++ b/lib/modules/manager/cargo/range.ts
@@ -11,5 +11,5 @@ export function getRangeStrategy({
if (currentValue?.includes('<')) {
return 'widen';
}
- return 'bump';
+ return 'update-lockfile';
}
diff --git a/lib/modules/manager/cargo/readme.md b/lib/modules/manager/cargo/readme.md
index 46195044aebb2a..94baa4638720b7 100644
--- a/lib/modules/manager/cargo/readme.md
+++ b/lib/modules/manager/cargo/readme.md
@@ -3,7 +3,9 @@ Extracts dependencies from `Cargo.toml` files, and also updates `Cargo.lock` fil
When using the default rangeStrategy=auto:
- If a "less than" instruction is found (e.g. `<2`) then `rangeStrategy=widen` will be selected,
-- Otherwise, `rangeStrategy=bump` will be selected.
+- Otherwise, `rangeStrategy=update-lockfile` will be selected.
+
+The `update-lockfile` default means that most upgrades will update `Cargo.lock` files without the need to change the value in `Cargo.toml`.
### Private Modules Authentication
diff --git a/lib/modules/manager/cargo/types.ts b/lib/modules/manager/cargo/types.ts
index 68bfa4740432a8..b28c3ced50fa27 100644
--- a/lib/modules/manager/cargo/types.ts
+++ b/lib/modules/manager/cargo/types.ts
@@ -45,6 +45,7 @@ export interface CargoRegistry {
export interface CargoSource {
'replace-with'?: string;
+ registry?: string;
}
/**
diff --git a/lib/modules/manager/cargo/update-locked.spec.ts b/lib/modules/manager/cargo/update-locked.spec.ts
index 5c99da7acdaf0a..c3ba15d26b6a6b 100644
--- a/lib/modules/manager/cargo/update-locked.spec.ts
+++ b/lib/modules/manager/cargo/update-locked.spec.ts
@@ -53,7 +53,7 @@ describe('modules/manager/cargo/update-locked', () => {
expect(updateLockedDependency(config).status).toBe('unsupported');
});
- it('returns update-failed incase of errors', () => {
+ it('returns update-failed in case of errors', () => {
const config: UpdateLockedConfig = {
packageFile: 'Cargo.toml',
lockFile: 'Cargo.lock',
diff --git a/lib/modules/manager/circleci/__fixtures__/config2.yml b/lib/modules/manager/circleci/__fixtures__/config2.yml
index cf55b2aebc4d87..3f885f3e5c0a15 100644
--- a/lib/modules/manager/circleci/__fixtures__/config2.yml
+++ b/lib/modules/manager/circleci/__fixtures__/config2.yml
@@ -5,11 +5,11 @@ orbs:
release-workflows: hutson/library-release-workflows@4.1.0
# Comments help me understand my work.
# The next line is intentionally just whitespace!
-
+
no-version: abc/def
# Comments help me understand my work.
- volatile: zzz/zzz@volatile
+ volatile: zzz/zzz@volatile # Comments help me understand my work.
test_plan: &test_plan
steps:
diff --git a/lib/modules/manager/circleci/extract.ts b/lib/modules/manager/circleci/extract.ts
index fe1e6adf445e22..9d971de87724c0 100644
--- a/lib/modules/manager/circleci/extract.ts
+++ b/lib/modules/manager/circleci/extract.ts
@@ -29,7 +29,9 @@ export function extractPackageFile(
lineNumber += 1;
continue;
}
- const orbMatch = regEx(/^\s+([^:]+):\s(.+)$/).exec(orbLine);
+ const orbMatch = regEx(/^\s+([^:]+):\s(.+?)(?:\s*#.*)?$/).exec(
+ orbLine,
+ );
if (orbMatch) {
logger.trace('orbMatch');
foundOrbOrNoop = true;
diff --git a/lib/modules/manager/circleci/index.spec.ts b/lib/modules/manager/circleci/index.spec.ts
new file mode 100644
index 00000000000000..527ffb6aa50e78
--- /dev/null
+++ b/lib/modules/manager/circleci/index.spec.ts
@@ -0,0 +1,24 @@
+import { regexMatches } from '../../../../test/util';
+import { defaultConfig } from '.';
+
+describe('modules/manager/circleci/index', () => {
+ describe('file names match fileMatch', () => {
+ it.each`
+ path | expected
+ ${'.circleci/config.yml'} | ${true}
+ ${'.circleci/config.yaml'} | ${true}
+ ${'.circleci/foo.yaml'} | ${true}
+ ${'.circleci/foo.yml'} | ${true}
+ ${'.circleci/foo/config.yaml'} | ${true}
+ ${'.circleci/foo/bar.yml'} | ${true}
+ ${'foo/.circleci/bar.yaml'} | ${true}
+ ${'foo.yml'} | ${false}
+ ${'circleci/foo.yml'} | ${false}
+ ${'circleci/foo.yml'} | ${false}
+ ${'.circleci_foo/bar.yml'} | ${false}
+ ${'.circleci/foo.toml'} | ${false}
+ `('regexMatches("$path") === $expected', ({ path, expected }) => {
+ expect(regexMatches(path, defaultConfig.fileMatch)).toBe(expected);
+ });
+ });
+});
diff --git a/lib/modules/manager/circleci/index.ts b/lib/modules/manager/circleci/index.ts
index 3c2fb9b0532b23..353e21627b8795 100644
--- a/lib/modules/manager/circleci/index.ts
+++ b/lib/modules/manager/circleci/index.ts
@@ -10,7 +10,7 @@ export const displayName = 'CircleCI';
export const url = 'https://circleci.com/docs/configuration-reference';
export const defaultConfig = {
- fileMatch: ['(^|/)\\.circleci/config\\.ya?ml$'],
+ fileMatch: ['(^|/)\\.circleci/.+\\.ya?ml$'],
};
export const categories: Category[] = ['ci'];
diff --git a/lib/modules/manager/circleci/readme.md b/lib/modules/manager/circleci/readme.md
index 64fb2a71f02b44..2380045fd9daba 100644
--- a/lib/modules/manager/circleci/readme.md
+++ b/lib/modules/manager/circleci/readme.md
@@ -1,6 +1,6 @@
The `circleci` manager extracts both `docker` as well as `orb` datasources from CircleCI config files.
-If you need to change the versioning format, read the [versioning](../../versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../versioning/index.md) documentation to learn more.
### Private orbs
diff --git a/lib/modules/manager/cloudbuild/readme.md b/lib/modules/manager/cloudbuild/readme.md
index ed5bcd87c3499b..c8fb219f32159d 100644
--- a/lib/modules/manager/cloudbuild/readme.md
+++ b/lib/modules/manager/cloudbuild/readme.md
@@ -1,3 +1,3 @@
The `cloudbuild` manager extracts `docker` datasources from [Cloud Build config files](https://cloud.google.com/build/docs/configuring-builds/create-basic-configuration).
-If you need to change the versioning format, read the [versioning](../../versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../versioning/index.md) documentation to learn more.
diff --git a/lib/modules/manager/composer/utils.ts b/lib/modules/manager/composer/utils.ts
index 353f2b7d0365c2..89328f611ef24d 100644
--- a/lib/modules/manager/composer/utils.ts
+++ b/lib/modules/manager/composer/utils.ts
@@ -2,7 +2,7 @@
import { quote } from 'shlex';
import { GlobalConfig } from '../../../config/global';
import { logger } from '../../../logger';
-import type { HostRuleSearchResult } from '../../../types';
+import type { CombinedHostRule } from '../../../types';
import type { ToolConstraint } from '../../../util/exec/types';
import { coerceNumber } from '../../../util/number';
import { api, id as composerVersioningId } from '../../versioning/composer';
@@ -113,6 +113,6 @@ export function extractConstraints(
return res;
}
-export function isArtifactAuthEnabled(rule: HostRuleSearchResult): boolean {
+export function isArtifactAuthEnabled(rule: CombinedHostRule): boolean {
return !rule.artifactAuth || rule.artifactAuth.includes('composer');
}
diff --git a/lib/modules/manager/crossplane/readme.md b/lib/modules/manager/crossplane/readme.md
index 68b661596ef16b..f8b983a20a4f96 100644
--- a/lib/modules/manager/crossplane/readme.md
+++ b/lib/modules/manager/crossplane/readme.md
@@ -10,7 +10,7 @@ The `crossplane` manager supports these `depType`s:
You can use these `depType`'s to control which dependencies Renovate will upgrade.
-If you need to change the versioning format, read the [versioning](../../../modules/versioning.md) documentation to learn more.
+If you need to change the versioning format, read the [versioning](../../../modules/versioning/index.md) documentation to learn more.
Some configuration examples:
diff --git a/lib/modules/manager/custom/regex/__snapshots__/index.spec.ts.snap b/lib/modules/manager/custom/regex/__snapshots__/index.spec.ts.snap
index 5132942cd4da4f..25dd3663151b9f 100644
--- a/lib/modules/manager/custom/regex/__snapshots__/index.spec.ts.snap
+++ b/lib/modules/manager/custom/regex/__snapshots__/index.spec.ts.snap
@@ -198,7 +198,7 @@ exports[`modules/manager/custom/regex/index extracts registryUrl 1`] = `
{
"currentValue": "8.12.13",
"datasource": "helm",
- "depName": "prometheus-operator",
+ "packageName": "prometheus-operator",
"registryUrls": [
"https://charts.helm.sh/stable",
],
@@ -212,7 +212,7 @@ exports[`modules/manager/custom/regex/index extracts registryUrl 1`] = `
"matchStrings": [
"chart:
*repository: (?.*?)
- *name: (?.*?)
+ *name: (?.*?)
*version: (?