diff --git a/docs/usage/assets/images/portal-sign-in.png b/docs/usage/assets/images/portal-sign-in.png new file mode 100644 index 00000000000000..743a62a05cded9 Binary files /dev/null and b/docs/usage/assets/images/portal-sign-in.png differ diff --git a/docs/usage/bazel.md b/docs/usage/bazel.md index 4a95e19c55e2e1..3ef033f1685ecd 100644 --- a/docs/usage/bazel.md +++ b/docs/usage/bazel.md @@ -145,6 +145,19 @@ archive_override( Renovate ignores [`multiple_version_override`](https://bazel.build/rules/lib/globals/module#multiple_version_override). `multiple_version_override` does not affect the processing of version updates for a module. +### `git_repository` + +If Renovate finds a [`git_repository`](https://bazel.build/rules/lib/repo/git#git_repository), it evaluates the `commit` value at the specified `remote`. +`remote` is limited to github repos: `https://github.com//.git` + +```python +git_repository( + name = "rules_foo", + remote = "https://github.com/fooexample/rules_foo.git", + commit = "8c94e11c2b05b6f25ced5f23cd07d0cfd36edc1a", +) +``` + ## Legacy `WORKSPACE` files Renovate extracts dependencies from the following repository rules: @@ -160,7 +173,7 @@ Renovate extracts dependencies from the following repository rules: It also recognizes when these repository rule names are prefixed with an underscore. For example, `_http_archive` is treated the same as `http_archive`. -### `git_repository` +### `git_repository` (legacy) Renovate updates any `git_repository` declaration that has the following: diff --git a/docs/usage/examples/opentelemetry.md b/docs/usage/examples/opentelemetry.md index 7ed617294aba72..504a7e0ef92e90 100644 --- a/docs/usage/examples/opentelemetry.md +++ b/docs/usage/examples/opentelemetry.md @@ -14,7 +14,7 @@ name: renovate-otel-demo services: # Jaeger for storing traces jaeger: - image: jaegertracing/jaeger:2.1.0 + image: jaegertracing/jaeger:2.2.0 ports: - '16686:16686' # Web UI - '4317' # OTLP gRPC @@ -36,7 +36,7 @@ services: otel-collector: # Using the Contrib version to access the spanmetrics connector. # If you don't need the spanmetrics connector, you can use the standard version - image: otel/opentelemetry-collector-contrib:0.116.1 + image: otel/opentelemetry-collector-contrib:0.117.0 volumes: - ./otel-collector-config.yml:/etc/otelcol-contrib/config.yaml ports: diff --git a/docs/usage/mend-hosted/.pages b/docs/usage/mend-hosted/.pages index 01b1b47debd098..6b4053c88bf23d 100644 --- a/docs/usage/mend-hosted/.pages +++ b/docs/usage/mend-hosted/.pages @@ -1,5 +1,6 @@ title: Mend-hosted Apps nav: + - 'Renovate Plans': 'renovate-plans.md' - 'Configuration': 'hosted-apps-config.md' - 'Credentials': 'credentials.md' - 'Migrating Secrets': 'migrating-secrets.md' diff --git a/docs/usage/mend-hosted/renovate-plans.md b/docs/usage/mend-hosted/renovate-plans.md new file mode 100644 index 00000000000000..e946575399b5cb --- /dev/null +++ b/docs/usage/mend-hosted/renovate-plans.md @@ -0,0 +1,53 @@ +# Renovate Plans on Mend-Hosted Apps + +Mend provides cloud hosting services for running Renovate on repositories hosted on the following cloud platforms: + +- GitHub +- Bitbucket Cloud +- Azure DevOps + +Mend Cloud will regularly schedule Renovate jobs against all installed repositories. +It also listens to webhooks and enqueues a Renovate job when relevant changes occur in a repo, or when actions are triggered from the Renovate PRs or Dashboard issue. +There is a web UI with functionality to view and interact with installed repositories, their jobs and job logs. + +## Accessing Mend Cloud via the Web UI + +Users can access the cloud-hosted Renovate service via the Developer Portal at [https://developer.mend.io/](https://developer.mend.io/). +Developers can log in with OAuth credentials from their cloud-based Git repository. + +![Developer Portal sign-in screen](../assets/images/portal-sign-in.png) + +Features of the Developer Portal include: + +- Ability to install, uninstall and view installed repositories +- Trigger Renovate jobs to run on demand +- View logs for all Renovate jobs +- Configure settings that apply at the Org-level or Repo-level + +## Resources and Scheduling + +The plan assigned to each Org determines the resources, scheduling and concurrency of Renoate jobs. +Mend Cloud has free and paid Renovate plans. Details of the plans are shown in the table below. + +| | Community (Free) | Pioneer (Free) | OSS Select (Free) | Enterprise | +| ----------------------------- | ---------------- | -------------- | ----------------- | ------------ | +| Concurrent jobs per Org | 1 | 8 | 2 | 16 | +| Job scheduling (active repos) | Every 4 hours | Every 4 hours | Hourly | Hourly (\*1) | +| Job runner CPUs | 1 CPU | 1 CPU | 1 CPU | 2 CPU | +| Job runner Memory | 2Gb | 3.5Gb | 6Gb | 8Gb | +| Job runner Disk space | 15Gb | 15Gb | 25Gb | 40Gb | +| Job timeout | 30 minutes | 30 minutes | 60 minutes | 60 minutes | +| Merge Confidence Workflows | Not included | Not included | Included | Included | +| Mend.io Helpdesk Support | Not included | Not included | Not Included | Included | + +(1) Bitbucket repositories on the Renovate Enterprise plan are scheduled to run every 4 hours, to avoid hitting rate limits on GitHub APIs. + +### Plan descriptions + +**Community (Free)** - This plan is available for free for all repositories. + +**Pioneer (Free)** - This plan is available for a limited time for Orgs that were installed on Renovate Cloud before 2025. Users on this plan will be transitioned to other plans over time. + +**OSS Select (Free)** - This is a premium plan granted for free to selected OSS Orgs. If you would like your Org to be considered for the free OSS Select plan, create a “[Suggest an Idea](https://github.com/renovatebot/renovate/discussions/categories/suggest-an-idea)” item on the Renovate discussions board on GitHub. Acceptance is at the discretion of Mend.io. + +**Enterprise** - A supported, paid plan available for purchase through Mend.io. Contact Mend at [sales@mend.io](mailto:sales@mend.io) for purchase details. diff --git a/lib/data/monorepo.json b/lib/data/monorepo.json index d14e39d39de8f6..ebaf39de7f8100 100644 --- a/lib/data/monorepo.json +++ b/lib/data/monorepo.json @@ -505,7 +505,10 @@ "shiki": "https://github.com/shikijs/shiki", "shopify-app-bridge": "https://github.com/Shopify/app-bridge", "sitecore-jss": "https://github.com/Sitecore/jss", - "skiasharp": "https://github.com/mono/SkiaSharp", + "skiasharp": [ + "https://github.com/mono/SkiaSharp", + "https://go.microsoft.com/fwlink/?linkid=868515" + ], "slack-net": "https://github.com/soxtoby/SlackNet", "slf4j": "https://github.com/qos-ch/slf4j", "slim-message-bus": "https://github.com/zarusz/SlimMessageBus", diff --git a/lib/modules/datasource/azure-pipelines-tasks/__fixtures__/tasks.json b/lib/modules/datasource/azure-pipelines-tasks/__fixtures__/tasks.json new file mode 100644 index 00000000000000..df4a3ea7a7e5da --- /dev/null +++ b/lib/modules/datasource/azure-pipelines-tasks/__fixtures__/tasks.json @@ -0,0 +1,575 @@ +{ + "count": 3, + "value": [ + { + "visibility": [ + "Build", + "Release" + ], + "runsOn": [ + "Agent", + "DeploymentGroup" + ], + "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", + "name": "PowerShell", + "version": { + "major": 2, + "minor": 247, + "patch": 1, + "isTest": false + }, + "serverOwned": true, + "contentsUploaded": true, + "iconUrl": "https://dev.azure.com/test_organization/_apis/distributedtask/tasks/e213ff0f-5d5c-4791-802d-52ea3e7be1f1/2.247.1/icon", + "minimumAgentVersion": "2.115.0", + "friendlyName": "PowerShell", + "description": "Run a PowerShell script on Linux, macOS, or Windows", + "category": "Utility", + "helpMarkDown": "[Learn more about this task](https://go.microsoft.com/fwlink/?LinkID=613736)", + "helpUrl": "https://docs.microsoft.com/azure/devops/pipelines/tasks/utility/powershell", + "releaseNotes": "Script task consistency. Added support for macOS and Linux.", + "definitionType": "task", + "showEnvironmentVariables": true, + "author": "Microsoft Corporation", + "demands": [], + "groups": [ + { + "name": "preferenceVariables", + "displayName": "Preference Variables", + "isExpanded": false + }, + { + "name": "advanced", + "displayName": "Advanced", + "isExpanded": false + } + ], + "inputs": [ + { + "options": { + "filePath": "File Path", + "inline": "Inline" + }, + "name": "targetType", + "label": "Type", + "defaultValue": "filePath", + "type": "radio", + "helpMarkDown": "Target script type: File Path or Inline" + }, + { + "name": "filePath", + "label": "Script Path", + "defaultValue": "", + "required": true, + "type": "filePath", + "helpMarkDown": "Path of the script to execute. Must be a fully qualified path or relative to $(System.DefaultWorkingDirectory).", + "visibleRule": "targetType = filePath" + }, + { + "name": "arguments", + "label": "Arguments", + "defaultValue": "", + "type": "string", + "helpMarkDown": "Arguments passed to the PowerShell script. Either ordinal parameters or named parameters.", + "visibleRule": "targetType = filePath" + }, + { + "properties": { + "resizable": "true", + "rows": "10", + "maxLength": "20000" + }, + "name": "script", + "label": "Script", + "defaultValue": "# Write your PowerShell commands here.\n\nWrite-Host \"Hello World\"\n", + "required": true, + "type": "multiLine", + "helpMarkDown": "", + "visibleRule": "targetType = inline" + }, + { + "options": { + "default": "Default", + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "errorActionPreference", + "label": "ErrorActionPreference", + "defaultValue": "stop", + "type": "pickList", + "helpMarkDown": "When not `Default`, prepends the line `$ErrorActionPreference = 'VALUE'` at the top of your script.", + "groupName": "preferenceVariables" + }, + { + "options": { + "default": "Default", + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "warningPreference", + "label": "WarningPreference", + "defaultValue": "default", + "type": "pickList", + "helpMarkDown": "When not `Default`, prepends the line `$WarningPreference = 'VALUE'` at the top of your script.", + "groupName": "preferenceVariables" + }, + { + "options": { + "default": "Default", + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "informationPreference", + "label": "InformationPreference", + "defaultValue": "default", + "type": "pickList", + "helpMarkDown": "When not `Default`, prepends the line `$InformationPreference = 'VALUE'` at the top of your script.", + "groupName": "preferenceVariables" + }, + { + "options": { + "default": "Default", + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "verbosePreference", + "label": "VerbosePreference", + "defaultValue": "default", + "type": "pickList", + "helpMarkDown": "When not `Default`, prepends the line `$VerbosePreference = 'VALUE'` at the top of your script.", + "groupName": "preferenceVariables" + }, + { + "options": { + "default": "Default", + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "debugPreference", + "label": "DebugPreference", + "defaultValue": "default", + "type": "pickList", + "helpMarkDown": "When not `Default`, prepends the line `$DebugPreference = 'VALUE'` at the top of your script.", + "groupName": "preferenceVariables" + }, + { + "options": { + "default": "Default", + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "progressPreference", + "label": "ProgressPreference", + "defaultValue": "silentlyContinue", + "type": "pickList", + "helpMarkDown": "When not `Default`, prepends the line `$ProgressPreference = 'VALUE'` at the top of your script.", + "groupName": "preferenceVariables" + }, + { + "name": "failOnStderr", + "label": "Fail on Standard Error", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is true, this task will fail if any errors are written to the error pipeline, or if any data is written to the Standard Error stream. Otherwise the task will rely on the exit code to determine failure.", + "groupName": "advanced" + }, + { + "name": "showWarnings", + "label": "Show warnings as Azure DevOps warnings", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is true, and your script writes a warnings - they are shown as warnings also in pipeline logs", + "groupName": "advanced" + }, + { + "name": "ignoreLASTEXITCODE", + "label": "Ignore $LASTEXITCODE", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is false, the line `if ((Test-Path -LiteralPath variable:\\LASTEXITCODE)) { exit $LASTEXITCODE }` is appended to the end of your script. This will cause the last exit code from an external command to be propagated as the exit code of powershell. Otherwise the line is not appended to the end of your script.", + "groupName": "advanced" + }, + { + "name": "pwsh", + "label": "Use PowerShell Core", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is true, then on Windows the task will use pwsh.exe from your PATH instead of powershell.exe.", + "groupName": "advanced" + }, + { + "name": "workingDirectory", + "label": "Working Directory", + "defaultValue": "", + "type": "filePath", + "helpMarkDown": "Working directory where the script is run.", + "groupName": "advanced" + }, + { + "name": "runScriptInSeparateScope", + "label": "Run script in the separate scope", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "This input allows executing PowerShell scripts using '&' operator instead of the default '.'. If this input set to the true script will be executed in separate scope and globally scoped PowerShell variables won't be updated", + "groupName": "advanced" + } + ], + "satisfies": [], + "sourceDefinitions": [], + "dataSourceBindings": [], + "instanceNameFormat": "PowerShell Script", + "preJobExecution": {}, + "execution": { + "PowerShell3": { + "target": "powershell.ps1", + "platforms": [ + "windows" + ] + }, + "Node10": { + "target": "powershell.js", + "argumentFormat": "" + }, + "Node16": { + "target": "powershell.js", + "argumentFormat": "" + }, + "Node20_1": { + "target": "powershell.js", + "argumentFormat": "" + } + }, + "postJobExecution": {}, + "_buildConfigMapping": { + "Default": "2.247.0", + "Node20-225": "2.247.1" + } + }, + { + "visibility": [ + "Build", + "Release" + ], + "runsOn": [ + "Agent", + "DeploymentGroup" + ], + "id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1", + "name": "PowerShell", + "deprecated": true, + "version": { + "major": 1, + "minor": 2, + "patch": 3, + "isTest": false + }, + "serverOwned": true, + "contentsUploaded": true, + "iconUrl": "https://dev.azure.com/test_organization/_apis/distributedtask/tasks/e213ff0f-5d5c-4791-802d-52ea3e7be1f1/1.2.3/icon", + "minimumAgentVersion": "1.102", + "friendlyName": "PowerShell", + "description": "Run a PowerShell script", + "category": "Utility", + "helpMarkDown": "[More Information](https://go.microsoft.com/fwlink/?LinkID=613736)", + "definitionType": "task", + "author": "Microsoft Corporation", + "demands": [ + "DotNetFramework" + ], + "groups": [ + { + "name": "advanced", + "displayName": "Advanced", + "isExpanded": false + } + ], + "inputs": [ + { + "options": { + "inlineScript": "Inline Script", + "filePath": "File Path" + }, + "name": "scriptType", + "label": "Type", + "defaultValue": "filePath", + "required": true, + "type": "pickList", + "helpMarkDown": "Type of the script: File Path or Inline Script" + }, + { + "name": "scriptName", + "label": "Script Path", + "defaultValue": "", + "required": true, + "type": "filePath", + "helpMarkDown": "Path of the script to execute. Should be fully qualified path or relative to the default working directory.", + "visibleRule": "scriptType = filePath" + }, + { + "name": "arguments", + "label": "Arguments", + "defaultValue": "", + "type": "string", + "helpMarkDown": "Arguments passed to the PowerShell script. Either ordinal parameters or named parameters" + }, + { + "name": "workingFolder", + "label": "Working folder", + "defaultValue": "", + "type": "filePath", + "helpMarkDown": "Current working directory when script is run. Defaults to the folder where the script is located.", + "groupName": "advanced" + }, + { + "properties": { + "resizable": "true", + "rows": "10", + "maxLength": "500" + }, + "name": "inlineScript", + "label": "Inline Script", + "defaultValue": "# You can write your powershell scripts inline here. \n# You can also pass predefined and custom variables to this scripts using arguments\n\n Write-Host \"Hello World\"", + "required": true, + "type": "multiLine", + "helpMarkDown": "", + "visibleRule": "scriptType = inlineScript" + }, + { + "name": "failOnStandardError", + "label": "Fail on Standard Error", + "defaultValue": "true", + "type": "boolean", + "helpMarkDown": "If this is true, this task will fail if any errors are written to the error pipeline, or if any data is written to the Standard Error stream. Otherwise the task will rely solely on $LASTEXITCODE and the exit code to determine failure.", + "groupName": "advanced" + } + ], + "satisfies": [], + "sourceDefinitions": [], + "dataSourceBindings": [], + "instanceNameFormat": "PowerShell Script", + "preJobExecution": {}, + "execution": { + "PowerShellExe": { + "target": "$(scriptName)", + "argumentFormat": "$(arguments)", + "workingDirectory": "$(workingFolder)", + "inlineScript": "$(inlineScript)", + "scriptType": "$(scriptType)", + "failOnStandardError": "$(failOnStandardError)" + } + }, + "postJobExecution": {}, + "_buildConfigMapping": {} + }, + { + "visibility": [ + "Build", + "Release" + ], + "runsOn": [ + "Agent", + "DeploymentGroup" + ], + "id": "72a1931b-effb-4d2e-8fd8-f8472a07cb62", + "name": "AzurePowerShell", + "version": { + "major": 5, + "minor": 248, + "patch": 3, + "isTest": false + }, + "serverOwned": true, + "contentsUploaded": true, + "iconUrl": "https://dev.azure.com/test_organization/_apis/distributedtask/tasks/72a1931b-effb-4d2e-8fd8-f8472a07cb62/5.248.3/icon", + "minimumAgentVersion": "2.115.0", + "friendlyName": "Azure PowerShell", + "description": "Run a PowerShell script within an Azure environment", + "category": "Deploy", + "helpMarkDown": "[Learn more about this task](https://go.microsoft.com/fwlink/?LinkID=613749)", + "helpUrl": "https://aka.ms/azurepowershelltroubleshooting", + "releaseNotes": "Added support for Az Module and cross platform agents.", + "definitionType": "task", + "author": "Microsoft Corporation", + "demands": [], + "groups": [ + { + "name": "AzurePowerShellVersionOptions", + "displayName": "Azure PowerShell version options", + "isExpanded": true + }, + { + "name": "advanced", + "displayName": "Advanced", + "isExpanded": false + } + ], + "inputs": [ + { + "aliases": [ + "azureSubscription" + ], + "properties": { + "EndpointFilterRule": "ScopeLevel != AzureMLWorkspace" + }, + "name": "ConnectedServiceNameARM", + "label": "Azure Subscription", + "defaultValue": "", + "required": true, + "type": "connectedService:AzureRM", + "helpMarkDown": "Azure Resource Manager subscription to configure before running PowerShell" + }, + { + "options": { + "FilePath": "Script File Path", + "InlineScript": "Inline Script" + }, + "name": "ScriptType", + "label": "Script Type", + "defaultValue": "FilePath", + "type": "radio", + "helpMarkDown": "Type of the script: File Path or Inline Script" + }, + { + "name": "ScriptPath", + "label": "Script Path", + "defaultValue": "", + "type": "filePath", + "helpMarkDown": "Path of the script. Should be fully qualified path or relative to the default working directory.", + "visibleRule": "ScriptType = FilePath" + }, + { + "properties": { + "resizable": "true", + "rows": "10", + "maxLength": "5000" + }, + "name": "Inline", + "label": "Inline Script", + "defaultValue": "# You can write your azure powershell scripts inline here. \n# You can also pass predefined and custom variables to this script using arguments", + "type": "multiLine", + "helpMarkDown": "Enter the script to execute.", + "visibleRule": "ScriptType = InlineScript" + }, + { + "properties": { + "editorExtension": "ms.vss-services-azure.parameters-grid" + }, + "name": "ScriptArguments", + "label": "Script Arguments", + "defaultValue": "", + "type": "string", + "helpMarkDown": "Additional parameters to pass to PowerShell. Can be either ordinal or named parameters.", + "visibleRule": "ScriptType = FilePath" + }, + { + "options": { + "stop": "Stop", + "continue": "Continue", + "silentlyContinue": "SilentlyContinue" + }, + "name": "errorActionPreference", + "label": "ErrorActionPreference", + "defaultValue": "stop", + "type": "pickList", + "helpMarkDown": "Select the value of the ErrorActionPreference variable for executing the script." + }, + { + "name": "FailOnStandardError", + "label": "Fail on Standard Error", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is true, this task will fail if any errors are written to the error pipeline, or if any data is written to the Standard Error stream." + }, + { + "aliases": [ + "azurePowerShellVersion" + ], + "options": { + "LatestVersion": "Latest installed version", + "OtherVersion": "Specify other version" + }, + "name": "TargetAzurePs", + "label": "Azure PowerShell Version", + "defaultValue": "OtherVersion", + "type": "radio", + "helpMarkDown": "In case of hosted agents, the supported Azure PowerShell Version is: 1.0.0, 1.6.0, 2.3.2, 2.6.0, 3.1.0 (Hosted VS2017 Queue).\nTo pick the latest version available on the agent, select \"Latest installed version\".\n\nFor private agents you can specify preferred version of Azure PowerShell using \"Specify version\"", + "groupName": "AzurePowerShellVersionOptions" + }, + { + "aliases": [ + "preferredAzurePowerShellVersion" + ], + "name": "CustomTargetAzurePs", + "label": "Preferred Azure PowerShell Version", + "defaultValue": "", + "required": true, + "type": "string", + "helpMarkDown": "Preferred Azure PowerShell Version needs to be a proper semantic version eg. 1.2.3. Regex like 2.\\*,2.3.\\* is not supported. The Hosted VS2017 Pool currently supports Az module version: 1.0.0, 1.6.0, 2.3.2, 2.6.0, 3.1.0", + "visibleRule": "TargetAzurePs = OtherVersion", + "groupName": "AzurePowerShellVersionOptions" + }, + { + "name": "pwsh", + "label": "Use PowerShell Core", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is true, then on Windows the task will use pwsh.exe from your PATH instead of powershell.exe.", + "groupName": "advanced" + }, + { + "name": "validateScriptSignature", + "label": "Validate script signature", + "defaultValue": "false", + "type": "boolean", + "helpMarkDown": "If this is true, then the task will first check to make sure specified script is signed and valid before executing it.", + "visibleRule": "ScriptType = FilePath", + "groupName": "advanced" + }, + { + "name": "workingDirectory", + "label": "Working Directory", + "defaultValue": "", + "type": "filePath", + "helpMarkDown": "Working directory where the script is run.", + "groupName": "advanced" + } + ], + "satisfies": [], + "sourceDefinitions": [], + "dataSourceBindings": [], + "instanceNameFormat": "Azure PowerShell script: $(ScriptType)", + "preJobExecution": {}, + "execution": { + "PowerShell3": { + "target": "azurepowershell.ps1", + "platforms": [ + "windows" + ] + }, + "Node16": { + "target": "azurepowershell.js", + "argumentFormat": "" + }, + "Node10": { + "target": "azurepowershell.js", + "argumentFormat": "" + }, + "Node20_1": { + "target": "azurepowershell.js", + "argumentFormat": "" + } + }, + "postJobExecution": {}, + "_buildConfigMapping": { + "Default": "5.248.2", + "Node20_229_2": "5.248.3" + } + } + ] +} \ No newline at end of file diff --git a/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts b/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts index 94d15fcec1f583..94d73d131bfe46 100644 --- a/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts +++ b/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts @@ -1,5 +1,9 @@ import { getPkgReleases } from '..'; +import { Fixtures } from '../../../../test/fixtures'; import * as httpMock from '../../../../test/http-mock'; +import { GlobalConfig } from '../../../config/global'; +import * as hostRules from '../../../util/host-rules'; +import { AzurePipelinesTask } from './schema'; import { AzurePipelinesTasksDatasource } from '.'; const gitHubHost = 'https://raw.githubusercontent.com'; @@ -9,6 +13,11 @@ const marketplaceTasksPath = '/renovatebot/azure-devops-marketplace/main/azure-pipelines-marketplace-tasks.json'; describe('modules/datasource/azure-pipelines-tasks/index', () => { + beforeEach(() => { + GlobalConfig.reset(); + hostRules.clear(); + }); + it('returns null for unknown task', async () => { httpMock .scope(gitHubHost) @@ -64,4 +73,103 @@ describe('modules/datasource/azure-pipelines-tasks/index', () => { }), ).toEqual({ releases: [{ version: '0.171.0' }, { version: '0.198.0' }] }); }); + + it('returns organization task with single version', async () => { + GlobalConfig.set({ + platform: 'azure', + endpoint: 'https://my.custom.domain', + }); + + hostRules.add({ + hostType: AzurePipelinesTasksDatasource.id, + matchHost: 'my.custom.domain', + token: '123test', + }); + + httpMock + .scope('https://my.custom.domain') + .get('/_apis/distributedtask/tasks/') + .reply(200, Fixtures.get('tasks.json')); + + expect( + await getPkgReleases({ + datasource: AzurePipelinesTasksDatasource.id, + packageName: 'AzurePowerShell', + }), + ).toEqual({ releases: [{ version: '5.248.3' }] }); + }); + + it('returns organization task with multiple versions', async () => { + GlobalConfig.set({ + platform: 'azure', + endpoint: 'https://my.custom.domain', + }); + + hostRules.add({ + hostType: AzurePipelinesTasksDatasource.id, + matchHost: 'my.custom.domain', + token: '123test', + }); + + httpMock + .scope('https://my.custom.domain') + .get('/_apis/distributedtask/tasks/') + .reply(200, Fixtures.get('tasks.json')); + + expect( + await getPkgReleases({ + datasource: AzurePipelinesTasksDatasource.id, + packageName: 'PowerShell', + }), + ).toEqual({ + releases: [ + { isDeprecated: true, version: '1.2.3' }, + { isDeprecated: undefined, version: '2.247.1' }, + ], + }); + }); + + describe('compare semver', () => { + it.each` + a | exp + ${[]} | ${[]} + ${['']} | ${['']} + ${['', '']} | ${['', '']} + ${['1.0.0']} | ${['1.0.0']} + ${['1.0.1', '1.1.0', '1.0.0']} | ${['1.0.0', '1.0.1', '1.1.0']} + `('when versions is $a', ({ a, exp }) => { + const azureVersions = a.map((x: string) => { + const splitted = x.split('.'); + + const version = + splitted.length === 3 + ? { + major: Number(splitted[0]), + minor: Number(splitted[1]), + patch: Number(splitted[2]), + } + : null; + + return AzurePipelinesTask.parse({ + name: '', + deprecated: false, + version, + }); + }); + + const azureSortedVersions = azureVersions.sort( + AzurePipelinesTasksDatasource.compareSemanticVersions('version'), + ); + + expect( + azureSortedVersions.map((x: any) => { + const data = AzurePipelinesTask.parse(x); + + return data.version === null + ? '' + : `${data.version.major}.${data.version.minor}.${data.version.patch}`; + }), + ).toStrictEqual(exp); + }); + }); }); diff --git a/lib/modules/datasource/azure-pipelines-tasks/index.ts b/lib/modules/datasource/azure-pipelines-tasks/index.ts index db9bf125aff6ef..2605cd91a78845 100644 --- a/lib/modules/datasource/azure-pipelines-tasks/index.ts +++ b/lib/modules/datasource/azure-pipelines-tasks/index.ts @@ -1,7 +1,16 @@ +import type { TypeOf, ZodType } from 'zod'; +import { GlobalConfig } from '../../../config/global'; import { cache } from '../../../util/cache/package/decorator'; +import * as hostRules from '../../../util/host-rules'; +import type { HttpOptions } from '../../../util/http/types'; import { id as versioning } from '../../versioning/loose'; import { Datasource } from '../datasource'; import type { GetReleasesConfig, ReleaseResult } from '../types'; +import { + AzurePipelinesFallbackTasks, + AzurePipelinesJSON, + AzurePipelinesTaskVersion, +} from './schema'; const TASKS_URL_BASE = 'https://raw.githubusercontent.com/renovatebot/azure-devops-marketplace/main'; @@ -22,13 +31,58 @@ export class AzurePipelinesTasksDatasource extends Datasource { async getReleases({ packageName, }: GetReleasesConfig): Promise { - const versions = - (await this.getTasks(BUILT_IN_TASKS_URL))[packageName.toLowerCase()] ?? - (await this.getTasks(MARKETPLACE_TASKS_URL))[packageName.toLowerCase()]; + const platform = GlobalConfig.get('platform'); + const endpoint = GlobalConfig.get('endpoint'); + const { token } = hostRules.find({ + hostType: AzurePipelinesTasksDatasource.id, + url: endpoint, + }); - if (versions) { - const releases = versions.map((version) => ({ version })); - return { releases }; + if (platform === 'azure' && endpoint && token) { + const auth = Buffer.from(`renovate:${token}`).toString('base64'); + const opts: HttpOptions = { + headers: { authorization: `Basic ${auth}` }, + }; + const results = await this.getTasks( + `${endpoint}/_apis/distributedtask/tasks/`, + opts, + AzurePipelinesJSON, + ); + + const result: ReleaseResult = { releases: [] }; + + results.value + .filter((task) => task.name === packageName) + .sort(AzurePipelinesTasksDatasource.compareSemanticVersions('version')) + .forEach((task) => { + result.releases.push({ + version: `${task.version!.major}.${task.version!.minor}.${task.version!.patch}`, + isDeprecated: task.deprecated, + }); + }); + + return result; + } else { + const versions = + ( + await this.getTasks( + BUILT_IN_TASKS_URL, + {}, + AzurePipelinesFallbackTasks, + ) + )[packageName.toLowerCase()] ?? + ( + await this.getTasks( + MARKETPLACE_TASKS_URL, + {}, + AzurePipelinesFallbackTasks, + ) + )[packageName.toLowerCase()]; + + if (versions) { + const releases = versions.map((version) => ({ version })); + return { releases }; + } } return null; @@ -39,8 +93,39 @@ export class AzurePipelinesTasksDatasource extends Datasource { key: (url: string) => url, ttlMinutes: 24 * 60, }) - async getTasks(url: string): Promise> { - const { body } = await this.http.getJson>(url); + async getTasks = ZodType>( + url: string, + opts: HttpOptions, + schema: Schema, + ): Promise> { + const { body } = await this.http.getJson(url, opts, schema); return body; } + + static compareSemanticVersions = (key: string) => (a: any, b: any) => { + const a1Version = AzurePipelinesTaskVersion.safeParse(a[key]).data; + const b1Version = AzurePipelinesTaskVersion.safeParse(b[key]).data; + + const a1 = + a1Version === undefined + ? '' + : `${a1Version.major}.${a1Version.minor}.${a1Version.patch}`; + const b1 = + b1Version === undefined + ? '' + : `${b1Version.major}.${b1Version.minor}.${b1Version.patch}`; + + const len = Math.min(a1.length, b1.length); + + for (let i = 0; i < len; i++) { + const a2 = +a1[i] || 0; + const b2 = +b1[i] || 0; + + if (a2 !== b2) { + return a2 > b2 ? 1 : -1; + } + } + + return b1.length - a1.length; + }; } diff --git a/lib/modules/datasource/azure-pipelines-tasks/schema.ts b/lib/modules/datasource/azure-pipelines-tasks/schema.ts new file mode 100644 index 00000000000000..eb20bc97dd0a06 --- /dev/null +++ b/lib/modules/datasource/azure-pipelines-tasks/schema.ts @@ -0,0 +1,19 @@ +import { z } from 'zod'; + +export const AzurePipelinesTaskVersion = z.object({ + major: z.number(), + minor: z.number(), + patch: z.number(), +}); + +export const AzurePipelinesTask = z.object({ + name: z.string(), + deprecated: z.boolean().optional(), + version: AzurePipelinesTaskVersion.nullable(), +}); + +export const AzurePipelinesJSON = z.object({ + value: AzurePipelinesTask.array(), +}); + +export const AzurePipelinesFallbackTasks = z.record(z.string().array()); diff --git a/lib/modules/datasource/docker/index.spec.ts b/lib/modules/datasource/docker/index.spec.ts index 68096768f24baf..768dacc88d61b4 100644 --- a/lib/modules/datasource/docker/index.spec.ts +++ b/lib/modules/datasource/docker/index.spec.ts @@ -98,10 +98,13 @@ describe('modules/datasource/docker/index', () => { .reply(200, { token: 'some-token' }); hostRules.find.mockReturnValue({}); - const res = await getDigest({ - datasource: 'docker', - packageName: 'some-dep', - }); + const res = await getDigest( + { + datasource: 'docker', + packageName: 'some-dep', + }, + '', + ); expect(res).toBe('some-digest'); }); diff --git a/lib/modules/datasource/docker/index.ts b/lib/modules/datasource/docker/index.ts index b6082a6455ef31..3e4339d0fa2b25 100644 --- a/lib/modules/datasource/docker/index.ts +++ b/lib/modules/datasource/docker/index.ts @@ -831,7 +831,7 @@ export class DockerDatasource extends Datasource { // TODO: types (#22198) `getDigest(${registryHost}, ${dockerRepository}, ${newValue})`, ); - const newTag = newValue ?? 'latest'; + const newTag = is.nonEmptyString(newValue) ? newValue : 'latest'; let digest: string | null = null; try { let architecture: string | null | undefined = null; diff --git a/lib/modules/manager/bazel-module/extract.spec.ts b/lib/modules/manager/bazel-module/extract.spec.ts index 336d6172a3ba13..53e5ca47845b30 100644 --- a/lib/modules/manager/bazel-module/extract.spec.ts +++ b/lib/modules/manager/bazel-module/extract.spec.ts @@ -392,5 +392,31 @@ describe('modules/manager/bazel-module/extract', () => { }, ]); }); + + it('returns git_repository dependencies', async () => { + const input = codeBlock` + git_repository( + name = "rules_foo", + commit = "850cb49c8649e463b80ef7984e7c744279746170", + remote = "https://github.com/example/rules_foo.git", + ) + `; + const result = await extractPackageFile(input, 'MODULE.bazel'); + if (!result) { + throw new Error('Expected a result.'); + } + expect(result.deps).toHaveLength(1); + expect(result.deps).toEqual( + expect.arrayContaining([ + { + datasource: GithubTagsDatasource.id, + depType: 'git_repository', + depName: 'rules_foo', + currentDigest: '850cb49c8649e463b80ef7984e7c744279746170', + packageName: 'example/rules_foo', + }, + ]), + ); + }); }); }); diff --git a/lib/modules/manager/bazel-module/extract.ts b/lib/modules/manager/bazel-module/extract.ts index b62bcbccdd983e..8774057c32833f 100644 --- a/lib/modules/manager/bazel-module/extract.ts +++ b/lib/modules/manager/bazel-module/extract.ts @@ -8,7 +8,10 @@ import type { RecordFragment } from './fragments'; import { parse } from './parser'; import { RuleToMavenPackageDep, fillRegistryUrls } from './parser/maven'; import { RuleToDockerPackageDep } from './parser/oci'; -import { RuleToBazelModulePackageDep } from './rules'; +import { + GitRepositoryToPackageDep, + RuleToBazelModulePackageDep, +} from './rules'; import * as rules from './rules'; export async function extractPackageFile( @@ -18,9 +21,14 @@ export async function extractPackageFile( try { const records = parse(content); const pfc = await extractBazelPfc(records, packageFile); + const gitRepositoryDeps = extractGitRepositoryDeps(records); const mavenDeps = extractMavenDeps(records); const dockerDeps = LooseArray(RuleToDockerPackageDep).parse(records); + if (gitRepositoryDeps.length) { + pfc.deps.push(...gitRepositoryDeps); + } + if (mavenDeps.length) { pfc.deps.push(...mavenDeps); } @@ -57,6 +65,12 @@ async function extractBazelPfc( return pfc; } +function extractGitRepositoryDeps( + records: RecordFragment[], +): PackageDependency[] { + return LooseArray(GitRepositoryToPackageDep).parse(records); +} + function extractMavenDeps(records: RecordFragment[]): PackageDependency[] { return LooseArray(RuleToMavenPackageDep) .transform(fillRegistryUrls) diff --git a/lib/modules/manager/bazel-module/parser/index.spec.ts b/lib/modules/manager/bazel-module/parser/index.spec.ts index 26ce0ae5c45e37..f3783f87774f02 100644 --- a/lib/modules/manager/bazel-module/parser/index.spec.ts +++ b/lib/modules/manager/bazel-module/parser/index.spec.ts @@ -315,5 +315,37 @@ describe('modules/manager/bazel-module/parser/index', () => { ), ]); }); + + it('finds the git_repository', () => { + const input = codeBlock` + git_repository( + name = "rules_foo", + remote = "https://github.com/example/rules_foo.git", + commit = "6a2c2e22849b3e6b33d5ea9aa72222d4803a986a", + patches = ["//:rules_foo.patch"], + patch_strip = 1, + ) + `; + const res = parse(input); + expect(res).toEqual([ + fragments.record( + { + rule: fragments.string('git_repository'), + name: fragments.string('rules_foo'), + patches: fragments.array( + [fragments.string('//:rules_foo.patch')], + true, + ), + commit: fragments.string( + '6a2c2e22849b3e6b33d5ea9aa72222d4803a986a', + ), + remote: fragments.string( + 'https://github.com/example/rules_foo.git', + ), + }, + true, + ), + ]); + }); }); }); diff --git a/lib/modules/manager/bazel-module/parser/module.ts b/lib/modules/manager/bazel-module/parser/module.ts index 80371274c029e3..54371e9a7bd25d 100644 --- a/lib/modules/manager/bazel-module/parser/module.ts +++ b/lib/modules/manager/bazel-module/parser/module.ts @@ -9,6 +9,7 @@ const supportedRules = [ 'git_override', 'local_path_override', 'single_version_override', + 'git_repository', ]; const supportedRulesRegex = regEx(`^${supportedRules.join('|')}$`); diff --git a/lib/modules/manager/bazel-module/rules.spec.ts b/lib/modules/manager/bazel-module/rules.spec.ts index 048471c6504d39..c3a8b2a2207cef 100644 --- a/lib/modules/manager/bazel-module/rules.spec.ts +++ b/lib/modules/manager/bazel-module/rules.spec.ts @@ -10,6 +10,7 @@ import type { OverridePackageDep, } from './rules'; import { + GitRepositoryToPackageDep, RuleToBazelModulePackageDep, bazelModulePackageDepToPackageDependency, processModulePkgDeps, @@ -72,6 +73,19 @@ const singleVersionOverrideWithoutVersionAndRegistryPkgDep: BasePackageDep = { depName: 'rules_foo', skipReason: 'ignored', }; +const gitRepositoryForGithubPkgDep: BasePackageDep = { + datasource: GithubTagsDatasource.id, + depType: 'git_repository', + depName: 'rules_foo', + packageName: 'example/rules_foo', + currentDigest: '850cb49c8649e463b80ef7984e7c744279746170', +}; +const gitRepositoryForUnsupportedPkgDep: BasePackageDep = { + depType: 'git_repository', + depName: 'rules_foo', + currentDigest: '850cb49c8649e463b80ef7984e7c744279746170', + skipReason: 'unsupported-datasource', +}; describe('modules/manager/bazel-module/rules', () => { describe('RuleToBazelModulePackageDep', () => { @@ -129,6 +143,30 @@ describe('modules/manager/bazel-module/rules', () => { }); }); + describe('GitRepositoryToPackageDep', () => { + const gitRepositoryWithGihubHost = fragments.record({ + rule: fragments.string('git_repository'), + name: fragments.string('rules_foo'), + remote: fragments.string('https://github.com/example/rules_foo.git'), + commit: fragments.string('850cb49c8649e463b80ef7984e7c744279746170'), + }); + const gitRepositoryWithUnsupportedHost = fragments.record({ + rule: fragments.string('git_repository'), + name: fragments.string('rules_foo'), + remote: fragments.string('https://nobuenos.com/example/rules_foo.git'), + commit: fragments.string('850cb49c8649e463b80ef7984e7c744279746170'), + }); + + it.each` + msg | a | exp + ${'git_repository, GitHub host'} | ${gitRepositoryWithGihubHost} | ${gitRepositoryForGithubPkgDep} + ${'git_repository, unsupported host'} | ${gitRepositoryWithUnsupportedHost} | ${gitRepositoryForUnsupportedPkgDep} + `('.parse() with $msg', ({ a, exp }) => { + const pkgDep = GitRepositoryToPackageDep.parse(a); + expect(pkgDep).toEqual(exp); + }); + }); + describe('.toPackageDependencies()', () => { const expectedBazelDepNoOverrides: PackageDependency[] = [bazelDepPkgDep]; const expectedBazelDepAndGitOverride: PackageDependency[] = [ diff --git a/lib/modules/manager/bazel-module/rules.ts b/lib/modules/manager/bazel-module/rules.ts index fda2cf253c0fd0..8f1bb2e626d77a 100644 --- a/lib/modules/manager/bazel-module/rules.ts +++ b/lib/modules/manager/bazel-module/rules.ts @@ -242,3 +242,28 @@ export function toPackageDependencies( ): PackageDependency[] { return collectByModule(packageDeps).map(processModulePkgDeps).flat(); } + +export const GitRepositoryToPackageDep = RecordFragmentSchema.extend({ + children: z.object({ + rule: StringFragmentSchema.extend({ + value: z.literal('git_repository'), + }), + name: StringFragmentSchema, + remote: StringFragmentSchema, + commit: StringFragmentSchema, + }), +}).transform(({ children: { rule, name, remote, commit } }): BasePackageDep => { + const gitRepo: BasePackageDep = { + depType: rule.value, + depName: name.value, + currentDigest: commit.value, + }; + const ghPackageName = githubPackageName(remote.value); + if (is.nonEmptyString(ghPackageName)) { + gitRepo.datasource = GithubTagsDatasource.id; + gitRepo.packageName = ghPackageName; + } else { + gitRepo.skipReason = 'unsupported-datasource'; + } + return gitRepo; +}); diff --git a/lib/modules/manager/bundler/artifacts.spec.ts b/lib/modules/manager/bundler/artifacts.spec.ts index 497d293a06b552..ad0ee829c84efb 100644 --- a/lib/modules/manager/bundler/artifacts.spec.ts +++ b/lib/modules/manager/bundler/artifacts.spec.ts @@ -435,217 +435,6 @@ describe('modules/manager/bundler/artifacts', () => { datasource.getPkgReleases.mockResolvedValueOnce({ releases: [{ version: '1.17.2' }, { version: '2.3.5' }], }); - bundlerHostRules.findAllAuthenticatable.mockReturnValue([ - { - hostType: 'bundler', - matchHost: 'gems.private.com', - resolvedHost: 'gems.private.com', - username: 'some-user', - password: 'some-password', - }, - ]); - bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue( - 'some-user:some-password', - ); - const execSnapshots = mockExecAll(); - git.getRepoStatus.mockResolvedValueOnce( - partial({ - modified: ['Gemfile.lock'], - }), - ); - fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock'); - expect( - await updateArtifacts({ - packageFileName: 'Gemfile', - updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }], - newPackageFileContent: 'Updated Gemfile content', - config, - }), - ).toEqual([updatedGemfileLock]); - expect(execSnapshots).toMatchObject([ - { cmd: 'docker pull ghcr.io/containerbase/sidecar' }, - { cmd: 'docker ps --filter name=renovate_sidecar -aq' }, - { - cmd: - 'docker run --rm --name=renovate_sidecar --label=renovate_child ' + - '-v "/tmp/github/some/repo":"/tmp/github/some/repo" ' + - '-v "/tmp/cache":"/tmp/cache" ' + - '-e BUNDLE_GEMS__PRIVATE__COM ' + - '-e GEM_HOME ' + - '-e CONTAINERBASE_CACHE_DIR ' + - '-w "/tmp/github/some/repo" ' + - 'ghcr.io/containerbase/sidecar' + - ' bash -l -c "' + - 'install-tool ruby 1.2.0' + - ' && ' + - 'install-tool bundler 2.3.5' + - ' && ' + - 'ruby --version' + - ' && ' + - 'bundler lock --update foo bar' + - '"', - }, - ]); - }); - - it('injects bundler host configuration as command with bundler < 2', async () => { - GlobalConfig.set({ ...adminConfig, binarySource: 'docker' }); - fs.readLocalFile.mockResolvedValueOnce('Current Gemfile.lock'); - fs.readLocalFile.mockResolvedValueOnce('1.2.0'); - // ruby - datasource.getPkgReleases.mockResolvedValueOnce({ - releases: [ - { version: '1.0.0' }, - { version: '1.2.0' }, - { version: '1.3.0' }, - ], - }); - bundlerHostRules.findAllAuthenticatable.mockReturnValue([ - { - hostType: 'bundler', - matchHost: 'gems-private.com', - resolvedHost: 'gems-private.com', - username: 'some-user', - password: 'some-password', - }, - ]); - bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue( - 'some-user:some-password', - ); - const execSnapshots = mockExecAll(); - git.getRepoStatus.mockResolvedValueOnce( - partial({ - modified: ['Gemfile.lock'], - }), - ); - fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock'); - expect( - await updateArtifacts({ - packageFileName: 'Gemfile', - updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }], - newPackageFileContent: 'Updated Gemfile content', - config: { - ...config, - constraints: { - bundler: '1.2', - }, - }, - }), - ).toEqual([updatedGemfileLock]); - expect(execSnapshots).toMatchObject([ - { cmd: 'docker pull ghcr.io/containerbase/sidecar' }, - { cmd: 'docker ps --filter name=renovate_sidecar -aq' }, - { - cmd: - 'docker run --rm --name=renovate_sidecar --label=renovate_child ' + - '-v "/tmp/github/some/repo":"/tmp/github/some/repo" ' + - '-v "/tmp/cache":"/tmp/cache" ' + - '-e GEM_HOME ' + - '-e CONTAINERBASE_CACHE_DIR ' + - '-w "/tmp/github/some/repo" ' + - 'ghcr.io/containerbase/sidecar' + - ' bash -l -c "' + - 'install-tool ruby 1.2.0' + - ' && ' + - 'install-tool bundler 1.2' + - ' && ' + - 'ruby --version' + - ' && ' + - 'bundler config --local gems-private.com some-user:some-password' + - ' && ' + - 'bundler lock --update foo bar' + - '"', - }, - ]); - }); - - it('injects bundler host configuration as command with bundler >= 2', async () => { - GlobalConfig.set({ ...adminConfig, binarySource: 'docker' }); - fs.readLocalFile.mockResolvedValueOnce('Current Gemfile.lock'); - fs.readLocalFile.mockResolvedValueOnce('1.2.0'); - // ruby - datasource.getPkgReleases.mockResolvedValueOnce({ - releases: [ - { version: '1.0.0' }, - { version: '1.2.0' }, - { version: '1.3.0' }, - ], - }); - bundlerHostRules.findAllAuthenticatable.mockReturnValue([ - { - hostType: 'bundler', - matchHost: 'gems-private.com', - resolvedHost: 'gems-private.com', - username: 'some-user', - password: 'some-password', - }, - ]); - bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue( - 'some-user:some-password', - ); - const execSnapshots = mockExecAll(); - git.getRepoStatus.mockResolvedValueOnce( - partial({ - modified: ['Gemfile.lock'], - }), - ); - fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock'); - expect( - await updateArtifacts({ - packageFileName: 'Gemfile', - updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }], - newPackageFileContent: 'Updated Gemfile content', - config: { - ...config, - constraints: { - bundler: '2.1', - }, - }, - }), - ).toEqual([updatedGemfileLock]); - expect(execSnapshots).toMatchObject([ - { cmd: 'docker pull ghcr.io/containerbase/sidecar' }, - { cmd: 'docker ps --filter name=renovate_sidecar -aq' }, - { - cmd: - 'docker run --rm --name=renovate_sidecar --label=renovate_child ' + - '-v "/tmp/github/some/repo":"/tmp/github/some/repo" ' + - '-v "/tmp/cache":"/tmp/cache" ' + - '-e GEM_HOME ' + - '-e CONTAINERBASE_CACHE_DIR ' + - '-w "/tmp/github/some/repo" ' + - 'ghcr.io/containerbase/sidecar' + - ' bash -l -c "' + - 'install-tool ruby 1.2.0' + - ' && ' + - 'install-tool bundler 2.1' + - ' && ' + - 'ruby --version' + - ' && ' + - 'bundler config set --local gems-private.com some-user:some-password' + - ' && ' + - 'bundler lock --update foo bar' + - '"', - }, - ]); - }); - - it('injects bundler host configuration as command with bundler == latest', async () => { - GlobalConfig.set({ ...adminConfig, binarySource: 'docker' }); - fs.readLocalFile.mockResolvedValueOnce('Current Gemfile.lock'); - fs.readLocalFile.mockResolvedValueOnce('1.2.0'); - // ruby - datasource.getPkgReleases.mockResolvedValueOnce({ - releases: [ - { version: '1.0.0' }, - { version: '1.2.0' }, - { version: '1.3.0' }, - ], - }); - // bundler - datasource.getPkgReleases.mockResolvedValueOnce({ - releases: [{ version: '1.17.2' }, { version: '2.3.5' }], - }); bundlerHostRules.findAllAuthenticatable.mockReturnValue([ { hostType: 'bundler', @@ -681,6 +470,7 @@ describe('modules/manager/bundler/artifacts', () => { 'docker run --rm --name=renovate_sidecar --label=renovate_child ' + '-v "/tmp/github/some/repo":"/tmp/github/some/repo" ' + '-v "/tmp/cache":"/tmp/cache" ' + + '-e BUNDLE_GEMS___PRIVATE__COM ' + '-e GEM_HOME ' + '-e CONTAINERBASE_CACHE_DIR ' + '-w "/tmp/github/some/repo" ' + @@ -688,12 +478,10 @@ describe('modules/manager/bundler/artifacts', () => { ' bash -l -c "' + 'install-tool ruby 1.2.0' + ' && ' + - 'install-tool bundler 1.3.0' + + 'install-tool bundler 2.3.5' + ' && ' + 'ruby --version' + ' && ' + - 'bundler config set --local gems-private.com some-user:some-password' + - ' && ' + 'bundler lock --update foo bar' + '"', }, diff --git a/lib/modules/manager/bundler/artifacts.ts b/lib/modules/manager/bundler/artifacts.ts index d1dc827f704eb5..071bd1a652c2ce 100644 --- a/lib/modules/manager/bundler/artifacts.ts +++ b/lib/modules/manager/bundler/artifacts.ts @@ -1,4 +1,3 @@ -import { lt } from '@renovatebot/ruby-semver'; import is from '@sindresorhus/is'; import { quote } from 'shlex'; import { @@ -17,7 +16,6 @@ import { } from '../../../util/fs'; import { getRepoStatus } from '../../../util/git'; import { newlineRegex, regEx } from '../../../util/regex'; -import { isValid } from '../../versioning/ruby'; import type { UpdateArtifact, UpdateArtifactsResult } from '../types'; import { getBundlerConstraint, @@ -32,14 +30,17 @@ import { const hostConfigVariablePrefix = 'BUNDLE_'; function buildBundleHostVariable(hostRule: HostRule): Record { - if (!hostRule.resolvedHost || hostRule.resolvedHost.includes('-')) { + // istanbul ignore if: doesn't happen in practice + if (!hostRule.resolvedHost) { return {}; } const varName = hostConfigVariablePrefix.concat( hostRule.resolvedHost + .toUpperCase() .split('.') - .map((term) => term.toUpperCase()) - .join('__'), + .join('__') + .split('-') + .join('___'), ); return { [varName]: `${getAuthenticationHeaderValue(hostRule)}`, @@ -149,47 +150,12 @@ export async function updateArtifacts( {} as Record, ); - // Detect hosts with a hyphen '-' in the url. - // Those cannot be added with environment variables but need to be added - // with the bundler config - const bundlerHostRulesAuthCommands: string[] = bundlerHostRules.reduce( - (authCommands: string[], hostRule) => { - if (hostRule.resolvedHost?.includes('-')) { - // TODO: fix me, hostrules can missing all auth - const creds = getAuthenticationHeaderValue(hostRule); - authCommands.push(`${quote(hostRule.resolvedHost)} ${quote(creds)}`); - } - return authCommands; - }, - [], - ); - const bundler = getBundlerConstraint( updateArtifact, existingLockFileContent, ); const preCommands = ['ruby --version']; - // Bundler < 2 has a different config option syntax than >= 2 - if ( - bundlerHostRulesAuthCommands && - bundler && - isValid(bundler) && - lt(bundler, '2') - ) { - preCommands.push( - ...bundlerHostRulesAuthCommands.map( - (authCommand) => `bundler config --local ${authCommand}`, - ), - ); - } else if (bundlerHostRulesAuthCommands) { - preCommands.push( - ...bundlerHostRulesAuthCommands.map( - (authCommand) => `bundler config set --local ${authCommand}`, - ), - ); - } - const execOptions: ExecOptions = { cwdFile: lockFileName, userConfiguredEnv: config.env, diff --git a/lib/modules/manager/flux/common.ts b/lib/modules/manager/flux/common.ts index 8fff55a1cf3e42..bafa6285135b0c 100644 --- a/lib/modules/manager/flux/common.ts +++ b/lib/modules/manager/flux/common.ts @@ -1,4 +1,6 @@ import { regEx } from '../../../util/regex'; +import type { HelmRepository } from './schema'; +import type { FluxManifest } from './types'; export const systemManifestFileNameRegex = '(?:^|/)gotk-components\\.ya?ml$'; @@ -8,3 +10,19 @@ export const systemManifestHeaderRegex = export function isSystemManifest(file: string): boolean { return regEx(systemManifestFileNameRegex).test(file); } + +export function collectHelmRepos(manifests: FluxManifest[]): HelmRepository[] { + const helmRepositories: HelmRepository[] = []; + + for (const manifest of manifests) { + if (manifest.kind === 'resource') { + for (const resource of manifest.resources) { + if (resource.kind === 'HelmRepository') { + helmRepositories.push(resource); + } + } + } + } + + return helmRepositories; +} diff --git a/lib/modules/manager/flux/extract.ts b/lib/modules/manager/flux/extract.ts index a592183c303b69..98e10778b09785 100644 --- a/lib/modules/manager/flux/extract.ts +++ b/lib/modules/manager/flux/extract.ts @@ -21,7 +21,11 @@ import type { PackageFile, PackageFileContent, } from '../types'; -import { isSystemManifest, systemManifestHeaderRegex } from './common'; +import { + collectHelmRepos, + isSystemManifest, + systemManifestHeaderRegex, +} from './common'; import { FluxResource, type HelmRepository } from './schema'; import type { FluxManagerData, @@ -102,6 +106,39 @@ function resolveGitRepositoryPerSourceTag( } } +function resolveHelmRepository( + dep: PackageDependency, + matchingRepositories: HelmRepository[], + registryAliases: Record | undefined, +): void { + if (matchingRepositories.length) { + dep.registryUrls = matchingRepositories + .map((repo) => { + if (repo.spec.type === 'oci' || isOCIRegistry(repo.spec.url)) { + // Change datasource to Docker + dep.datasource = DockerDatasource.id; + // Ensure the URL is a valid OCI path + dep.packageName = getDep( + `${removeOCIPrefix(repo.spec.url)}/${dep.depName}`, + false, + registryAliases, + ).depName; + return null; + } else { + return repo.spec.url; + } + }) + .filter(is.string); + + // if registryUrls is empty, delete it from dep + if (!dep.registryUrls?.length) { + delete dep.registryUrls; + } + } else { + dep.skipReason = 'unknown-registry'; + } +} + function resolveSystemManifest( manifest: SystemFluxManifest, ): PackageDependency[] { @@ -126,7 +163,8 @@ function resolveResourceManifest( for (const resource of manifest.resources) { switch (resource.kind) { case 'HelmRelease': { - const depName = resource.spec.chart.spec.chart; + const chartSpec = resource.spec.chart.spec; + const depName = chartSpec.chart; const dep: PackageDependency = { depName, currentValue: resource.spec.chart.spec.version, @@ -142,40 +180,12 @@ function resolveResourceManifest( const matchingRepositories = helmRepositories.filter( (rep) => - rep.kind === resource.spec.chart.spec.sourceRef?.kind && - rep.metadata.name === resource.spec.chart.spec.sourceRef.name && + rep.kind === chartSpec.sourceRef?.kind && + rep.metadata.name === chartSpec.sourceRef.name && rep.metadata.namespace === - (resource.spec.chart.spec.sourceRef.namespace ?? - resource.metadata?.namespace), + (chartSpec.sourceRef.namespace ?? resource.metadata?.namespace), ); - if (matchingRepositories.length) { - dep.registryUrls = matchingRepositories - .map((repo) => { - if (repo.spec.type === 'oci' || isOCIRegistry(repo.spec.url)) { - // Change datasource to Docker - dep.datasource = DockerDatasource.id; - // Ensure the URL is a valid OCI path - dep.packageName = getDep( - `${removeOCIPrefix(repo.spec.url)}/${ - resource.spec.chart.spec.chart - }`, - false, - registryAliases, - ).depName; - return null; - } else { - return repo.spec.url; - } - }) - .filter(is.string); - - // if registryUrls is empty, delete it from dep - if (!dep.registryUrls?.length) { - delete dep.registryUrls; - } - } else { - dep.skipReason = 'unknown-registry'; - } + resolveHelmRepository(dep, matchingRepositories, registryAliases); deps.push(dep); break; } @@ -252,14 +262,7 @@ export function extractPackageFile( if (!manifest) { return null; } - const helmRepositories: HelmRepository[] = []; - if (manifest.kind === 'resource') { - for (const resource of manifest.resources) { - if (resource.kind === 'HelmRepository') { - helmRepositories.push(resource); - } - } - } + const helmRepositories = collectHelmRepos([manifest]); let deps: PackageDependency[] | null = null; switch (manifest.kind) { case 'system': @@ -293,16 +296,7 @@ export async function extractAllPackageFiles( } } - const helmRepositories: HelmRepository[] = []; - for (const manifest of manifests) { - if (manifest.kind === 'resource') { - for (const resource of manifest.resources) { - if (resource.kind === 'HelmRepository') { - helmRepositories.push(resource); - } - } - } - } + const helmRepositories = collectHelmRepos(manifests); for (const manifest of manifests) { let deps: PackageDependency[] | null = null; diff --git a/lib/modules/manager/github-actions/extract.spec.ts b/lib/modules/manager/github-actions/extract.spec.ts index 75479bef617333..f3d499e10adda7 100644 --- a/lib/modules/manager/github-actions/extract.spec.ts +++ b/lib/modules/manager/github-actions/extract.spec.ts @@ -467,7 +467,7 @@ describe('modules/manager/github-actions/extract', () => { build: steps: - name: "test1" - uses: https://github.com/actions/setup-node@56337c425554a6be30cdef71bf441f15be286854 # tag=v3.1.1 + uses: https://github.com/actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # tag=v4.2.0 - name: "test2" uses: https://code.forgejo.org/actions/setup-node@56337c425554a6be30cdef71bf441f15be286854 # v3.1.1 - name: "test3" @@ -479,14 +479,18 @@ describe('modules/manager/github-actions/extract', () => { expect(res).toMatchObject({ deps: [ { - currentDigest: '56337c425554a6be30cdef71bf441f15be286854', - currentValue: 'v3.1.1', + depName: 'https://github.com/actions/cache', + packageName: 'actions/cache', + currentDigest: '1bd1e32a3bdc45362d1e726936510720a7c30a57', + currentValue: 'v4.2.0', replaceString: - 'https://github.com/actions/setup-node@56337c425554a6be30cdef71bf441f15be286854 # tag=v3.1.1', + 'https://github.com/actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # tag=v4.2.0', datasource: 'github-tags', registryUrls: ['https://github.com/'], }, { + depName: 'https://code.forgejo.org/actions/setup-node', + packageName: 'actions/setup-node', currentDigest: '56337c425554a6be30cdef71bf441f15be286854', currentValue: 'v3.1.1', replaceString: diff --git a/lib/modules/manager/github-actions/extract.ts b/lib/modules/manager/github-actions/extract.ts index c402dbcbc58ef9..b797f8fc1e9f2c 100644 --- a/lib/modules/manager/github-actions/extract.ts +++ b/lib/modules/manager/github-actions/extract.ts @@ -19,7 +19,7 @@ import type { Workflow } from './types'; const dockerActionRe = regEx(/^\s+uses\s*: ['"]?docker:\/\/([^'"]+)\s*$/); const actionRe = regEx( - /^\s+-?\s+?uses\s*: (?['"]?(?https:\/\/[.\w-]+\/)?(?[\w-]+\/[.\w-]+)(?\/.*)?@(?[^\s'"]+)['"]?(?:(?\s+)#\s*(((?:renovate\s*:\s*)?(?:pin\s+|tag\s*=\s*)?|(?:ratchet:[\w-]+\/[.\w-]+)?)@?(?([\w-]*-)?v?\d+(?:\.\d+(?:\.\d+)?)?)|(?:ratchet:exclude)))?)/, + /^\s+-?\s+?uses\s*: (?['"]?(?(?https:\/\/[.\w-]+\/)?(?[\w-]+\/[.\w-]+))(?\/.*)?@(?[^\s'"]+)['"]?(?:(?\s+)#\s*(((?:renovate\s*:\s*)?(?:pin\s+|tag\s*=\s*)?|(?:ratchet:[\w-]+\/[.\w-]+)?)@?(?([\w-]*-)?v?\d+(?:\.\d+(?:\.\d+)?)?)|(?:ratchet:exclude)))?)/, ); // SHA1 or SHA256, see https://github.blog/2020-10-19-git-2-29-released/ @@ -74,6 +74,7 @@ function extractWithRegex( if (tagMatch?.groups) { const { depName, + packageName, currentValue, path = '', tag, @@ -90,12 +91,13 @@ function extractWithRegex( } const dep: PackageDependency = { depName, + ...(packageName !== depName && { packageName }), commitMessageTopic: '{{{depName}}} action', datasource: GithubTagsDatasource.id, versioning: dockerVersioning.id, depType: 'action', replaceString, - autoReplaceStringTemplate: `${quotes}${registryUrl}{{depName}}${path}@{{#if newDigest}}{{newDigest}}${quotes}{{#if newValue}}${commentWhiteSpaces}# {{newValue}}{{/if}}{{/if}}{{#unless newDigest}}{{newValue}}${quotes}{{/unless}}`, + autoReplaceStringTemplate: `${quotes}{{depName}}${path}@{{#if newDigest}}{{newDigest}}${quotes}{{#if newValue}}${commentWhiteSpaces}# {{newValue}}{{/if}}{{/if}}{{#unless newDigest}}{{newValue}}${quotes}{{/unless}}`, ...(registryUrl ? detectDatasource(registryUrl) : customRegistryUrlsPackageDependency), diff --git a/lib/modules/manager/gradle/parser.spec.ts b/lib/modules/manager/gradle/parser.spec.ts index 3951184f435de5..6e835a56d6be00 100644 --- a/lib/modules/manager/gradle/parser.spec.ts +++ b/lib/modules/manager/gradle/parser.spec.ts @@ -770,6 +770,17 @@ describe('modules/manager/gradle/parser', () => { const { deps } = parseGradle(input); expect(deps).toMatchObject([output].filter(is.truthy)); }); + + it('handles 3 independent dependencies mismatched as groupId, artifactId, version', () => { + const { deps } = parseGradle( + 'someConfig("foo:bar:1.2.3", "foo:baz:4.5.6", "foo:qux:7.8.9")', + ); + expect(deps).toMatchObject([ + { depName: 'foo:bar', currentValue: '1.2.3' }, + { depName: 'foo:baz', currentValue: '4.5.6' }, + { depName: 'foo:qux', currentValue: '7.8.9' }, + ]); + }); }); describe('calculations', () => { diff --git a/lib/modules/manager/gradle/parser/handlers.ts b/lib/modules/manager/gradle/parser/handlers.ts index 229671b249eaed..4cb6fe719b1118 100644 --- a/lib/modules/manager/gradle/parser/handlers.ts +++ b/lib/modules/manager/gradle/parser/handlers.ts @@ -6,7 +6,7 @@ import { regEx } from '../../../../util/regex'; import type { PackageDependency } from '../../types'; import type { parseGradle as parseGradleCallback } from '../parser'; import type { Ctx, GradleManagerData } from '../types'; -import { parseDependencyString } from '../utils'; +import { isDependencyString, parseDependencyString } from '../utils'; import { GRADLE_PLUGINS, REGISTRY_URLS, @@ -169,6 +169,22 @@ export function handleLongFormDep(ctx: Ctx): Ctx { return ctx; } + // Special handling: 3 independent dependencies mismatched as groupId, artifactId, version + if ( + isDependencyString(groupId) && + isDependencyString(artifactId) && + isDependencyString(version) + ) { + ctx.tokenMap.templateStringTokens = groupIdTokens; + handleDepString(ctx); + ctx.tokenMap.templateStringTokens = artifactIdTokens; + handleDepString(ctx); + ctx.tokenMap.templateStringTokens = versionTokens; + handleDepString(ctx); + + return ctx; + } + const dep = parseDependencyString([groupId, artifactId, version].join(':')); if (!dep) { return ctx; diff --git a/lib/modules/manager/nuget/__fixtures__/sample.csproj b/lib/modules/manager/nuget/__fixtures__/sample.csproj index 37a002bbce8089..eae6a29972ccc2 100644 --- a/lib/modules/manager/nuget/__fixtures__/sample.csproj +++ b/lib/modules/manager/nuget/__fixtures__/sample.csproj @@ -10,6 +10,7 @@ + diff --git a/lib/modules/manager/nuget/__snapshots__/extract.spec.ts.snap b/lib/modules/manager/nuget/__snapshots__/extract.spec.ts.snap index 55c20db0dc876c..141706ed5a1658 100644 --- a/lib/modules/manager/nuget/__snapshots__/extract.spec.ts.snap +++ b/lib/modules/manager/nuget/__snapshots__/extract.spec.ts.snap @@ -14,6 +14,43 @@ exports[`modules/manager/nuget/extract extractPackageFile() extracts all depende "depName": "Microsoft.VisualStudio.Web.CodeGeneration.Tools", "depType": "nuget", }, + { + "currentValue": "undefined", + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable2", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable1", + "depType": "nuget", + "skipReason": "invalid-version", + }, { "currentValue": "1.2.3", "datasource": "nuget", @@ -115,6 +152,36 @@ exports[`modules/manager/nuget/extract extractPackageFile() extracts all depende "depName": "Microsoft.VisualStudio.Web.CodeGeneration.Tools", "depType": "nuget", }, + { + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable3", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable2", + "depType": "nuget", + "skipReason": "invalid-version", + }, + { + "datasource": "nuget", + "depName": "NotUpdatable1", + "depType": "nuget", + "skipReason": "invalid-version", + }, { "currentValue": "1.2.3", "datasource": "nuget", diff --git a/lib/modules/manager/nuget/extract.spec.ts b/lib/modules/manager/nuget/extract.spec.ts index 8b608fc92c3cf8..592ff57f668989 100644 --- a/lib/modules/manager/nuget/extract.spec.ts +++ b/lib/modules/manager/nuget/extract.spec.ts @@ -57,7 +57,7 @@ describe('modules/manager/nuget/extract', () => { const sample = Fixtures.get(packageFile); const res = await extractPackageFile(sample, packageFile, config); expect(res?.deps).toMatchSnapshot(); - expect(res?.deps).toHaveLength(17); + expect(res?.deps).toHaveLength(23); }); it('extracts msbuild sdk from the Sdk attr of Project element', async () => { @@ -157,7 +157,7 @@ describe('modules/manager/nuget/extract', () => { const sample = Fixtures.get(packageFile); const res = await extractPackageFile(sample, packageFile, config); expect(res?.deps).toMatchSnapshot(); - expect(res?.deps).toHaveLength(17); + expect(res?.deps).toHaveLength(22); }); it('extracts ContainerBaseImage', async () => { diff --git a/lib/modules/manager/nuget/extract.ts b/lib/modules/manager/nuget/extract.ts index 19961664e499a6..3b405f684335b4 100644 --- a/lib/modules/manager/nuget/extract.ts +++ b/lib/modules/manager/nuget/extract.ts @@ -28,7 +28,7 @@ import { applyRegistries, findVersion, getConfiguredRegistries } from './util'; * so we don't include it in the extracting regexp */ const checkVersion = regEx( - `^\\s*(?:[[])?(?:(?[^"(,[\\]]+)\\s*(?:,\\s*[)\\]]|])?)\\s*$`, + /^\s*(?:[[])?(?:(?[^"(,[\]]+)\s*(?:,\s*[)\]]|])?)\s*$/, ); const elemNames = new Set([ 'PackageReference', @@ -58,23 +58,38 @@ function extractDepsFromXml(xmlNode: XmlDocument): NugetPackageDependency[] { if (elemNames.has(name)) { const depName = attr?.Include || attr?.Update; - const version = + if (!depName) { + continue; + } + + const dep: NugetPackageDependency = { + datasource: NugetDatasource.id, + depType: 'nuget', + depName, + }; + + let currentValue: string | undefined = attr?.Version ?? attr?.version ?? child.valueWithPath('Version') ?? attr?.VersionOverride ?? child.valueWithPath('VersionOverride'); - const currentValue = is.nonEmptyStringAndNotWhitespace(version) - ? checkVersion.exec(version)?.groups?.currentValue?.trim() - : undefined; - if (depName && currentValue) { - results.push({ - datasource: NugetDatasource.id, - depType: 'nuget', - depName, - currentValue, - }); + + if (!is.nonEmptyStringAndNotWhitespace(currentValue)) { + dep.skipReason = 'invalid-version'; } + + currentValue = checkVersion + .exec(currentValue) + ?.groups?.currentValue?.trim(); + + if (currentValue) { + dep.currentValue = currentValue; + } else { + dep.skipReason = 'invalid-version'; + } + + results.push(dep); } else if (name === 'Sdk') { const depName = attr?.Name; const version = attr?.Version; diff --git a/lib/modules/manager/poetry/__fixtures__/pyproject.10.toml b/lib/modules/manager/poetry/__fixtures__/pyproject.10.toml index 65c5e06ba8d947..44e0eb41e200a9 100644 --- a/lib/modules/manager/poetry/__fixtures__/pyproject.10.toml +++ b/lib/modules/manager/poetry/__fixtures__/pyproject.10.toml @@ -16,6 +16,10 @@ url = "last.url" [[tool.poetry.source]] name = "five" +[[tool.poetry.source]] +name = "invalid-url" +url = "invalid-url" + [build-system] requires = ["poetry_core>=1.0", "wheel"] build-backend = "poetry.masonry.api" diff --git a/lib/modules/manager/poetry/artifacts.spec.ts b/lib/modules/manager/poetry/artifacts.spec.ts index 0ea60e8bb8a633..f7db72a8c48838 100644 --- a/lib/modules/manager/poetry/artifacts.spec.ts +++ b/lib/modules/manager/poetry/artifacts.spec.ts @@ -1,4 +1,5 @@ import { codeBlock } from 'common-tags'; +import { GoogleAuth as _googleAuth } from 'google-auth-library'; import { mockDeep } from 'jest-mock-extended'; import { join } from 'upath'; import { envMock, mockExecAll } from '../../../../test/exec-util'; @@ -15,16 +16,26 @@ import { updateArtifacts } from '.'; const pyproject1toml = Fixtures.get('pyproject.1.toml'); const pyproject10toml = Fixtures.get('pyproject.10.toml'); +const pyproject13toml = `[[tool.poetry.source]] +name = "some-gar-repo" +url = "https://someregion-python.pkg.dev/some-project/some-repo/simple/" + +[build-system] +requires = ["poetry_core>=1.0", "wheel"] +build-backend = "poetry.masonry.api" +`; jest.mock('../../../util/exec/env'); jest.mock('../../../util/fs'); jest.mock('../../datasource', () => mockDeep()); jest.mock('../../../util/host-rules', () => mockDeep()); +jest.mock('google-auth-library'); process.env.CONTAINERBASE = 'true'; const datasource = mocked(_datasource); const hostRules = mocked(_hostRules); +const googleAuth = mocked(_googleAuth); const adminConfig: RepoGlobalConfig = { localDir: join('/tmp/github/some/repo'), @@ -198,7 +209,99 @@ describe('modules/manager/poetry/artifacts', () => { }, }, ]); - expect(hostRules.find.mock.calls).toHaveLength(5); + expect(hostRules.find.mock.calls).toHaveLength(7); + expect(execSnapshots).toMatchObject([ + { + cmd: 'poetry update --lock --no-interaction dep1', + options: { + env: { + POETRY_HTTP_BASIC_ONE_PASSWORD: 'passwordOne', + POETRY_HTTP_BASIC_ONE_USERNAME: 'usernameOne', + POETRY_HTTP_BASIC_TWO_USERNAME: 'usernameTwo', + POETRY_HTTP_BASIC_FOUR_OH_FOUR_PASSWORD: 'passwordFour', + }, + }, + }, + ]); + }); + + it('passes Google Artifact Registry credentials environment vars', async () => { + // poetry.lock + fs.getSiblingFileName.mockReturnValueOnce('poetry.lock'); + fs.readLocalFile.mockResolvedValueOnce(null); + // pyproject.lock + fs.getSiblingFileName.mockReturnValueOnce('pyproject.lock'); + fs.readLocalFile.mockResolvedValueOnce('[metadata]\n'); + const execSnapshots = mockExecAll(); + fs.readLocalFile.mockResolvedValueOnce('New poetry.lock'); + googleAuth.mockImplementationOnce( + jest.fn().mockImplementationOnce(() => ({ + getAccessToken: jest.fn().mockResolvedValue('some-token'), + })), + ); + const updatedDeps = [{ depName: 'dep1' }]; + expect( + await updateArtifacts({ + packageFileName: 'pyproject.toml', + updatedDeps, + newPackageFileContent: pyproject13toml, + config, + }), + ).toEqual([ + { + file: { + type: 'addition', + path: 'pyproject.lock', + contents: 'New poetry.lock', + }, + }, + ]); + expect(hostRules.find.mock.calls).toHaveLength(3); + expect(execSnapshots).toMatchObject([ + { + cmd: 'poetry update --lock --no-interaction dep1', + options: { + env: { + POETRY_HTTP_BASIC_SOME_GAR_REPO_USERNAME: 'oauth2accesstoken', + POETRY_HTTP_BASIC_SOME_GAR_REPO_PASSWORD: 'some-token', + }, + }, + }, + ]); + }); + + it('continues if Google auth is not configured', async () => { + // poetry.lock + fs.getSiblingFileName.mockReturnValueOnce('poetry.lock'); + fs.readLocalFile.mockResolvedValueOnce(null); + // pyproject.lock + fs.getSiblingFileName.mockReturnValueOnce('pyproject.lock'); + fs.readLocalFile.mockResolvedValueOnce('[metadata]\n'); + const execSnapshots = mockExecAll(); + fs.readLocalFile.mockResolvedValueOnce('New poetry.lock'); + googleAuth.mockImplementation( + jest.fn().mockImplementation(() => ({ + getAccessToken: jest.fn().mockResolvedValue(undefined), + })), + ); + const updatedDeps = [{ depName: 'dep1' }]; + expect( + await updateArtifacts({ + packageFileName: 'pyproject.toml', + updatedDeps, + newPackageFileContent: pyproject13toml, + config, + }), + ).toEqual([ + { + file: { + type: 'addition', + path: 'pyproject.lock', + contents: 'New poetry.lock', + }, + }, + ]); + expect(hostRules.find.mock.calls).toHaveLength(3); expect(execSnapshots).toMatchObject([ { cmd: 'poetry update --lock --no-interaction dep1' }, ]); diff --git a/lib/modules/manager/poetry/artifacts.ts b/lib/modules/manager/poetry/artifacts.ts index d5582f619c5910..ec77248288828c 100644 --- a/lib/modules/manager/poetry/artifacts.ts +++ b/lib/modules/manager/poetry/artifacts.ts @@ -17,7 +17,9 @@ import { find } from '../../../util/host-rules'; import { regEx } from '../../../util/regex'; import { Result } from '../../../util/result'; import { parse as parseToml } from '../../../util/toml'; +import { parseUrl } from '../../../util/url'; import { PypiDatasource } from '../../datasource/pypi'; +import { getGoogleAuthTokenRaw } from '../../datasource/util'; import type { UpdateArtifact, UpdateArtifactsResult } from '../types'; import { Lockfile, PoetrySchemaToml } from './schema'; import type { PoetryFile, PoetrySource } from './types'; @@ -101,7 +103,7 @@ function getPoetrySources(content: string, fileName: string): PoetrySource[] { return []; } if (!pyprojectFile.tool?.poetry) { - logger.debug(`{$fileName} contains no poetry section`); + logger.debug(`${fileName} contains no poetry section`); return []; } @@ -115,20 +117,42 @@ function getPoetrySources(content: string, fileName: string): PoetrySource[] { return sourceArray; } -function getMatchingHostRule(url: string | undefined): HostRule { +async function getMatchingHostRule(url: string | undefined): Promise { const scopedMatch = find({ hostType: PypiDatasource.id, url }); - return is.nonEmptyObject(scopedMatch) ? scopedMatch : find({ url }); + const hostRule = is.nonEmptyObject(scopedMatch) ? scopedMatch : find({ url }); + if (hostRule) { + return hostRule; + } + + const parsedUrl = parseUrl(url); + if (!parsedUrl) { + logger.once.debug(`Failed to parse URL ${url}`); + return {}; + } + + if (parsedUrl.hostname.endsWith('.pkg.dev')) { + const accessToken = await getGoogleAuthTokenRaw(); + if (accessToken) { + return { + username: 'oauth2accesstoken', + password: accessToken, + }; + } + logger.once.debug(`Could not get Google access token (url=${url})`); + } + + return {}; } -function getSourceCredentialVars( +async function getSourceCredentialVars( pyprojectContent: string, packageFileName: string, -): NodeJS.ProcessEnv { +): Promise { const poetrySources = getPoetrySources(pyprojectContent, packageFileName); const envVars: NodeJS.ProcessEnv = {}; for (const source of poetrySources) { - const matchingHostRule = getMatchingHostRule(source.url); + const matchingHostRule = await getMatchingHostRule(source.url); const formattedSourceName = source.name .replace(regEx(/(\.|-)+/g), '_') .toUpperCase(); @@ -192,7 +216,10 @@ export async function updateArtifacts({ config.constraints?.poetry ?? getPoetryRequirement(newPackageFileContent, existingLockFileContent); const extraEnv = { - ...getSourceCredentialVars(newPackageFileContent, packageFileName), + ...(await getSourceCredentialVars( + newPackageFileContent, + packageFileName, + )), ...getGitEnvironmentVariables(['poetry']), PIP_CACHE_DIR: await ensureCacheDir('pip'), }; diff --git a/lib/modules/manager/pre-commit/__fixtures__/complex.pre-commit-config.yaml b/lib/modules/manager/pre-commit/__fixtures__/complex.pre-commit-config.yaml index 1d569aaa676f17..1b1eab5ea00dfd 100644 --- a/lib/modules/manager/pre-commit/__fixtures__/complex.pre-commit-config.yaml +++ b/lib/modules/manager/pre-commit/__fixtures__/complex.pre-commit-config.yaml @@ -13,11 +13,18 @@ repos: rev: 19.3b0 hooks: - id: black + language: python + additional_dependencies: + - "request==1.1.1" + - "" # broken pypi package - repo: https://gitlab.com/psf/black # should also detect gitlab rev: 19.3b0 hooks: - id: black + # missing language, not extracted + additional_dependencies: + - "urllib==24.9.0" - repo: http://gitlab.com/psf/black # should also detect http rev: 19.3b0 @@ -48,3 +55,7 @@ repos: - repo: some_invalid_url # case with invlalid url. rev: v1.0.0 + + # pre-commit meta hooks + - repo: meta + hooks: [] diff --git a/lib/modules/manager/pre-commit/__snapshots__/extract.spec.ts.snap b/lib/modules/manager/pre-commit/__snapshots__/extract.spec.ts.snap index af3eb630748fd7..7dea84085efd04 100644 --- a/lib/modules/manager/pre-commit/__snapshots__/extract.spec.ts.snap +++ b/lib/modules/manager/pre-commit/__snapshots__/extract.spec.ts.snap @@ -10,6 +10,14 @@ exports[`modules/manager/pre-commit/extract extractPackageFile() extracts from c "depType": "repository", "packageName": "pre-commit/pre-commit-hooks", }, + { + "currentValue": "==1.1.1", + "currentVersion": "1.1.1", + "datasource": "pypi", + "depName": "request", + "depType": "pre-commit-python", + "packageName": "request", + }, { "currentValue": "19.3b0", "datasource": "github-tags", diff --git a/lib/modules/manager/pre-commit/extract.spec.ts b/lib/modules/manager/pre-commit/extract.spec.ts index 1bdfd58d2757a7..5eaf7426a522a1 100644 --- a/lib/modules/manager/pre-commit/extract.spec.ts +++ b/lib/modules/manager/pre-commit/extract.spec.ts @@ -2,6 +2,7 @@ import { mockDeep } from 'jest-mock-extended'; import { Fixtures } from '../../../../test/fixtures'; import { mocked } from '../../../../test/util'; import * as _hostRules from '../../../util/host-rules'; +import { PypiDatasource } from '../../datasource/pypi'; import { extractPackageFile } from '.'; jest.mock('../../../util/host-rules', () => mockDeep()); @@ -81,6 +82,14 @@ describe('modules/manager/pre-commit/extract', () => { expect(result).toMatchSnapshot({ deps: [ { depName: 'pre-commit/pre-commit-hooks', currentValue: 'v3.3.0' }, + { + currentValue: '==1.1.1', + currentVersion: '1.1.1', + datasource: PypiDatasource.id, + depName: 'request', + depType: 'pre-commit-python', + packageName: 'request', + }, { depName: 'psf/black', currentValue: '19.3b0' }, { depName: 'psf/black', currentValue: '19.3b0' }, { depName: 'psf/black', currentValue: '19.3b0' }, diff --git a/lib/modules/manager/pre-commit/extract.ts b/lib/modules/manager/pre-commit/extract.ts index 8dc89e4e70a269..a127861cd65c24 100644 --- a/lib/modules/manager/pre-commit/extract.ts +++ b/lib/modules/manager/pre-commit/extract.ts @@ -7,6 +7,7 @@ import { regEx } from '../../../util/regex'; import { parseSingleYaml } from '../../../util/yaml'; import { GithubTagsDatasource } from '../../datasource/github-tags'; import { GitlabTagsDatasource } from '../../datasource/gitlab-tags'; +import { pep508ToPackageDependency } from '../pep621/utils'; import type { PackageDependency, PackageFileContent } from '../types'; import { matchesPrecommitConfigHeuristic, @@ -137,6 +138,23 @@ function findDependencies(precommitFile: PreCommitConfig): PackageDependency[] { } const packageDependencies: PackageDependency[] = []; precommitFile.repos.forEach((item) => { + // meta hooks is defined from pre-commit and doesn't support `additional_dependencies` + if (item.repo !== 'meta') { + item.hooks?.forEach((hook) => { + // normally language are not defined in yaml + // only support it when it's explicitly defined. + // this avoid to parse hooks from pre-commit-hooks.yaml from git repo + if (hook.language === 'python') { + hook.additional_dependencies?.map((req) => { + const dep = pep508ToPackageDependency('pre-commit-python', req); + if (dep) { + packageDependencies.push(dep); + } + }); + } + }); + } + if (matchesPrecommitDependencyHeuristic(item)) { logger.trace(item, 'Matched pre-commit dependency spec'); const repository = String(item.repo); diff --git a/lib/modules/manager/pre-commit/readme.md b/lib/modules/manager/pre-commit/readme.md index 9e91c6cf95cc7b..77949b696099d0 100644 --- a/lib/modules/manager/pre-commit/readme.md +++ b/lib/modules/manager/pre-commit/readme.md @@ -26,3 +26,33 @@ To enable the `pre-commit` manager, add the following config: ``` Alternatively, add `:enablePreCommit` to your `extends` array. + +### Additional Dependencies + +renovate has partial support for `additional_dependencies`, currently python only. + +for python hooks, you will need to **explicitly add language** to your hooks with `additional_dependencies` +to let renovatebot know what kind of dependencies they are. + +For example, this work for `request`: + +```yaml +- repo: https://github.com/psf/black + rev: 19.3b0 + hooks: + - id: black + language: python + additional_dependencies: + - 'request==1.1.1' +``` + +this won't work: + +```yaml +- repo: https://github.com/psf/black + rev: 19.3b0 + hooks: + - id: black + additional_dependencies: + - 'request==1.1.1' +``` diff --git a/lib/modules/manager/pre-commit/types.ts b/lib/modules/manager/pre-commit/types.ts index dadcf068e61e97..3d3b3cf2e12abc 100644 --- a/lib/modules/manager/pre-commit/types.ts +++ b/lib/modules/manager/pre-commit/types.ts @@ -2,7 +2,13 @@ export interface PreCommitConfig { repos: PreCommitDependency[]; } +export interface PreCommitHook { + language?: string; + additional_dependencies?: Array; +} + export interface PreCommitDependency { repo: string; + hooks?: Array; rev: string; } diff --git a/lib/modules/platform/github/index.spec.ts b/lib/modules/platform/github/index.spec.ts index eb133f6ecae99b..9eaf850b78dce7 100644 --- a/lib/modules/platform/github/index.spec.ts +++ b/lib/modules/platform/github/index.spec.ts @@ -1531,6 +1531,57 @@ describe('modules/platform/github/index', () => { }); }); + describe('getIssue()', () => { + it('returns null if issues disabled', async () => { + const scope = httpMock.scope(githubApiHost); + initRepoMock(scope, 'some/repo', { hasIssuesEnabled: false }); + await github.initRepo({ repository: 'some/repo' }); + const res = await github.getIssue(1); + expect(res).toBeNull(); + }); + + it('returns issue', async () => { + const scope = httpMock.scope(githubApiHost); + initRepoMock(scope, 'some/repo'); + const issue = { + number: 1, + state: 'open', + title: 'title-1', + body: 'body-1', + }; + scope + .get('/repos/some/repo/issues/1') + .reply(200, { ...issue, updated_at: '2022-01-01T00:00:00Z' }); + await github.initRepo({ repository: 'some/repo' }); + const res = await github.getIssue(1); + expect(res).toMatchObject({ + ...issue, + lastModified: '2022-01-01T00:00:00Z', + }); + }); + + it('returns null if issue not found', async () => { + const scope = httpMock.scope(githubApiHost); + initRepoMock(scope, 'some/repo'); + scope.get('/repos/some/repo/issues/1').reply(404); + await github.initRepo({ repository: 'some/repo' }); + const res = await github.getIssue(1); + expect(res).toBeNull(); + }); + + it('logs debug message if issue deleted', async () => { + const scope = httpMock.scope(githubApiHost); + initRepoMock(scope, 'some/repo'); + scope.get('/repos/some/repo/issues/1').reply(410); + await github.initRepo({ repository: 'some/repo' }); + const res = await github.getIssue(1); + expect(res).toBeNull(); + expect(logger.logger.debug).toHaveBeenCalledWith( + 'Issue #1 has been deleted', + ); + }); + }); + describe('findIssue()', () => { it('returns null if no issue', async () => { httpMock diff --git a/lib/modules/platform/github/index.ts b/lib/modules/platform/github/index.ts index 93488565ba630a..8551653ce0dac3 100644 --- a/lib/modules/platform/github/index.ts +++ b/lib/modules/platform/github/index.ts @@ -1231,7 +1231,6 @@ export async function getIssueList(): Promise { } export async function getIssue(number: number): Promise { - // istanbul ignore if if (config.hasIssuesEnabled === false) { return null; } @@ -1246,8 +1245,12 @@ export async function getIssue(number: number): Promise { ); GithubIssueCache.updateIssue(issue); return issue; - } catch (err) /* istanbul ignore next */ { + } catch (err) { logger.debug({ err, number }, 'Error getting issue'); + if (err.response?.statusCode === 410) { + logger.debug(`Issue #${number} has been deleted`); + GithubIssueCache.deleteIssue(number); + } return null; } } diff --git a/lib/modules/platform/github/issue.spec.ts b/lib/modules/platform/github/issue.spec.ts index 749364ddf5ad4e..8a9e9bd1cb82a4 100644 --- a/lib/modules/platform/github/issue.spec.ts +++ b/lib/modules/platform/github/issue.spec.ts @@ -159,6 +159,32 @@ describe('modules/platform/github/issue', () => { }); }); + it('removes particular issue from the cache', () => { + cache.platform = { + github: { + issuesCache: { + '1': { + number: 1, + body: 'body-1', + state: 'open', + title: 'title-1', + lastModified: '2020-01-01T00:00:00.000Z', + }, + }, + }, + }; + + GithubIssueCache.deleteIssue(1); + + expect(cache).toEqual({ + platform: { + github: { + issuesCache: {}, + }, + }, + }); + }); + it('reconciles cache', () => { cache.platform = { github: { diff --git a/lib/modules/platform/github/issue.ts b/lib/modules/platform/github/issue.ts index 1cc85151777233..11a5d050dceceb 100644 --- a/lib/modules/platform/github/issue.ts +++ b/lib/modules/platform/github/issue.ts @@ -85,6 +85,13 @@ export class GithubIssueCache { } } + static deleteIssue(number: number): void { + const cacheData = this.data; + if (cacheData) { + delete cacheData[number]; + } + } + /** * At the moment of repo initialization, repository cache is not available. * What we can do is to store issues for later reconciliation. diff --git a/lib/modules/platform/gitlab/index.ts b/lib/modules/platform/gitlab/index.ts index 2460b206d92de0..1010f448cdc7f1 100644 --- a/lib/modules/platform/gitlab/index.ts +++ b/lib/modules/platform/gitlab/index.ts @@ -1361,9 +1361,12 @@ export async function ensureComment({ if (topic) { logger.debug(`Ensuring comment "${massagedTopic!}" in #${number}`); body = `### ${topic}\n\n${sanitizedContent}`; - body = body - .replace(regEx(/Pull Request/g), 'Merge Request') - .replace(regEx(/PR/g), 'MR'); + body = smartTruncate( + body + .replace(regEx(/Pull Request/g), 'Merge Request') + .replace(regEx(/PR/g), 'MR'), + maxBodyLength(), + ); comments.forEach((comment: { body: string; id: number }) => { if (comment.body.startsWith(`### ${massagedTopic!}\n\n`)) { commentId = comment.id; @@ -1372,7 +1375,7 @@ export async function ensureComment({ }); } else { logger.debug(`Ensuring content-only comment in #${number}`); - body = `${sanitizedContent}`; + body = smartTruncate(`${sanitizedContent}`, maxBodyLength()); comments.forEach((comment: { body: string; id: number }) => { if (comment.body === body) { commentId = comment.id; diff --git a/lib/workers/repository/finalize/index.ts b/lib/workers/repository/finalize/index.ts index e530834c7ef705..88782016965f0b 100644 --- a/lib/workers/repository/finalize/index.ts +++ b/lib/workers/repository/finalize/index.ts @@ -4,7 +4,7 @@ import { platform } from '../../../modules/platform'; import * as repositoryCache from '../../../util/cache/repository'; import { clearRenovateRefs } from '../../../util/git'; import { PackageFiles } from '../package-files'; -import { validateReconfigureBranch } from '../reconfigure'; +import { checkReconfigureBranch } from '../reconfigure'; import { pruneStaleBranches } from './prune'; import { runBranchSummary, @@ -16,7 +16,7 @@ export async function finalizeRepo( config: RenovateConfig, branchList: string[], ): Promise { - await validateReconfigureBranch(config); + await checkReconfigureBranch(config); await repositoryCache.saveCache(); await pruneStaleBranches(config, branchList); await ensureIssuesClosing(); diff --git a/lib/workers/repository/finalize/prune.ts b/lib/workers/repository/finalize/prune.ts index 7d30c01f97a554..918751344bbe98 100644 --- a/lib/workers/repository/finalize/prune.ts +++ b/lib/workers/repository/finalize/prune.ts @@ -9,7 +9,7 @@ import { scm } from '../../../modules/platform/scm'; import { getBranchList, setUserRepoConfig } from '../../../util/git'; import { escapeRegExp, regEx } from '../../../util/regex'; import { uniqueStrings } from '../../../util/string'; -import { getReconfigureBranchName } from '../reconfigure'; +import { getReconfigureBranchName } from '../reconfigure/utils'; async function cleanUpBranches( config: RenovateConfig, diff --git a/lib/workers/repository/reconfigure/index.spec.ts b/lib/workers/repository/reconfigure/index.spec.ts index d7b9e5a97dac5e..52aff264f7ebe1 100644 --- a/lib/workers/repository/reconfigure/index.spec.ts +++ b/lib/workers/repository/reconfigure/index.spec.ts @@ -1,242 +1,42 @@ -import { mock } from 'jest-mock-extended'; import type { RenovateConfig } from '../../../../test/util'; -import { fs, git, mocked, partial, platform, scm } from '../../../../test/util'; +import { logger, mocked, scm } from '../../../../test/util'; import { GlobalConfig } from '../../../config/global'; -import { logger } from '../../../logger'; -import type { Pr } from '../../../modules/platform/types'; -import * as _cache from '../../../util/cache/repository'; -import type { LongCommitSha } from '../../../util/git/types'; -import * as _merge from '../init/merge'; -import { validateReconfigureBranch } from '.'; +import * as _validate from './validate'; +import { checkReconfigureBranch } from '.'; -jest.mock('../../../util/cache/repository'); -jest.mock('../../../util/fs'); -jest.mock('../../../util/git'); -jest.mock('../init/merge'); +jest.mock('./validate'); -const cache = mocked(_cache); -const merge = mocked(_merge); +const validate = mocked(_validate); describe('workers/repository/reconfigure/index', () => { const config: RenovateConfig = { branchPrefix: 'prefix/', baseBranch: 'base', - statusCheckNames: partial({ - configValidation: 'renovate/config-validation', - }), }; beforeEach(() => { - config.repository = 'some/repo'; - merge.detectConfigFile.mockResolvedValue('renovate.json'); - scm.branchExists.mockResolvedValue(true); - cache.getCache.mockReturnValue({}); - git.getBranchCommit.mockReturnValue('sha' as LongCommitSha); - fs.readLocalFile.mockResolvedValue(null); - platform.getBranchStatusCheck.mockResolvedValue(null); GlobalConfig.reset(); + scm.branchExists.mockResolvedValue(true); + validate.validateReconfigureBranch.mockResolvedValue(undefined); }); it('no effect when running with platform=local', async () => { GlobalConfig.set({ platform: 'local' }); - await validateReconfigureBranch(config); - expect(logger.debug).toHaveBeenCalledWith( + await checkReconfigureBranch(config); + expect(logger.logger.debug).toHaveBeenCalledWith( 'Not attempting to reconfigure when running with local platform', ); }); it('no effect on repo with no reconfigure branch', async () => { scm.branchExists.mockResolvedValueOnce(false); - await validateReconfigureBranch(config); - expect(logger.debug).toHaveBeenCalledWith('No reconfigure branch found'); - }); - - it('logs error if config file search fails', async () => { - const err = new Error(); - merge.detectConfigFile.mockRejectedValueOnce(err as never); - await validateReconfigureBranch(config); - expect(logger.error).toHaveBeenCalledWith( - { err }, - 'Error while searching for config file in reconfigure branch', - ); - }); - - it('throws error if config file not found in reconfigure branch', async () => { - merge.detectConfigFile.mockResolvedValue(null); - await validateReconfigureBranch(config); - expect(logger.warn).toHaveBeenCalledWith( - 'No config file found in reconfigure branch', - ); - }); - - it('logs error if config file is unreadable', async () => { - const err = new Error(); - fs.readLocalFile.mockRejectedValueOnce(err as never); - await validateReconfigureBranch(config); - expect(logger.error).toHaveBeenCalledWith( - { err }, - 'Error while reading config file', - ); - }); - - it('throws error if config file is empty', async () => { - await validateReconfigureBranch(config); - expect(logger.warn).toHaveBeenCalledWith('Empty or invalid config file'); - }); - - it('throws error if config file content is invalid', async () => { - fs.readLocalFile.mockResolvedValueOnce(` - { - "name": - } - `); - await validateReconfigureBranch(config); - expect(logger.error).toHaveBeenCalledWith( - { err: expect.any(Object) }, - 'Error while parsing config file', - ); - expect(platform.setBranchStatus).toHaveBeenCalledWith({ - branchName: 'prefix/reconfigure', - context: 'renovate/config-validation', - description: 'Validation Failed - Unparsable config file', - state: 'red', - }); - }); - - it('handles failed validation', async () => { - fs.readLocalFile.mockResolvedValueOnce(` - { - "enabledManagers": ["docker"] - } - `); - await validateReconfigureBranch(config); - expect(logger.debug).toHaveBeenCalledWith( - { errors: expect.any(String) }, - 'Validation Errors', - ); - expect(platform.setBranchStatus).toHaveBeenCalledWith({ - branchName: 'prefix/reconfigure', - context: 'renovate/config-validation', - description: 'Validation Failed', - state: 'red', - }); - }); - - it('adds comment if reconfigure PR exists', async () => { - fs.readLocalFile.mockResolvedValueOnce(` - { - "enabledManagers": ["docker"] - } - `); - platform.findPr.mockResolvedValueOnce(mock({ number: 1 })); - await validateReconfigureBranch(config); - expect(logger.debug).toHaveBeenCalledWith( - { errors: expect.any(String) }, - 'Validation Errors', - ); - expect(platform.setBranchStatus).toHaveBeenCalled(); - expect(platform.ensureComment).toHaveBeenCalled(); - }); - - it('handles successful validation', async () => { - const pJson = ` - { - "renovate": { - "enabledManagers": ["npm"] - } - } - `; - merge.detectConfigFile.mockResolvedValue('package.json'); - fs.readLocalFile.mockResolvedValueOnce(pJson).mockResolvedValueOnce(pJson); - await validateReconfigureBranch(config); - expect(platform.setBranchStatus).toHaveBeenCalledWith({ - branchName: 'prefix/reconfigure', - context: 'renovate/config-validation', - description: 'Validation Successful', - state: 'green', - }); - }); - - it('skips adding status check if statusCheckNames.configValidation is null', async () => { - cache.getCache.mockReturnValueOnce({ - reconfigureBranchCache: { - reconfigureBranchSha: 'new-sha', - isConfigValid: false, - }, - }); - - await validateReconfigureBranch({ - ...config, - statusCheckNames: partial({ - configValidation: null, - }), - }); - expect(logger.debug).toHaveBeenCalledWith( - 'Status check is null or an empty string, skipping status check addition.', - ); - expect(platform.setBranchStatus).not.toHaveBeenCalled(); - }); - - it('skips adding status check if statusCheckNames.configValidation is empty string', async () => { - cache.getCache.mockReturnValueOnce({ - reconfigureBranchCache: { - reconfigureBranchSha: 'new-sha', - isConfigValid: false, - }, - }); - - await validateReconfigureBranch({ - ...config, - statusCheckNames: partial({ - configValidation: '', - }), - }); - expect(logger.debug).toHaveBeenCalledWith( - 'Status check is null or an empty string, skipping status check addition.', - ); - expect(platform.setBranchStatus).not.toHaveBeenCalled(); - }); - - it('skips validation if cache is valid', async () => { - cache.getCache.mockReturnValueOnce({ - reconfigureBranchCache: { - reconfigureBranchSha: 'sha', - isConfigValid: false, - }, - }); - await validateReconfigureBranch(config); - expect(logger.debug).toHaveBeenCalledWith( - 'Skipping validation check as branch sha is unchanged', - ); - }); - - it('skips validation if status check present', async () => { - cache.getCache.mockReturnValueOnce({ - reconfigureBranchCache: { - reconfigureBranchSha: 'new_sha', - isConfigValid: false, - }, - }); - platform.getBranchStatusCheck.mockResolvedValueOnce('green'); - await validateReconfigureBranch(config); - expect(logger.debug).toHaveBeenCalledWith( - 'Skipping validation check because status check already exists.', + await checkReconfigureBranch(config); + expect(logger.logger.debug).toHaveBeenCalledWith( + 'No reconfigure branch found', ); }); - it('handles non-default config file', async () => { - merge.detectConfigFile.mockResolvedValue('.renovaterc'); - fs.readLocalFile.mockResolvedValueOnce(` - { - "enabledManagers": ["npm",] - } - `); - await validateReconfigureBranch(config); - expect(platform.setBranchStatus).toHaveBeenCalledWith({ - branchName: 'prefix/reconfigure', - context: 'renovate/config-validation', - description: 'Validation Successful', - state: 'green', - }); + it('validates reconfigure branch', async () => { + await expect(checkReconfigureBranch(config)).toResolve(); }); }); diff --git a/lib/workers/repository/reconfigure/index.ts b/lib/workers/repository/reconfigure/index.ts index abdb1d014649c0..5977918c3b214b 100644 --- a/lib/workers/repository/reconfigure/index.ts +++ b/lib/workers/repository/reconfigure/index.ts @@ -1,49 +1,15 @@ -import is from '@sindresorhus/is'; -import JSON5 from 'json5'; import { GlobalConfig } from '../../../config/global'; import type { RenovateConfig } from '../../../config/types'; -import { validateConfig } from '../../../config/validation'; import { logger } from '../../../logger'; -import { platform } from '../../../modules/platform'; -import { ensureComment } from '../../../modules/platform/comment'; import { scm } from '../../../modules/platform/scm'; -import type { BranchStatus } from '../../../types'; -import { getCache } from '../../../util/cache/repository'; -import { readLocalFile } from '../../../util/fs'; -import { getBranchCommit } from '../../../util/git'; -import { regEx } from '../../../util/regex'; -import { detectConfigFile } from '../init/merge'; -import { - deleteReconfigureBranchCache, - setReconfigureBranchCache, -} from './reconfigure-cache'; +import { deleteReconfigureBranchCache } from './reconfigure-cache'; +import { getReconfigureBranchName } from './utils'; +import { validateReconfigureBranch } from './validate'; -async function setBranchStatus( - branchName: string, - description: string, - state: BranchStatus, - context?: string | null, -): Promise { - if (!is.nonEmptyString(context)) { - // already logged this case when validating the status check - return; - } - - await platform.setBranchStatus({ - branchName, - context, - description, - state, - }); -} - -export function getReconfigureBranchName(prefix: string): string { - return `${prefix}reconfigure`; -} -export async function validateReconfigureBranch( +export async function checkReconfigureBranch( config: RenovateConfig, ): Promise { - logger.debug('validateReconfigureBranch()'); + logger.debug('checkReconfigureBranch()'); if (GlobalConfig.get('platform') === 'local') { logger.debug( 'Not attempting to reconfigure when running with local platform', @@ -51,10 +17,8 @@ export async function validateReconfigureBranch( return; } - const context = config.statusCheckNames?.configValidation; - - const branchName = getReconfigureBranchName(config.branchPrefix!); - const branchExists = await scm.branchExists(branchName); + const reconfigureBranch = getReconfigureBranchName(config.branchPrefix!); + const branchExists = await scm.branchExists(reconfigureBranch); // this is something the user initiates, so skip if no branch exists if (!branchExists) { @@ -63,141 +27,5 @@ export async function validateReconfigureBranch( return; } - // look for config file - // 1. check reconfigure branch cache and use the configFileName if it exists - // 2. checkout reconfigure branch and look for the config file, don't assume default configFileName - const branchSha = getBranchCommit(branchName)!; - const cache = getCache(); - let configFileName: string | null = null; - const reconfigureCache = cache.reconfigureBranchCache; - // only use valid cached information - if (reconfigureCache?.reconfigureBranchSha === branchSha) { - logger.debug('Skipping validation check as branch sha is unchanged'); - return; - } - - if (context) { - const validationStatus = await platform.getBranchStatusCheck( - branchName, - context, - ); - - // if old status check is present skip validation - if (is.nonEmptyString(validationStatus)) { - logger.debug( - 'Skipping validation check because status check already exists.', - ); - return; - } - } else { - logger.debug( - 'Status check is null or an empty string, skipping status check addition.', - ); - } - - try { - await scm.checkoutBranch(branchName); - configFileName = await detectConfigFile(); - } catch (err) { - logger.error( - { err }, - 'Error while searching for config file in reconfigure branch', - ); - } - - if (!is.nonEmptyString(configFileName)) { - logger.warn('No config file found in reconfigure branch'); - await setBranchStatus( - branchName, - 'Validation Failed - No config file found', - 'red', - context, - ); - setReconfigureBranchCache(branchSha, false); - await scm.checkoutBranch(config.defaultBranch!); - return; - } - - let configFileRaw: string | null = null; - try { - configFileRaw = await readLocalFile(configFileName, 'utf8'); - } catch (err) { - logger.error({ err }, 'Error while reading config file'); - } - - if (!is.nonEmptyString(configFileRaw)) { - logger.warn('Empty or invalid config file'); - await setBranchStatus( - branchName, - 'Validation Failed - Empty/Invalid config file', - 'red', - context, - ); - setReconfigureBranchCache(branchSha, false); - await scm.checkoutBranch(config.baseBranch!); - return; - } - - let configFileParsed: any; - try { - configFileParsed = JSON5.parse(configFileRaw); - // no need to confirm renovate field in package.json we already do it in `detectConfigFile()` - if (configFileName === 'package.json') { - configFileParsed = configFileParsed.renovate; - } - } catch (err) { - logger.error({ err }, 'Error while parsing config file'); - await setBranchStatus( - branchName, - 'Validation Failed - Unparsable config file', - 'red', - context, - ); - setReconfigureBranchCache(branchSha, false); - await scm.checkoutBranch(config.baseBranch!); - return; - } - - // perform validation and provide a passing or failing check run based on result - const validationResult = await validateConfig('repo', configFileParsed); - - // failing check - if (validationResult.errors.length > 0) { - logger.debug( - { errors: validationResult.errors.map((err) => err.message).join(', ') }, - 'Validation Errors', - ); - - // add comment to reconfigure PR if it exists - const branchPr = await platform.findPr({ - branchName, - state: 'open', - includeOtherAuthors: true, - }); - if (branchPr) { - let body = `There is an error with this repository's Renovate configuration that needs to be fixed.\n\n`; - body += `Location: \`${configFileName}\`\n`; - body += `Message: \`${validationResult.errors - .map((e) => e.message) - .join(', ') - .replace(regEx(/`/g), "'")}\`\n`; - - await ensureComment({ - number: branchPr.number, - topic: 'Action Required: Fix Renovate Configuration', - content: body, - }); - } - - await setBranchStatus(branchName, 'Validation Failed', 'red', context); - setReconfigureBranchCache(branchSha, false); - await scm.checkoutBranch(config.baseBranch!); - return; - } - - // passing check - await setBranchStatus(branchName, 'Validation Successful', 'green', context); - - setReconfigureBranchCache(branchSha, true); - await scm.checkoutBranch(config.baseBranch!); + await validateReconfigureBranch(config); } diff --git a/lib/workers/repository/reconfigure/utils.ts b/lib/workers/repository/reconfigure/utils.ts new file mode 100644 index 00000000000000..e5208d6a107c68 --- /dev/null +++ b/lib/workers/repository/reconfigure/utils.ts @@ -0,0 +1,3 @@ +export function getReconfigureBranchName(prefix: string): string { + return `${prefix}reconfigure`; +} diff --git a/lib/workers/repository/reconfigure/validate.spec.ts b/lib/workers/repository/reconfigure/validate.spec.ts new file mode 100644 index 00000000000000..730bf75e378edd --- /dev/null +++ b/lib/workers/repository/reconfigure/validate.spec.ts @@ -0,0 +1,228 @@ +import { mock } from 'jest-mock-extended'; +import type { RenovateConfig } from '../../../../test/util'; +import { fs, git, mocked, partial, platform, scm } from '../../../../test/util'; +import { GlobalConfig } from '../../../config/global'; +import { logger } from '../../../logger'; +import type { Pr } from '../../../modules/platform/types'; +import * as _cache from '../../../util/cache/repository'; +import type { LongCommitSha } from '../../../util/git/types'; +import * as _merge from '../init/merge'; +import { validateReconfigureBranch } from './validate'; + +jest.mock('../../../util/cache/repository'); +jest.mock('../../../util/fs'); +jest.mock('../../../util/git'); +jest.mock('../init/merge'); + +const cache = mocked(_cache); +const merge = mocked(_merge); + +describe('workers/repository/reconfigure/validate', () => { + const config: RenovateConfig = { + branchPrefix: 'prefix/', + baseBranch: 'base', + statusCheckNames: partial({ + configValidation: 'renovate/config-validation', + }), + }; + + beforeEach(() => { + config.repository = 'some/repo'; + merge.detectConfigFile.mockResolvedValue('renovate.json'); + scm.branchExists.mockResolvedValue(true); + cache.getCache.mockReturnValue({}); + git.getBranchCommit.mockReturnValue('sha' as LongCommitSha); + fs.readLocalFile.mockResolvedValue(null); + platform.getBranchStatusCheck.mockResolvedValue(null); + GlobalConfig.reset(); + }); + + it('logs error if config file search fails', async () => { + const err = new Error(); + merge.detectConfigFile.mockRejectedValueOnce(err as never); + await validateReconfigureBranch(config); + expect(logger.error).toHaveBeenCalledWith( + { err }, + 'Error while searching for config file in reconfigure branch', + ); + }); + + it('throws error if config file not found in reconfigure branch', async () => { + merge.detectConfigFile.mockResolvedValue(null); + await validateReconfigureBranch(config); + expect(logger.warn).toHaveBeenCalledWith( + 'No config file found in reconfigure branch', + ); + }); + + it('logs error if config file is unreadable', async () => { + const err = new Error(); + fs.readLocalFile.mockRejectedValueOnce(err as never); + await validateReconfigureBranch(config); + expect(logger.error).toHaveBeenCalledWith( + { err }, + 'Error while reading config file', + ); + }); + + it('throws error if config file is empty', async () => { + await validateReconfigureBranch(config); + expect(logger.warn).toHaveBeenCalledWith('Empty or invalid config file'); + }); + + it('throws error if config file content is invalid', async () => { + fs.readLocalFile.mockResolvedValueOnce(` + { + "name": + } + `); + await validateReconfigureBranch(config); + expect(logger.error).toHaveBeenCalledWith( + { err: expect.any(Object) }, + 'Error while parsing config file', + ); + expect(platform.setBranchStatus).toHaveBeenCalledWith({ + branchName: 'prefix/reconfigure', + context: 'renovate/config-validation', + description: 'Validation Failed - Unparsable config file', + state: 'red', + }); + }); + + it('handles failed validation', async () => { + fs.readLocalFile.mockResolvedValueOnce(` + { + "enabledManagers": ["docker"] + } + `); + await validateReconfigureBranch(config); + expect(logger.debug).toHaveBeenCalledWith( + { errors: expect.any(String) }, + 'Validation Errors', + ); + expect(platform.setBranchStatus).toHaveBeenCalledWith({ + branchName: 'prefix/reconfigure', + context: 'renovate/config-validation', + description: 'Validation Failed', + state: 'red', + }); + }); + + it('adds comment if reconfigure PR exists', async () => { + fs.readLocalFile.mockResolvedValueOnce(` + { + "enabledManagers": ["docker"] + } + `); + platform.findPr.mockResolvedValueOnce(mock({ number: 1 })); + await validateReconfigureBranch(config); + expect(logger.debug).toHaveBeenCalledWith( + { errors: expect.any(String) }, + 'Validation Errors', + ); + expect(platform.setBranchStatus).toHaveBeenCalled(); + expect(platform.ensureComment).toHaveBeenCalled(); + }); + + it('handles successful validation', async () => { + const pJson = ` + { + "renovate": { + "enabledManagers": ["npm"] + } + } + `; + merge.detectConfigFile.mockResolvedValue('package.json'); + fs.readLocalFile.mockResolvedValueOnce(pJson).mockResolvedValueOnce(pJson); + await validateReconfigureBranch(config); + expect(platform.setBranchStatus).toHaveBeenCalledWith({ + branchName: 'prefix/reconfigure', + context: 'renovate/config-validation', + description: 'Validation Successful', + state: 'green', + }); + }); + + it('skips adding status check if statusCheckNames.configValidation is null', async () => { + cache.getCache.mockReturnValueOnce({ + reconfigureBranchCache: { + reconfigureBranchSha: 'new-sha', + isConfigValid: false, + }, + }); + + await validateReconfigureBranch({ + ...config, + statusCheckNames: partial({ + configValidation: null, + }), + }); + expect(logger.debug).toHaveBeenCalledWith( + 'Status check is null or an empty string, skipping status check addition.', + ); + expect(platform.setBranchStatus).not.toHaveBeenCalled(); + }); + + it('skips adding status check if statusCheckNames.configValidation is empty string', async () => { + cache.getCache.mockReturnValueOnce({ + reconfigureBranchCache: { + reconfigureBranchSha: 'new-sha', + isConfigValid: false, + }, + }); + + await validateReconfigureBranch({ + ...config, + statusCheckNames: partial({ + configValidation: '', + }), + }); + expect(logger.debug).toHaveBeenCalledWith( + 'Status check is null or an empty string, skipping status check addition.', + ); + expect(platform.setBranchStatus).not.toHaveBeenCalled(); + }); + + it('skips validation if cache is valid', async () => { + cache.getCache.mockReturnValueOnce({ + reconfigureBranchCache: { + reconfigureBranchSha: 'sha', + isConfigValid: false, + }, + }); + await validateReconfigureBranch(config); + expect(logger.debug).toHaveBeenCalledWith( + 'Skipping validation check as branch sha is unchanged', + ); + }); + + it('skips validation if status check present', async () => { + cache.getCache.mockReturnValueOnce({ + reconfigureBranchCache: { + reconfigureBranchSha: 'new_sha', + isConfigValid: false, + }, + }); + platform.getBranchStatusCheck.mockResolvedValueOnce('green'); + await validateReconfigureBranch(config); + expect(logger.debug).toHaveBeenCalledWith( + 'Skipping validation check because status check already exists.', + ); + }); + + it('handles non-default config file', async () => { + merge.detectConfigFile.mockResolvedValue('.renovaterc'); + fs.readLocalFile.mockResolvedValueOnce(` + { + "enabledManagers": ["npm",] + } + `); + await validateReconfigureBranch(config); + expect(platform.setBranchStatus).toHaveBeenCalledWith({ + branchName: 'prefix/reconfigure', + context: 'renovate/config-validation', + description: 'Validation Successful', + state: 'green', + }); + }); +}); diff --git a/lib/workers/repository/reconfigure/validate.ts b/lib/workers/repository/reconfigure/validate.ts new file mode 100644 index 00000000000000..ca1b6a68d668c4 --- /dev/null +++ b/lib/workers/repository/reconfigure/validate.ts @@ -0,0 +1,184 @@ +import is from '@sindresorhus/is'; +import JSON5 from 'json5'; +import type { RenovateConfig } from '../../../config/types'; +import { validateConfig } from '../../../config/validation'; +import { logger } from '../../../logger'; +import { platform } from '../../../modules/platform'; +import { ensureComment } from '../../../modules/platform/comment'; +import { scm } from '../../../modules/platform/scm'; +import type { BranchStatus } from '../../../types'; +import { getCache } from '../../../util/cache/repository'; +import { readLocalFile } from '../../../util/fs'; +import { getBranchCommit } from '../../../util/git'; +import { regEx } from '../../../util/regex'; +import { detectConfigFile } from '../init/merge'; +import { setReconfigureBranchCache } from './reconfigure-cache'; +import { getReconfigureBranchName } from './utils'; + +async function setBranchStatus( + branchName: string, + description: string, + state: BranchStatus, + context?: string | null, +): Promise { + if (!is.nonEmptyString(context)) { + // already logged this case when validating the status check + return; + } + + await platform.setBranchStatus({ + branchName, + context, + description, + state, + }); +} + +export async function validateReconfigureBranch( + config: RenovateConfig, +): Promise { + logger.debug('validateReconfigureBranch()'); + + const context = config.statusCheckNames?.configValidation; + const branchName = getReconfigureBranchName(config.branchPrefix!); + + // look for config file + // 1. check reconfigure branch cache and use the configFileName if it exists + // 2. checkout reconfigure branch and look for the config file, don't assume default configFileName + const branchSha = getBranchCommit(branchName)!; + const cache = getCache(); + let configFileName: string | null = null; + const reconfigureCache = cache.reconfigureBranchCache; + // only use valid cached information + if (reconfigureCache?.reconfigureBranchSha === branchSha) { + logger.debug('Skipping validation check as branch sha is unchanged'); + return; + } + + if (context) { + const validationStatus = await platform.getBranchStatusCheck( + branchName, + context, + ); + + // if old status check is present skip validation + if (is.nonEmptyString(validationStatus)) { + logger.debug( + 'Skipping validation check because status check already exists.', + ); + return; + } + } else { + logger.debug( + 'Status check is null or an empty string, skipping status check addition.', + ); + } + + try { + await scm.checkoutBranch(branchName); + configFileName = await detectConfigFile(); + } catch (err) { + logger.error( + { err }, + 'Error while searching for config file in reconfigure branch', + ); + } + + if (!is.nonEmptyString(configFileName)) { + logger.warn('No config file found in reconfigure branch'); + await setBranchStatus( + branchName, + 'Validation Failed - No config file found', + 'red', + context, + ); + setReconfigureBranchCache(branchSha, false); + await scm.checkoutBranch(config.defaultBranch!); + return; + } + + let configFileRaw: string | null = null; + try { + configFileRaw = await readLocalFile(configFileName, 'utf8'); + } catch (err) { + logger.error({ err }, 'Error while reading config file'); + } + + if (!is.nonEmptyString(configFileRaw)) { + logger.warn('Empty or invalid config file'); + await setBranchStatus( + branchName, + 'Validation Failed - Empty/Invalid config file', + 'red', + context, + ); + setReconfigureBranchCache(branchSha, false); + await scm.checkoutBranch(config.baseBranch!); + return; + } + + let configFileParsed: any; + try { + configFileParsed = JSON5.parse(configFileRaw); + // no need to confirm renovate field in package.json we already do it in `detectConfigFile()` + if (configFileName === 'package.json') { + configFileParsed = configFileParsed.renovate; + } + } catch (err) { + logger.error({ err }, 'Error while parsing config file'); + await setBranchStatus( + branchName, + 'Validation Failed - Unparsable config file', + 'red', + context, + ); + setReconfigureBranchCache(branchSha, false); + await scm.checkoutBranch(config.baseBranch!); + return; + } + + // perform validation and provide a passing or failing check based on result + const validationResult = await validateConfig('repo', configFileParsed); + + // failing check + if (validationResult.errors.length > 0) { + logger.debug( + { errors: validationResult.errors.map((err) => err.message).join(', ') }, + 'Validation Errors', + ); + + const reconfigurePr = await platform.findPr({ + branchName, + state: 'open', + includeOtherAuthors: true, + }); + + // add comment to reconfigure PR if it exists + if (reconfigurePr) { + let body = `There is an error with this repository's Renovate configuration that needs to be fixed.\n\n`; + body += `Location: \`${configFileName}\`\n`; + body += `Message: \`${validationResult.errors + .map((e) => e.message) + .join(', ') + .replace(regEx(/`/g), "'")}\`\n`; + + await ensureComment({ + number: reconfigurePr.number, + topic: 'Action Required: Fix Renovate Configuration', + content: body, + }); + } + + await setBranchStatus(branchName, 'Validation Failed', 'red', context); + setReconfigureBranchCache(branchSha, false); + await scm.checkoutBranch(config.baseBranch!); + return; + } + + // passing check + await setBranchStatus(branchName, 'Validation Successful', 'green', context); + + setReconfigureBranchCache(branchSha, true); + await scm.checkoutBranch(config.baseBranch!); + return; +} diff --git a/package.json b/package.json index 43b7f33c94a0a1..cd3329c1cfab47 100644 --- a/package.json +++ b/package.json @@ -299,7 +299,7 @@ "@types/mdast": "3.0.15", "@types/moo": "0.5.9", "@types/ms": "0.7.34", - "@types/node": "20.17.10", + "@types/node": "20.17.11", "@types/parse-link-header": "2.0.3", "@types/punycode": "2.1.4", "@types/semver": "7.5.8", @@ -336,7 +336,7 @@ "jest-mock-extended": "3.0.7", "jest-snapshot": "29.7.0", "markdownlint-cli2": "0.17.1", - "memfs": "4.15.2", + "memfs": "4.15.3", "nock": "13.5.6", "npm-run-all2": "7.0.2", "nyc": "17.1.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fa8c72d1581062..5093e0b950c499 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -470,8 +470,8 @@ importers: specifier: 0.7.34 version: 0.7.34 '@types/node': - specifier: 20.17.10 - version: 20.17.10 + specifier: 20.17.11 + version: 20.17.11 '@types/parse-link-header': specifier: 2.0.3 version: 2.0.3 @@ -540,7 +540,7 @@ importers: version: 2.31.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint-import-resolver-typescript@3.7.0)(eslint@8.57.1) eslint-plugin-jest: specifier: 28.10.0 - version: 28.10.0(@typescript-eslint/eslint-plugin@8.19.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)))(typescript@5.7.2) + version: 28.10.0(@typescript-eslint/eslint-plugin@8.19.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)))(typescript@5.7.2) eslint-plugin-jest-formatting: specifier: 3.1.0 version: 3.1.0(eslint@8.57.1) @@ -564,16 +564,16 @@ importers: version: 9.1.7 jest: specifier: 29.7.0 - version: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + version: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) jest-extended: specifier: 4.0.2 - version: 4.0.2(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2))) + version: 4.0.2(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2))) jest-mock: specifier: 29.7.0 version: 29.7.0 jest-mock-extended: specifier: 3.0.7 - version: 3.0.7(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)))(typescript@5.7.2) + version: 3.0.7(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)))(typescript@5.7.2) jest-snapshot: specifier: 29.7.0 version: 29.7.0 @@ -581,8 +581,8 @@ importers: specifier: 0.17.1 version: 0.17.1 memfs: - specifier: 4.15.2 - version: 4.15.2 + specifier: 4.15.3 + version: 4.15.3 nock: specifier: 13.5.6 version: 13.5.6 @@ -609,10 +609,10 @@ importers: version: 3.0.3 ts-jest: specifier: 29.2.5 - version: 29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)))(typescript@5.7.2) + version: 29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)))(typescript@5.7.2) ts-node: specifier: 10.9.2 - version: 10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2) + version: 10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2) type-fest: specifier: 4.31.0 version: 4.31.0 @@ -2121,8 +2121,8 @@ packages: '@types/ms@0.7.34': resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} - '@types/node@20.17.10': - resolution: {integrity: sha512-/jrvh5h6NXhEauFFexRin69nA0uHJ5gwk4iDivp/DeoEua3uwCUto6PC86IpRITBOs4+6i2I56K5x5b6WYGXHA==} + '@types/node@20.17.11': + resolution: {integrity: sha512-Ept5glCK35R8yeyIeYlRIZtX6SLRyqMhOFTgj5SOkMpLTdw3SEHI9fHx60xaUZ+V1aJxQJODE+7/j5ocZydYTg==} '@types/normalize-package-data@2.4.4': resolution: {integrity: sha512-37i+OaWTh9qeK4LSHPsyRC7NahnGotNuZvjLSgcPzblpHB3rrCJxAOgI5gCdKm7coonsaX1Of0ILiTcnZjbfxA==} @@ -4555,8 +4555,8 @@ packages: mdurl@2.0.0: resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} - memfs@4.15.2: - resolution: {integrity: sha512-n8/qP8AT6CtY6kxCPYgYVusT5rS6axaT66dD3tYi2lm+l1iMH7YYpmW8H/qL5bfV4YvInCCgUDAWIRvrNS7kbQ==} + memfs@4.15.3: + resolution: {integrity: sha512-vR/g1SgqvKJgAyYla+06G4p/EOcEmwhYuVb1yc1ixcKf8o/sh7Zngv63957ZSNd1xrZJoinmNyDf2LzuP8WJXw==} engines: {node: '>= 4.0.0'} memorystream@0.3.1: @@ -7496,27 +7496,27 @@ snapshots: '@jest/console@29.7.0': dependencies: '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 chalk: 4.1.2 jest-message-util: 29.7.0 jest-util: 29.7.0 slash: 3.0.0 - '@jest/core@29.7.0(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2))': + '@jest/core@29.7.0(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2))': dependencies: '@jest/console': 29.7.0 '@jest/reporters': 29.7.0 '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 ansi-escapes: 4.3.2 chalk: 4.1.2 ci-info: 3.9.0 exit: 0.1.2 graceful-fs: 4.2.11 jest-changed-files: 29.7.0 - jest-config: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest-config: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) jest-haste-map: 29.7.0 jest-message-util: 29.7.0 jest-regex-util: 29.6.3 @@ -7541,7 +7541,7 @@ snapshots: dependencies: '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 jest-mock: 29.7.0 '@jest/expect-utils@29.4.1': @@ -7563,7 +7563,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@sinonjs/fake-timers': 10.3.0 - '@types/node': 20.17.10 + '@types/node': 20.17.11 jest-message-util: 29.7.0 jest-mock: 29.7.0 jest-util: 29.7.0 @@ -7585,7 +7585,7 @@ snapshots: '@jest/transform': 29.7.0 '@jest/types': 29.6.3 '@jridgewell/trace-mapping': 0.3.25 - '@types/node': 20.17.10 + '@types/node': 20.17.11 chalk: 4.1.2 collect-v8-coverage: 1.0.2 exit: 0.1.2 @@ -7655,7 +7655,7 @@ snapshots: '@jest/schemas': 29.6.3 '@types/istanbul-lib-coverage': 2.0.6 '@types/istanbul-reports': 3.0.4 - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/yargs': 17.0.33 chalk: 4.1.2 @@ -8703,7 +8703,7 @@ snapshots: '@types/aws4@1.11.6': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/babel__core@7.20.5': dependencies: @@ -8728,27 +8728,27 @@ snapshots: '@types/better-sqlite3@7.6.12': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/breejs__later@4.1.5': {} '@types/bunyan@1.8.11': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/bunyan@1.8.9': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/cacache@17.0.2': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/cacheable-request@6.0.3': dependencies: '@types/http-cache-semantics': 4.0.4 '@types/keyv': 3.1.4 - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/responselike': 1.0.3 '@types/callsite@1.0.34': {} @@ -8779,7 +8779,7 @@ snapshots: '@types/fs-extra@11.0.4': dependencies: '@types/jsonfile': 6.1.4 - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/git-url-parse@9.0.3': {} @@ -8789,7 +8789,7 @@ snapshots: '@types/graceful-fs@4.1.9': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/http-cache-semantics@4.0.4': {} @@ -8815,13 +8815,13 @@ snapshots: '@types/jsonfile@6.1.4': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/katex@0.16.7': {} '@types/keyv@3.1.4': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/linkify-it@5.0.0': {} @@ -8840,7 +8840,7 @@ snapshots: '@types/marshal@0.5.3': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/mdast@3.0.15': dependencies: @@ -8856,7 +8856,7 @@ snapshots: '@types/ms@0.7.34': {} - '@types/node@20.17.10': + '@types/node@20.17.11': dependencies: undici-types: 6.19.8 @@ -8870,7 +8870,7 @@ snapshots: '@types/responselike@1.0.3': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 '@types/semver-stable@3.0.2': {} @@ -8890,7 +8890,7 @@ snapshots: '@types/tar@6.1.13': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 minipass: 4.2.8 '@types/tmp@0.2.6': {} @@ -8915,7 +8915,7 @@ snapshots: '@types/yauzl@2.10.3': dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 optional: true '@typescript-eslint/eslint-plugin@8.19.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(typescript@5.7.2)': @@ -9680,13 +9680,13 @@ snapshots: optionalDependencies: typescript: 5.7.2 - create-jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)): + create-jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)): dependencies: '@jest/types': 29.6.3 chalk: 4.1.2 exit: 0.1.2 graceful-fs: 4.2.11 - jest-config: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest-config: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) jest-util: 29.7.0 prompts: 2.4.2 transitivePeerDependencies: @@ -10109,13 +10109,13 @@ snapshots: dependencies: eslint: 8.57.1 - eslint-plugin-jest@28.10.0(@typescript-eslint/eslint-plugin@8.19.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)))(typescript@5.7.2): + eslint-plugin-jest@28.10.0(@typescript-eslint/eslint-plugin@8.19.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)))(typescript@5.7.2): dependencies: '@typescript-eslint/utils': 8.19.0(eslint@8.57.1)(typescript@5.7.2) eslint: 8.57.1 optionalDependencies: '@typescript-eslint/eslint-plugin': 8.19.0(@typescript-eslint/parser@8.19.0(eslint@8.57.1)(typescript@5.7.2))(eslint@8.57.1)(typescript@5.7.2) - jest: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) transitivePeerDependencies: - supports-color - typescript @@ -11146,7 +11146,7 @@ snapshots: '@jest/expect': 29.7.0 '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 chalk: 4.1.2 co: 4.6.0 dedent: 1.5.3 @@ -11166,16 +11166,16 @@ snapshots: - babel-plugin-macros - supports-color - jest-cli@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)): + jest-cli@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)): dependencies: - '@jest/core': 29.7.0(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + '@jest/core': 29.7.0(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 chalk: 4.1.2 - create-jest: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + create-jest: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) exit: 0.1.2 import-local: 3.2.0 - jest-config: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest-config: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) jest-util: 29.7.0 jest-validate: 29.7.0 yargs: 17.7.2 @@ -11185,7 +11185,7 @@ snapshots: - supports-color - ts-node - jest-config@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)): + jest-config@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)): dependencies: '@babel/core': 7.26.0 '@jest/test-sequencer': 29.7.0 @@ -11210,8 +11210,8 @@ snapshots: slash: 3.0.0 strip-json-comments: 3.1.1 optionalDependencies: - '@types/node': 20.17.10 - ts-node: 10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2) + '@types/node': 20.17.11 + ts-node: 10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2) transitivePeerDependencies: - babel-plugin-macros - supports-color @@ -11240,16 +11240,16 @@ snapshots: '@jest/environment': 29.7.0 '@jest/fake-timers': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 jest-mock: 29.7.0 jest-util: 29.7.0 - jest-extended@4.0.2(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2))): + jest-extended@4.0.2(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2))): dependencies: jest-diff: 29.7.0 jest-get-type: 29.6.3 optionalDependencies: - jest: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) jest-get-type@29.6.3: {} @@ -11257,7 +11257,7 @@ snapshots: dependencies: '@jest/types': 29.6.3 '@types/graceful-fs': 4.1.9 - '@types/node': 20.17.10 + '@types/node': 20.17.11 anymatch: 3.1.3 fb-watchman: 2.0.2 graceful-fs: 4.2.11 @@ -11300,16 +11300,16 @@ snapshots: slash: 3.0.0 stack-utils: 2.0.6 - jest-mock-extended@3.0.7(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)))(typescript@5.7.2): + jest-mock-extended@3.0.7(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)))(typescript@5.7.2): dependencies: - jest: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) ts-essentials: 10.0.4(typescript@5.7.2) typescript: 5.7.2 jest-mock@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 jest-util: 29.7.0 jest-pnp-resolver@1.2.3(jest-resolve@29.7.0): @@ -11344,7 +11344,7 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 chalk: 4.1.2 emittery: 0.13.1 graceful-fs: 4.2.11 @@ -11372,7 +11372,7 @@ snapshots: '@jest/test-result': 29.7.0 '@jest/transform': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 chalk: 4.1.2 cjs-module-lexer: 1.4.1 collect-v8-coverage: 1.0.2 @@ -11418,7 +11418,7 @@ snapshots: jest-util@29.7.0: dependencies: '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 chalk: 4.1.2 ci-info: 3.9.0 graceful-fs: 4.2.11 @@ -11437,7 +11437,7 @@ snapshots: dependencies: '@jest/test-result': 29.7.0 '@jest/types': 29.6.3 - '@types/node': 20.17.10 + '@types/node': 20.17.11 ansi-escapes: 4.3.2 chalk: 4.1.2 emittery: 0.13.1 @@ -11446,17 +11446,17 @@ snapshots: jest-worker@29.7.0: dependencies: - '@types/node': 20.17.10 + '@types/node': 20.17.11 jest-util: 29.7.0 merge-stream: 2.0.0 supports-color: 8.1.1 - jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)): + jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)): dependencies: - '@jest/core': 29.7.0(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + '@jest/core': 29.7.0(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) '@jest/types': 29.6.3 import-local: 3.2.0 - jest-cli: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest-cli: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) transitivePeerDependencies: - '@types/node' - babel-plugin-macros @@ -11769,7 +11769,7 @@ snapshots: mdurl@2.0.0: {} - memfs@4.15.2: + memfs@4.15.3: dependencies: '@jsonjoy.com/json-pack': 1.1.1(tslib@2.8.1) '@jsonjoy.com/util': 1.5.0(tslib@2.8.1) @@ -12619,7 +12619,7 @@ snapshots: '@protobufjs/path': 1.1.2 '@protobufjs/pool': 1.1.0 '@protobufjs/utf8': 1.1.0 - '@types/node': 20.17.10 + '@types/node': 20.17.11 long: 5.2.3 protocols@2.0.1: {} @@ -13382,12 +13382,12 @@ snapshots: optionalDependencies: typescript: 5.7.2 - ts-jest@29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(jest@29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)))(typescript@5.7.2): + ts-jest@29.2.5(@babel/core@7.26.0)(@jest/transform@29.7.0)(@jest/types@29.6.3)(babel-jest@29.7.0(@babel/core@7.26.0))(jest@29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)))(typescript@5.7.2): dependencies: bs-logger: 0.2.6 ejs: 3.1.10 fast-json-stable-stringify: 2.1.0 - jest: 29.7.0(@types/node@20.17.10)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2)) + jest: 29.7.0(@types/node@20.17.11)(ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2)) jest-util: 29.7.0 json5: 2.2.3 lodash.memoize: 4.1.2 @@ -13401,14 +13401,14 @@ snapshots: '@jest/types': 29.6.3 babel-jest: 29.7.0(@babel/core@7.26.0) - ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.10)(typescript@5.7.2): + ts-node@10.9.2(@swc/core@1.10.4)(@types/node@20.17.11)(typescript@5.7.2): dependencies: '@cspotcode/source-map-support': 0.8.1 '@tsconfig/node10': 1.0.11 '@tsconfig/node12': 1.0.11 '@tsconfig/node14': 1.0.3 '@tsconfig/node16': 1.0.4 - '@types/node': 20.17.10 + '@types/node': 20.17.11 acorn: 8.14.0 acorn-walk: 8.3.4 arg: 4.1.3 diff --git a/tools/docker/Dockerfile b/tools/docker/Dockerfile index 520e887c3d55cd..1f0a6900ef34c8 100644 --- a/tools/docker/Dockerfile +++ b/tools/docker/Dockerfile @@ -5,19 +5,19 @@ ARG BASE_IMAGE_TYPE=slim # -------------------------------------- # slim image # -------------------------------------- -FROM ghcr.io/renovatebot/base-image:9.28.1@sha256:d012a79a5f3dc6e6067c46016405064b30fbaaac954597318a7a2122ef807444 AS slim-base +FROM ghcr.io/renovatebot/base-image:9.29.1@sha256:db4b70c00fb197babca9dd92be612bef044d7a35d933d19c668864f84b52d1f8 AS slim-base # -------------------------------------- # full image # -------------------------------------- -FROM ghcr.io/renovatebot/base-image:9.28.1-full@sha256:422a843cbf6c1a3730fab9e89877bf04c49d329501a5b998488078cc6153fc03 AS full-base +FROM ghcr.io/renovatebot/base-image:9.29.1-full@sha256:4880c7aae10ed892d49c6c5573418014605ce2824c978dbcc04382a2c26bb0df AS full-base ENV RENOVATE_BINARY_SOURCE=global # -------------------------------------- # build image # -------------------------------------- -FROM --platform=$BUILDPLATFORM ghcr.io/renovatebot/base-image:9.28.1@sha256:d012a79a5f3dc6e6067c46016405064b30fbaaac954597318a7a2122ef807444 AS build +FROM --platform=$BUILDPLATFORM ghcr.io/renovatebot/base-image:9.29.1@sha256:db4b70c00fb197babca9dd92be612bef044d7a35d933d19c668864f84b52d1f8 AS build # We want a specific node version here # renovate: datasource=node-version