Compare commits

...

21 commits

Author SHA1 Message Date
Will Brennan
3083a0bc94
Merge 234eff76c1 into adede1d309 2025-01-09 07:24:01 +11:00
renovate[bot]
adede1d309
chore(deps): update otel/opentelemetry-collector-contrib docker tag to v0.117.0 (#33483)
Some checks are pending
Build / setup (push) Waiting to run
Build / setup-build (push) Waiting to run
Build / prefetch (push) Blocked by required conditions
Build / lint-eslint (push) Blocked by required conditions
Build / lint-prettier (push) Blocked by required conditions
Build / lint-docs (push) Blocked by required conditions
Build / lint-other (push) Blocked by required conditions
Build / (push) Blocked by required conditions
Build / codecov (push) Blocked by required conditions
Build / coverage-threshold (push) Blocked by required conditions
Build / test-success (push) Blocked by required conditions
Build / build (push) Blocked by required conditions
Build / build-docs (push) Blocked by required conditions
Build / test-e2e (push) Blocked by required conditions
Build / release (push) Blocked by required conditions
Code scanning / CodeQL-Build (push) Waiting to run
Scorecard supply-chain security / Scorecard analysis (push) Waiting to run
whitesource-scan / WS_SCAN (push) Waiting to run
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-08 19:16:59 +00:00
renovate[bot]
2eca39ad90
chore(deps): update dependency memfs to v4.15.3 (#33482)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-08 19:11:29 +00:00
renovate[bot]
88e2336945
fix(deps): update ghcr.io/renovatebot/base-image docker tag to v9.29.1 (#33480)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2025-01-08 18:10:27 +00:00
ssams
766d0c37cf
refactor(manager/flux): extract helm repo handling to helper functions (#33462) 2025-01-08 15:17:04 +00:00
Borja Domínguez
8683eeb7ad
feat(datasource/azure-pipelines-tasks): Azure DevOps API based datasource (#32966) 2025-01-08 16:04:34 +01:00
Mark Reuter
f97189c600
feat(bazel-module): Support git_repository (#33415)
Some checks are pending
Build / setup (push) Waiting to run
Build / setup-build (push) Waiting to run
Build / prefetch (push) Blocked by required conditions
Build / lint-eslint (push) Blocked by required conditions
Build / lint-prettier (push) Blocked by required conditions
Build / lint-docs (push) Blocked by required conditions
Build / lint-other (push) Blocked by required conditions
Build / (push) Blocked by required conditions
Build / codecov (push) Blocked by required conditions
Build / coverage-threshold (push) Blocked by required conditions
Build / test-success (push) Blocked by required conditions
Build / build (push) Blocked by required conditions
Build / build-docs (push) Blocked by required conditions
Build / test-e2e (push) Blocked by required conditions
Build / release (push) Blocked by required conditions
Code scanning / CodeQL-Build (push) Waiting to run
Scorecard supply-chain security / Scorecard analysis (push) Waiting to run
whitesource-scan / WS_SCAN (push) Waiting to run
2025-01-08 12:34:19 +00:00
Trim21
59455c0512
feat(pre-commit): support python additional_dependencies (#33417) 2025-01-08 12:30:31 +00:00
Maxime Brunet
147b620187
feat(poetry): support GCloud credentials for Google Artifact Registry when locking (#32586)
Co-authored-by: Rhys Arkins <rhys@arkins.net>
2025-01-08 12:28:11 +00:00
RahulGautamSingh
39fb207a83
refactor(workers/reconfigure): update code structure (#33340) 2025-01-08 12:18:30 +00:00
RahulGautamSingh
db31a1634c
fix(gitlab): truncate comment (#33348) 2025-01-08 12:07:30 +00:00
RahulGautamSingh
5282f7c080
fix(github): remove deleted issue from issues cache (#33349) 2025-01-08 11:42:12 +00:00
Will Brennan
234eff76c1 Add suggested changes 2025-01-08 21:53:02 +11:00
Will Brennan
2744bb079c address PR comments 2025-01-08 21:06:43 +11:00
Will Brennan
c75bba03f9 chore(datasource): address PR comments 2025-01-08 13:38:21 +11:00
Will Brennan
8cca687f66
Merge branch 'main' into devbox-datasource 2025-01-06 20:49:01 +11:00
Will Brennan
03d90cb3e2 remove all fixtures to appease the tests 2025-01-06 17:01:30 +11:00
Will Brennan
d5c59f8a76 Fix test 2025-01-06 16:38:59 +11:00
Will Brennan
45a6a36c54 Remove new fixture file 2025-01-06 15:51:13 +11:00
Will Brennan
5197a2e2b4 fix(datasource): code coverage 2025-01-06 10:20:15 +11:00
Will Brennan
f0c5caca6b feat(datasource): add devbox datasource module 2025-01-06 09:36:01 +11:00
43 changed files with 2024 additions and 483 deletions

View file

@ -145,6 +145,19 @@ archive_override(
Renovate ignores [`multiple_version_override`](https://bazel.build/rules/lib/globals/module#multiple_version_override).
`multiple_version_override` does not affect the processing of version updates for a module.
### `git_repository`
If Renovate finds a [`git_repository`](https://bazel.build/rules/lib/repo/git#git_repository), it evaluates the `commit` value at the specified `remote`.
`remote` is limited to github repos: `https://github.com/<owner>/<repo>.git`
```python
git_repository(
name = "rules_foo",
remote = "https://github.com/fooexample/rules_foo.git",
commit = "8c94e11c2b05b6f25ced5f23cd07d0cfd36edc1a",
)
```
## Legacy `WORKSPACE` files
Renovate extracts dependencies from the following repository rules:
@ -160,7 +173,7 @@ Renovate extracts dependencies from the following repository rules:
It also recognizes when these repository rule names are prefixed with an underscore.
For example, `_http_archive` is treated the same as `http_archive`.
### `git_repository`
### `git_repository` (legacy)
Renovate updates any `git_repository` declaration that has the following:

View file

@ -36,7 +36,7 @@ services:
otel-collector:
# Using the Contrib version to access the spanmetrics connector.
# If you don't need the spanmetrics connector, you can use the standard version
image: otel/opentelemetry-collector-contrib:0.116.1
image: otel/opentelemetry-collector-contrib:0.117.0
volumes:
- ./otel-collector-config.yml:/etc/otelcol-contrib/config.yaml
ports:

View file

@ -17,6 +17,7 @@ import { DartDatasource } from './dart';
import { DartVersionDatasource } from './dart-version';
import { DebDatasource } from './deb';
import { DenoDatasource } from './deno';
import { DevboxDatasource } from './devbox';
import { DockerDatasource } from './docker';
import { DotnetVersionDatasource } from './dotnet-version';
import { EndoflifeDateDatasource } from './endoflife-date';
@ -88,6 +89,7 @@ api.set(DartDatasource.id, new DartDatasource());
api.set(DartVersionDatasource.id, new DartVersionDatasource());
api.set(DebDatasource.id, new DebDatasource());
api.set(DenoDatasource.id, new DenoDatasource());
api.set(DevboxDatasource.id, new DevboxDatasource());
api.set(DockerDatasource.id, new DockerDatasource());
api.set(DotnetVersionDatasource.id, new DotnetVersionDatasource());
api.set(EndoflifeDateDatasource.id, new EndoflifeDateDatasource());

View file

@ -0,0 +1,575 @@
{
"count": 3,
"value": [
{
"visibility": [
"Build",
"Release"
],
"runsOn": [
"Agent",
"DeploymentGroup"
],
"id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1",
"name": "PowerShell",
"version": {
"major": 2,
"minor": 247,
"patch": 1,
"isTest": false
},
"serverOwned": true,
"contentsUploaded": true,
"iconUrl": "https://dev.azure.com/test_organization/_apis/distributedtask/tasks/e213ff0f-5d5c-4791-802d-52ea3e7be1f1/2.247.1/icon",
"minimumAgentVersion": "2.115.0",
"friendlyName": "PowerShell",
"description": "Run a PowerShell script on Linux, macOS, or Windows",
"category": "Utility",
"helpMarkDown": "[Learn more about this task](https://go.microsoft.com/fwlink/?LinkID=613736)",
"helpUrl": "https://docs.microsoft.com/azure/devops/pipelines/tasks/utility/powershell",
"releaseNotes": "Script task consistency. Added support for macOS and Linux.",
"definitionType": "task",
"showEnvironmentVariables": true,
"author": "Microsoft Corporation",
"demands": [],
"groups": [
{
"name": "preferenceVariables",
"displayName": "Preference Variables",
"isExpanded": false
},
{
"name": "advanced",
"displayName": "Advanced",
"isExpanded": false
}
],
"inputs": [
{
"options": {
"filePath": "File Path",
"inline": "Inline"
},
"name": "targetType",
"label": "Type",
"defaultValue": "filePath",
"type": "radio",
"helpMarkDown": "Target script type: File Path or Inline"
},
{
"name": "filePath",
"label": "Script Path",
"defaultValue": "",
"required": true,
"type": "filePath",
"helpMarkDown": "Path of the script to execute. Must be a fully qualified path or relative to $(System.DefaultWorkingDirectory).",
"visibleRule": "targetType = filePath"
},
{
"name": "arguments",
"label": "Arguments",
"defaultValue": "",
"type": "string",
"helpMarkDown": "Arguments passed to the PowerShell script. Either ordinal parameters or named parameters.",
"visibleRule": "targetType = filePath"
},
{
"properties": {
"resizable": "true",
"rows": "10",
"maxLength": "20000"
},
"name": "script",
"label": "Script",
"defaultValue": "# Write your PowerShell commands here.\n\nWrite-Host \"Hello World\"\n",
"required": true,
"type": "multiLine",
"helpMarkDown": "",
"visibleRule": "targetType = inline"
},
{
"options": {
"default": "Default",
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "errorActionPreference",
"label": "ErrorActionPreference",
"defaultValue": "stop",
"type": "pickList",
"helpMarkDown": "When not `Default`, prepends the line `$ErrorActionPreference = 'VALUE'` at the top of your script.",
"groupName": "preferenceVariables"
},
{
"options": {
"default": "Default",
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "warningPreference",
"label": "WarningPreference",
"defaultValue": "default",
"type": "pickList",
"helpMarkDown": "When not `Default`, prepends the line `$WarningPreference = 'VALUE'` at the top of your script.",
"groupName": "preferenceVariables"
},
{
"options": {
"default": "Default",
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "informationPreference",
"label": "InformationPreference",
"defaultValue": "default",
"type": "pickList",
"helpMarkDown": "When not `Default`, prepends the line `$InformationPreference = 'VALUE'` at the top of your script.",
"groupName": "preferenceVariables"
},
{
"options": {
"default": "Default",
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "verbosePreference",
"label": "VerbosePreference",
"defaultValue": "default",
"type": "pickList",
"helpMarkDown": "When not `Default`, prepends the line `$VerbosePreference = 'VALUE'` at the top of your script.",
"groupName": "preferenceVariables"
},
{
"options": {
"default": "Default",
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "debugPreference",
"label": "DebugPreference",
"defaultValue": "default",
"type": "pickList",
"helpMarkDown": "When not `Default`, prepends the line `$DebugPreference = 'VALUE'` at the top of your script.",
"groupName": "preferenceVariables"
},
{
"options": {
"default": "Default",
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "progressPreference",
"label": "ProgressPreference",
"defaultValue": "silentlyContinue",
"type": "pickList",
"helpMarkDown": "When not `Default`, prepends the line `$ProgressPreference = 'VALUE'` at the top of your script.",
"groupName": "preferenceVariables"
},
{
"name": "failOnStderr",
"label": "Fail on Standard Error",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is true, this task will fail if any errors are written to the error pipeline, or if any data is written to the Standard Error stream. Otherwise the task will rely on the exit code to determine failure.",
"groupName": "advanced"
},
{
"name": "showWarnings",
"label": "Show warnings as Azure DevOps warnings",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is true, and your script writes a warnings - they are shown as warnings also in pipeline logs",
"groupName": "advanced"
},
{
"name": "ignoreLASTEXITCODE",
"label": "Ignore $LASTEXITCODE",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is false, the line `if ((Test-Path -LiteralPath variable:\\LASTEXITCODE)) { exit $LASTEXITCODE }` is appended to the end of your script. This will cause the last exit code from an external command to be propagated as the exit code of powershell. Otherwise the line is not appended to the end of your script.",
"groupName": "advanced"
},
{
"name": "pwsh",
"label": "Use PowerShell Core",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is true, then on Windows the task will use pwsh.exe from your PATH instead of powershell.exe.",
"groupName": "advanced"
},
{
"name": "workingDirectory",
"label": "Working Directory",
"defaultValue": "",
"type": "filePath",
"helpMarkDown": "Working directory where the script is run.",
"groupName": "advanced"
},
{
"name": "runScriptInSeparateScope",
"label": "Run script in the separate scope",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "This input allows executing PowerShell scripts using '&' operator instead of the default '.'. If this input set to the true script will be executed in separate scope and globally scoped PowerShell variables won't be updated",
"groupName": "advanced"
}
],
"satisfies": [],
"sourceDefinitions": [],
"dataSourceBindings": [],
"instanceNameFormat": "PowerShell Script",
"preJobExecution": {},
"execution": {
"PowerShell3": {
"target": "powershell.ps1",
"platforms": [
"windows"
]
},
"Node10": {
"target": "powershell.js",
"argumentFormat": ""
},
"Node16": {
"target": "powershell.js",
"argumentFormat": ""
},
"Node20_1": {
"target": "powershell.js",
"argumentFormat": ""
}
},
"postJobExecution": {},
"_buildConfigMapping": {
"Default": "2.247.0",
"Node20-225": "2.247.1"
}
},
{
"visibility": [
"Build",
"Release"
],
"runsOn": [
"Agent",
"DeploymentGroup"
],
"id": "e213ff0f-5d5c-4791-802d-52ea3e7be1f1",
"name": "PowerShell",
"deprecated": true,
"version": {
"major": 1,
"minor": 2,
"patch": 3,
"isTest": false
},
"serverOwned": true,
"contentsUploaded": true,
"iconUrl": "https://dev.azure.com/test_organization/_apis/distributedtask/tasks/e213ff0f-5d5c-4791-802d-52ea3e7be1f1/1.2.3/icon",
"minimumAgentVersion": "1.102",
"friendlyName": "PowerShell",
"description": "Run a PowerShell script",
"category": "Utility",
"helpMarkDown": "[More Information](https://go.microsoft.com/fwlink/?LinkID=613736)",
"definitionType": "task",
"author": "Microsoft Corporation",
"demands": [
"DotNetFramework"
],
"groups": [
{
"name": "advanced",
"displayName": "Advanced",
"isExpanded": false
}
],
"inputs": [
{
"options": {
"inlineScript": "Inline Script",
"filePath": "File Path"
},
"name": "scriptType",
"label": "Type",
"defaultValue": "filePath",
"required": true,
"type": "pickList",
"helpMarkDown": "Type of the script: File Path or Inline Script"
},
{
"name": "scriptName",
"label": "Script Path",
"defaultValue": "",
"required": true,
"type": "filePath",
"helpMarkDown": "Path of the script to execute. Should be fully qualified path or relative to the default working directory.",
"visibleRule": "scriptType = filePath"
},
{
"name": "arguments",
"label": "Arguments",
"defaultValue": "",
"type": "string",
"helpMarkDown": "Arguments passed to the PowerShell script. Either ordinal parameters or named parameters"
},
{
"name": "workingFolder",
"label": "Working folder",
"defaultValue": "",
"type": "filePath",
"helpMarkDown": "Current working directory when script is run. Defaults to the folder where the script is located.",
"groupName": "advanced"
},
{
"properties": {
"resizable": "true",
"rows": "10",
"maxLength": "500"
},
"name": "inlineScript",
"label": "Inline Script",
"defaultValue": "# You can write your powershell scripts inline here. \n# You can also pass predefined and custom variables to this scripts using arguments\n\n Write-Host \"Hello World\"",
"required": true,
"type": "multiLine",
"helpMarkDown": "",
"visibleRule": "scriptType = inlineScript"
},
{
"name": "failOnStandardError",
"label": "Fail on Standard Error",
"defaultValue": "true",
"type": "boolean",
"helpMarkDown": "If this is true, this task will fail if any errors are written to the error pipeline, or if any data is written to the Standard Error stream. Otherwise the task will rely solely on $LASTEXITCODE and the exit code to determine failure.",
"groupName": "advanced"
}
],
"satisfies": [],
"sourceDefinitions": [],
"dataSourceBindings": [],
"instanceNameFormat": "PowerShell Script",
"preJobExecution": {},
"execution": {
"PowerShellExe": {
"target": "$(scriptName)",
"argumentFormat": "$(arguments)",
"workingDirectory": "$(workingFolder)",
"inlineScript": "$(inlineScript)",
"scriptType": "$(scriptType)",
"failOnStandardError": "$(failOnStandardError)"
}
},
"postJobExecution": {},
"_buildConfigMapping": {}
},
{
"visibility": [
"Build",
"Release"
],
"runsOn": [
"Agent",
"DeploymentGroup"
],
"id": "72a1931b-effb-4d2e-8fd8-f8472a07cb62",
"name": "AzurePowerShell",
"version": {
"major": 5,
"minor": 248,
"patch": 3,
"isTest": false
},
"serverOwned": true,
"contentsUploaded": true,
"iconUrl": "https://dev.azure.com/test_organization/_apis/distributedtask/tasks/72a1931b-effb-4d2e-8fd8-f8472a07cb62/5.248.3/icon",
"minimumAgentVersion": "2.115.0",
"friendlyName": "Azure PowerShell",
"description": "Run a PowerShell script within an Azure environment",
"category": "Deploy",
"helpMarkDown": "[Learn more about this task](https://go.microsoft.com/fwlink/?LinkID=613749)",
"helpUrl": "https://aka.ms/azurepowershelltroubleshooting",
"releaseNotes": "Added support for Az Module and cross platform agents.",
"definitionType": "task",
"author": "Microsoft Corporation",
"demands": [],
"groups": [
{
"name": "AzurePowerShellVersionOptions",
"displayName": "Azure PowerShell version options",
"isExpanded": true
},
{
"name": "advanced",
"displayName": "Advanced",
"isExpanded": false
}
],
"inputs": [
{
"aliases": [
"azureSubscription"
],
"properties": {
"EndpointFilterRule": "ScopeLevel != AzureMLWorkspace"
},
"name": "ConnectedServiceNameARM",
"label": "Azure Subscription",
"defaultValue": "",
"required": true,
"type": "connectedService:AzureRM",
"helpMarkDown": "Azure Resource Manager subscription to configure before running PowerShell"
},
{
"options": {
"FilePath": "Script File Path",
"InlineScript": "Inline Script"
},
"name": "ScriptType",
"label": "Script Type",
"defaultValue": "FilePath",
"type": "radio",
"helpMarkDown": "Type of the script: File Path or Inline Script"
},
{
"name": "ScriptPath",
"label": "Script Path",
"defaultValue": "",
"type": "filePath",
"helpMarkDown": "Path of the script. Should be fully qualified path or relative to the default working directory.",
"visibleRule": "ScriptType = FilePath"
},
{
"properties": {
"resizable": "true",
"rows": "10",
"maxLength": "5000"
},
"name": "Inline",
"label": "Inline Script",
"defaultValue": "# You can write your azure powershell scripts inline here. \n# You can also pass predefined and custom variables to this script using arguments",
"type": "multiLine",
"helpMarkDown": "Enter the script to execute.",
"visibleRule": "ScriptType = InlineScript"
},
{
"properties": {
"editorExtension": "ms.vss-services-azure.parameters-grid"
},
"name": "ScriptArguments",
"label": "Script Arguments",
"defaultValue": "",
"type": "string",
"helpMarkDown": "Additional parameters to pass to PowerShell. Can be either ordinal or named parameters.",
"visibleRule": "ScriptType = FilePath"
},
{
"options": {
"stop": "Stop",
"continue": "Continue",
"silentlyContinue": "SilentlyContinue"
},
"name": "errorActionPreference",
"label": "ErrorActionPreference",
"defaultValue": "stop",
"type": "pickList",
"helpMarkDown": "Select the value of the ErrorActionPreference variable for executing the script."
},
{
"name": "FailOnStandardError",
"label": "Fail on Standard Error",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is true, this task will fail if any errors are written to the error pipeline, or if any data is written to the Standard Error stream."
},
{
"aliases": [
"azurePowerShellVersion"
],
"options": {
"LatestVersion": "Latest installed version",
"OtherVersion": "Specify other version"
},
"name": "TargetAzurePs",
"label": "Azure PowerShell Version",
"defaultValue": "OtherVersion",
"type": "radio",
"helpMarkDown": "In case of hosted agents, the supported Azure PowerShell Version is: 1.0.0, 1.6.0, 2.3.2, 2.6.0, 3.1.0 (Hosted VS2017 Queue).\nTo pick the latest version available on the agent, select \"Latest installed version\".\n\nFor private agents you can specify preferred version of Azure PowerShell using \"Specify version\"",
"groupName": "AzurePowerShellVersionOptions"
},
{
"aliases": [
"preferredAzurePowerShellVersion"
],
"name": "CustomTargetAzurePs",
"label": "Preferred Azure PowerShell Version",
"defaultValue": "",
"required": true,
"type": "string",
"helpMarkDown": "Preferred Azure PowerShell Version needs to be a proper semantic version eg. 1.2.3. Regex like 2.\\*,2.3.\\* is not supported. The Hosted VS2017 Pool currently supports Az module version: 1.0.0, 1.6.0, 2.3.2, 2.6.0, 3.1.0",
"visibleRule": "TargetAzurePs = OtherVersion",
"groupName": "AzurePowerShellVersionOptions"
},
{
"name": "pwsh",
"label": "Use PowerShell Core",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is true, then on Windows the task will use pwsh.exe from your PATH instead of powershell.exe.",
"groupName": "advanced"
},
{
"name": "validateScriptSignature",
"label": "Validate script signature",
"defaultValue": "false",
"type": "boolean",
"helpMarkDown": "If this is true, then the task will first check to make sure specified script is signed and valid before executing it.",
"visibleRule": "ScriptType = FilePath",
"groupName": "advanced"
},
{
"name": "workingDirectory",
"label": "Working Directory",
"defaultValue": "",
"type": "filePath",
"helpMarkDown": "Working directory where the script is run.",
"groupName": "advanced"
}
],
"satisfies": [],
"sourceDefinitions": [],
"dataSourceBindings": [],
"instanceNameFormat": "Azure PowerShell script: $(ScriptType)",
"preJobExecution": {},
"execution": {
"PowerShell3": {
"target": "azurepowershell.ps1",
"platforms": [
"windows"
]
},
"Node16": {
"target": "azurepowershell.js",
"argumentFormat": ""
},
"Node10": {
"target": "azurepowershell.js",
"argumentFormat": ""
},
"Node20_1": {
"target": "azurepowershell.js",
"argumentFormat": ""
}
},
"postJobExecution": {},
"_buildConfigMapping": {
"Default": "5.248.2",
"Node20_229_2": "5.248.3"
}
}
]
}

View file

@ -1,5 +1,9 @@
import { getPkgReleases } from '..';
import { Fixtures } from '../../../../test/fixtures';
import * as httpMock from '../../../../test/http-mock';
import { GlobalConfig } from '../../../config/global';
import * as hostRules from '../../../util/host-rules';
import { AzurePipelinesTask } from './schema';
import { AzurePipelinesTasksDatasource } from '.';
const gitHubHost = 'https://raw.githubusercontent.com';
@ -9,6 +13,11 @@ const marketplaceTasksPath =
'/renovatebot/azure-devops-marketplace/main/azure-pipelines-marketplace-tasks.json';
describe('modules/datasource/azure-pipelines-tasks/index', () => {
beforeEach(() => {
GlobalConfig.reset();
hostRules.clear();
});
it('returns null for unknown task', async () => {
httpMock
.scope(gitHubHost)
@ -64,4 +73,103 @@ describe('modules/datasource/azure-pipelines-tasks/index', () => {
}),
).toEqual({ releases: [{ version: '0.171.0' }, { version: '0.198.0' }] });
});
it('returns organization task with single version', async () => {
GlobalConfig.set({
platform: 'azure',
endpoint: 'https://my.custom.domain',
});
hostRules.add({
hostType: AzurePipelinesTasksDatasource.id,
matchHost: 'my.custom.domain',
token: '123test',
});
httpMock
.scope('https://my.custom.domain')
.get('/_apis/distributedtask/tasks/')
.reply(200, Fixtures.get('tasks.json'));
expect(
await getPkgReleases({
datasource: AzurePipelinesTasksDatasource.id,
packageName: 'AzurePowerShell',
}),
).toEqual({ releases: [{ version: '5.248.3' }] });
});
it('returns organization task with multiple versions', async () => {
GlobalConfig.set({
platform: 'azure',
endpoint: 'https://my.custom.domain',
});
hostRules.add({
hostType: AzurePipelinesTasksDatasource.id,
matchHost: 'my.custom.domain',
token: '123test',
});
httpMock
.scope('https://my.custom.domain')
.get('/_apis/distributedtask/tasks/')
.reply(200, Fixtures.get('tasks.json'));
expect(
await getPkgReleases({
datasource: AzurePipelinesTasksDatasource.id,
packageName: 'PowerShell',
}),
).toEqual({
releases: [
{ isDeprecated: true, version: '1.2.3' },
{ isDeprecated: undefined, version: '2.247.1' },
],
});
});
describe('compare semver', () => {
it.each`
a | exp
${[]} | ${[]}
${['']} | ${['']}
${['', '']} | ${['', '']}
${['1.0.0']} | ${['1.0.0']}
${['1.0.1', '1.1.0', '1.0.0']} | ${['1.0.0', '1.0.1', '1.1.0']}
`('when versions is $a', ({ a, exp }) => {
const azureVersions = a.map((x: string) => {
const splitted = x.split('.');
const version =
splitted.length === 3
? {
major: Number(splitted[0]),
minor: Number(splitted[1]),
patch: Number(splitted[2]),
}
: null;
return AzurePipelinesTask.parse({
name: '',
deprecated: false,
version,
});
});
const azureSortedVersions = azureVersions.sort(
AzurePipelinesTasksDatasource.compareSemanticVersions('version'),
);
expect(
azureSortedVersions.map((x: any) => {
const data = AzurePipelinesTask.parse(x);
return data.version === null
? ''
: `${data.version.major}.${data.version.minor}.${data.version.patch}`;
}),
).toStrictEqual(exp);
});
});
});

View file

@ -1,7 +1,16 @@
import type { TypeOf, ZodType } from 'zod';
import { GlobalConfig } from '../../../config/global';
import { cache } from '../../../util/cache/package/decorator';
import * as hostRules from '../../../util/host-rules';
import type { HttpOptions } from '../../../util/http/types';
import { id as versioning } from '../../versioning/loose';
import { Datasource } from '../datasource';
import type { GetReleasesConfig, ReleaseResult } from '../types';
import {
AzurePipelinesFallbackTasks,
AzurePipelinesJSON,
AzurePipelinesTaskVersion,
} from './schema';
const TASKS_URL_BASE =
'https://raw.githubusercontent.com/renovatebot/azure-devops-marketplace/main';
@ -22,13 +31,58 @@ export class AzurePipelinesTasksDatasource extends Datasource {
async getReleases({
packageName,
}: GetReleasesConfig): Promise<ReleaseResult | null> {
const versions =
(await this.getTasks(BUILT_IN_TASKS_URL))[packageName.toLowerCase()] ??
(await this.getTasks(MARKETPLACE_TASKS_URL))[packageName.toLowerCase()];
const platform = GlobalConfig.get('platform');
const endpoint = GlobalConfig.get('endpoint');
const { token } = hostRules.find({
hostType: AzurePipelinesTasksDatasource.id,
url: endpoint,
});
if (versions) {
const releases = versions.map((version) => ({ version }));
return { releases };
if (platform === 'azure' && endpoint && token) {
const auth = Buffer.from(`renovate:${token}`).toString('base64');
const opts: HttpOptions = {
headers: { authorization: `Basic ${auth}` },
};
const results = await this.getTasks(
`${endpoint}/_apis/distributedtask/tasks/`,
opts,
AzurePipelinesJSON,
);
const result: ReleaseResult = { releases: [] };
results.value
.filter((task) => task.name === packageName)
.sort(AzurePipelinesTasksDatasource.compareSemanticVersions('version'))
.forEach((task) => {
result.releases.push({
version: `${task.version!.major}.${task.version!.minor}.${task.version!.patch}`,
isDeprecated: task.deprecated,
});
});
return result;
} else {
const versions =
(
await this.getTasks(
BUILT_IN_TASKS_URL,
{},
AzurePipelinesFallbackTasks,
)
)[packageName.toLowerCase()] ??
(
await this.getTasks(
MARKETPLACE_TASKS_URL,
{},
AzurePipelinesFallbackTasks,
)
)[packageName.toLowerCase()];
if (versions) {
const releases = versions.map((version) => ({ version }));
return { releases };
}
}
return null;
@ -39,8 +93,39 @@ export class AzurePipelinesTasksDatasource extends Datasource {
key: (url: string) => url,
ttlMinutes: 24 * 60,
})
async getTasks(url: string): Promise<Record<string, string[]>> {
const { body } = await this.http.getJson<Record<string, string[]>>(url);
async getTasks<ResT, Schema extends ZodType<ResT> = ZodType<ResT>>(
url: string,
opts: HttpOptions,
schema: Schema,
): Promise<TypeOf<Schema>> {
const { body } = await this.http.getJson(url, opts, schema);
return body;
}
static compareSemanticVersions = (key: string) => (a: any, b: any) => {
const a1Version = AzurePipelinesTaskVersion.safeParse(a[key]).data;
const b1Version = AzurePipelinesTaskVersion.safeParse(b[key]).data;
const a1 =
a1Version === undefined
? ''
: `${a1Version.major}.${a1Version.minor}.${a1Version.patch}`;
const b1 =
b1Version === undefined
? ''
: `${b1Version.major}.${b1Version.minor}.${b1Version.patch}`;
const len = Math.min(a1.length, b1.length);
for (let i = 0; i < len; i++) {
const a2 = +a1[i] || 0;
const b2 = +b1[i] || 0;
if (a2 !== b2) {
return a2 > b2 ? 1 : -1;
}
}
return b1.length - a1.length;
};
}

View file

@ -0,0 +1,19 @@
import { z } from 'zod';
export const AzurePipelinesTaskVersion = z.object({
major: z.number(),
minor: z.number(),
patch: z.number(),
});
export const AzurePipelinesTask = z.object({
name: z.string(),
deprecated: z.boolean().optional(),
version: AzurePipelinesTaskVersion.nullable(),
});
export const AzurePipelinesJSON = z.object({
value: AzurePipelinesTask.array(),
});
export const AzurePipelinesFallbackTasks = z.record(z.string().array());

View file

@ -0,0 +1,3 @@
export const defaultRegistryUrl = 'https://search.devbox.sh/v2/';
export const datasource = 'devbox';

View file

@ -0,0 +1,159 @@
import { getPkgReleases } from '..';
import * as httpMock from '../../../../test/http-mock';
import { EXTERNAL_HOST_ERROR } from '../../../constants/error-messages';
import { datasource, defaultRegistryUrl } from './common';
const packageName = 'nodejs';
function getPath(packageName: string): string {
return `/pkg?name=${encodeURIComponent(packageName)}`;
}
const sampleReleases = [
{
version: '22.2.0',
last_updated: '2024-05-22T06:18:38Z',
},
{
version: '22.0.0',
last_updated: '2024-05-12T16:19:40Z',
},
{
version: '21.7.3',
last_updated: '2024-04-19T21:36:04Z',
},
];
describe('modules/datasource/devbox/index', () => {
describe('getReleases', () => {
it('throws for error', async () => {
httpMock
.scope(defaultRegistryUrl)
.get(getPath(packageName))
.replyWithError('error');
await expect(
getPkgReleases({
datasource,
packageName,
}),
).rejects.toThrow(EXTERNAL_HOST_ERROR);
});
});
it('returns null for 404', async () => {
httpMock.scope(defaultRegistryUrl).get(getPath(packageName)).reply(404);
expect(
await getPkgReleases({
datasource,
packageName,
}),
).toBeNull();
});
it('returns null for empty result', async () => {
httpMock.scope(defaultRegistryUrl).get(getPath(packageName)).reply(200, {});
expect(
await getPkgReleases({
datasource,
packageName,
}),
).toBeNull();
});
it('returns null for empty 200 OK', async () => {
httpMock
.scope(defaultRegistryUrl)
.get(getPath(packageName))
.reply(200, { versions: [] });
expect(
await getPkgReleases({
datasource,
packageName,
}),
).toBeNull();
});
it('throws for 5xx', async () => {
httpMock.scope(defaultRegistryUrl).get(getPath(packageName)).reply(502);
await expect(
getPkgReleases({
datasource,
packageName,
}),
).rejects.toThrow(EXTERNAL_HOST_ERROR);
});
it('processes real data', async () => {
httpMock.scope(defaultRegistryUrl).get(getPath(packageName)).reply(200, {
name: 'nodejs',
summary: 'Event-driven I/O framework for the V8 JavaScript engine',
homepage_url: 'https://nodejs.org',
license: 'MIT',
releases: sampleReleases,
});
const res = await getPkgReleases({
datasource,
packageName,
});
expect(res).toEqual({
homepage: 'https://nodejs.org',
registryUrl: 'https://search.devbox.sh/v2',
releases: [
{
version: '21.7.3',
releaseTimestamp: '2024-04-19T21:36:04.000Z',
},
{
version: '22.0.0',
releaseTimestamp: '2024-05-12T16:19:40.000Z',
},
{
version: '22.2.0',
releaseTimestamp: '2024-05-22T06:18:38.000Z',
},
],
});
});
it('processes empty data', async () => {
httpMock.scope(defaultRegistryUrl).get(getPath(packageName)).reply(200, {
name: 'nodejs',
summary: 'Event-driven I/O framework for the V8 JavaScript engine',
homepage_url: 'https://nodejs.org',
license: 'MIT',
releases: [],
});
const res = await getPkgReleases({
datasource,
packageName,
});
expect(res).toBeNull();
});
it('returns null when no body is returned', async () => {
httpMock
.scope(defaultRegistryUrl)
.get(getPath(packageName))
.reply(200, undefined);
const res = await getPkgReleases({
datasource,
packageName,
});
expect(res).toBeNull();
});
it('falls back to a default homepage_url', async () => {
httpMock.scope(defaultRegistryUrl).get(getPath(packageName)).reply(200, {
name: 'nodejs',
summary: 'Event-driven I/O framework for the V8 JavaScript engine',
homepage_url: undefined,
license: 'MIT',
releases: sampleReleases,
});
const res = await getPkgReleases({
datasource,
packageName,
});
expect(res?.homepage).toBe('https://www.nixhub.io/');
});
});

View file

@ -0,0 +1,57 @@
import { logger } from '../../../logger';
import { ExternalHostError } from '../../../types/errors/external-host-error';
import { HttpError } from '../../../util/http';
import { joinUrlParts } from '../../../util/url';
import * as devboxVersioning from '../../versioning/devbox';
import { Datasource } from '../datasource';
import type { GetReleasesConfig, ReleaseResult } from '../types';
import { datasource, defaultRegistryUrl } from './common';
import { DevboxResponse } from './schema';
export class DevboxDatasource extends Datasource {
static readonly id = datasource;
constructor() {
super(datasource);
}
override readonly customRegistrySupport = true;
override readonly releaseTimestampSupport = true;
override readonly registryStrategy = 'first';
override readonly defaultVersioning = devboxVersioning.id;
override readonly defaultRegistryUrls = [defaultRegistryUrl];
async getReleases({
registryUrl,
packageName,
}: GetReleasesConfig): Promise<ReleaseResult | null> {
const res: ReleaseResult = {
releases: [],
};
logger.trace({ registryUrl, packageName }, 'fetching devbox release');
const devboxPkgUrl = joinUrlParts(
registryUrl!,
`/pkg?name=${encodeURIComponent(packageName)}`,
);
try {
const response = await this.http.getJson(devboxPkgUrl, DevboxResponse);
res.releases = response.body.releases;
res.homepage = response.body.homepage;
} catch (err) {
// istanbul ignore else: not testable with nock
if (err instanceof HttpError) {
if (err.response?.statusCode !== 404) {
throw new ExternalHostError(err);
}
}
this.handleGenericErrors(err);
}
return res.releases.length ? res : null;
}
}

View file

@ -0,0 +1,23 @@
import { z } from 'zod';
export const DevboxRelease = z.object({
version: z.string(),
last_updated: z.string(),
});
export const DevboxResponse = z
.object({
name: z.string(),
summary: z.string().optional(),
homepage_url: z.string().catch(`https://www.nixhub.io/`),
license: z.string().optional(),
releases: DevboxRelease.array(),
})
.transform((response) => ({
name: response.name,
homepage: response.homepage_url,
releases: response.releases.map((release) => ({
version: release.version,
releaseTimestamp: release.last_updated,
})),
}));

View file

@ -392,5 +392,31 @@ describe('modules/manager/bazel-module/extract', () => {
},
]);
});
it('returns git_repository dependencies', async () => {
const input = codeBlock`
git_repository(
name = "rules_foo",
commit = "850cb49c8649e463b80ef7984e7c744279746170",
remote = "https://github.com/example/rules_foo.git",
)
`;
const result = await extractPackageFile(input, 'MODULE.bazel');
if (!result) {
throw new Error('Expected a result.');
}
expect(result.deps).toHaveLength(1);
expect(result.deps).toEqual(
expect.arrayContaining([
{
datasource: GithubTagsDatasource.id,
depType: 'git_repository',
depName: 'rules_foo',
currentDigest: '850cb49c8649e463b80ef7984e7c744279746170',
packageName: 'example/rules_foo',
},
]),
);
});
});
});

View file

@ -8,7 +8,10 @@ import type { RecordFragment } from './fragments';
import { parse } from './parser';
import { RuleToMavenPackageDep, fillRegistryUrls } from './parser/maven';
import { RuleToDockerPackageDep } from './parser/oci';
import { RuleToBazelModulePackageDep } from './rules';
import {
GitRepositoryToPackageDep,
RuleToBazelModulePackageDep,
} from './rules';
import * as rules from './rules';
export async function extractPackageFile(
@ -18,9 +21,14 @@ export async function extractPackageFile(
try {
const records = parse(content);
const pfc = await extractBazelPfc(records, packageFile);
const gitRepositoryDeps = extractGitRepositoryDeps(records);
const mavenDeps = extractMavenDeps(records);
const dockerDeps = LooseArray(RuleToDockerPackageDep).parse(records);
if (gitRepositoryDeps.length) {
pfc.deps.push(...gitRepositoryDeps);
}
if (mavenDeps.length) {
pfc.deps.push(...mavenDeps);
}
@ -57,6 +65,12 @@ async function extractBazelPfc(
return pfc;
}
function extractGitRepositoryDeps(
records: RecordFragment[],
): PackageDependency[] {
return LooseArray(GitRepositoryToPackageDep).parse(records);
}
function extractMavenDeps(records: RecordFragment[]): PackageDependency[] {
return LooseArray(RuleToMavenPackageDep)
.transform(fillRegistryUrls)

View file

@ -315,5 +315,37 @@ describe('modules/manager/bazel-module/parser/index', () => {
),
]);
});
it('finds the git_repository', () => {
const input = codeBlock`
git_repository(
name = "rules_foo",
remote = "https://github.com/example/rules_foo.git",
commit = "6a2c2e22849b3e6b33d5ea9aa72222d4803a986a",
patches = ["//:rules_foo.patch"],
patch_strip = 1,
)
`;
const res = parse(input);
expect(res).toEqual([
fragments.record(
{
rule: fragments.string('git_repository'),
name: fragments.string('rules_foo'),
patches: fragments.array(
[fragments.string('//:rules_foo.patch')],
true,
),
commit: fragments.string(
'6a2c2e22849b3e6b33d5ea9aa72222d4803a986a',
),
remote: fragments.string(
'https://github.com/example/rules_foo.git',
),
},
true,
),
]);
});
});
});

View file

@ -9,6 +9,7 @@ const supportedRules = [
'git_override',
'local_path_override',
'single_version_override',
'git_repository',
];
const supportedRulesRegex = regEx(`^${supportedRules.join('|')}$`);

View file

@ -10,6 +10,7 @@ import type {
OverridePackageDep,
} from './rules';
import {
GitRepositoryToPackageDep,
RuleToBazelModulePackageDep,
bazelModulePackageDepToPackageDependency,
processModulePkgDeps,
@ -72,6 +73,19 @@ const singleVersionOverrideWithoutVersionAndRegistryPkgDep: BasePackageDep = {
depName: 'rules_foo',
skipReason: 'ignored',
};
const gitRepositoryForGithubPkgDep: BasePackageDep = {
datasource: GithubTagsDatasource.id,
depType: 'git_repository',
depName: 'rules_foo',
packageName: 'example/rules_foo',
currentDigest: '850cb49c8649e463b80ef7984e7c744279746170',
};
const gitRepositoryForUnsupportedPkgDep: BasePackageDep = {
depType: 'git_repository',
depName: 'rules_foo',
currentDigest: '850cb49c8649e463b80ef7984e7c744279746170',
skipReason: 'unsupported-datasource',
};
describe('modules/manager/bazel-module/rules', () => {
describe('RuleToBazelModulePackageDep', () => {
@ -129,6 +143,30 @@ describe('modules/manager/bazel-module/rules', () => {
});
});
describe('GitRepositoryToPackageDep', () => {
const gitRepositoryWithGihubHost = fragments.record({
rule: fragments.string('git_repository'),
name: fragments.string('rules_foo'),
remote: fragments.string('https://github.com/example/rules_foo.git'),
commit: fragments.string('850cb49c8649e463b80ef7984e7c744279746170'),
});
const gitRepositoryWithUnsupportedHost = fragments.record({
rule: fragments.string('git_repository'),
name: fragments.string('rules_foo'),
remote: fragments.string('https://nobuenos.com/example/rules_foo.git'),
commit: fragments.string('850cb49c8649e463b80ef7984e7c744279746170'),
});
it.each`
msg | a | exp
${'git_repository, GitHub host'} | ${gitRepositoryWithGihubHost} | ${gitRepositoryForGithubPkgDep}
${'git_repository, unsupported host'} | ${gitRepositoryWithUnsupportedHost} | ${gitRepositoryForUnsupportedPkgDep}
`('.parse() with $msg', ({ a, exp }) => {
const pkgDep = GitRepositoryToPackageDep.parse(a);
expect(pkgDep).toEqual(exp);
});
});
describe('.toPackageDependencies()', () => {
const expectedBazelDepNoOverrides: PackageDependency[] = [bazelDepPkgDep];
const expectedBazelDepAndGitOverride: PackageDependency[] = [

View file

@ -242,3 +242,28 @@ export function toPackageDependencies(
): PackageDependency[] {
return collectByModule(packageDeps).map(processModulePkgDeps).flat();
}
export const GitRepositoryToPackageDep = RecordFragmentSchema.extend({
children: z.object({
rule: StringFragmentSchema.extend({
value: z.literal('git_repository'),
}),
name: StringFragmentSchema,
remote: StringFragmentSchema,
commit: StringFragmentSchema,
}),
}).transform(({ children: { rule, name, remote, commit } }): BasePackageDep => {
const gitRepo: BasePackageDep = {
depType: rule.value,
depName: name.value,
currentDigest: commit.value,
};
const ghPackageName = githubPackageName(remote.value);
if (is.nonEmptyString(ghPackageName)) {
gitRepo.datasource = GithubTagsDatasource.id;
gitRepo.packageName = ghPackageName;
} else {
gitRepo.skipReason = 'unsupported-datasource';
}
return gitRepo;
});

View file

@ -1,4 +1,6 @@
import { regEx } from '../../../util/regex';
import type { HelmRepository } from './schema';
import type { FluxManifest } from './types';
export const systemManifestFileNameRegex = '(?:^|/)gotk-components\\.ya?ml$';
@ -8,3 +10,19 @@ export const systemManifestHeaderRegex =
export function isSystemManifest(file: string): boolean {
return regEx(systemManifestFileNameRegex).test(file);
}
export function collectHelmRepos(manifests: FluxManifest[]): HelmRepository[] {
const helmRepositories: HelmRepository[] = [];
for (const manifest of manifests) {
if (manifest.kind === 'resource') {
for (const resource of manifest.resources) {
if (resource.kind === 'HelmRepository') {
helmRepositories.push(resource);
}
}
}
}
return helmRepositories;
}

View file

@ -21,7 +21,11 @@ import type {
PackageFile,
PackageFileContent,
} from '../types';
import { isSystemManifest, systemManifestHeaderRegex } from './common';
import {
collectHelmRepos,
isSystemManifest,
systemManifestHeaderRegex,
} from './common';
import { FluxResource, type HelmRepository } from './schema';
import type {
FluxManagerData,
@ -102,6 +106,39 @@ function resolveGitRepositoryPerSourceTag(
}
}
function resolveHelmRepository(
dep: PackageDependency,
matchingRepositories: HelmRepository[],
registryAliases: Record<string, string> | undefined,
): void {
if (matchingRepositories.length) {
dep.registryUrls = matchingRepositories
.map((repo) => {
if (repo.spec.type === 'oci' || isOCIRegistry(repo.spec.url)) {
// Change datasource to Docker
dep.datasource = DockerDatasource.id;
// Ensure the URL is a valid OCI path
dep.packageName = getDep(
`${removeOCIPrefix(repo.spec.url)}/${dep.depName}`,
false,
registryAliases,
).depName;
return null;
} else {
return repo.spec.url;
}
})
.filter(is.string);
// if registryUrls is empty, delete it from dep
if (!dep.registryUrls?.length) {
delete dep.registryUrls;
}
} else {
dep.skipReason = 'unknown-registry';
}
}
function resolveSystemManifest(
manifest: SystemFluxManifest,
): PackageDependency<FluxManagerData>[] {
@ -126,7 +163,8 @@ function resolveResourceManifest(
for (const resource of manifest.resources) {
switch (resource.kind) {
case 'HelmRelease': {
const depName = resource.spec.chart.spec.chart;
const chartSpec = resource.spec.chart.spec;
const depName = chartSpec.chart;
const dep: PackageDependency = {
depName,
currentValue: resource.spec.chart.spec.version,
@ -142,40 +180,12 @@ function resolveResourceManifest(
const matchingRepositories = helmRepositories.filter(
(rep) =>
rep.kind === resource.spec.chart.spec.sourceRef?.kind &&
rep.metadata.name === resource.spec.chart.spec.sourceRef.name &&
rep.kind === chartSpec.sourceRef?.kind &&
rep.metadata.name === chartSpec.sourceRef.name &&
rep.metadata.namespace ===
(resource.spec.chart.spec.sourceRef.namespace ??
resource.metadata?.namespace),
(chartSpec.sourceRef.namespace ?? resource.metadata?.namespace),
);
if (matchingRepositories.length) {
dep.registryUrls = matchingRepositories
.map((repo) => {
if (repo.spec.type === 'oci' || isOCIRegistry(repo.spec.url)) {
// Change datasource to Docker
dep.datasource = DockerDatasource.id;
// Ensure the URL is a valid OCI path
dep.packageName = getDep(
`${removeOCIPrefix(repo.spec.url)}/${
resource.spec.chart.spec.chart
}`,
false,
registryAliases,
).depName;
return null;
} else {
return repo.spec.url;
}
})
.filter(is.string);
// if registryUrls is empty, delete it from dep
if (!dep.registryUrls?.length) {
delete dep.registryUrls;
}
} else {
dep.skipReason = 'unknown-registry';
}
resolveHelmRepository(dep, matchingRepositories, registryAliases);
deps.push(dep);
break;
}
@ -252,14 +262,7 @@ export function extractPackageFile(
if (!manifest) {
return null;
}
const helmRepositories: HelmRepository[] = [];
if (manifest.kind === 'resource') {
for (const resource of manifest.resources) {
if (resource.kind === 'HelmRepository') {
helmRepositories.push(resource);
}
}
}
const helmRepositories = collectHelmRepos([manifest]);
let deps: PackageDependency[] | null = null;
switch (manifest.kind) {
case 'system':
@ -293,16 +296,7 @@ export async function extractAllPackageFiles(
}
}
const helmRepositories: HelmRepository[] = [];
for (const manifest of manifests) {
if (manifest.kind === 'resource') {
for (const resource of manifest.resources) {
if (resource.kind === 'HelmRepository') {
helmRepositories.push(resource);
}
}
}
}
const helmRepositories = collectHelmRepos(manifests);
for (const manifest of manifests) {
let deps: PackageDependency[] | null = null;

View file

@ -16,6 +16,10 @@ url = "last.url"
[[tool.poetry.source]]
name = "five"
[[tool.poetry.source]]
name = "invalid-url"
url = "invalid-url"
[build-system]
requires = ["poetry_core>=1.0", "wheel"]
build-backend = "poetry.masonry.api"

View file

@ -1,4 +1,5 @@
import { codeBlock } from 'common-tags';
import { GoogleAuth as _googleAuth } from 'google-auth-library';
import { mockDeep } from 'jest-mock-extended';
import { join } from 'upath';
import { envMock, mockExecAll } from '../../../../test/exec-util';
@ -15,16 +16,26 @@ import { updateArtifacts } from '.';
const pyproject1toml = Fixtures.get('pyproject.1.toml');
const pyproject10toml = Fixtures.get('pyproject.10.toml');
const pyproject13toml = `[[tool.poetry.source]]
name = "some-gar-repo"
url = "https://someregion-python.pkg.dev/some-project/some-repo/simple/"
[build-system]
requires = ["poetry_core>=1.0", "wheel"]
build-backend = "poetry.masonry.api"
`;
jest.mock('../../../util/exec/env');
jest.mock('../../../util/fs');
jest.mock('../../datasource', () => mockDeep());
jest.mock('../../../util/host-rules', () => mockDeep());
jest.mock('google-auth-library');
process.env.CONTAINERBASE = 'true';
const datasource = mocked(_datasource);
const hostRules = mocked(_hostRules);
const googleAuth = mocked(_googleAuth);
const adminConfig: RepoGlobalConfig = {
localDir: join('/tmp/github/some/repo'),
@ -198,7 +209,99 @@ describe('modules/manager/poetry/artifacts', () => {
},
},
]);
expect(hostRules.find.mock.calls).toHaveLength(5);
expect(hostRules.find.mock.calls).toHaveLength(7);
expect(execSnapshots).toMatchObject([
{
cmd: 'poetry update --lock --no-interaction dep1',
options: {
env: {
POETRY_HTTP_BASIC_ONE_PASSWORD: 'passwordOne',
POETRY_HTTP_BASIC_ONE_USERNAME: 'usernameOne',
POETRY_HTTP_BASIC_TWO_USERNAME: 'usernameTwo',
POETRY_HTTP_BASIC_FOUR_OH_FOUR_PASSWORD: 'passwordFour',
},
},
},
]);
});
it('passes Google Artifact Registry credentials environment vars', async () => {
// poetry.lock
fs.getSiblingFileName.mockReturnValueOnce('poetry.lock');
fs.readLocalFile.mockResolvedValueOnce(null);
// pyproject.lock
fs.getSiblingFileName.mockReturnValueOnce('pyproject.lock');
fs.readLocalFile.mockResolvedValueOnce('[metadata]\n');
const execSnapshots = mockExecAll();
fs.readLocalFile.mockResolvedValueOnce('New poetry.lock');
googleAuth.mockImplementationOnce(
jest.fn().mockImplementationOnce(() => ({
getAccessToken: jest.fn().mockResolvedValue('some-token'),
})),
);
const updatedDeps = [{ depName: 'dep1' }];
expect(
await updateArtifacts({
packageFileName: 'pyproject.toml',
updatedDeps,
newPackageFileContent: pyproject13toml,
config,
}),
).toEqual([
{
file: {
type: 'addition',
path: 'pyproject.lock',
contents: 'New poetry.lock',
},
},
]);
expect(hostRules.find.mock.calls).toHaveLength(3);
expect(execSnapshots).toMatchObject([
{
cmd: 'poetry update --lock --no-interaction dep1',
options: {
env: {
POETRY_HTTP_BASIC_SOME_GAR_REPO_USERNAME: 'oauth2accesstoken',
POETRY_HTTP_BASIC_SOME_GAR_REPO_PASSWORD: 'some-token',
},
},
},
]);
});
it('continues if Google auth is not configured', async () => {
// poetry.lock
fs.getSiblingFileName.mockReturnValueOnce('poetry.lock');
fs.readLocalFile.mockResolvedValueOnce(null);
// pyproject.lock
fs.getSiblingFileName.mockReturnValueOnce('pyproject.lock');
fs.readLocalFile.mockResolvedValueOnce('[metadata]\n');
const execSnapshots = mockExecAll();
fs.readLocalFile.mockResolvedValueOnce('New poetry.lock');
googleAuth.mockImplementation(
jest.fn().mockImplementation(() => ({
getAccessToken: jest.fn().mockResolvedValue(undefined),
})),
);
const updatedDeps = [{ depName: 'dep1' }];
expect(
await updateArtifacts({
packageFileName: 'pyproject.toml',
updatedDeps,
newPackageFileContent: pyproject13toml,
config,
}),
).toEqual([
{
file: {
type: 'addition',
path: 'pyproject.lock',
contents: 'New poetry.lock',
},
},
]);
expect(hostRules.find.mock.calls).toHaveLength(3);
expect(execSnapshots).toMatchObject([
{ cmd: 'poetry update --lock --no-interaction dep1' },
]);

View file

@ -17,7 +17,9 @@ import { find } from '../../../util/host-rules';
import { regEx } from '../../../util/regex';
import { Result } from '../../../util/result';
import { parse as parseToml } from '../../../util/toml';
import { parseUrl } from '../../../util/url';
import { PypiDatasource } from '../../datasource/pypi';
import { getGoogleAuthTokenRaw } from '../../datasource/util';
import type { UpdateArtifact, UpdateArtifactsResult } from '../types';
import { Lockfile, PoetrySchemaToml } from './schema';
import type { PoetryFile, PoetrySource } from './types';
@ -101,7 +103,7 @@ function getPoetrySources(content: string, fileName: string): PoetrySource[] {
return [];
}
if (!pyprojectFile.tool?.poetry) {
logger.debug(`{$fileName} contains no poetry section`);
logger.debug(`${fileName} contains no poetry section`);
return [];
}
@ -115,20 +117,42 @@ function getPoetrySources(content: string, fileName: string): PoetrySource[] {
return sourceArray;
}
function getMatchingHostRule(url: string | undefined): HostRule {
async function getMatchingHostRule(url: string | undefined): Promise<HostRule> {
const scopedMatch = find({ hostType: PypiDatasource.id, url });
return is.nonEmptyObject(scopedMatch) ? scopedMatch : find({ url });
const hostRule = is.nonEmptyObject(scopedMatch) ? scopedMatch : find({ url });
if (hostRule) {
return hostRule;
}
const parsedUrl = parseUrl(url);
if (!parsedUrl) {
logger.once.debug(`Failed to parse URL ${url}`);
return {};
}
if (parsedUrl.hostname.endsWith('.pkg.dev')) {
const accessToken = await getGoogleAuthTokenRaw();
if (accessToken) {
return {
username: 'oauth2accesstoken',
password: accessToken,
};
}
logger.once.debug(`Could not get Google access token (url=${url})`);
}
return {};
}
function getSourceCredentialVars(
async function getSourceCredentialVars(
pyprojectContent: string,
packageFileName: string,
): NodeJS.ProcessEnv {
): Promise<NodeJS.ProcessEnv> {
const poetrySources = getPoetrySources(pyprojectContent, packageFileName);
const envVars: NodeJS.ProcessEnv = {};
for (const source of poetrySources) {
const matchingHostRule = getMatchingHostRule(source.url);
const matchingHostRule = await getMatchingHostRule(source.url);
const formattedSourceName = source.name
.replace(regEx(/(\.|-)+/g), '_')
.toUpperCase();
@ -192,7 +216,10 @@ export async function updateArtifacts({
config.constraints?.poetry ??
getPoetryRequirement(newPackageFileContent, existingLockFileContent);
const extraEnv = {
...getSourceCredentialVars(newPackageFileContent, packageFileName),
...(await getSourceCredentialVars(
newPackageFileContent,
packageFileName,
)),
...getGitEnvironmentVariables(['poetry']),
PIP_CACHE_DIR: await ensureCacheDir('pip'),
};

View file

@ -13,11 +13,18 @@ repos:
rev: 19.3b0
hooks:
- id: black
language: python
additional_dependencies:
- "request==1.1.1"
- "" # broken pypi package
- repo: https://gitlab.com/psf/black
# should also detect gitlab
rev: 19.3b0
hooks:
- id: black
# missing language, not extracted
additional_dependencies:
- "urllib==24.9.0"
- repo: http://gitlab.com/psf/black
# should also detect http
rev: 19.3b0
@ -48,3 +55,7 @@ repos:
- repo: some_invalid_url
# case with invlalid url.
rev: v1.0.0
# pre-commit meta hooks
- repo: meta
hooks: []

View file

@ -10,6 +10,14 @@ exports[`modules/manager/pre-commit/extract extractPackageFile() extracts from c
"depType": "repository",
"packageName": "pre-commit/pre-commit-hooks",
},
{
"currentValue": "==1.1.1",
"currentVersion": "1.1.1",
"datasource": "pypi",
"depName": "request",
"depType": "pre-commit-python",
"packageName": "request",
},
{
"currentValue": "19.3b0",
"datasource": "github-tags",

View file

@ -2,6 +2,7 @@ import { mockDeep } from 'jest-mock-extended';
import { Fixtures } from '../../../../test/fixtures';
import { mocked } from '../../../../test/util';
import * as _hostRules from '../../../util/host-rules';
import { PypiDatasource } from '../../datasource/pypi';
import { extractPackageFile } from '.';
jest.mock('../../../util/host-rules', () => mockDeep());
@ -81,6 +82,14 @@ describe('modules/manager/pre-commit/extract', () => {
expect(result).toMatchSnapshot({
deps: [
{ depName: 'pre-commit/pre-commit-hooks', currentValue: 'v3.3.0' },
{
currentValue: '==1.1.1',
currentVersion: '1.1.1',
datasource: PypiDatasource.id,
depName: 'request',
depType: 'pre-commit-python',
packageName: 'request',
},
{ depName: 'psf/black', currentValue: '19.3b0' },
{ depName: 'psf/black', currentValue: '19.3b0' },
{ depName: 'psf/black', currentValue: '19.3b0' },

View file

@ -7,6 +7,7 @@ import { regEx } from '../../../util/regex';
import { parseSingleYaml } from '../../../util/yaml';
import { GithubTagsDatasource } from '../../datasource/github-tags';
import { GitlabTagsDatasource } from '../../datasource/gitlab-tags';
import { pep508ToPackageDependency } from '../pep621/utils';
import type { PackageDependency, PackageFileContent } from '../types';
import {
matchesPrecommitConfigHeuristic,
@ -137,6 +138,23 @@ function findDependencies(precommitFile: PreCommitConfig): PackageDependency[] {
}
const packageDependencies: PackageDependency[] = [];
precommitFile.repos.forEach((item) => {
// meta hooks is defined from pre-commit and doesn't support `additional_dependencies`
if (item.repo !== 'meta') {
item.hooks?.forEach((hook) => {
// normally language are not defined in yaml
// only support it when it's explicitly defined.
// this avoid to parse hooks from pre-commit-hooks.yaml from git repo
if (hook.language === 'python') {
hook.additional_dependencies?.map((req) => {
const dep = pep508ToPackageDependency('pre-commit-python', req);
if (dep) {
packageDependencies.push(dep);
}
});
}
});
}
if (matchesPrecommitDependencyHeuristic(item)) {
logger.trace(item, 'Matched pre-commit dependency spec');
const repository = String(item.repo);

View file

@ -26,3 +26,33 @@ To enable the `pre-commit` manager, add the following config:
```
Alternatively, add `:enablePreCommit` to your `extends` array.
### Additional Dependencies
renovate has partial support for `additional_dependencies`, currently python only.
for python hooks, you will need to **explicitly add language** to your hooks with `additional_dependencies`
to let renovatebot know what kind of dependencies they are.
For example, this work for `request`:
```yaml
- repo: https://github.com/psf/black
rev: 19.3b0
hooks:
- id: black
language: python
additional_dependencies:
- 'request==1.1.1'
```
this won't work:
```yaml
- repo: https://github.com/psf/black
rev: 19.3b0
hooks:
- id: black
additional_dependencies:
- 'request==1.1.1'
```

View file

@ -2,7 +2,13 @@ export interface PreCommitConfig {
repos: PreCommitDependency[];
}
export interface PreCommitHook {
language?: string;
additional_dependencies?: Array<string>;
}
export interface PreCommitDependency {
repo: string;
hooks?: Array<PreCommitHook>;
rev: string;
}

View file

@ -1531,6 +1531,57 @@ describe('modules/platform/github/index', () => {
});
});
describe('getIssue()', () => {
it('returns null if issues disabled', async () => {
const scope = httpMock.scope(githubApiHost);
initRepoMock(scope, 'some/repo', { hasIssuesEnabled: false });
await github.initRepo({ repository: 'some/repo' });
const res = await github.getIssue(1);
expect(res).toBeNull();
});
it('returns issue', async () => {
const scope = httpMock.scope(githubApiHost);
initRepoMock(scope, 'some/repo');
const issue = {
number: 1,
state: 'open',
title: 'title-1',
body: 'body-1',
};
scope
.get('/repos/some/repo/issues/1')
.reply(200, { ...issue, updated_at: '2022-01-01T00:00:00Z' });
await github.initRepo({ repository: 'some/repo' });
const res = await github.getIssue(1);
expect(res).toMatchObject({
...issue,
lastModified: '2022-01-01T00:00:00Z',
});
});
it('returns null if issue not found', async () => {
const scope = httpMock.scope(githubApiHost);
initRepoMock(scope, 'some/repo');
scope.get('/repos/some/repo/issues/1').reply(404);
await github.initRepo({ repository: 'some/repo' });
const res = await github.getIssue(1);
expect(res).toBeNull();
});
it('logs debug message if issue deleted', async () => {
const scope = httpMock.scope(githubApiHost);
initRepoMock(scope, 'some/repo');
scope.get('/repos/some/repo/issues/1').reply(410);
await github.initRepo({ repository: 'some/repo' });
const res = await github.getIssue(1);
expect(res).toBeNull();
expect(logger.logger.debug).toHaveBeenCalledWith(
'Issue #1 has been deleted',
);
});
});
describe('findIssue()', () => {
it('returns null if no issue', async () => {
httpMock

View file

@ -1231,7 +1231,6 @@ export async function getIssueList(): Promise<Issue[]> {
}
export async function getIssue(number: number): Promise<Issue | null> {
// istanbul ignore if
if (config.hasIssuesEnabled === false) {
return null;
}
@ -1246,8 +1245,12 @@ export async function getIssue(number: number): Promise<Issue | null> {
);
GithubIssueCache.updateIssue(issue);
return issue;
} catch (err) /* istanbul ignore next */ {
} catch (err) {
logger.debug({ err, number }, 'Error getting issue');
if (err.response?.statusCode === 410) {
logger.debug(`Issue #${number} has been deleted`);
GithubIssueCache.deleteIssue(number);
}
return null;
}
}

View file

@ -159,6 +159,32 @@ describe('modules/platform/github/issue', () => {
});
});
it('removes particular issue from the cache', () => {
cache.platform = {
github: {
issuesCache: {
'1': {
number: 1,
body: 'body-1',
state: 'open',
title: 'title-1',
lastModified: '2020-01-01T00:00:00.000Z',
},
},
},
};
GithubIssueCache.deleteIssue(1);
expect(cache).toEqual({
platform: {
github: {
issuesCache: {},
},
},
});
});
it('reconciles cache', () => {
cache.platform = {
github: {

View file

@ -85,6 +85,13 @@ export class GithubIssueCache {
}
}
static deleteIssue(number: number): void {
const cacheData = this.data;
if (cacheData) {
delete cacheData[number];
}
}
/**
* At the moment of repo initialization, repository cache is not available.
* What we can do is to store issues for later reconciliation.

View file

@ -1361,9 +1361,12 @@ export async function ensureComment({
if (topic) {
logger.debug(`Ensuring comment "${massagedTopic!}" in #${number}`);
body = `### ${topic}\n\n${sanitizedContent}`;
body = body
.replace(regEx(/Pull Request/g), 'Merge Request')
.replace(regEx(/PR/g), 'MR');
body = smartTruncate(
body
.replace(regEx(/Pull Request/g), 'Merge Request')
.replace(regEx(/PR/g), 'MR'),
maxBodyLength(),
);
comments.forEach((comment: { body: string; id: number }) => {
if (comment.body.startsWith(`### ${massagedTopic!}\n\n`)) {
commentId = comment.id;
@ -1372,7 +1375,7 @@ export async function ensureComment({
});
} else {
logger.debug(`Ensuring content-only comment in #${number}`);
body = `${sanitizedContent}`;
body = smartTruncate(`${sanitizedContent}`, maxBodyLength());
comments.forEach((comment: { body: string; id: number }) => {
if (comment.body === body) {
commentId = comment.id;

View file

@ -4,7 +4,7 @@ import { platform } from '../../../modules/platform';
import * as repositoryCache from '../../../util/cache/repository';
import { clearRenovateRefs } from '../../../util/git';
import { PackageFiles } from '../package-files';
import { validateReconfigureBranch } from '../reconfigure';
import { checkReconfigureBranch } from '../reconfigure';
import { pruneStaleBranches } from './prune';
import {
runBranchSummary,
@ -16,7 +16,7 @@ export async function finalizeRepo(
config: RenovateConfig,
branchList: string[],
): Promise<void> {
await validateReconfigureBranch(config);
await checkReconfigureBranch(config);
await repositoryCache.saveCache();
await pruneStaleBranches(config, branchList);
await ensureIssuesClosing();

View file

@ -9,7 +9,7 @@ import { scm } from '../../../modules/platform/scm';
import { getBranchList, setUserRepoConfig } from '../../../util/git';
import { escapeRegExp, regEx } from '../../../util/regex';
import { uniqueStrings } from '../../../util/string';
import { getReconfigureBranchName } from '../reconfigure';
import { getReconfigureBranchName } from '../reconfigure/utils';
async function cleanUpBranches(
config: RenovateConfig,

View file

@ -1,242 +1,42 @@
import { mock } from 'jest-mock-extended';
import type { RenovateConfig } from '../../../../test/util';
import { fs, git, mocked, partial, platform, scm } from '../../../../test/util';
import { logger, mocked, scm } from '../../../../test/util';
import { GlobalConfig } from '../../../config/global';
import { logger } from '../../../logger';
import type { Pr } from '../../../modules/platform/types';
import * as _cache from '../../../util/cache/repository';
import type { LongCommitSha } from '../../../util/git/types';
import * as _merge from '../init/merge';
import { validateReconfigureBranch } from '.';
import * as _validate from './validate';
import { checkReconfigureBranch } from '.';
jest.mock('../../../util/cache/repository');
jest.mock('../../../util/fs');
jest.mock('../../../util/git');
jest.mock('../init/merge');
jest.mock('./validate');
const cache = mocked(_cache);
const merge = mocked(_merge);
const validate = mocked(_validate);
describe('workers/repository/reconfigure/index', () => {
const config: RenovateConfig = {
branchPrefix: 'prefix/',
baseBranch: 'base',
statusCheckNames: partial<RenovateConfig['statusCheckNames']>({
configValidation: 'renovate/config-validation',
}),
};
beforeEach(() => {
config.repository = 'some/repo';
merge.detectConfigFile.mockResolvedValue('renovate.json');
scm.branchExists.mockResolvedValue(true);
cache.getCache.mockReturnValue({});
git.getBranchCommit.mockReturnValue('sha' as LongCommitSha);
fs.readLocalFile.mockResolvedValue(null);
platform.getBranchStatusCheck.mockResolvedValue(null);
GlobalConfig.reset();
scm.branchExists.mockResolvedValue(true);
validate.validateReconfigureBranch.mockResolvedValue(undefined);
});
it('no effect when running with platform=local', async () => {
GlobalConfig.set({ platform: 'local' });
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
await checkReconfigureBranch(config);
expect(logger.logger.debug).toHaveBeenCalledWith(
'Not attempting to reconfigure when running with local platform',
);
});
it('no effect on repo with no reconfigure branch', async () => {
scm.branchExists.mockResolvedValueOnce(false);
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith('No reconfigure branch found');
});
it('logs error if config file search fails', async () => {
const err = new Error();
merge.detectConfigFile.mockRejectedValueOnce(err as never);
await validateReconfigureBranch(config);
expect(logger.error).toHaveBeenCalledWith(
{ err },
'Error while searching for config file in reconfigure branch',
await checkReconfigureBranch(config);
expect(logger.logger.debug).toHaveBeenCalledWith(
'No reconfigure branch found',
);
});
it('throws error if config file not found in reconfigure branch', async () => {
merge.detectConfigFile.mockResolvedValue(null);
await validateReconfigureBranch(config);
expect(logger.warn).toHaveBeenCalledWith(
'No config file found in reconfigure branch',
);
});
it('logs error if config file is unreadable', async () => {
const err = new Error();
fs.readLocalFile.mockRejectedValueOnce(err as never);
await validateReconfigureBranch(config);
expect(logger.error).toHaveBeenCalledWith(
{ err },
'Error while reading config file',
);
});
it('throws error if config file is empty', async () => {
await validateReconfigureBranch(config);
expect(logger.warn).toHaveBeenCalledWith('Empty or invalid config file');
});
it('throws error if config file content is invalid', async () => {
fs.readLocalFile.mockResolvedValueOnce(`
{
"name":
}
`);
await validateReconfigureBranch(config);
expect(logger.error).toHaveBeenCalledWith(
{ err: expect.any(Object) },
'Error while parsing config file',
);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Failed - Unparsable config file',
state: 'red',
});
});
it('handles failed validation', async () => {
fs.readLocalFile.mockResolvedValueOnce(`
{
"enabledManagers": ["docker"]
}
`);
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
{ errors: expect.any(String) },
'Validation Errors',
);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Failed',
state: 'red',
});
});
it('adds comment if reconfigure PR exists', async () => {
fs.readLocalFile.mockResolvedValueOnce(`
{
"enabledManagers": ["docker"]
}
`);
platform.findPr.mockResolvedValueOnce(mock<Pr>({ number: 1 }));
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
{ errors: expect.any(String) },
'Validation Errors',
);
expect(platform.setBranchStatus).toHaveBeenCalled();
expect(platform.ensureComment).toHaveBeenCalled();
});
it('handles successful validation', async () => {
const pJson = `
{
"renovate": {
"enabledManagers": ["npm"]
}
}
`;
merge.detectConfigFile.mockResolvedValue('package.json');
fs.readLocalFile.mockResolvedValueOnce(pJson).mockResolvedValueOnce(pJson);
await validateReconfigureBranch(config);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Successful',
state: 'green',
});
});
it('skips adding status check if statusCheckNames.configValidation is null', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'new-sha',
isConfigValid: false,
},
});
await validateReconfigureBranch({
...config,
statusCheckNames: partial<RenovateConfig['statusCheckNames']>({
configValidation: null,
}),
});
expect(logger.debug).toHaveBeenCalledWith(
'Status check is null or an empty string, skipping status check addition.',
);
expect(platform.setBranchStatus).not.toHaveBeenCalled();
});
it('skips adding status check if statusCheckNames.configValidation is empty string', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'new-sha',
isConfigValid: false,
},
});
await validateReconfigureBranch({
...config,
statusCheckNames: partial<RenovateConfig['statusCheckNames']>({
configValidation: '',
}),
});
expect(logger.debug).toHaveBeenCalledWith(
'Status check is null or an empty string, skipping status check addition.',
);
expect(platform.setBranchStatus).not.toHaveBeenCalled();
});
it('skips validation if cache is valid', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'sha',
isConfigValid: false,
},
});
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
'Skipping validation check as branch sha is unchanged',
);
});
it('skips validation if status check present', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'new_sha',
isConfigValid: false,
},
});
platform.getBranchStatusCheck.mockResolvedValueOnce('green');
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
'Skipping validation check because status check already exists.',
);
});
it('handles non-default config file', async () => {
merge.detectConfigFile.mockResolvedValue('.renovaterc');
fs.readLocalFile.mockResolvedValueOnce(`
{
"enabledManagers": ["npm",]
}
`);
await validateReconfigureBranch(config);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Successful',
state: 'green',
});
it('validates reconfigure branch', async () => {
await expect(checkReconfigureBranch(config)).toResolve();
});
});

View file

@ -1,49 +1,15 @@
import is from '@sindresorhus/is';
import JSON5 from 'json5';
import { GlobalConfig } from '../../../config/global';
import type { RenovateConfig } from '../../../config/types';
import { validateConfig } from '../../../config/validation';
import { logger } from '../../../logger';
import { platform } from '../../../modules/platform';
import { ensureComment } from '../../../modules/platform/comment';
import { scm } from '../../../modules/platform/scm';
import type { BranchStatus } from '../../../types';
import { getCache } from '../../../util/cache/repository';
import { readLocalFile } from '../../../util/fs';
import { getBranchCommit } from '../../../util/git';
import { regEx } from '../../../util/regex';
import { detectConfigFile } from '../init/merge';
import {
deleteReconfigureBranchCache,
setReconfigureBranchCache,
} from './reconfigure-cache';
import { deleteReconfigureBranchCache } from './reconfigure-cache';
import { getReconfigureBranchName } from './utils';
import { validateReconfigureBranch } from './validate';
async function setBranchStatus(
branchName: string,
description: string,
state: BranchStatus,
context?: string | null,
): Promise<void> {
if (!is.nonEmptyString(context)) {
// already logged this case when validating the status check
return;
}
await platform.setBranchStatus({
branchName,
context,
description,
state,
});
}
export function getReconfigureBranchName(prefix: string): string {
return `${prefix}reconfigure`;
}
export async function validateReconfigureBranch(
export async function checkReconfigureBranch(
config: RenovateConfig,
): Promise<void> {
logger.debug('validateReconfigureBranch()');
logger.debug('checkReconfigureBranch()');
if (GlobalConfig.get('platform') === 'local') {
logger.debug(
'Not attempting to reconfigure when running with local platform',
@ -51,10 +17,8 @@ export async function validateReconfigureBranch(
return;
}
const context = config.statusCheckNames?.configValidation;
const branchName = getReconfigureBranchName(config.branchPrefix!);
const branchExists = await scm.branchExists(branchName);
const reconfigureBranch = getReconfigureBranchName(config.branchPrefix!);
const branchExists = await scm.branchExists(reconfigureBranch);
// this is something the user initiates, so skip if no branch exists
if (!branchExists) {
@ -63,141 +27,5 @@ export async function validateReconfigureBranch(
return;
}
// look for config file
// 1. check reconfigure branch cache and use the configFileName if it exists
// 2. checkout reconfigure branch and look for the config file, don't assume default configFileName
const branchSha = getBranchCommit(branchName)!;
const cache = getCache();
let configFileName: string | null = null;
const reconfigureCache = cache.reconfigureBranchCache;
// only use valid cached information
if (reconfigureCache?.reconfigureBranchSha === branchSha) {
logger.debug('Skipping validation check as branch sha is unchanged');
return;
}
if (context) {
const validationStatus = await platform.getBranchStatusCheck(
branchName,
context,
);
// if old status check is present skip validation
if (is.nonEmptyString(validationStatus)) {
logger.debug(
'Skipping validation check because status check already exists.',
);
return;
}
} else {
logger.debug(
'Status check is null or an empty string, skipping status check addition.',
);
}
try {
await scm.checkoutBranch(branchName);
configFileName = await detectConfigFile();
} catch (err) {
logger.error(
{ err },
'Error while searching for config file in reconfigure branch',
);
}
if (!is.nonEmptyString(configFileName)) {
logger.warn('No config file found in reconfigure branch');
await setBranchStatus(
branchName,
'Validation Failed - No config file found',
'red',
context,
);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.defaultBranch!);
return;
}
let configFileRaw: string | null = null;
try {
configFileRaw = await readLocalFile(configFileName, 'utf8');
} catch (err) {
logger.error({ err }, 'Error while reading config file');
}
if (!is.nonEmptyString(configFileRaw)) {
logger.warn('Empty or invalid config file');
await setBranchStatus(
branchName,
'Validation Failed - Empty/Invalid config file',
'red',
context,
);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.baseBranch!);
return;
}
let configFileParsed: any;
try {
configFileParsed = JSON5.parse(configFileRaw);
// no need to confirm renovate field in package.json we already do it in `detectConfigFile()`
if (configFileName === 'package.json') {
configFileParsed = configFileParsed.renovate;
}
} catch (err) {
logger.error({ err }, 'Error while parsing config file');
await setBranchStatus(
branchName,
'Validation Failed - Unparsable config file',
'red',
context,
);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.baseBranch!);
return;
}
// perform validation and provide a passing or failing check run based on result
const validationResult = await validateConfig('repo', configFileParsed);
// failing check
if (validationResult.errors.length > 0) {
logger.debug(
{ errors: validationResult.errors.map((err) => err.message).join(', ') },
'Validation Errors',
);
// add comment to reconfigure PR if it exists
const branchPr = await platform.findPr({
branchName,
state: 'open',
includeOtherAuthors: true,
});
if (branchPr) {
let body = `There is an error with this repository's Renovate configuration that needs to be fixed.\n\n`;
body += `Location: \`${configFileName}\`\n`;
body += `Message: \`${validationResult.errors
.map((e) => e.message)
.join(', ')
.replace(regEx(/`/g), "'")}\`\n`;
await ensureComment({
number: branchPr.number,
topic: 'Action Required: Fix Renovate Configuration',
content: body,
});
}
await setBranchStatus(branchName, 'Validation Failed', 'red', context);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.baseBranch!);
return;
}
// passing check
await setBranchStatus(branchName, 'Validation Successful', 'green', context);
setReconfigureBranchCache(branchSha, true);
await scm.checkoutBranch(config.baseBranch!);
await validateReconfigureBranch(config);
}

View file

@ -0,0 +1,3 @@
export function getReconfigureBranchName(prefix: string): string {
return `${prefix}reconfigure`;
}

View file

@ -0,0 +1,228 @@
import { mock } from 'jest-mock-extended';
import type { RenovateConfig } from '../../../../test/util';
import { fs, git, mocked, partial, platform, scm } from '../../../../test/util';
import { GlobalConfig } from '../../../config/global';
import { logger } from '../../../logger';
import type { Pr } from '../../../modules/platform/types';
import * as _cache from '../../../util/cache/repository';
import type { LongCommitSha } from '../../../util/git/types';
import * as _merge from '../init/merge';
import { validateReconfigureBranch } from './validate';
jest.mock('../../../util/cache/repository');
jest.mock('../../../util/fs');
jest.mock('../../../util/git');
jest.mock('../init/merge');
const cache = mocked(_cache);
const merge = mocked(_merge);
describe('workers/repository/reconfigure/validate', () => {
const config: RenovateConfig = {
branchPrefix: 'prefix/',
baseBranch: 'base',
statusCheckNames: partial<RenovateConfig['statusCheckNames']>({
configValidation: 'renovate/config-validation',
}),
};
beforeEach(() => {
config.repository = 'some/repo';
merge.detectConfigFile.mockResolvedValue('renovate.json');
scm.branchExists.mockResolvedValue(true);
cache.getCache.mockReturnValue({});
git.getBranchCommit.mockReturnValue('sha' as LongCommitSha);
fs.readLocalFile.mockResolvedValue(null);
platform.getBranchStatusCheck.mockResolvedValue(null);
GlobalConfig.reset();
});
it('logs error if config file search fails', async () => {
const err = new Error();
merge.detectConfigFile.mockRejectedValueOnce(err as never);
await validateReconfigureBranch(config);
expect(logger.error).toHaveBeenCalledWith(
{ err },
'Error while searching for config file in reconfigure branch',
);
});
it('throws error if config file not found in reconfigure branch', async () => {
merge.detectConfigFile.mockResolvedValue(null);
await validateReconfigureBranch(config);
expect(logger.warn).toHaveBeenCalledWith(
'No config file found in reconfigure branch',
);
});
it('logs error if config file is unreadable', async () => {
const err = new Error();
fs.readLocalFile.mockRejectedValueOnce(err as never);
await validateReconfigureBranch(config);
expect(logger.error).toHaveBeenCalledWith(
{ err },
'Error while reading config file',
);
});
it('throws error if config file is empty', async () => {
await validateReconfigureBranch(config);
expect(logger.warn).toHaveBeenCalledWith('Empty or invalid config file');
});
it('throws error if config file content is invalid', async () => {
fs.readLocalFile.mockResolvedValueOnce(`
{
"name":
}
`);
await validateReconfigureBranch(config);
expect(logger.error).toHaveBeenCalledWith(
{ err: expect.any(Object) },
'Error while parsing config file',
);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Failed - Unparsable config file',
state: 'red',
});
});
it('handles failed validation', async () => {
fs.readLocalFile.mockResolvedValueOnce(`
{
"enabledManagers": ["docker"]
}
`);
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
{ errors: expect.any(String) },
'Validation Errors',
);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Failed',
state: 'red',
});
});
it('adds comment if reconfigure PR exists', async () => {
fs.readLocalFile.mockResolvedValueOnce(`
{
"enabledManagers": ["docker"]
}
`);
platform.findPr.mockResolvedValueOnce(mock<Pr>({ number: 1 }));
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
{ errors: expect.any(String) },
'Validation Errors',
);
expect(platform.setBranchStatus).toHaveBeenCalled();
expect(platform.ensureComment).toHaveBeenCalled();
});
it('handles successful validation', async () => {
const pJson = `
{
"renovate": {
"enabledManagers": ["npm"]
}
}
`;
merge.detectConfigFile.mockResolvedValue('package.json');
fs.readLocalFile.mockResolvedValueOnce(pJson).mockResolvedValueOnce(pJson);
await validateReconfigureBranch(config);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Successful',
state: 'green',
});
});
it('skips adding status check if statusCheckNames.configValidation is null', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'new-sha',
isConfigValid: false,
},
});
await validateReconfigureBranch({
...config,
statusCheckNames: partial<RenovateConfig['statusCheckNames']>({
configValidation: null,
}),
});
expect(logger.debug).toHaveBeenCalledWith(
'Status check is null or an empty string, skipping status check addition.',
);
expect(platform.setBranchStatus).not.toHaveBeenCalled();
});
it('skips adding status check if statusCheckNames.configValidation is empty string', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'new-sha',
isConfigValid: false,
},
});
await validateReconfigureBranch({
...config,
statusCheckNames: partial<RenovateConfig['statusCheckNames']>({
configValidation: '',
}),
});
expect(logger.debug).toHaveBeenCalledWith(
'Status check is null or an empty string, skipping status check addition.',
);
expect(platform.setBranchStatus).not.toHaveBeenCalled();
});
it('skips validation if cache is valid', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'sha',
isConfigValid: false,
},
});
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
'Skipping validation check as branch sha is unchanged',
);
});
it('skips validation if status check present', async () => {
cache.getCache.mockReturnValueOnce({
reconfigureBranchCache: {
reconfigureBranchSha: 'new_sha',
isConfigValid: false,
},
});
platform.getBranchStatusCheck.mockResolvedValueOnce('green');
await validateReconfigureBranch(config);
expect(logger.debug).toHaveBeenCalledWith(
'Skipping validation check because status check already exists.',
);
});
it('handles non-default config file', async () => {
merge.detectConfigFile.mockResolvedValue('.renovaterc');
fs.readLocalFile.mockResolvedValueOnce(`
{
"enabledManagers": ["npm",]
}
`);
await validateReconfigureBranch(config);
expect(platform.setBranchStatus).toHaveBeenCalledWith({
branchName: 'prefix/reconfigure',
context: 'renovate/config-validation',
description: 'Validation Successful',
state: 'green',
});
});
});

View file

@ -0,0 +1,184 @@
import is from '@sindresorhus/is';
import JSON5 from 'json5';
import type { RenovateConfig } from '../../../config/types';
import { validateConfig } from '../../../config/validation';
import { logger } from '../../../logger';
import { platform } from '../../../modules/platform';
import { ensureComment } from '../../../modules/platform/comment';
import { scm } from '../../../modules/platform/scm';
import type { BranchStatus } from '../../../types';
import { getCache } from '../../../util/cache/repository';
import { readLocalFile } from '../../../util/fs';
import { getBranchCommit } from '../../../util/git';
import { regEx } from '../../../util/regex';
import { detectConfigFile } from '../init/merge';
import { setReconfigureBranchCache } from './reconfigure-cache';
import { getReconfigureBranchName } from './utils';
async function setBranchStatus(
branchName: string,
description: string,
state: BranchStatus,
context?: string | null,
): Promise<void> {
if (!is.nonEmptyString(context)) {
// already logged this case when validating the status check
return;
}
await platform.setBranchStatus({
branchName,
context,
description,
state,
});
}
export async function validateReconfigureBranch(
config: RenovateConfig,
): Promise<void> {
logger.debug('validateReconfigureBranch()');
const context = config.statusCheckNames?.configValidation;
const branchName = getReconfigureBranchName(config.branchPrefix!);
// look for config file
// 1. check reconfigure branch cache and use the configFileName if it exists
// 2. checkout reconfigure branch and look for the config file, don't assume default configFileName
const branchSha = getBranchCommit(branchName)!;
const cache = getCache();
let configFileName: string | null = null;
const reconfigureCache = cache.reconfigureBranchCache;
// only use valid cached information
if (reconfigureCache?.reconfigureBranchSha === branchSha) {
logger.debug('Skipping validation check as branch sha is unchanged');
return;
}
if (context) {
const validationStatus = await platform.getBranchStatusCheck(
branchName,
context,
);
// if old status check is present skip validation
if (is.nonEmptyString(validationStatus)) {
logger.debug(
'Skipping validation check because status check already exists.',
);
return;
}
} else {
logger.debug(
'Status check is null or an empty string, skipping status check addition.',
);
}
try {
await scm.checkoutBranch(branchName);
configFileName = await detectConfigFile();
} catch (err) {
logger.error(
{ err },
'Error while searching for config file in reconfigure branch',
);
}
if (!is.nonEmptyString(configFileName)) {
logger.warn('No config file found in reconfigure branch');
await setBranchStatus(
branchName,
'Validation Failed - No config file found',
'red',
context,
);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.defaultBranch!);
return;
}
let configFileRaw: string | null = null;
try {
configFileRaw = await readLocalFile(configFileName, 'utf8');
} catch (err) {
logger.error({ err }, 'Error while reading config file');
}
if (!is.nonEmptyString(configFileRaw)) {
logger.warn('Empty or invalid config file');
await setBranchStatus(
branchName,
'Validation Failed - Empty/Invalid config file',
'red',
context,
);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.baseBranch!);
return;
}
let configFileParsed: any;
try {
configFileParsed = JSON5.parse(configFileRaw);
// no need to confirm renovate field in package.json we already do it in `detectConfigFile()`
if (configFileName === 'package.json') {
configFileParsed = configFileParsed.renovate;
}
} catch (err) {
logger.error({ err }, 'Error while parsing config file');
await setBranchStatus(
branchName,
'Validation Failed - Unparsable config file',
'red',
context,
);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.baseBranch!);
return;
}
// perform validation and provide a passing or failing check based on result
const validationResult = await validateConfig('repo', configFileParsed);
// failing check
if (validationResult.errors.length > 0) {
logger.debug(
{ errors: validationResult.errors.map((err) => err.message).join(', ') },
'Validation Errors',
);
const reconfigurePr = await platform.findPr({
branchName,
state: 'open',
includeOtherAuthors: true,
});
// add comment to reconfigure PR if it exists
if (reconfigurePr) {
let body = `There is an error with this repository's Renovate configuration that needs to be fixed.\n\n`;
body += `Location: \`${configFileName}\`\n`;
body += `Message: \`${validationResult.errors
.map((e) => e.message)
.join(', ')
.replace(regEx(/`/g), "'")}\`\n`;
await ensureComment({
number: reconfigurePr.number,
topic: 'Action Required: Fix Renovate Configuration',
content: body,
});
}
await setBranchStatus(branchName, 'Validation Failed', 'red', context);
setReconfigureBranchCache(branchSha, false);
await scm.checkoutBranch(config.baseBranch!);
return;
}
// passing check
await setBranchStatus(branchName, 'Validation Successful', 'green', context);
setReconfigureBranchCache(branchSha, true);
await scm.checkoutBranch(config.baseBranch!);
return;
}

View file

@ -336,7 +336,7 @@
"jest-mock-extended": "3.0.7",
"jest-snapshot": "29.7.0",
"markdownlint-cli2": "0.17.1",
"memfs": "4.15.2",
"memfs": "4.15.3",
"nock": "13.5.6",
"npm-run-all2": "7.0.2",
"nyc": "17.1.0",

View file

@ -581,8 +581,8 @@ importers:
specifier: 0.17.1
version: 0.17.1
memfs:
specifier: 4.15.2
version: 4.15.2
specifier: 4.15.3
version: 4.15.3
nock:
specifier: 13.5.6
version: 13.5.6
@ -4555,8 +4555,8 @@ packages:
mdurl@2.0.0:
resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==}
memfs@4.15.2:
resolution: {integrity: sha512-n8/qP8AT6CtY6kxCPYgYVusT5rS6axaT66dD3tYi2lm+l1iMH7YYpmW8H/qL5bfV4YvInCCgUDAWIRvrNS7kbQ==}
memfs@4.15.3:
resolution: {integrity: sha512-vR/g1SgqvKJgAyYla+06G4p/EOcEmwhYuVb1yc1ixcKf8o/sh7Zngv63957ZSNd1xrZJoinmNyDf2LzuP8WJXw==}
engines: {node: '>= 4.0.0'}
memorystream@0.3.1:
@ -11769,7 +11769,7 @@ snapshots:
mdurl@2.0.0: {}
memfs@4.15.2:
memfs@4.15.3:
dependencies:
'@jsonjoy.com/json-pack': 1.1.1(tslib@2.8.1)
'@jsonjoy.com/util': 1.5.0(tslib@2.8.1)

View file

@ -5,19 +5,19 @@ ARG BASE_IMAGE_TYPE=slim
# --------------------------------------
# slim image
# --------------------------------------
FROM ghcr.io/renovatebot/base-image:9.29.0@sha256:10e27273241a0ba63d3a298a7b1e178dbb75b84da6bc2ea7a71db7c9d1a4971c AS slim-base
FROM ghcr.io/renovatebot/base-image:9.29.1@sha256:db4b70c00fb197babca9dd92be612bef044d7a35d933d19c668864f84b52d1f8 AS slim-base
# --------------------------------------
# full image
# --------------------------------------
FROM ghcr.io/renovatebot/base-image:9.29.0-full@sha256:7b2353855c0f59b9efdb93ce9356aff5dad7d5102f8947c4ebc906855be9177c AS full-base
FROM ghcr.io/renovatebot/base-image:9.29.1-full@sha256:4880c7aae10ed892d49c6c5573418014605ce2824c978dbcc04382a2c26bb0df AS full-base
ENV RENOVATE_BINARY_SOURCE=global
# --------------------------------------
# build image
# --------------------------------------
FROM --platform=$BUILDPLATFORM ghcr.io/renovatebot/base-image:9.29.0@sha256:10e27273241a0ba63d3a298a7b1e178dbb75b84da6bc2ea7a71db7c9d1a4971c AS build
FROM --platform=$BUILDPLATFORM ghcr.io/renovatebot/base-image:9.29.1@sha256:db4b70c00fb197babca9dd92be612bef044d7a35d933d19c668864f84b52d1f8 AS build
# We want a specific node version here
# renovate: datasource=node-version