Merge branch 'main' of https://github.com/renovatebot/renovate into fix-auto-merge-if-plus-one

This commit is contained in:
Felipe Santos 2024-12-29 12:41:33 -03:00
commit 74137a8460
182 changed files with 6742 additions and 3442 deletions

View file

@ -1 +1 @@
FROM ghcr.io/containerbase/devcontainer:13.0.24 FROM ghcr.io/containerbase/devcontainer:13.5.5

View file

@ -34,7 +34,7 @@ runs:
- name: Check cache miss for MacOS - name: Check cache miss for MacOS
id: macos-cache id: macos-cache
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: node_modules path: node_modules
key: ${{ env.MACOS_KEY }} key: ${{ env.MACOS_KEY }}
@ -43,7 +43,7 @@ runs:
- name: Check cache miss for Windows - name: Check cache miss for Windows
id: windows-cache id: windows-cache
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: node_modules path: node_modules
key: ${{ env.WINDOWS_KEY }} key: ${{ env.WINDOWS_KEY }}

View file

@ -34,7 +34,7 @@ runs:
- name: Restore `node_modules` - name: Restore `node_modules`
id: node-modules-restore id: node-modules-restore
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: node_modules path: node_modules
key: ${{ env.CACHE_KEY }} key: ${{ env.CACHE_KEY }}
@ -64,7 +64,7 @@ runs:
- name: Cache and restore `pnpm store` - name: Cache and restore `pnpm store`
if: env.CACHE_HIT != 'true' if: env.CACHE_HIT != 'true'
uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: ${{ env.PNPM_STORE }} path: ${{ env.PNPM_STORE }}
key: | key: |
@ -87,7 +87,7 @@ runs:
- name: Write `node_modules` cache - name: Write `node_modules` cache
if: inputs.save-cache == 'true' && env.CACHE_HIT != 'true' if: inputs.save-cache == 'true' && env.CACHE_HIT != 'true'
uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: node_modules path: node_modules
key: ${{ env.CACHE_KEY }} key: ${{ env.CACHE_KEY }}

View file

@ -31,7 +31,7 @@ concurrency:
env: env:
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
NODE_VERSION: 22 NODE_VERSION: 22
PDM_VERSION: 2.21.0 # renovate: datasource=pypi depName=pdm PDM_VERSION: 2.22.1 # renovate: datasource=pypi depName=pdm
DRY_RUN: true DRY_RUN: true
TEST_LEGACY_DECRYPTION: true TEST_LEGACY_DECRYPTION: true
SPARSE_CHECKOUT: |- SPARSE_CHECKOUT: |-
@ -209,7 +209,7 @@ jobs:
os: ${{ runner.os }} os: ${{ runner.os }}
- name: Restore eslint cache - name: Restore eslint cache
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: .cache/eslint path: .cache/eslint
key: eslint-main-cache key: eslint-main-cache
@ -228,7 +228,7 @@ jobs:
- name: Save eslint cache - name: Save eslint cache
if: github.event_name == 'push' if: github.event_name == 'push'
uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: .cache/eslint path: .cache/eslint
key: eslint-main-cache key: eslint-main-cache
@ -255,7 +255,7 @@ jobs:
os: ${{ runner.os }} os: ${{ runner.os }}
- name: Restore prettier cache - name: Restore prettier cache
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: .cache/prettier path: .cache/prettier
key: prettier-main-cache key: prettier-main-cache
@ -280,7 +280,7 @@ jobs:
- name: Save prettier cache - name: Save prettier cache
if: github.event_name == 'push' if: github.event_name == 'push'
uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: .cache/prettier path: .cache/prettier
key: prettier-main-cache key: prettier-main-cache
@ -373,7 +373,7 @@ jobs:
os: ${{ runner.os }} os: ${{ runner.os }}
- name: Cache jest - name: Cache jest
uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2 uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
with: with:
path: .cache/jest path: .cache/jest
key: | key: |
@ -411,7 +411,7 @@ jobs:
- name: Save coverage artifacts - name: Save coverage artifacts
if: (success() || failure()) && github.event.pull_request.draft != true && matrix.coverage if: (success() || failure()) && github.event.pull_request.draft != true && matrix.coverage
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
with: with:
name: ${{ matrix.upload-artifact-name }} name: ${{ matrix.upload-artifact-name }}
path: | path: |
@ -438,7 +438,7 @@ jobs:
merge-multiple: true merge-multiple: true
- name: Codecov - name: Codecov
uses: codecov/codecov-action@015f24e6818733317a2da2edd6290ab26238649a # v5.0.7 uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303 # v5.1.2
with: with:
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}
directory: coverage/lcov directory: coverage/lcov
@ -567,7 +567,7 @@ jobs:
run: pnpm test-e2e:pack run: pnpm test-e2e:pack
- name: Upload - name: Upload
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
with: with:
name: renovate-package name: renovate-package
path: renovate-0.0.0-semantic-release.tgz path: renovate-0.0.0-semantic-release.tgz
@ -611,7 +611,7 @@ jobs:
run: pnpm test:docs run: pnpm test:docs
- name: Upload - name: Upload
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
with: with:
name: docs name: docs
path: tmp/docs/ path: tmp/docs/
@ -684,7 +684,7 @@ jobs:
show-progress: false show-progress: false
- name: docker-config - name: docker-config
uses: containerbase/internal-tools@fa96b70003f221771f8c015cd3f598818ebf4d78 # v3.5.4 uses: containerbase/internal-tools@c440de95307545d23ff0e0b57018147e02ae217f # v3.5.15
with: with:
command: docker-config command: docker-config

View file

@ -41,7 +41,7 @@ jobs:
# Initializes the CodeQL tools for scanning. # Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL - name: Initialize CodeQL
uses: github/codeql-action/init@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5 uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
with: with:
languages: javascript languages: javascript
@ -51,7 +51,7 @@ jobs:
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java). # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below) # If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild - name: Autobuild
uses: github/codeql-action/autobuild@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5 uses: github/codeql-action/autobuild@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
# Command-line programs to run using the OS shell. # Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl # 📚 https://git.io/JvXDl
@ -65,4 +65,4 @@ jobs:
# make release # make release
- name: Perform CodeQL Analysis - name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5 uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0

View file

@ -0,0 +1,22 @@
name: 'Find issues with missing labels'
on:
schedule:
# Run every Sunday at midnight
- cron: '0 0 * * 0'
jobs:
check-unlabeled-issues:
runs-on: ubuntu-latest
permissions:
issues: write
env:
GH_TOKEN: ${{ github.token }}
steps:
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
- name: Search for issues with missing labels
run: bash ./tools/find-issues-with-missing-labels.sh

View file

@ -43,7 +43,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab. # format to the repository Actions tab.
- name: 'Upload artifact' - name: 'Upload artifact'
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3 uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
with: with:
name: SARIF file name: SARIF file
path: results.sarif path: results.sarif
@ -51,6 +51,6 @@ jobs:
# Upload the results to GitHub's code scanning dashboard. # Upload the results to GitHub's code scanning dashboard.
- name: 'Upload to code-scanning' - name: 'Upload to code-scanning'
uses: github/codeql-action/upload-sarif@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5 uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
with: with:
sarif_file: results.sarif sarif_file: results.sarif

View file

@ -31,7 +31,7 @@ jobs:
format: 'sarif' format: 'sarif'
output: 'trivy-results.sarif' output: 'trivy-results.sarif'
- uses: github/codeql-action/upload-sarif@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5 - uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
with: with:
sarif_file: trivy-results.sarif sarif_file: trivy-results.sarif
category: 'docker-image-${{ matrix.tag }}' category: 'docker-image-${{ matrix.tag }}'

View file

@ -42,7 +42,7 @@ jobs:
run: pnpm prettier-fix run: pnpm prettier-fix
- name: Create pull request - name: Create pull request
uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5 uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # v7.0.6
with: with:
author: 'Renovate Bot <renovate@whitesourcesoftware.com>' author: 'Renovate Bot <renovate@whitesourcesoftware.com>'
branch: 'chore/update-static-data' branch: 'chore/update-static-data'

View file

@ -1 +1 @@
3.13.0 3.13.1

View file

@ -221,3 +221,15 @@ Add a label `auto:retry-latest` to any Discussion where the user should retry th
</details> </details>
Apply the `self-hosted` label when an issue is applicable only to users who self-administer their own bot. Apply the `self-hosted` label when an issue is applicable only to users who self-administer their own bot.
## Automated check for Issues with missing labels
We have a GitHub Action (`find-issues-with-missing-labels.yml`) to find issues on our repository that are missing labels.
Any Issues with missing labels will be put in a list in a new "error" Issue.
The Action runs each week.
### Apply the correct labels manually
The Action will _not_ fix any badly labeled issues.
This means that you, or we, must apply the correct labels to any affected Issue.

Binary file not shown.

Before

Width:  |  Height:  |  Size: 5.5 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View file

@ -1,12 +1,24 @@
# Renovate configuration overview # Renovate configuration overview
When Renovate runs on a repository, the final config used is derived from the: Each time Renovate runs on a repository it reads the configuration files listed below and creates a final config.
This final config describes what Renovate will do during its run.
- Default config The final config is internal to Renovate, and is _not_ saved or cached for a later run.
- Global config But you can always find the final config in Renovate's logs.
- Inherited config
- Repository config Renovate reads the configuration files in this order (from from top to bottom):
- Resolved presets referenced in config
1. Default config
2. Global config
- File config
- Environment config
- CLI config
3. Inherited config
4. Resolved presets referenced in config
5. Repository config
Items with a higher number override items that have lower numbers.
If the item has the `mergeable` property, it will merge with lower numbers instead.
<!-- prettier-ignore --> <!-- prettier-ignore -->
!!! note !!! note
@ -159,6 +171,19 @@ Inherited config may use all Repository config settings, and any Global config o
For information on how the Mend Renovate App supports Inherited config, see the dedicated "Mend Renovate App Config" section toward the end of this page. For information on how the Mend Renovate App supports Inherited config, see the dedicated "Mend Renovate App Config" section toward the end of this page.
#### Presets handling
If the inherited config contains `extends` presets, then Renovate will:
1. Resolve the presets
1. Add the resolved preset config to the beginning of the inherited config
1. Merge the presets on top of the global config
##### You can not ignore presets from inherited config
You can _not_ use `ignorePresets` in your repository config to ignore presets _within_ inherited config.
This is because inherited config is resolved _before_ the repository config.
### Repository config ### Repository config
Repository config is the config loaded from a config file in the repository. Repository config is the config loaded from a config file in the repository.

View file

@ -456,11 +456,19 @@ For `sbt` note that Renovate will update the version string only for packages th
## cloneSubmodules ## cloneSubmodules
Enabling this option will mean that any detected Git submodules will be cloned at time of repository clone. Enabling this option will mean that detected Git submodules will be cloned at time of repository clone.
By default all will be cloned, but this can be customized by configuring `cloneSubmodulesFilter` too.
Submodules are always cloned recursively. Submodules are always cloned recursively.
Important: private submodules aren't supported by Renovate, unless the underlying `ssh` layer already has the correct permissions. Important: private submodules aren't supported by Renovate, unless the underlying `ssh` layer already has the correct permissions.
## cloneSubmodulesFilter
Use this option together with `cloneSubmodules` if you wish to clone only a subset of submodules.
This config option supports regex and glob filters, including negative matches.
For more details on this syntax see Renovate's [string pattern matching documentation](./string-pattern-matching.md).
## commitBody ## commitBody
Configure this if you wish Renovate to add a commit body, otherwise Renovate uses a regular single-line commit. Configure this if you wish Renovate to add a commit body, otherwise Renovate uses a regular single-line commit.
@ -769,6 +777,7 @@ Example:
"customManagers": [ "customManagers": [
{ {
"customType": "regex", "customType": "regex",
"fileMatch": ["values.yaml$"],
"matchStrings": [ "matchStrings": [
"ENV .*?_VERSION=(?<currentValue>.*) # (?<datasource>.*?)/(?<depName>.*?)\\s" "ENV .*?_VERSION=(?<currentValue>.*) # (?<datasource>.*?)/(?<depName>.*?)\\s"
] ]
@ -2246,6 +2255,7 @@ Supported lock files:
- `pubspec.lock` - `pubspec.lock`
- `pyproject.toml` - `pyproject.toml`
- `requirements.txt` - `requirements.txt`
- `uv.lock`
- `yarn.lock` - `yarn.lock`
Support for new lock files may be added via feature request. Support for new lock files may be added via feature request.
@ -3347,6 +3357,7 @@ Table with options:
| `gomodTidyE` | Run `go mod tidy -e` after Go module updates. | | `gomodTidyE` | Run `go mod tidy -e` after Go module updates. |
| `gomodUpdateImportPaths` | Update source import paths on major module updates, using [mod](https://github.com/marwan-at-work/mod). | | `gomodUpdateImportPaths` | Update source import paths on major module updates, using [mod](https://github.com/marwan-at-work/mod). |
| `gomodSkipVendor` | Never run `go mod vendor` after Go module updates. | | `gomodSkipVendor` | Never run `go mod vendor` after Go module updates. |
| `gomodVendor` | Always run `go mod vendor` after Go module updates even if vendor files aren't detected. |
| `helmUpdateSubChartArchives` | Update subchart archives in the `/charts` folder. | | `helmUpdateSubChartArchives` | Update subchart archives in the `/charts` folder. |
| `npmDedupe` | Run `npm install` with `--prefer-dedupe` for npm >= 7 or `npm dedupe` after `package-lock.json` update for npm <= 6. | | `npmDedupe` | Run `npm install` with `--prefer-dedupe` for npm >= 7 or `npm dedupe` after `package-lock.json` update for npm <= 6. |
| `pnpmDedupe` | Run `pnpm dedupe --config.ignore-scripts=true` after `pnpm-lock.yaml` updates. | | `pnpmDedupe` | Run `pnpm dedupe --config.ignore-scripts=true` after `pnpm-lock.yaml` updates. |
@ -3612,7 +3623,7 @@ Behavior:
- `bump` = e.g. bump the range even if the new version satisfies the existing range, e.g. `^1.0.0` -> `^1.1.0` - `bump` = e.g. bump the range even if the new version satisfies the existing range, e.g. `^1.0.0` -> `^1.1.0`
- `replace` = Replace the range with a newer one if the new version falls outside it, and update nothing otherwise - `replace` = Replace the range with a newer one if the new version falls outside it, and update nothing otherwise
- `widen` = Widen the range with newer one, e.g. `^1.0.0` -> `^1.0.0 || ^2.0.0` - `widen` = Widen the range with newer one, e.g. `^1.0.0` -> `^1.0.0 || ^2.0.0`
- `update-lockfile` = Update the lock file when in-range updates are available, otherwise `replace` for updates out of range. Works for `bundler`, `cargo`, `composer`, `npm`, `yarn`, `pnpm`, `terraform` and `poetry` so far - `update-lockfile` = Update the lock file when in-range updates are available, otherwise `replace` for updates out of range. Works for `bundler`, `cargo`, `composer`, `gleam`, `npm`, `yarn`, `pnpm`, `terraform` and `poetry` so far
- `in-range-only` = Update the lock file when in-range updates are available, ignore package file updates - `in-range-only` = Update the lock file when in-range updates are available, ignore package file updates
Renovate's `"auto"` strategy works like this for npm: Renovate's `"auto"` strategy works like this for npm:
@ -3690,6 +3701,7 @@ This feature works with the following managers:
- [`ansible`](modules/manager/ansible/index.md) - [`ansible`](modules/manager/ansible/index.md)
- [`bitbucket-pipelines`](modules/manager/bitbucket-pipelines/index.md) - [`bitbucket-pipelines`](modules/manager/bitbucket-pipelines/index.md)
- [`circleci`](modules/manager/circleci/index.md)
- [`docker-compose`](modules/manager/docker-compose/index.md) - [`docker-compose`](modules/manager/docker-compose/index.md)
- [`dockerfile`](modules/manager/dockerfile/index.md) - [`dockerfile`](modules/manager/dockerfile/index.md)
- [`droneci`](modules/manager/droneci/index.md) - [`droneci`](modules/manager/droneci/index.md)
@ -3784,7 +3796,7 @@ If enabled Renovate tries to determine PR reviewers by matching rules defined in
Read the docs for your platform for details on syntax and allowed file locations: Read the docs for your platform for details on syntax and allowed file locations:
- [GitHub Docs, About code owners](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners) - [GitHub Docs, About code owners](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)
- [GitLab, Code Owners](https://docs.gitlab.com/ee/user/project/code_owners.html) - [GitLab, Code Owners](https://docs.gitlab.com/ee/user/project/codeowners/)
- [Bitbucket, Set up and use code owners](https://support.atlassian.com/bitbucket-cloud/docs/set-up-and-use-code-owners/) - [Bitbucket, Set up and use code owners](https://support.atlassian.com/bitbucket-cloud/docs/set-up-and-use-code-owners/)
## reviewersSampleSize ## reviewersSampleSize
@ -3852,6 +3864,11 @@ You could then configure a schedule like this at the repository level:
This would mean that Renovate can run for 7 hours each night, plus all the time on weekends. This would mean that Renovate can run for 7 hours each night, plus all the time on weekends.
Note how the above example makes use of the "OR" logic of combining multiple schedules in the array. Note how the above example makes use of the "OR" logic of combining multiple schedules in the array.
<!-- prettier-ignore -->
!!! note
If both the day of the week _and_ the day of the month are restricted in the schedule, then Renovate only runs when both the day of the month _and_ day of the week match!
For example: `* * 1-7 * 4` means Renovate only runs on the _first_ Thursday of the month.
It's common to use `schedule` in combination with [`timezone`](#timezone). It's common to use `schedule` in combination with [`timezone`](#timezone).
You should configure [`updateNotScheduled=false`](#updatenotscheduled) if you want the schedule more strictly enforced so that _updates_ to existing branches aren't pushed out of schedule. You should configure [`updateNotScheduled=false`](#updatenotscheduled) if you want the schedule more strictly enforced so that _updates_ to existing branches aren't pushed out of schedule.
You can also configure [`automergeSchedule`](#automergeschedule) to limit the hours in which branches/PRs are _automerged_ (if [`automerge`](#automerge) is configured). You can also configure [`automergeSchedule`](#automergeschedule) to limit the hours in which branches/PRs are _automerged_ (if [`automerge`](#automerge) is configured).

View file

@ -307,7 +307,7 @@ Renovate will get the credentials with the [`google-auth-library`](https://www.n
service_account: ${{ env.SERVICE_ACCOUNT }} service_account: ${{ env.SERVICE_ACCOUNT }}
- name: renovate - name: renovate
uses: renovatebot/github-action@v41.0.5 uses: renovatebot/github-action@v41.0.7
env: env:
RENOVATE_HOST_RULES: | RENOVATE_HOST_RULES: |
[ [
@ -478,7 +478,7 @@ Make sure to install the Google Cloud SDK into the custom image, as you need the
For example: For example:
```Dockerfile ```Dockerfile
FROM renovate/renovate:39.42.4 FROM renovate/renovate:39.82.1
# Include the "Docker tip" which you can find here https://cloud.google.com/sdk/docs/install # Include the "Docker tip" which you can find here https://cloud.google.com/sdk/docs/install
# under "Installation" for "Debian/Ubuntu" # under "Installation" for "Debian/Ubuntu"
RUN ... RUN ...

View file

@ -9,29 +9,42 @@ Requirements:
Create a `docker-compose.yaml` and `otel-collector-config.yml` file as seen below in a folder. Create a `docker-compose.yaml` and `otel-collector-config.yml` file as seen below in a folder.
```yaml title="docker-compose.yaml" ```yaml title="docker-compose.yaml"
version: '3' name: renovate-otel-demo
services: services:
# Jaeger # Jaeger for storing traces
jaeger: jaeger:
image: jaegertracing/all-in-one:1.63.0 image: jaegertracing/jaeger:2.1.0
ports: ports:
- '16686:16686' - '16686:16686' # Web UI
- '4317' - '4317' # OTLP gRPC
- '4318' # OTLP HTTP
# Prometheus for storing metrics
prometheus:
image: prom/prometheus:v3.0.1
ports:
- '9090:9090' # Web UI
- '4318' # OTLP HTTP
command:
- --web.enable-otlp-receiver
# Mirror these flags from the Dockerfile, because `command` overwrites the default flags.
# https://github.com/prometheus/prometheus/blob/5b5fee08af4c73230b2dae35964816f7b3c29351/Dockerfile#L23-L24
- --config.file=/etc/prometheus/prometheus.yml
- --storage.tsdb.path=/prometheus
otel-collector: otel-collector:
image: otel/opentelemetry-collector-contrib:0.114.0 # Using the Contrib version to access the spanmetrics connector.
command: ['--config=/etc/otel-collector-config.yml'] # If you don't need the spanmetrics connector, you can use the standard version
image: otel/opentelemetry-collector-contrib:0.116.1
volumes: volumes:
- ./otel-collector-config.yml:/etc/otel-collector-config.yml - ./otel-collector-config.yml:/etc/otelcol-contrib/config.yaml
ports: ports:
- '1888:1888' # pprof extension - '4318:4318' # OTLP HTTP ( exposed to the host )
- '13133:13133' # health_check extension - '4317:4317' # OTLP gRPC ( exposed to the host )
- '55679:55679' # zpages extension
- '4318:4318' # OTLP HTTP
- '4317:4317' # OTLP GRPC
- '9123:9123' # Prometheus exporter
depends_on: depends_on:
- jaeger - jaeger
- prometheus
``` ```
```yaml title="otel-collector-config.yml" ```yaml title="otel-collector-config.yml"
@ -39,28 +52,36 @@ receivers:
otlp: otlp:
protocols: protocols:
grpc: grpc:
endpoint: 0.0.0.0:4317
http: http:
endpoint: 0.0.0.0:4318
exporters: exporters:
otlp/jaeger: otlp/jaeger:
endpoint: jaeger:4317 endpoint: jaeger:4317
tls: tls:
insecure: true insecure: true
logging: otlphttp/prometheus:
prometheus: endpoint: http://prometheus:9090/api/v1/otlp
endpoint: '0.0.0.0:9123' debug:
# verbosity: normal
processors: connectors:
batch:
spanmetrics: spanmetrics:
metrics_exporter: prometheus histogram:
latency_histogram_buckets: [10ms, 100ms, 250ms, 1s, 30s, 1m, 5m] exponential:
dimensions: dimensions:
- name: http.method - name: http.method
default: GET
- name: http.status_code - name: http.status_code
- name: http.host - name: http.host
dimensions_cache_size: 1000 dimensions_cache_size: 1000
aggregation_temporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE' aggregation_temporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE'
exemplars:
enabled: true
processors:
batch:
extensions: extensions:
health_check: health_check:
@ -72,12 +93,23 @@ service:
pipelines: pipelines:
traces: traces:
receivers: [otlp] receivers: [otlp]
exporters: [otlp/jaeger, logging] exporters:
processors: [spanmetrics, batch] - otlp/jaeger
# Send traces to connector for metrics calculation
- spanmetrics
# Enable debug exporter to see traces in the logs
#- debug
processors: [batch]
metrics: metrics:
receivers: [otlp] receivers:
exporters: [prometheus] - otlp # Receive metrics from Renovate.
- spanmetrics # Receive metrics calculated by the spanmetrics connector.
processors: [batch]
exporters:
- otlphttp/prometheus
# Enable debug exporter to see metrics in the logs
# - debug
``` ```
Start setup using this command inside the folder containing the files created in the earlier steps: Start setup using this command inside the folder containing the files created in the earlier steps:
@ -86,7 +118,11 @@ Start setup using this command inside the folder containing the files created in
docker-compose up docker-compose up
``` ```
This command will start an [OpenTelemetry Collector](https://github.com/open-telemetry/opentelemetry-collector-contrib) and an instance of [Jaeger](https://www.jaegertracing.io/). This command will start:
- an [OpenTelemetry Collector](https://github.com/open-telemetry/opentelemetry-collector-contrib)
- an instance of [Jaeger for traces](https://www.jaegertracing.io/)
- and [Prometheus](https://prometheus.io/)
Jaeger will be now reachable under [http://localhost:16686](http://localhost:16686). Jaeger will be now reachable under [http://localhost:16686](http://localhost:16686).
@ -97,7 +133,8 @@ To start Renovate with OpenTelemetry enabled run following command, after pointi
``` ```
docker run \ docker run \
--rm \ --rm \
-e OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318 \ --network renovate-otel-demo_default \
-e OTEL_EXPORTER_OTLP_ENDPOINT=http://otel-collector:4318 \
-v "/path/to/your/config.js:/usr/src/app/config.js" \ -v "/path/to/your/config.js:/usr/src/app/config.js" \
renovate/renovate:latest renovate/renovate:latest
``` ```
@ -130,100 +167,90 @@ You should be able to see now the full trace view which shows each HTTP request
### Metrics ### Metrics
Additional to the received traces some metrics are calculated. Additional to the received traces some metrics are calculated.
This is achieved using the [spanmetricsprocessor](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/spanmetricsprocessor). This is achieved using the [spanmetrics connector](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/connector/spanmetricsconnector).
The previous implemented setup will produce following metrics, which are exposed under [http://localhost:9123/metrics](http://localhost:9123/metrics): The previously implemented setup will produce following metrics, which pushed to [Prometheus](http://localhost:9090):
``` ```
# HELP calls_total
# TYPE calls_total counter
### Example of internal spans ### Example of internal spans
calls_total{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 3 traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 2
calls_total{operation="run",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 1 traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="run", status_code="STATUS_CODE_UNSET"} 2
### Example of http calls from Renovate to external services
calls_total{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 9
...
# HELP latency
# TYPE latency histogram
### Example of internal spans
latency_bucket{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET",le="0.1"} 0
...
latency_bucket{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET",le="9.223372036854775e+12"} 3
latency_bucket{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET",le="+Inf"} 3
latency_sum{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 30947.4689
latency_count{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 3
...
### Example of http calls from Renovate to external services ### Example of http calls from Renovate to external services
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="0.1"} 0 traces_span_metrics_calls_total{http_host="api.github.com:443", http_method="POST", http_status_code="200", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_CLIENT", span_name="POST", status_code="STATUS_CODE_UNSET"} 4
### Example histogram metrics
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="8", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 0
... ...
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="250"} 3 traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="2000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 0
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="9.223372036854775e+12"} 9 traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="5000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="+Inf"} 9 traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="15000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
latency_sum{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 2306.1385999999998 traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="10000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
latency_count{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 9 traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="+Inf", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
traces_span_metrics_duration_milliseconds_sum{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 4190.694209
traces_span_metrics_duration_milliseconds_count{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
``` ```
The [spanmetricsprocessor](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/spanmetricsprocessor) creates two sets of metrics. The [spanmetrics connector](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/connector/spanmetricsconnector) creates two sets of metrics.
#### Calls metric #### Calls metric
At first there are the `calls_total` metrics which display how often specific trace spans have been observed. At first there are the `traces_span_metrics_calls_total` metrics.
These metrics show how often _specific_ trace spans have been observed.
For example: For example:
`calls_total{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 3` signals that 3 repositories have been renovated.
`calls_total{operation="run",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 1` represents how often Renovate has been run. - `traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repositories", status_code="STATUS_CODE_UNSET"} 2` signals that 2 repositories have been renovated.
- `traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="run", status_code="STATUS_CODE_UNSET"} 1` represents how often Renovate has been run.
If we combine this using the PrometheusQueryLanguage ( PromQL ), we can calculate the average count of repositories each Renovate run handles. If we combine this using the PrometheusQueryLanguage ( PromQL ), we can calculate the average count of repositories each Renovate run handles.
``` ```
calls_total{operation="renovate repository",service_name="renovate"} / calls_total{operation="run",service_name="renovate"} traces_span_metrics_calls_total{span_name="repository",service_name="renovate"} / traces_span_metrics_calls_total{span_name="run",service_name="renovate"}
``` ```
This metrics is also for spans generated by http calls: These metrics are generated for HTTP call spans too:
```yaml ```yaml
calls_total{http_host="registry.terraform.io:443",http_method="GET",http_status_code="200",operation="HTTPS GET",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 5 traces_span_metrics_calls_total{http_host="prometheus-community.github.io:443", http_method="GET", http_status_code="200", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_CLIENT", span_name="GET", status_code="STATUS_CODE_UNSET"} 5
``` ```
#### Latency buckets #### Latency buckets
The second class of metrics exposed are the latency focused latency buckets which allow to create [heatmaps](https://grafana.com/docs/grafana/latest/basics/intro-histograms/#heatmaps). The second class of metrics exposed are the latency-focused buckets, that allow creating [heatmaps](https://grafana.com/docs/grafana/latest/basics/intro-histograms/#heatmaps).
A request is added to a backed if the latency is bigger than the bucket value (`le`). `request_duration => le` A request is added to a backed if the latency is bigger than the bucket value (`le`). `request_duration => le`
As an example if we receive a request which need `1.533s` to complete get following metrics: As an example if we receive a request which need `1.533s` to complete get following metrics:
``` ```
latency_bucket{http_host="api.github.com:443",le="0.1"} 0 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="0.1"} 0
latency_bucket{http_host="api.github.com:443",le="1"} 0 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="1"} 0
latency_bucket{http_host="api.github.com:443",le="2"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="2"} 1
latency_bucket{http_host="api.github.com:443",le="6"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="6"} 1
latency_bucket{http_host="api.github.com:443",le="10"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="10"} 1
latency_bucket{http_host="api.github.com:443",le="100"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="100"} 1
latency_bucket{http_host="api.github.com:443",le="250"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="250"} 1
latency_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 1
latency_bucket{http_host="api.github.com:443",le="+Inf"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="+Inf"} 1
latency_sum{http_host="api.github.com:443"} 1.533 traces_span_metrics_duration_milliseconds_sum{http_host="api.github.com:443"} 1.533
latency_count{http_host="api.github.com:443"} 1 traces_span_metrics_duration_milliseconds_count{http_host="api.github.com:443"} 1
``` ```
Now we have another request which this time takes 10s to complete: Now we have another request which this time takes 10s to complete:
``` ```
latency_bucket{http_host="api.github.com:443",le="0.1"} 0 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="0.1"} 0
latency_bucket{http_host="api.github.com:443",le="1"} 0 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="1"} 0
latency_bucket{http_host="api.github.com:443",le="2"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="2"} 1
latency_bucket{http_host="api.github.com:443",le="6"} 1 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="6"} 1
latency_bucket{http_host="api.github.com:443",le="10"} 2 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="10"} 2
latency_bucket{http_host="api.github.com:443",le="100"} 2 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="100"} 2
latency_bucket{http_host="api.github.com:443",le="250"} 2 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="250"} 2
latency_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 2 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 2
latency_bucket{http_host="api.github.com:443",le="+Inf"} 2 traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="+Inf"} 2
latency_sum{http_host="api.github.com:443"} 11.533 traces_span_metrics_duration_milliseconds_sum{http_host="api.github.com:443"} 11.533
latency_count{http_host="api.github.com:443"} 2 traces_span_metrics_duration_milliseconds_count{http_host="api.github.com:443"} 2
``` ```
More about the functionality can be found on the Prometheus page for [metric types](https://prometheus.io/docs/concepts/metric_types/#histogram). More about the functionality can be found on the Prometheus page for [metric types](https://prometheus.io/docs/concepts/metric_types/#histogram).

View file

@ -25,8 +25,8 @@ It builds `latest` based on the `main` branch and all SemVer tags are published
```sh title="Example of valid tags" ```sh title="Example of valid tags"
docker run --rm renovate/renovate docker run --rm renovate/renovate
docker run --rm renovate/renovate:39 docker run --rm renovate/renovate:39
docker run --rm renovate/renovate:39.42 docker run --rm renovate/renovate:39.82
docker run --rm renovate/renovate:39.42.4 docker run --rm renovate/renovate:39.82.1
``` ```
<!-- prettier-ignore --> <!-- prettier-ignore -->
@ -62,7 +62,7 @@ spec:
- name: renovate - name: renovate
# Update this to the latest available and then enable Renovate on # Update this to the latest available and then enable Renovate on
# the manifest # the manifest
image: renovate/renovate:39.42.4 image: renovate/renovate:39.82.1
args: args:
- user/repo - user/repo
# Environment Variables # Environment Variables
@ -121,7 +121,7 @@ spec:
template: template:
spec: spec:
containers: containers:
- image: renovate/renovate:39.42.4 - image: renovate/renovate:39.82.1
name: renovate-bot name: renovate-bot
env: # For illustration purposes, please use secrets. env: # For illustration purposes, please use secrets.
- name: RENOVATE_PLATFORM - name: RENOVATE_PLATFORM
@ -367,7 +367,7 @@ spec:
containers: containers:
- name: renovate - name: renovate
# Update this to the latest available and then enable Renovate on the manifest # Update this to the latest available and then enable Renovate on the manifest
image: renovate/renovate:39.42.4 image: renovate/renovate:39.82.1
volumeMounts: volumeMounts:
- name: ssh-key-volume - name: ssh-key-volume
readOnly: true readOnly: true

View file

@ -29,36 +29,6 @@ The Renovate team only fixes bugs in an older version if:
If you're using the Mend Renovate App, you don't need to do anything, as the Renovate maintainers update it regularly. If you're using the Mend Renovate App, you don't need to do anything, as the Renovate maintainers update it regularly.
If you're self hosting Renovate, use the latest release if possible. If you're self hosting Renovate, use the latest release if possible.
## When is the Mend Renovate App updated with new Renovate versions?
The Renovate maintainers manually update the app.
The maintainers don't follow any release schedule or release cadence.
This means the Mend Renovate App can lag a few hours to a week behind the open source version.
Major releases of Renovate are held back until the maintainers are reasonably certain it works for most users.
## How can I see which version the Mend Renovate app is using?
Follow these steps to see which version the Mend Renovate app is on:
1. Go to the [Mend Developer Portal](https://developer.mend.io/)
1. Sign in to the Renovate app with your GitHub or Bitbucket account
1. Select your organization
1. Select a installed repository
1. Select a job from the _Recent jobs_ overview
1. Select the _Info_ Log Level from the dropdown menu
1. You should see something like this:
```
INFO: Repository started
{
"renovateVersion": "39.11.5"
}
```
<!-- prettier-ignore -->
!!! tip
The PRs that Renovate creates have a link to the "repository job log" in the footer of the PR body text.
## Renovate core features not supported on all platforms ## Renovate core features not supported on all platforms
| Feature | Platforms which lack feature | See Renovate issue(s) | | Feature | Platforms which lack feature | See Renovate issue(s) |

View file

@ -499,7 +499,7 @@ private-package==1.2.3
#### Packages that Renovate needs #### Packages that Renovate needs
Renovate relies on `pip`'s integration with the Python [keyring](https://pypi.org/project/keyring/) package along with the [keyrigs.envvars](https://pypi.org/project/keyrings.envvars/) backend for this. Renovate relies on `pip`'s integration with the Python [keyring](https://pypi.org/project/keyring/) package along with the [keyrings.envvars](https://pypi.org/project/keyrings.envvars/) backend for this.
##### Self-hosting Renovate ##### Self-hosting Renovate
@ -511,7 +511,7 @@ But if you are self-hosting Renovate and:
- _not_ running Renovate in a Containerbase environment - _not_ running Renovate in a Containerbase environment
- or, _not_ using the Docker sidecar container - or, _not_ using the Docker sidecar container
Then you must install the Python keyring package and the keyrigs.envvars package into your self-hosted environment. Then you must install the Python keyring package and the keyrings.envvars package into your self-hosted environment.
### poetry ### poetry

View file

@ -1,4 +1,4 @@
![Renovate banner](https://app.renovatebot.com/images/whitesource_renovate_660_220.jpg){ loading=lazy } ![Renovate banner](./assets/images/mend-renovate-cli-banner.jpg){ loading=lazy }
# Renovate documentation # Renovate documentation

View file

@ -4,12 +4,13 @@ The information on this page is for the Mend-hosted cloud apps:
- Renovate App on GitHub - Renovate App on GitHub
- Mend App on Bitbucket - Mend App on Bitbucket
- Mend App on Azure DevOps
If you self-host, you can skip reading this page. If you self-host, you can skip reading this page.
## :warning: Migrate secrets in your Renovate config file :warning: ## :warning: Migrate secrets in your Renovate config file :warning:
The Mend-hosted cloud app will stop reading secrets from the Renovate config file in your repository on 01-Oct-2024. Use of encrypted secrets in the Mend Renovate cloud apps has been deprecated and soon the apps will stop reading secrets from the Renovate config file in your repository.
You must migrate any secrets you currently keep in the Renovate config file, and put them in the app settings page on [developer.mend.io](https://developer.mend.io). You must migrate any secrets you currently keep in the Renovate config file, and put them in the app settings page on [developer.mend.io](https://developer.mend.io).
To add secrets you must have admin-level rights. To add secrets you must have admin-level rights.

View file

@ -17,6 +17,36 @@ The Renovate logs for the Mend-hosted apps are on the [Mend Developer Portal](ht
Reading the logs can help you understand the configuration that Renovate used. Reading the logs can help you understand the configuration that Renovate used.
## Renovate Version
The Renovate version used by the Mend-hosted apps is updated manually by the maintainers of the app.
The maintainers don't follow any release schedule or release cadence, but try to update at least once a week.
This means the Mend Renovate App can lag a few hours to a week behind the open source version.
Major releases of Renovate are held back until the maintainers are reasonably certain it works for most users.
### Which version is the Mend Renovate app using?
Follow these steps to see which version the Mend Renovate app used for a specific job:
1. Sign in to the [Mend Developer Portal](https://developer.mend.io/) with your GitHub or Bitbucket account
1. Select your organization
1. Select a installed repository
1. Select a job from the _Recent jobs_ overview
1. Select the _Info_ Log Level from the dropdown menu
1. You should see something like this:
```
INFO: Repository started
{
"renovateVersion": "39.11.5"
}
```
<!-- prettier-ignore -->
!!! tip
The PRs that Renovate creates have a link to the "repository job log" in the footer of the PR body text.
## Onboarding behavior ## Onboarding behavior
### Installing Renovate into all repositories leads to silent mode ### Installing Renovate into all repositories leads to silent mode

View file

@ -8,7 +8,7 @@ They can be referenced from the Renovate config files inside the repo using `{{
## Old method ## Old method
This method will stop working on 01-Oct-2024: This method is deprecated:
```json title="Put encrypted secret in Renovate config" ```json title="Put encrypted secret in Renovate config"
{ {
@ -25,7 +25,7 @@ This method will stop working on 01-Oct-2024:
## New method ## New method
This is the new method, that you should start using: This is the new method that you should start using:
```json title="Reference the app secret in the Renovate config" ```json title="Reference the app secret in the Renovate config"
{ {

View file

@ -130,6 +130,23 @@ If you use Azure DevOps:
The username of the PAT must match the username of the _user of the PAT_. The username of the PAT must match the username of the _user of the PAT_.
The generated `nuget.config` forces the basic authentication, which cannot be overridden externally! The generated `nuget.config` forces the basic authentication, which cannot be overridden externally!
## Ignoring package files when using presets
Because `nuget` manager has a dedicated `ignorePaths` entry in the `:ignoreModulesAndTests` preset, if you're using any presets that extend it (like `config:recommended`), you need to put your `ignorePaths` inside the `nuget` section for it to be merged.
For example:
```json
{
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
"extends": ["config:recommended"],
"nuget": {
"ignorePaths": ["IgnoreThisPackage/**"]
}
}
```
Otherwise, all `nuget.ignorePaths` values in `:ignoreModulesAndTests` will override values you put inside `ignorePaths` at the top-level config.
## Future work ## Future work
We welcome contributions or feature requests to support more patterns or use cases. We welcome contributions or feature requests to support more patterns or use cases.

View file

@ -39,6 +39,10 @@ This includes the following:
If set to any value, Renovate will stop using the Docker Hub API (`https://hub.docker.com`) to fetch tags and instead use the normal Docker API for images pulled from `https://index.docker.io`. If set to any value, Renovate will stop using the Docker Hub API (`https://hub.docker.com`) to fetch tags and instead use the normal Docker API for images pulled from `https://index.docker.io`.
## `RENOVATE_X_ENCRYPTED_STRICT`
If set to `"true"`, a config error Issue will be raised in case repository config contains `encrypted` objects without any `privateKey` defined.
## `RENOVATE_X_EXEC_GPID_HANDLE` ## `RENOVATE_X_EXEC_GPID_HANDLE`
If set, Renovate will terminate the whole process group of a terminated child process spawned by Renovate. If set, Renovate will terminate the whole process group of a terminated child process spawned by Renovate.

View file

@ -88,7 +88,7 @@ Pinning your development dependencies means you, and your team, are using the sa
This makes the developer-tool side of your builds reproducible. This makes the developer-tool side of your builds reproducible.
Debugging faulty versions of your tools is easier, because you can use Git to check out different versions of the tools. Debugging faulty versions of your tools is easier, because you can use Git to check out different versions of the tools.
### Why updating often is easier, faster and safer ## Why updating often is easier, faster and safer
You may think that updating takes too much time. You may think that updating takes too much time.
But updating regularly actually _saves_ you time, because: But updating regularly actually _saves_ you time, because:
@ -98,14 +98,14 @@ But updating regularly actually _saves_ you time, because:
- You'll be ready for CVE patches - You'll be ready for CVE patches
- You'll look for ways to automate the updates - You'll look for ways to automate the updates
#### Regular updates tend to be small ### Regular updates tend to be small
Firstly, when you update regularly updates tend to be small. Firstly, when you update regularly updates tend to be small.
The update's changelogs are small, quick to read, and easy to understand. The update's changelogs are small, quick to read, and easy to understand.
You probably only need to make changes in a few places (if at all) to merge the PR and get going again. You probably only need to make changes in a few places (if at all) to merge the PR and get going again.
Because you're reading the changelogs regularly, you'll get a feel for the direction of the upstream project. Because you're reading the changelogs regularly, you'll get a feel for the direction of the upstream project.
#### Applying `major` updates is easier ### Applying `major` updates is easier
Secondly, when you're current with upstream, `major` updates are easier. Secondly, when you're current with upstream, `major` updates are easier.
This is because you already: This is because you already:
@ -114,14 +114,14 @@ This is because you already:
- use the latest names for features/variables - use the latest names for features/variables
- read the previous changelogs - read the previous changelogs
#### You'll be ready for CVE patches ### You'll be ready for CVE patches
Thirdly, you'll be ready when a upstream package releases a patch for a critical CVE. Thirdly, you'll be ready when a upstream package releases a patch for a critical CVE.
If you're current, you can review and merge Renovate's PR quickly. If you're current, you can review and merge Renovate's PR quickly.
When you're behind on updates, you'll have a bad time, because you must read _more_ changelogs and make _more_ changes before you can merge the critical patch. When you're behind on updates, you'll have a bad time, because you must read _more_ changelogs and make _more_ changes before you can merge the critical patch.
#### You'll look for ways to automate the updates ### You'll look for ways to automate the updates
Finally, when you're updating often, you'll start looking for ways to automate the updates. Finally, when you're updating often, you'll start looking for ways to automate the updates.
You may start to [`automerge`](./configuration-options.md#automerge) development dependencies like Prettier, or ESLint when the linter passes. You may start to [`automerge`](./configuration-options.md#automerge) development dependencies like Prettier, or ESLint when the linter passes.

View file

@ -1,53 +0,0 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`config/massage massageConfig does not massage lockFileMaintenance 1`] = `
{
"packageRules": [
{
"lockFileMaintenance": {
"enabled": true,
},
"matchBaseBranches": [
"release/ft10/1.9.x",
],
"matchManagers": [
"helmv3",
],
"schedule": [
"at any time",
],
},
],
}
`;
exports[`config/massage massageConfig massages packageRules matchUpdateTypes 1`] = `
{
"packageRules": [
{
"matchPackageNames": [
"foo",
],
"separateMajorMinor": false,
},
{
"matchPackageNames": [
"foo",
],
"matchUpdateTypes": [
"minor",
],
"semanticCommitType": "feat",
},
{
"matchPackageNames": [
"foo",
],
"matchUpdateTypes": [
"patch",
],
"semanticCommitType": "fix",
},
],
}
`;

View file

@ -12,6 +12,7 @@ describe('config/decrypt', () => {
beforeEach(() => { beforeEach(() => {
config = {}; config = {};
GlobalConfig.reset(); GlobalConfig.reset();
delete process.env.RENOVATE_X_ENCRYPTED_STRICT;
}); });
it('returns empty with no privateKey', async () => { it('returns empty with no privateKey', async () => {
@ -30,5 +31,14 @@ describe('config/decrypt', () => {
expect(res.encrypted).toBeUndefined(); expect(res.encrypted).toBeUndefined();
expect(res.a).toBeUndefined(); expect(res.a).toBeUndefined();
}); });
it('throws exception if encrypted found but no privateKey', async () => {
config.encrypted = { a: '1' };
process.env.RENOVATE_X_ENCRYPTED_STRICT = 'true';
await expect(decryptConfig(config, repository)).rejects.toThrow(
'config-validation',
);
});
}); });
}); });

View file

@ -1,4 +1,5 @@
import is from '@sindresorhus/is'; import is from '@sindresorhus/is';
import { CONFIG_VALIDATION } from '../constants/error-messages';
import { logger } from '../logger'; import { logger } from '../logger';
import { regEx } from '../util/regex'; import { regEx } from '../util/regex';
import { addSecretForSanitizing } from '../util/sanitize'; import { addSecretForSanitizing } from '../util/sanitize';
@ -173,7 +174,15 @@ export async function decryptConfig(
} }
} }
} else { } else {
logger.error('Found encrypted data but no privateKey'); if (process.env.RENOVATE_X_ENCRYPTED_STRICT === 'true') {
const error = new Error(CONFIG_VALIDATION);
error.validationSource = 'config';
error.validationError = 'Encrypted config unsupported';
error.validationMessage = `This config contains an encrypted object at location \`$.${key}\` but no privateKey is configured. To support encrypted config, the Renovate administrator must configure a \`privateKey\` in Global Configuration.`;
throw error;
} else {
logger.error('Found encrypted data but no privateKey');
}
} }
delete decryptedConfig.encrypted; delete decryptedConfig.encrypted;
} else if (is.array(val)) { } else if (is.array(val)) {

View file

@ -32,14 +32,23 @@ describe('config/index', () => {
it('merges packageRules', () => { it('merges packageRules', () => {
const parentConfig = { ...defaultConfig }; const parentConfig = { ...defaultConfig };
Object.assign(parentConfig, { Object.assign(parentConfig, {
packageRules: [{ a: 1 }, { a: 2 }], packageRules: [
{ matchPackageNames: ['pkg1'] },
{ matchPackageNames: ['pkg2'] },
],
}); });
const childConfig = { const childConfig = {
packageRules: [{ a: 3 }, { a: 4 }], packageRules: [
{ matchPackageNames: ['pkg3'] },
{ matchPackageNames: ['pkg4'] },
],
}; };
const config = mergeChildConfig(parentConfig, childConfig); const config = mergeChildConfig(parentConfig, childConfig);
expect(config.packageRules.map((rule) => rule.a)).toMatchObject([ expect(config.packageRules).toMatchObject([
1, 2, 3, 4, { matchPackageNames: ['pkg1'] },
{ matchPackageNames: ['pkg2'] },
{ matchPackageNames: ['pkg3'] },
{ matchPackageNames: ['pkg4'] },
]); ]);
}); });
@ -95,9 +104,15 @@ describe('config/index', () => {
it('handles null child packageRules', () => { it('handles null child packageRules', () => {
const parentConfig = { ...defaultConfig }; const parentConfig = { ...defaultConfig };
parentConfig.packageRules = [{ a: 3 }, { a: 4 }]; parentConfig.packageRules = [
{ matchPackageNames: ['pkg1'] },
{ matchPackageNames: ['pkg2'] },
];
const config = mergeChildConfig(parentConfig, {}); const config = mergeChildConfig(parentConfig, {});
expect(config.packageRules).toHaveLength(2); expect(config.packageRules).toMatchObject([
{ matchPackageNames: ['pkg1'] },
{ matchPackageNames: ['pkg2'] },
]);
}); });
it('handles undefined childConfig', () => { it('handles undefined childConfig', () => {

View file

@ -17,15 +17,6 @@ describe('config/massage', () => {
expect(Array.isArray(res.schedule)).toBeTrue(); expect(Array.isArray(res.schedule)).toBeTrue();
}); });
it('massages npmToken', () => {
const config: RenovateConfig = {
npmToken: 'some-token',
};
expect(massage.massageConfig(config)).toEqual({
npmrc: '//registry.npmjs.org/:_authToken=some-token\n',
});
});
it('massages packageRules matchUpdateTypes', () => { it('massages packageRules matchUpdateTypes', () => {
const config: RenovateConfig = { const config: RenovateConfig = {
packageRules: [ packageRules: [
@ -42,7 +33,24 @@ describe('config/massage', () => {
], ],
}; };
const res = massage.massageConfig(config); const res = massage.massageConfig(config);
expect(res).toMatchSnapshot(); expect(res).toEqual({
packageRules: [
{
matchPackageNames: ['foo'],
separateMajorMinor: false,
},
{
matchPackageNames: ['foo'],
matchUpdateTypes: ['minor'],
semanticCommitType: 'feat',
},
{
matchPackageNames: ['foo'],
matchUpdateTypes: ['patch'],
semanticCommitType: 'fix',
},
],
});
expect(res.packageRules).toHaveLength(3); expect(res.packageRules).toHaveLength(3);
}); });
@ -73,7 +81,18 @@ describe('config/massage', () => {
], ],
}; };
const res = massage.massageConfig(config); const res = massage.massageConfig(config);
expect(res).toMatchSnapshot(); expect(res).toEqual({
packageRules: [
{
lockFileMaintenance: {
enabled: true,
},
matchBaseBranches: ['release/ft10/1.9.x'],
matchManagers: ['helmv3'],
schedule: ['at any time'],
},
],
});
expect(res.packageRules).toHaveLength(1); expect(res.packageRules).toHaveLength(1);
}); });
}); });

View file

@ -21,9 +21,6 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
for (const [key, val] of Object.entries(config)) { for (const [key, val] of Object.entries(config)) {
if (allowedStrings.includes(key) && is.string(val)) { if (allowedStrings.includes(key) && is.string(val)) {
massagedConfig[key] = [val]; massagedConfig[key] = [val];
} else if (key === 'npmToken' && is.string(val) && val.length < 50) {
massagedConfig.npmrc = `//registry.npmjs.org/:_authToken=${val}\n`;
delete massagedConfig.npmToken;
} else if (is.array(val)) { } else if (is.array(val)) {
massagedConfig[key] = []; massagedConfig[key] = [];
val.forEach((item) => { val.forEach((item) => {

View file

@ -52,6 +52,7 @@ const options: RenovateOptions[] = [
subType: 'string', subType: 'string',
globalOnly: true, globalOnly: true,
patternMatch: true, patternMatch: true,
mergeable: true,
}, },
{ {
name: 'detectGlobalManagerConfig', name: 'detectGlobalManagerConfig',
@ -515,7 +516,7 @@ const options: RenovateOptions[] = [
description: description:
'Change this value to override the default Renovate sidecar image.', 'Change this value to override the default Renovate sidecar image.',
type: 'string', type: 'string',
default: 'ghcr.io/containerbase/sidecar:13.0.24', default: 'ghcr.io/containerbase/sidecar:13.5.5',
globalOnly: true, globalOnly: true,
}, },
{ {
@ -2389,6 +2390,7 @@ const options: RenovateOptions[] = [
'gomodTidyE', 'gomodTidyE',
'gomodUpdateImportPaths', 'gomodUpdateImportPaths',
'gomodSkipVendor', 'gomodSkipVendor',
'gomodVendor',
'helmUpdateSubChartArchives', 'helmUpdateSubChartArchives',
'npmDedupe', 'npmDedupe',
'pnpmDedupe', 'pnpmDedupe',
@ -2855,6 +2857,14 @@ const options: RenovateOptions[] = [
type: 'boolean', type: 'boolean',
default: false, default: false,
}, },
{
name: 'cloneSubmodulesFilter',
description:
'List of submodules names or patterns to clone when cloneSubmodules=true.',
type: 'array',
subType: 'string',
default: ['*'],
},
{ {
name: 'ignorePrAuthor', name: 'ignorePrAuthor',
description: description:

View file

@ -513,485 +513,6 @@ describe('config/presets/index', () => {
}); });
}); });
describe('parsePreset', () => {
// default namespace
it('returns default package name', () => {
expect(presets.parsePreset(':base')).toEqual({
repo: 'default',
params: undefined,
presetName: 'base',
presetPath: undefined,
presetSource: 'internal',
});
});
it('parses github', () => {
expect(presets.parsePreset('github>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'github',
});
});
it('handles special chars', () => {
expect(presets.parsePreset('github>some/repo:foo+bar')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'foo+bar',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github subfiles', () => {
expect(presets.parsePreset('github>some/repo:somefile')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github subfiles with preset name', () => {
expect(
presets.parsePreset('github>some/repo:somefile/somepreset'),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile/somepreset',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github file with preset name with .json extension', () => {
expect(presets.parsePreset('github>some/repo:somefile.json')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github file with preset name with .json5 extension', () => {
expect(presets.parsePreset('github>some/repo:somefile.json5')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json5',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github subfiles with preset name with .json extension', () => {
expect(
presets.parsePreset('github>some/repo:somefile.json/somepreset'),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json/somepreset',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github subfiles with preset name with .json5 extension', () => {
expect(
presets.parsePreset('github>some/repo:somefile.json5/somepreset'),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json5/somepreset',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github subfiles with preset and sub-preset name', () => {
expect(
presets.parsePreset(
'github>some/repo:somefile/somepreset/somesubpreset',
),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile/somepreset/somesubpreset',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github subdirectories', () => {
expect(
presets.parsePreset('github>some/repo//somepath/somesubpath/somefile'),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile',
presetPath: 'somepath/somesubpath',
presetSource: 'github',
});
});
it('parses github toplevel file using subdirectory syntax', () => {
expect(presets.parsePreset('github>some/repo//somefile')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses gitlab', () => {
expect(presets.parsePreset('gitlab>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'gitlab',
});
});
it('parses gitea', () => {
expect(presets.parsePreset('gitea>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'gitea',
});
});
it('parses local', () => {
expect(presets.parsePreset('local>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local with spaces', () => {
expect(presets.parsePreset('local>A2B CD/A2B_Renovate')).toEqual({
repo: 'A2B CD/A2B_Renovate',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local with subdirectory', () => {
expect(
presets.parsePreset('local>some-group/some-repo//some-dir/some-file'),
).toEqual({
repo: 'some-group/some-repo',
params: undefined,
presetName: 'some-file',
presetPath: 'some-dir',
presetSource: 'local',
});
});
it('parses local with spaces and subdirectory', () => {
expect(
presets.parsePreset('local>A2B CD/A2B_Renovate//some-dir/some-file'),
).toEqual({
repo: 'A2B CD/A2B_Renovate',
params: undefined,
presetName: 'some-file',
presetPath: 'some-dir',
presetSource: 'local',
});
});
it('parses local with sub preset and tag', () => {
expect(
presets.parsePreset(
'local>some-group/some-repo:some-file/subpreset#1.2.3',
),
).toEqual({
repo: 'some-group/some-repo',
params: undefined,
presetName: 'some-file/subpreset',
presetPath: undefined,
presetSource: 'local',
tag: '1.2.3',
});
});
it('parses local with subdirectory and tag', () => {
expect(
presets.parsePreset(
'local>some-group/some-repo//some-dir/some-file#1.2.3',
),
).toEqual({
repo: 'some-group/some-repo',
params: undefined,
presetName: 'some-file',
presetPath: 'some-dir',
presetSource: 'local',
tag: '1.2.3',
});
});
it('parses local with subdirectory and branch/tag with a slash', () => {
expect(
presets.parsePreset(
'local>PROJECT/repository//path/to/preset#feature/branch',
),
).toEqual({
repo: 'PROJECT/repository',
params: undefined,
presetName: 'preset',
presetPath: 'path/to',
presetSource: 'local',
tag: 'feature/branch',
});
});
it('parses local with sub preset and branch/tag with a slash', () => {
expect(
presets.parsePreset(
'local>PROJECT/repository:preset/subpreset#feature/branch',
),
).toEqual({
repo: 'PROJECT/repository',
params: undefined,
presetName: 'preset/subpreset',
presetPath: undefined,
presetSource: 'local',
tag: 'feature/branch',
});
});
it('parses no prefix as local', () => {
expect(presets.parsePreset('some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local Bitbucket user repo with preset name', () => {
expect(presets.parsePreset('local>~john_doe/repo//somefile')).toEqual({
repo: '~john_doe/repo',
params: undefined,
presetName: 'somefile',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local Bitbucket user repo', () => {
expect(presets.parsePreset('local>~john_doe/renovate-config')).toEqual({
repo: '~john_doe/renovate-config',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('returns default package name with params', () => {
expect(presets.parsePreset(':group(packages/eslint, eslint)')).toEqual({
repo: 'default',
params: ['packages/eslint', 'eslint'],
presetName: 'group',
presetPath: undefined,
presetSource: 'internal',
});
});
// scoped namespace
it('returns simple scope', () => {
expect(presets.parsePreset('@somescope')).toEqual({
repo: '@somescope/renovate-config',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns simple scope and params', () => {
expect(presets.parsePreset('@somescope(param1)')).toEqual({
repo: '@somescope/renovate-config',
params: ['param1'],
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and default', () => {
expect(presets.parsePreset('@somescope/somepackagename')).toEqual({
repo: '@somescope/somepackagename',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and params and default', () => {
expect(
presets.parsePreset(
'@somescope/somepackagename(param1, param2, param3)',
),
).toEqual({
repo: '@somescope/somepackagename',
params: ['param1', 'param2', 'param3'],
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with presetName', () => {
expect(presets.parsePreset('@somescope:somePresetName')).toEqual({
repo: '@somescope/renovate-config',
params: undefined,
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with presetName and params', () => {
expect(presets.parsePreset('@somescope:somePresetName(param1)')).toEqual({
repo: '@somescope/renovate-config',
params: ['param1'],
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and presetName', () => {
expect(
presets.parsePreset('@somescope/somepackagename:somePresetName'),
).toEqual({
repo: '@somescope/somepackagename',
params: undefined,
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and presetName and params', () => {
expect(
presets.parsePreset(
'@somescope/somepackagename:somePresetName(param1, param2)',
),
).toEqual({
repo: '@somescope/somepackagename',
params: ['param1', 'param2'],
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
// non-scoped namespace
it('returns non-scoped default', () => {
expect(presets.parsePreset('somepackage')).toEqual({
repo: 'renovate-config-somepackage',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns non-scoped package name', () => {
expect(presets.parsePreset('somepackage:webapp')).toEqual({
repo: 'renovate-config-somepackage',
params: undefined,
presetName: 'webapp',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns non-scoped package name full', () => {
expect(presets.parsePreset('renovate-config-somepackage:webapp')).toEqual(
{
repo: 'renovate-config-somepackage',
params: undefined,
presetName: 'webapp',
presetPath: undefined,
presetSource: 'npm',
},
);
});
it('returns non-scoped package name with params', () => {
expect(presets.parsePreset('somepackage:webapp(param1)')).toEqual({
repo: 'renovate-config-somepackage',
params: ['param1'],
presetName: 'webapp',
presetPath: undefined,
presetSource: 'npm',
});
});
it('parses HTTPS URLs', () => {
expect(
presets.parsePreset(
'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
),
).toEqual({
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
params: undefined,
presetName: '',
presetPath: undefined,
presetSource: 'http',
});
});
it('parses HTTP URLs', () => {
expect(
presets.parsePreset(
'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
),
).toEqual({
repo: 'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
params: undefined,
presetName: '',
presetPath: undefined,
presetSource: 'http',
});
});
it('parses HTTPS URLs with parameters', () => {
expect(
presets.parsePreset(
'https://my.server/gitea/renovate-config/raw/branch/main/default.json(param1)',
),
).toEqual({
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
params: ['param1'],
presetName: '',
presetPath: undefined,
presetSource: 'http',
});
});
});
describe('getPreset', () => { describe('getPreset', () => {
it('handles removed presets with a migration', async () => { it('handles removed presets with a migration', async () => {
const res = await presets.getPreset(':base', {}); const res = await presets.getPreset(':base', {});

View file

@ -24,7 +24,8 @@ import * as http from './http';
import * as internal from './internal'; import * as internal from './internal';
import * as local from './local'; import * as local from './local';
import * as npm from './npm'; import * as npm from './npm';
import type { ParsedPreset, Preset, PresetApi } from './types'; import { parsePreset } from './parse';
import type { Preset, PresetApi } from './types';
import { import {
PRESET_DEP_NOT_FOUND, PRESET_DEP_NOT_FOUND,
PRESET_INVALID, PRESET_INVALID,
@ -46,13 +47,6 @@ const presetSources: Record<string, PresetApi> = {
const presetCacheNamespace = 'preset'; const presetCacheNamespace = 'preset';
const nonScopedPresetWithSubdirRegex = regEx(
/^(?<repo>~?[\w\-. /]+?)\/\/(?:(?<presetPath>[\w\-./]+)\/)?(?<presetName>[\w\-.]+)(?:#(?<tag>[\w\-./]+?))?$/,
);
const gitPresetRegex = regEx(
/^(?<repo>~?[\w\-. /]+)(?::(?<presetName>[\w\-.+/]+))?(?:#(?<tag>[\w\-./]+?))?$/,
);
export function replaceArgs( export function replaceArgs(
obj: string, obj: string,
argMapping: Record<string, any>, argMapping: Record<string, any>,
@ -105,120 +99,6 @@ export function replaceArgs(
return obj; return obj;
} }
export function parsePreset(input: string): ParsedPreset {
let str = input;
let presetSource: string | undefined;
let presetPath: string | undefined;
let repo: string;
let presetName: string;
let tag: string | undefined;
let params: string[] | undefined;
if (str.startsWith('github>')) {
presetSource = 'github';
str = str.substring('github>'.length);
} else if (str.startsWith('gitlab>')) {
presetSource = 'gitlab';
str = str.substring('gitlab>'.length);
} else if (str.startsWith('gitea>')) {
presetSource = 'gitea';
str = str.substring('gitea>'.length);
} else if (str.startsWith('local>')) {
presetSource = 'local';
str = str.substring('local>'.length);
} else if (str.startsWith('http://') || str.startsWith('https://')) {
presetSource = 'http';
} else if (
!str.startsWith('@') &&
!str.startsWith(':') &&
str.includes('/')
) {
presetSource = 'local';
}
str = str.replace(regEx(/^npm>/), '');
presetSource = presetSource ?? 'npm';
if (str.includes('(')) {
params = str
.slice(str.indexOf('(') + 1, -1)
.split(',')
.map((elem) => elem.trim());
str = str.slice(0, str.indexOf('('));
}
if (presetSource === 'http') {
return { presetSource, repo: str, presetName: '', params };
}
const presetsPackages = [
'compatibility',
'config',
'customManagers',
'default',
'docker',
'group',
'helpers',
'mergeConfidence',
'monorepo',
'npm',
'packages',
'preview',
'replacements',
'schedule',
'security',
'workarounds',
];
if (
presetsPackages.some((presetPackage) => str.startsWith(`${presetPackage}:`))
) {
presetSource = 'internal';
[repo, presetName] = str.split(':');
} else if (str.startsWith(':')) {
// default namespace
presetSource = 'internal';
repo = 'default';
presetName = str.slice(1);
} else if (str.startsWith('@')) {
// scoped namespace
[, repo] = regEx(/(@.*?)(:|$)/).exec(str)!;
str = str.slice(repo.length);
if (!repo.includes('/')) {
repo += '/renovate-config';
}
if (str === '') {
presetName = 'default';
} else {
presetName = str.slice(1);
}
} else if (str.includes('//')) {
// non-scoped namespace with a subdirectory preset
// Validation
if (str.includes(':')) {
throw new Error(PRESET_PROHIBITED_SUBPRESET);
}
if (!nonScopedPresetWithSubdirRegex.test(str)) {
throw new Error(PRESET_INVALID);
}
({ repo, presetPath, presetName, tag } =
nonScopedPresetWithSubdirRegex.exec(str)!.groups!);
} else {
({ repo, presetName, tag } = gitPresetRegex.exec(str)!.groups!);
if (presetSource === 'npm' && !repo.startsWith('renovate-config-')) {
repo = `renovate-config-${repo}`;
}
if (!is.nonEmptyString(presetName)) {
presetName = 'default';
}
}
return {
presetSource,
presetPath,
repo,
presetName,
tag,
params,
};
}
export async function getPreset( export async function getPreset(
preset: string, preset: string,
baseConfig?: RenovateConfig, baseConfig?: RenovateConfig,

View file

@ -96,7 +96,7 @@ export const presets: Record<string, Preset> = {
packageRules: [ packageRules: [
{ {
automerge: true, automerge: true,
matchCurrentVersion: '>= 1.0.0', matchCurrentVersion: '!/^0/',
matchUpdateTypes: ['minor', 'patch'], matchUpdateTypes: ['minor', 'patch'],
}, },
], ],

View file

@ -0,0 +1,11 @@
import type { Preset } from '../types';
/* eslint sort-keys: ["error", "asc", {caseSensitive: false, natural: true}] */
export const presets: Record<string, Preset> = {
safeEnv: {
allowedEnv: ['GO*'],
description:
'Hopefully safe environment variables to allow users to configure.',
},
};

View file

@ -0,0 +1,13 @@
import { presets } from './group';
const exceptions = new Set(['monorepos', 'recommended']);
describe('config/presets/internal/group', () => {
const presetNames = Object.keys(presets).filter(
(name) => !exceptions.has(name),
);
it.each(presetNames)('group:%s contains packageRules', (name: string) => {
expect(presets[name]).toHaveProperty('packageRules');
});
});

View file

@ -111,14 +111,19 @@ const staticGroups = {
}, },
fusionjs: { fusionjs: {
description: 'Group Fusion.js packages together.', description: 'Group Fusion.js packages together.',
matchPackageNames: [ packageRules: [
'fusion-cli', {
'fusion-core', groupName: 'Fusion.js packages',
'fusion-test-utils', matchPackageNames: [
'fusion-tokens', 'fusion-cli',
'fusion-plugin-**', 'fusion-core',
'fusion-react**', 'fusion-test-utils',
'fusion-apollo**', 'fusion-tokens',
'fusion-plugin-**',
'fusion-react**',
'fusion-apollo**',
],
},
], ],
}, },
githubArtifactActions: { githubArtifactActions: {
@ -311,9 +316,10 @@ const staticGroups = {
'k8s.io/cluster-bootstrap**', 'k8s.io/cluster-bootstrap**',
'k8s.io/code-generator**', 'k8s.io/code-generator**',
'k8s.io/component-base**', 'k8s.io/component-base**',
'k8s.io/component-helpers**',
'k8s.io/controller-manager**', 'k8s.io/controller-manager**',
'k8s.io/cri-api**', 'k8s.io/cri-api**',
// 'k8s.io/csi-api', has not go.mod set up and does not follow the versioning of other repos // 'k8s.io/csi-api', has no go.mod set up and does not follow the versioning of other repos
'k8s.io/csi-translation-lib**', 'k8s.io/csi-translation-lib**',
'k8s.io/kube-aggregator**', 'k8s.io/kube-aggregator**',
'k8s.io/kube-controller-manager**', 'k8s.io/kube-controller-manager**',
@ -341,6 +347,16 @@ const staticGroups = {
}, },
], ],
}, },
micrometer: {
description:
"Group Micrometer packages together, e.g. 'io.micrometer:micrometer-core'.",
packageRules: [
{
groupName: 'micrometer',
matchPackageNames: ['io.micrometer:micrometer-**'],
},
],
},
nodeJs: { nodeJs: {
description: description:
"Group anything that looks like Node.js together so that it's updated together.", "Group anything that looks like Node.js together so that it's updated together.",
@ -462,6 +478,7 @@ const staticGroups = {
'group:jestPlusTypes', 'group:jestPlusTypes',
'group:jwtFramework', 'group:jwtFramework',
'group:kubernetes', 'group:kubernetes',
'group:micrometer',
'group:phpstan', 'group:phpstan',
'group:polymer', 'group:polymer',
'group:react', 'group:react',

View file

@ -30,7 +30,8 @@ describe('config/presets/internal/index', () => {
const config = await resolveConfigPresets( const config = await resolveConfigPresets(
massageConfig(presetConfig), massageConfig(presetConfig),
); );
const res = await validateConfig('repo', config, true); const configType = groupName === 'global' ? 'global' : 'repo';
const res = await validateConfig(configType, config, true);
expect(res.errors).toHaveLength(0); expect(res.errors).toHaveLength(0);
expect(res.warnings).toHaveLength(0); expect(res.warnings).toHaveLength(0);
} catch (err) { } catch (err) {

View file

@ -3,6 +3,7 @@ import * as configPreset from './config';
import * as customManagersPreset from './custom-managers'; import * as customManagersPreset from './custom-managers';
import * as defaultPreset from './default'; import * as defaultPreset from './default';
import * as dockerPreset from './docker'; import * as dockerPreset from './docker';
import * as globalPreset from './global';
import * as groupPreset from './group'; import * as groupPreset from './group';
import * as helpersPreset from './helpers'; import * as helpersPreset from './helpers';
import * as mergeConfidence from './merge-confidence'; import * as mergeConfidence from './merge-confidence';
@ -22,6 +23,7 @@ export const groups: Record<string, Record<string, Preset>> = {
customManagers: customManagersPreset.presets, customManagers: customManagersPreset.presets,
default: defaultPreset.presets, default: defaultPreset.presets,
docker: dockerPreset.presets, docker: dockerPreset.presets,
global: globalPreset.presets,
group: groupPreset.presets, group: groupPreset.presets,
helpers: helpersPreset.presets, helpers: helpersPreset.presets,
mergeConfidence: mergeConfidence.presets, mergeConfidence: mergeConfidence.presets,

View file

@ -135,7 +135,8 @@ export const presets: Record<string, Preset> = {
}, },
react: { react: {
description: 'All React packages.', description: 'All React packages.',
matchPackageNames: ['@types/react', 'react**'], matchDatasources: ['npm'],
matchPackageNames: ['@types/react**', 'react**'],
}, },
stylelint: { stylelint: {
description: 'All Stylelint packages.', description: 'All Stylelint packages.',

View file

@ -0,0 +1,462 @@
import { parsePreset } from './parse';
describe('config/presets/parse', () => {
describe('parsePreset', () => {
// default namespace
it('returns default package name', () => {
expect(parsePreset(':base')).toEqual({
repo: 'default',
params: undefined,
presetName: 'base',
presetPath: undefined,
presetSource: 'internal',
});
});
it('parses github', () => {
expect(parsePreset('github>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'github',
});
});
it('handles special chars', () => {
expect(parsePreset('github>some/repo:foo+bar')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'foo+bar',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github subfiles', () => {
expect(parsePreset('github>some/repo:somefile')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github subfiles with preset name', () => {
expect(parsePreset('github>some/repo:somefile/somepreset')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile/somepreset',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github file with preset name with .json extension', () => {
expect(parsePreset('github>some/repo:somefile.json')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github file with preset name with .json5 extension', () => {
expect(parsePreset('github>some/repo:somefile.json5')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json5',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github subfiles with preset name with .json extension', () => {
expect(parsePreset('github>some/repo:somefile.json/somepreset')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json/somepreset',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
});
});
it('parses github subfiles with preset name with .json5 extension', () => {
expect(parsePreset('github>some/repo:somefile.json5/somepreset')).toEqual(
{
repo: 'some/repo',
params: undefined,
presetName: 'somefile.json5/somepreset',
presetPath: undefined,
presetSource: 'github',
tag: undefined,
},
);
});
it('parses github subfiles with preset and sub-preset name', () => {
expect(
parsePreset('github>some/repo:somefile/somepreset/somesubpreset'),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile/somepreset/somesubpreset',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses github subdirectories', () => {
expect(
parsePreset('github>some/repo//somepath/somesubpath/somefile'),
).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile',
presetPath: 'somepath/somesubpath',
presetSource: 'github',
});
});
it('parses github toplevel file using subdirectory syntax', () => {
expect(parsePreset('github>some/repo//somefile')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'somefile',
presetPath: undefined,
presetSource: 'github',
});
});
it('parses gitlab', () => {
expect(parsePreset('gitlab>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'gitlab',
});
});
it('parses gitea', () => {
expect(parsePreset('gitea>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'gitea',
});
});
it('parses local', () => {
expect(parsePreset('local>some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local with spaces', () => {
expect(parsePreset('local>A2B CD/A2B_Renovate')).toEqual({
repo: 'A2B CD/A2B_Renovate',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local with subdirectory', () => {
expect(
parsePreset('local>some-group/some-repo//some-dir/some-file'),
).toEqual({
repo: 'some-group/some-repo',
params: undefined,
presetName: 'some-file',
presetPath: 'some-dir',
presetSource: 'local',
});
});
it('parses local with spaces and subdirectory', () => {
expect(
parsePreset('local>A2B CD/A2B_Renovate//some-dir/some-file'),
).toEqual({
repo: 'A2B CD/A2B_Renovate',
params: undefined,
presetName: 'some-file',
presetPath: 'some-dir',
presetSource: 'local',
});
});
it('parses local with sub preset and tag', () => {
expect(
parsePreset('local>some-group/some-repo:some-file/subpreset#1.2.3'),
).toEqual({
repo: 'some-group/some-repo',
params: undefined,
presetName: 'some-file/subpreset',
presetPath: undefined,
presetSource: 'local',
tag: '1.2.3',
});
});
it('parses local with subdirectory and tag', () => {
expect(
parsePreset('local>some-group/some-repo//some-dir/some-file#1.2.3'),
).toEqual({
repo: 'some-group/some-repo',
params: undefined,
presetName: 'some-file',
presetPath: 'some-dir',
presetSource: 'local',
tag: '1.2.3',
});
});
it('parses local with subdirectory and branch/tag with a slash', () => {
expect(
parsePreset('local>PROJECT/repository//path/to/preset#feature/branch'),
).toEqual({
repo: 'PROJECT/repository',
params: undefined,
presetName: 'preset',
presetPath: 'path/to',
presetSource: 'local',
tag: 'feature/branch',
});
});
it('parses local with sub preset and branch/tag with a slash', () => {
expect(
parsePreset('local>PROJECT/repository:preset/subpreset#feature/branch'),
).toEqual({
repo: 'PROJECT/repository',
params: undefined,
presetName: 'preset/subpreset',
presetPath: undefined,
presetSource: 'local',
tag: 'feature/branch',
});
});
it('parses no prefix as local', () => {
expect(parsePreset('some/repo')).toEqual({
repo: 'some/repo',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local Bitbucket user repo with preset name', () => {
expect(parsePreset('local>~john_doe/repo//somefile')).toEqual({
repo: '~john_doe/repo',
params: undefined,
presetName: 'somefile',
presetPath: undefined,
presetSource: 'local',
});
});
it('parses local Bitbucket user repo', () => {
expect(parsePreset('local>~john_doe/renovate-config')).toEqual({
repo: '~john_doe/renovate-config',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'local',
});
});
it('returns default package name with params', () => {
expect(parsePreset(':group(packages/eslint, eslint)')).toEqual({
repo: 'default',
params: ['packages/eslint', 'eslint'],
presetName: 'group',
presetPath: undefined,
presetSource: 'internal',
});
});
// scoped namespace
it('returns simple scope', () => {
expect(parsePreset('@somescope')).toEqual({
repo: '@somescope/renovate-config',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns simple scope and params', () => {
expect(parsePreset('@somescope(param1)')).toEqual({
repo: '@somescope/renovate-config',
params: ['param1'],
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and default', () => {
expect(parsePreset('@somescope/somepackagename')).toEqual({
repo: '@somescope/somepackagename',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and params and default', () => {
expect(
parsePreset('@somescope/somepackagename(param1, param2, param3)'),
).toEqual({
repo: '@somescope/somepackagename',
params: ['param1', 'param2', 'param3'],
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with presetName', () => {
expect(parsePreset('@somescope:somePresetName')).toEqual({
repo: '@somescope/renovate-config',
params: undefined,
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with presetName and params', () => {
expect(parsePreset('@somescope:somePresetName(param1)')).toEqual({
repo: '@somescope/renovate-config',
params: ['param1'],
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and presetName', () => {
expect(parsePreset('@somescope/somepackagename:somePresetName')).toEqual({
repo: '@somescope/somepackagename',
params: undefined,
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns scope with repo and presetName and params', () => {
expect(
parsePreset(
'@somescope/somepackagename:somePresetName(param1, param2)',
),
).toEqual({
repo: '@somescope/somepackagename',
params: ['param1', 'param2'],
presetName: 'somePresetName',
presetPath: undefined,
presetSource: 'npm',
});
});
// non-scoped namespace
it('returns non-scoped default', () => {
expect(parsePreset('somepackage')).toEqual({
repo: 'renovate-config-somepackage',
params: undefined,
presetName: 'default',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns non-scoped package name', () => {
expect(parsePreset('somepackage:webapp')).toEqual({
repo: 'renovate-config-somepackage',
params: undefined,
presetName: 'webapp',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns non-scoped package name full', () => {
expect(parsePreset('renovate-config-somepackage:webapp')).toEqual({
repo: 'renovate-config-somepackage',
params: undefined,
presetName: 'webapp',
presetPath: undefined,
presetSource: 'npm',
});
});
it('returns non-scoped package name with params', () => {
expect(parsePreset('somepackage:webapp(param1)')).toEqual({
repo: 'renovate-config-somepackage',
params: ['param1'],
presetName: 'webapp',
presetPath: undefined,
presetSource: 'npm',
});
});
it('parses HTTPS URLs', () => {
expect(
parsePreset(
'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
),
).toEqual({
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
params: undefined,
presetName: '',
presetPath: undefined,
presetSource: 'http',
});
});
it('parses HTTP URLs', () => {
expect(
parsePreset(
'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
),
).toEqual({
repo: 'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
params: undefined,
presetName: '',
presetPath: undefined,
presetSource: 'http',
});
});
it('parses HTTPS URLs with parameters', () => {
expect(
parsePreset(
'https://my.server/gitea/renovate-config/raw/branch/main/default.json(param1)',
),
).toEqual({
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
params: ['param1'],
presetName: '',
presetPath: undefined,
presetSource: 'http',
});
});
});
});

126
lib/config/presets/parse.ts Normal file
View file

@ -0,0 +1,126 @@
import is from '@sindresorhus/is';
import { regEx } from '../../util/regex';
import type { ParsedPreset } from './types';
import { PRESET_INVALID, PRESET_PROHIBITED_SUBPRESET } from './util';
const nonScopedPresetWithSubdirRegex = regEx(
/^(?<repo>~?[\w\-. /]+?)\/\/(?:(?<presetPath>[\w\-./]+)\/)?(?<presetName>[\w\-.]+)(?:#(?<tag>[\w\-./]+?))?$/,
);
const gitPresetRegex = regEx(
/^(?<repo>~?[\w\-. /]+)(?::(?<presetName>[\w\-.+/]+))?(?:#(?<tag>[\w\-./]+?))?$/,
);
export function parsePreset(input: string): ParsedPreset {
let str = input;
let presetSource: string | undefined;
let presetPath: string | undefined;
let repo: string;
let presetName: string;
let tag: string | undefined;
let params: string[] | undefined;
if (str.startsWith('github>')) {
presetSource = 'github';
str = str.substring('github>'.length);
} else if (str.startsWith('gitlab>')) {
presetSource = 'gitlab';
str = str.substring('gitlab>'.length);
} else if (str.startsWith('gitea>')) {
presetSource = 'gitea';
str = str.substring('gitea>'.length);
} else if (str.startsWith('local>')) {
presetSource = 'local';
str = str.substring('local>'.length);
} else if (str.startsWith('http://') || str.startsWith('https://')) {
presetSource = 'http';
} else if (
!str.startsWith('@') &&
!str.startsWith(':') &&
str.includes('/')
) {
presetSource = 'local';
}
str = str.replace(regEx(/^npm>/), '');
presetSource = presetSource ?? 'npm';
if (str.includes('(')) {
params = str
.slice(str.indexOf('(') + 1, -1)
.split(',')
.map((elem) => elem.trim());
str = str.slice(0, str.indexOf('('));
}
if (presetSource === 'http') {
return { presetSource, repo: str, presetName: '', params };
}
const presetsPackages = [
'compatibility',
'config',
'customManagers',
'default',
'docker',
'global',
'group',
'helpers',
'mergeConfidence',
'monorepo',
'npm',
'packages',
'preview',
'replacements',
'schedule',
'security',
'workarounds',
];
if (
presetsPackages.some((presetPackage) => str.startsWith(`${presetPackage}:`))
) {
presetSource = 'internal';
[repo, presetName] = str.split(':');
} else if (str.startsWith(':')) {
// default namespace
presetSource = 'internal';
repo = 'default';
presetName = str.slice(1);
} else if (str.startsWith('@')) {
// scoped namespace
[, repo] = regEx(/(@.*?)(:|$)/).exec(str)!;
str = str.slice(repo.length);
if (!repo.includes('/')) {
repo += '/renovate-config';
}
if (str === '') {
presetName = 'default';
} else {
presetName = str.slice(1);
}
} else if (str.includes('//')) {
// non-scoped namespace with a subdirectory preset
// Validation
if (str.includes(':')) {
throw new Error(PRESET_PROHIBITED_SUBPRESET);
}
if (!nonScopedPresetWithSubdirRegex.test(str)) {
throw new Error(PRESET_INVALID);
}
({ repo, presetPath, presetName, tag } =
nonScopedPresetWithSubdirRegex.exec(str)!.groups!);
} else {
({ repo, presetName, tag } = gitPresetRegex.exec(str)!.groups!);
if (presetSource === 'npm' && !repo.startsWith('renovate-config-')) {
repo = `renovate-config-${repo}`;
}
if (!is.nonEmptyString(presetName)) {
presetName = 'default';
}
}
return {
presetSource,
presetPath,
repo,
presetName,
tag,
params,
};
}

View file

@ -239,6 +239,7 @@ export interface RenovateConfig
baseBranch?: string; baseBranch?: string;
defaultBranch?: string; defaultBranch?: string;
branchList?: string[]; branchList?: string[];
cloneSubmodulesFilter?: string[];
description?: string | string[]; description?: string | string[];
force?: RenovateConfig; force?: RenovateConfig;
errors?: ValidationMessage[]; errors?: ValidationMessage[];
@ -305,6 +306,9 @@ export interface RenovateConfig
statusCheckNames?: Record<StatusCheckKey, string | null>; statusCheckNames?: Record<StatusCheckKey, string | null>;
env?: UserEnv; env?: UserEnv;
logLevelRemap?: LogLevelRemap[]; logLevelRemap?: LogLevelRemap[];
branchTopic?: string;
additionalBranchPrefix?: string;
} }
const CustomDatasourceFormats = ['json', 'plain', 'yaml', 'html'] as const; const CustomDatasourceFormats = ['json', 'plain', 'yaml', 'html'] as const;

View file

@ -0,0 +1,17 @@
import { getParentName } from './utils';
describe('config/validation-helpers/utils', () => {
describe('getParentName()', () => {
it('ignores encrypted in root', () => {
expect(getParentName('encrypted')).toBeEmptyString();
});
it('handles array types', () => {
expect(getParentName('hostRules[1]')).toBe('hostRules');
});
it('handles encrypted within array types', () => {
expect(getParentName('hostRules[0].encrypted')).toBe('hostRules');
});
});
});

View file

@ -0,0 +1,138 @@
import is from '@sindresorhus/is';
import { logger } from '../../logger';
import type {
RegexManagerConfig,
RegexManagerTemplates,
} from '../../modules/manager/custom/regex/types';
import { regEx } from '../../util/regex';
import type { ValidationMessage } from '../types';
export function getParentName(parentPath: string | undefined): string {
return parentPath
? parentPath
.replace(regEx(/\.?encrypted$/), '')
.replace(regEx(/\[\d+\]$/), '')
.split('.')
.pop()!
: '.';
}
export function validatePlainObject(
val: Record<string, unknown>,
): true | string {
for (const [key, value] of Object.entries(val)) {
if (!is.string(value)) {
return key;
}
}
return true;
}
export function validateNumber(
key: string,
val: unknown,
allowsNegative: boolean,
currentPath?: string,
subKey?: string,
): ValidationMessage[] {
const errors: ValidationMessage[] = [];
const path = `${currentPath}${subKey ? '.' + subKey : ''}`;
if (is.number(val)) {
if (val < 0 && !allowsNegative) {
errors.push({
topic: 'Configuration Error',
message: `Configuration option \`${path}\` should be a positive integer. Found negative value instead.`,
});
}
} else {
errors.push({
topic: 'Configuration Error',
message: `Configuration option \`${path}\` should be an integer. Found: ${JSON.stringify(
val,
)} (${typeof val}).`,
});
}
return errors;
}
/** An option is a false global if it has the same name as a global only option
* but is actually just the field of a non global option or field an children of the non global option
* eg. token: it's global option used as the bot's token as well and
* also it can be the token used for a platform inside the hostRules configuration
*/
export function isFalseGlobal(
optionName: string,
parentPath?: string,
): boolean {
if (parentPath?.includes('hostRules')) {
if (
optionName === 'token' ||
optionName === 'username' ||
optionName === 'password'
) {
return true;
}
}
return false;
}
function hasField(
customManager: Partial<RegexManagerConfig>,
field: string,
): boolean {
const templateField = `${field}Template` as keyof RegexManagerTemplates;
return !!(
customManager[templateField] ??
customManager.matchStrings?.some((matchString) =>
matchString.includes(`(?<${field}>`),
)
);
}
export function validateRegexManagerFields(
customManager: Partial<RegexManagerConfig>,
currentPath: string,
errors: ValidationMessage[],
): void {
if (is.nonEmptyArray(customManager.matchStrings)) {
for (const matchString of customManager.matchStrings) {
try {
regEx(matchString);
} catch (err) {
logger.debug(
{ err },
'customManager.matchStrings regEx validation error',
);
errors.push({
topic: 'Configuration Error',
message: `Invalid regExp for ${currentPath}: \`${matchString}\``,
});
}
}
} else {
errors.push({
topic: 'Configuration Error',
message: `Each Custom Manager must contain a non-empty matchStrings array`,
});
}
const mandatoryFields = ['currentValue', 'datasource'];
for (const field of mandatoryFields) {
if (!hasField(customManager, field)) {
errors.push({
topic: 'Configuration Error',
message: `Regex Managers must contain ${field}Template configuration or regex group named ${field}`,
});
}
}
const nameFields = ['depName', 'packageName'];
if (!nameFields.some((field) => hasField(customManager, field))) {
errors.push({
topic: 'Configuration Error',
message: `Regex Managers must contain depName or packageName regex groups or templates`,
});
}
}

View file

@ -4,22 +4,6 @@ import type { RenovateConfig } from './types';
import * as configValidation from './validation'; import * as configValidation from './validation';
describe('config/validation', () => { describe('config/validation', () => {
describe('getParentName()', () => {
it('ignores encrypted in root', () => {
expect(configValidation.getParentName('encrypted')).toBeEmptyString();
});
it('handles array types', () => {
expect(configValidation.getParentName('hostRules[1]')).toBe('hostRules');
});
it('handles encrypted within array types', () => {
expect(configValidation.getParentName('hostRules[0].encrypted')).toBe(
'hostRules',
);
});
});
describe('validateConfig(config)', () => { describe('validateConfig(config)', () => {
it('returns deprecation warnings', async () => { it('returns deprecation warnings', async () => {
const config = { const config = {

View file

@ -1,11 +1,6 @@
import is from '@sindresorhus/is'; import is from '@sindresorhus/is';
import { logger } from '../logger';
import { allManagersList, getManagerList } from '../modules/manager'; import { allManagersList, getManagerList } from '../modules/manager';
import { isCustomManager } from '../modules/manager/custom'; import { isCustomManager } from '../modules/manager/custom';
import type {
RegexManagerConfig,
RegexManagerTemplates,
} from '../modules/manager/custom/regex/types';
import type { CustomManager } from '../modules/manager/custom/types'; import type { CustomManager } from '../modules/manager/custom/types';
import type { HostRule } from '../types'; import type { HostRule } from '../types';
import { getExpression } from '../util/jsonata'; import { getExpression } from '../util/jsonata';
@ -39,6 +34,13 @@ import { allowedStatusCheckStrings } from './types';
import * as managerValidator from './validation-helpers/managers'; import * as managerValidator from './validation-helpers/managers';
import * as matchBaseBranchesValidator from './validation-helpers/match-base-branches'; import * as matchBaseBranchesValidator from './validation-helpers/match-base-branches';
import * as regexOrGlobValidator from './validation-helpers/regex-glob-matchers'; import * as regexOrGlobValidator from './validation-helpers/regex-glob-matchers';
import {
getParentName,
isFalseGlobal,
validateNumber,
validatePlainObject,
validateRegexManagerFields,
} from './validation-helpers/utils';
const options = getOptions(); const options = getOptions();
@ -84,42 +86,6 @@ function isIgnored(key: string): boolean {
return ignoredNodes.includes(key); return ignoredNodes.includes(key);
} }
function validatePlainObject(val: Record<string, unknown>): true | string {
for (const [key, value] of Object.entries(val)) {
if (!is.string(value)) {
return key;
}
}
return true;
}
function validateNumber(
key: string,
val: unknown,
currentPath?: string,
subKey?: string,
): ValidationMessage[] {
const errors: ValidationMessage[] = [];
const path = `${currentPath}${subKey ? '.' + subKey : ''}`;
if (is.number(val)) {
if (val < 0 && !optionAllowsNegativeIntegers.has(key)) {
errors.push({
topic: 'Configuration Error',
message: `Configuration option \`${path}\` should be a positive integer. Found negative value instead.`,
});
}
} else {
errors.push({
topic: 'Configuration Error',
message: `Configuration option \`${path}\` should be an integer. Found: ${JSON.stringify(
val,
)} (${typeof val}).`,
});
}
return errors;
}
function getUnsupportedEnabledManagers(enabledManagers: string[]): string[] { function getUnsupportedEnabledManagers(enabledManagers: string[]): string[] {
return enabledManagers.filter( return enabledManagers.filter(
(manager) => !allManagersList.includes(manager.replace('custom.', '')), (manager) => !allManagersList.includes(manager.replace('custom.', '')),
@ -186,16 +152,6 @@ function initOptions(): void {
optionsInitialized = true; optionsInitialized = true;
} }
export function getParentName(parentPath: string | undefined): string {
return parentPath
? parentPath
.replace(regEx(/\.?encrypted$/), '')
.replace(regEx(/\[\d+\]$/), '')
.split('.')
.pop()!
: '.';
}
export async function validateConfig( export async function validateConfig(
configType: 'global' | 'inherit' | 'repo', configType: 'global' | 'inherit' | 'repo',
config: RenovateConfig, config: RenovateConfig,
@ -370,7 +326,8 @@ export async function validateConfig(
}); });
} }
} else if (type === 'integer') { } else if (type === 'integer') {
errors.push(...validateNumber(key, val, currentPath)); const allowsNegative = optionAllowsNegativeIntegers.has(key);
errors.push(...validateNumber(key, val, allowsNegative, currentPath));
} else if (type === 'array' && val) { } else if (type === 'array' && val) {
if (is.array(val)) { if (is.array(val)) {
for (const [subIndex, subval] of val.entries()) { for (const [subIndex, subval] of val.entries()) {
@ -865,65 +822,6 @@ export async function validateConfig(
return { errors, warnings }; return { errors, warnings };
} }
function hasField(
customManager: Partial<RegexManagerConfig>,
field: string,
): boolean {
const templateField = `${field}Template` as keyof RegexManagerTemplates;
return !!(
customManager[templateField] ??
customManager.matchStrings?.some((matchString) =>
matchString.includes(`(?<${field}>`),
)
);
}
function validateRegexManagerFields(
customManager: Partial<RegexManagerConfig>,
currentPath: string,
errors: ValidationMessage[],
): void {
if (is.nonEmptyArray(customManager.matchStrings)) {
for (const matchString of customManager.matchStrings) {
try {
regEx(matchString);
} catch (err) {
logger.debug(
{ err },
'customManager.matchStrings regEx validation error',
);
errors.push({
topic: 'Configuration Error',
message: `Invalid regExp for ${currentPath}: \`${matchString}\``,
});
}
}
} else {
errors.push({
topic: 'Configuration Error',
message: `Each Custom Manager must contain a non-empty matchStrings array`,
});
}
const mandatoryFields = ['currentValue', 'datasource'];
for (const field of mandatoryFields) {
if (!hasField(customManager, field)) {
errors.push({
topic: 'Configuration Error',
message: `Regex Managers must contain ${field}Template configuration or regex group named ${field}`,
});
}
}
const nameFields = ['depName', 'packageName'];
if (!nameFields.some((field) => hasField(customManager, field))) {
errors.push({
topic: 'Configuration Error',
message: `Regex Managers must contain depName or packageName regex groups or templates`,
});
}
}
/** /**
* Basic validation for global config options * Basic validation for global config options
*/ */
@ -1013,7 +911,8 @@ async function validateGlobalConfig(
}); });
} }
} else if (type === 'integer') { } else if (type === 'integer') {
warnings.push(...validateNumber(key, val, currentPath)); const allowsNegative = optionAllowsNegativeIntegers.has(key);
warnings.push(...validateNumber(key, val, allowsNegative, currentPath));
} else if (type === 'boolean') { } else if (type === 'boolean') {
if (val !== true && val !== false) { if (val !== true && val !== false) {
warnings.push({ warnings.push({
@ -1079,8 +978,15 @@ async function validateGlobalConfig(
} }
} else if (key === 'cacheTtlOverride') { } else if (key === 'cacheTtlOverride') {
for (const [subKey, subValue] of Object.entries(val)) { for (const [subKey, subValue] of Object.entries(val)) {
const allowsNegative = optionAllowsNegativeIntegers.has(key);
warnings.push( warnings.push(
...validateNumber(key, subValue, currentPath, subKey), ...validateNumber(
key,
subValue,
allowsNegative,
currentPath,
subKey,
),
); );
} }
} else { } else {
@ -1101,22 +1007,3 @@ async function validateGlobalConfig(
} }
} }
} }
/** An option is a false global if it has the same name as a global only option
* but is actually just the field of a non global option or field an children of the non global option
* eg. token: it's global option used as the bot's token as well and
* also it can be the token used for a platform inside the hostRules configuration
*/
function isFalseGlobal(optionName: string, parentPath?: string): boolean {
if (parentPath?.includes('hostRules')) {
if (
optionName === 'token' ||
optionName === 'username' ||
optionName === 'password'
) {
return true;
}
}
return false;
}

View file

@ -15,7 +15,7 @@
"flake8": "https://flake8.pycqa.org/en/latest/release-notes/index.html", "flake8": "https://flake8.pycqa.org/en/latest/release-notes/index.html",
"django-storages": "https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst", "django-storages": "https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst",
"lxml": "https://git.launchpad.net/lxml/plain/CHANGES.txt", "lxml": "https://git.launchpad.net/lxml/plain/CHANGES.txt",
"mypy": "https://mypy-lang.blogspot.com/", "mypy": "https://mypy.readthedocs.io/en/latest/changelog.html",
"phonenumbers": "https://github.com/daviddrysdale/python-phonenumbers/blob/dev/python/HISTORY.md", "phonenumbers": "https://github.com/daviddrysdale/python-phonenumbers/blob/dev/python/HISTORY.md",
"pycountry": "https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt", "pycountry": "https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt",
"django-debug-toolbar": "https://django-debug-toolbar.readthedocs.io/en/latest/changes.html", "django-debug-toolbar": "https://django-debug-toolbar.readthedocs.io/en/latest/changes.html",

View file

@ -55,10 +55,6 @@
"https://github.com/awslabs/aws-sdk-rust" "https://github.com/awslabs/aws-sdk-rust"
], ],
"awsappsync": "https://github.com/awslabs/aws-mobile-appsync-sdk-js", "awsappsync": "https://github.com/awslabs/aws-mobile-appsync-sdk-js",
"axis2": [
"https://gitbox.apache.org/repos/asf?p=axis-axis2-java-core.git;a=summary",
"https://github.com/apache/axis-axis2-java-core"
],
"azure-functions-dotnet-worker": "https://github.com/Azure/azure-functions-dotnet-worker", "azure-functions-dotnet-worker": "https://github.com/Azure/azure-functions-dotnet-worker",
"azure azure-libraries-for-net": "https://github.com/Azure/azure-libraries-for-net", "azure azure-libraries-for-net": "https://github.com/Azure/azure-libraries-for-net",
"azure azure-sdk-for-net": "https://github.com/Azure/azure-sdk-for-net", "azure azure-sdk-for-net": "https://github.com/Azure/azure-sdk-for-net",
@ -281,6 +277,7 @@
"embla-carousel": "https://github.com/davidjerleke/embla-carousel", "embla-carousel": "https://github.com/davidjerleke/embla-carousel",
"emojibase": "https://github.com/milesj/emojibase", "emojibase": "https://github.com/milesj/emojibase",
"emotion": "https://github.com/emotion-js/emotion", "emotion": "https://github.com/emotion-js/emotion",
"envelop": "https://github.com/n1ru4l/envelop",
"eslint": "https://github.com/eslint/eslint", "eslint": "https://github.com/eslint/eslint",
"eslint-config-globex": "https://github.com/GlobexDesignsInc/eslint-config-globex", "eslint-config-globex": "https://github.com/GlobexDesignsInc/eslint-config-globex",
"eslint-stylistic": "https://github.com/eslint-stylistic/eslint-stylistic", "eslint-stylistic": "https://github.com/eslint-stylistic/eslint-stylistic",
@ -307,9 +304,12 @@
"go-cloud": "https://github.com/google/go-cloud", "go-cloud": "https://github.com/google/go-cloud",
"google-api-dotnet-client": "https://github.com/googleapis/google-api-dotnet-client", "google-api-dotnet-client": "https://github.com/googleapis/google-api-dotnet-client",
"grafana": "https://github.com/grafana/grafana", "grafana": "https://github.com/grafana/grafana",
"graphiql": "https://github.com/graphql/graphiql",
"graphql-hive-gateway": "https://github.com/graphql-hive/gateway",
"graphql-mesh": "https://github.com/Urigo/graphql-mesh", "graphql-mesh": "https://github.com/Urigo/graphql-mesh",
"graphql-modules": "https://github.com/Urigo/graphql-modules", "graphql-modules": "https://github.com/Urigo/graphql-modules",
"graphql-tools": "https://github.com/ardatan/graphql-tools", "graphql-tools": "https://github.com/ardatan/graphql-tools",
"graphql-yoga": "https://github.com/dotansimha/graphql-yoga",
"graphqlcodegenerator": [ "graphqlcodegenerator": [
"https://github.com/dotansimha/graphql-code-generator-community", "https://github.com/dotansimha/graphql-code-generator-community",
"https://github.com/dotansimha/graphql-code-generator", "https://github.com/dotansimha/graphql-code-generator",
@ -321,6 +321,7 @@
"grpc-java": "https://github.com/grpc/grpc-java", "grpc-java": "https://github.com/grpc/grpc-java",
"gstreamer-rust": "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs", "gstreamer-rust": "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs",
"guava": "https://github.com/google/guava", "guava": "https://github.com/google/guava",
"happy-dom": "https://github.com/capricorn86/happy-dom",
"Hangfire": "https://github.com/HangfireIO/Hangfire", "Hangfire": "https://github.com/HangfireIO/Hangfire",
"hickory-dns": "https://github.com/hickory-dns/hickory-dns", "hickory-dns": "https://github.com/hickory-dns/hickory-dns",
"infrastructure-ui": "https://github.com/instructure/instructure-ui", "infrastructure-ui": "https://github.com/instructure/instructure-ui",
@ -328,9 +329,12 @@
"istanbuljs": "https://github.com/istanbuljs/istanbuljs", "istanbuljs": "https://github.com/istanbuljs/istanbuljs",
"jackson": [ "jackson": [
"https://github.com/FasterXML/jackson", "https://github.com/FasterXML/jackson",
"https://github.com/FasterXML/jackson-annotations",
"https://github.com/FasterXML/jackson-core",
"https://github.com/FasterXML/jackson-databind", "https://github.com/FasterXML/jackson-databind",
"https://github.com/FasterXML/jackson-dataformats-binary", "https://github.com/FasterXML/jackson-dataformats-binary",
"https://github.com/FasterXML/jackson-dataformats-text", "https://github.com/FasterXML/jackson-dataformats-text",
"https://github.com/FasterXML/jackson-jaxrs-providers",
"https://github.com/FasterXML/jackson-module-kotlin" "https://github.com/FasterXML/jackson-module-kotlin"
], ],
"jasmine": "https://github.com/jasmine/jasmine", "jasmine": "https://github.com/jasmine/jasmine",
@ -357,7 +361,9 @@
"lerna-lite": "https://github.com/lerna-lite/lerna-lite", "lerna-lite": "https://github.com/lerna-lite/lerna-lite",
"lexical": "https://github.com/facebook/lexical", "lexical": "https://github.com/facebook/lexical",
"linguijs": "https://github.com/lingui/js-lingui", "linguijs": "https://github.com/lingui/js-lingui",
"linkifyjs": "https://github.com/nfrasser/linkifyjs",
"log4j2": "https://github.com/apache/logging-log4j2", "log4j2": "https://github.com/apache/logging-log4j2",
"logback": "https://github.com/qos-ch/logback",
"loopback": [ "loopback": [
"https://github.com/strongloop/loopback-next", "https://github.com/strongloop/loopback-next",
"https://github.com/loopbackio/loopback-next" "https://github.com/loopbackio/loopback-next"
@ -424,9 +430,13 @@
"opentelemetry-erlang": "https://github.com/open-telemetry/opentelemetry-erlang", "opentelemetry-erlang": "https://github.com/open-telemetry/opentelemetry-erlang",
"opentelemetry-erlang-contrib": "https://github.com/open-telemetry/opentelemetry-erlang-contrib", "opentelemetry-erlang-contrib": "https://github.com/open-telemetry/opentelemetry-erlang-contrib",
"opentelemetry-go": "https://github.com/open-telemetry/opentelemetry-go", "opentelemetry-go": "https://github.com/open-telemetry/opentelemetry-go",
"opentelemetry-go-contrib": "https://github.com/open-telemetry/opentelemetry-go-contrib",
"opentelemetry-java": "https://github.com/open-telemetry/opentelemetry-java", "opentelemetry-java": "https://github.com/open-telemetry/opentelemetry-java",
"opentelemetry-java-contrib": "https://github.com/open-telemetry/opentelemetry-java-contrib",
"opentelemetry-js": "https://github.com/open-telemetry/opentelemetry-js", "opentelemetry-js": "https://github.com/open-telemetry/opentelemetry-js",
"opentelemetry-js-contrib": "https://github.com/open-telemetry/opentelemetry-js-contrib",
"opentelemetry-rust": "https://github.com/open-telemetry/opentelemetry-rust", "opentelemetry-rust": "https://github.com/open-telemetry/opentelemetry-rust",
"opentelemetry-rust-contrib": "https://github.com/open-telemetry/opentelemetry-rust-contrib",
"orleans": "https://github.com/dotnet/orleans", "orleans": "https://github.com/dotnet/orleans",
"panda-css": "https://github.com/chakra-ui/panda", "panda-css": "https://github.com/chakra-ui/panda",
"parcel": "https://github.com/parcel-bundler/parcel", "parcel": "https://github.com/parcel-bundler/parcel",
@ -442,6 +452,7 @@
"pollyjs": "https://github.com/Netflix/pollyjs", "pollyjs": "https://github.com/Netflix/pollyjs",
"pothos": "https://github.com/hayes/pothos", "pothos": "https://github.com/hayes/pothos",
"pouchdb": "https://github.com/pouchdb/pouchdb", "pouchdb": "https://github.com/pouchdb/pouchdb",
"powermock": "https://github.com/powermock/powermock",
"prisma": "https://github.com/prisma/prisma", "prisma": "https://github.com/prisma/prisma",
"prometheus-net": "https://github.com/prometheus-net/prometheus-net", "prometheus-net": "https://github.com/prometheus-net/prometheus-net",
"promster": "https://github.com/tdeekens/promster", "promster": "https://github.com/tdeekens/promster",
@ -478,6 +489,7 @@
"sanity": "https://github.com/sanity-io/sanity", "sanity": "https://github.com/sanity-io/sanity",
"serilog-ui": "https://github.com/serilog-contrib/serilog-ui", "serilog-ui": "https://github.com/serilog-contrib/serilog-ui",
"scaffdog": "https://github.com/scaffdog/scaffdog", "scaffdog": "https://github.com/scaffdog/scaffdog",
"sea-orm": "https://github.com/SeaQL/sea-orm",
"secretlint": "https://github.com/secretlint/secretlint", "secretlint": "https://github.com/secretlint/secretlint",
"sendgrid-nodejs": "https://github.com/sendgrid/sendgrid-nodejs", "sendgrid-nodejs": "https://github.com/sendgrid/sendgrid-nodejs",
"sentry-dotnet": "https://github.com/getsentry/sentry-dotnet", "sentry-dotnet": "https://github.com/getsentry/sentry-dotnet",
@ -493,6 +505,7 @@
"skiasharp": "https://github.com/mono/SkiaSharp", "skiasharp": "https://github.com/mono/SkiaSharp",
"slack-net": "https://github.com/soxtoby/SlackNet", "slack-net": "https://github.com/soxtoby/SlackNet",
"slf4j": "https://github.com/qos-ch/slf4j", "slf4j": "https://github.com/qos-ch/slf4j",
"slim-message-bus": "https://github.com/zarusz/SlimMessageBus",
"spectre-console": "https://github.com/spectreconsole/spectre.console", "spectre-console": "https://github.com/spectreconsole/spectre.console",
"springfox": "https://github.com/springfox/springfox", "springfox": "https://github.com/springfox/springfox",
"steeltoe": "https://github.com/SteeltoeOSS/steeltoe", "steeltoe": "https://github.com/SteeltoeOSS/steeltoe",
@ -540,6 +553,7 @@
"unhead": "https://github.com/unjs/unhead", "unhead": "https://github.com/unjs/unhead",
"unocss": "https://github.com/unocss/unocss", "unocss": "https://github.com/unocss/unocss",
"uppy": "https://github.com/transloadit/uppy", "uppy": "https://github.com/transloadit/uppy",
"utoipa": "https://github.com/juhaku/utoipa",
"vaadin-hilla": "https://github.com/vaadin/hilla", "vaadin-hilla": "https://github.com/vaadin/hilla",
"vaadinWebComponents": "https://github.com/vaadin/web-components", "vaadinWebComponents": "https://github.com/vaadin/web-components",
"visx": "https://github.com/airbnb/visx", "visx": "https://github.com/airbnb/visx",
@ -581,12 +595,17 @@
"apache-poi": "/^org.apache.poi:/", "apache-poi": "/^org.apache.poi:/",
"aws-java-sdk": "/^com.amazonaws:aws-java-sdk-/", "aws-java-sdk": "/^com.amazonaws:aws-java-sdk-/",
"aws-java-sdk-v2": "/^software.amazon.awssdk:/", "aws-java-sdk-v2": "/^software.amazon.awssdk:/",
"axis2": "/^org.apache.axis2:/",
"babel6": "/^babel6$/", "babel6": "/^babel6$/",
"clarity": ["/^@cds//", "/^@clr//"], "clarity": ["/^@cds//", "/^@clr//"],
"embroider": "/^@embroider//", "embroider": "/^@embroider//",
"forge": "/^@forge//", "forge": "/^@forge//",
"fullcalendar": "/^@fullcalendar//", "fullcalendar": "/^@fullcalendar//",
"hotchocolate": "/^HotChocolate\\./", "hotchocolate": "/^HotChocolate\\./",
"oracle-database": [
"/^com.oracle.database.jdbc:/",
"/^com.oracle.database.nls:/"
],
"prometheus-simpleclient": "/^io.prometheus:simpleclient/", "prometheus-simpleclient": "/^io.prometheus:simpleclient/",
"russh": ["/^russh$/", "/^russh-keys$/"], "russh": ["/^russh$/", "/^russh-keys$/"],
"spfx": ["/^@microsoft/sp-/", "/^@microsoft/eslint-.+-spfx$/"], "spfx": ["/^@microsoft/sp-/", "/^@microsoft/eslint-.+-spfx$/"],

View file

@ -59,10 +59,12 @@ describe('instrumentation/index', () => {
_registeredSpanProcessors: [ _registeredSpanProcessors: [
{ {
_exporter: { _exporter: {
_transport: { _delegate: {
_transport: { _transport: {
_parameters: { _transport: {
url: 'https://collector.example.com/v1/traces', _parameters: {
url: 'https://collector.example.com/v1/traces',
},
}, },
}, },
}, },
@ -88,10 +90,12 @@ describe('instrumentation/index', () => {
{ _exporter: {} }, { _exporter: {} },
{ {
_exporter: { _exporter: {
_transport: { _delegate: {
_transport: { _transport: {
_parameters: { _transport: {
url: 'https://collector.example.com/v1/traces', _parameters: {
url: 'https://collector.example.com/v1/traces',
},
}, },
}, },
}, },

View file

@ -1,8 +1,10 @@
import type { WriteStream } from 'node:fs'; import type { WriteStream } from 'node:fs';
import bunyan from 'bunyan';
import fs from 'fs-extra'; import fs from 'fs-extra';
import { partial } from '../../test/util'; import { partial } from '../../test/util';
import { add } from '../util/host-rules'; import { add } from '../util/host-rules';
import { addSecretForSanitizing as addSecret } from '../util/sanitize'; import { addSecretForSanitizing as addSecret } from '../util/sanitize';
import type { RenovateLogger } from './renovate-logger';
import { import {
addMeta, addMeta,
addStream, addStream,
@ -17,16 +19,38 @@ import {
setMeta, setMeta,
} from '.'; } from '.';
const initialContext = 'initial_context';
jest.unmock('.'); jest.unmock('.');
jest.mock('nanoid', () => ({
nanoid: () => 'initial_context',
}));
const bunyanDebugSpy = jest.spyOn(bunyan.prototype, 'debug');
describe('logger/index', () => { describe('logger/index', () => {
it('inits', () => { it('inits', () => {
expect(logger).toBeDefined(); expect(logger).toBeDefined();
}); });
it('uses an auto-generated log context', () => {
logger.debug('');
expect(bunyanDebugSpy).toHaveBeenCalledWith(
{ logContext: initialContext },
'',
);
});
it('sets and gets context', () => { it('sets and gets context', () => {
setContext('123test'); const logContext = '123test';
expect(getContext()).toBe('123test'); const msg = 'test';
setContext(logContext);
logger.debug(msg);
expect(getContext()).toBe(logContext);
expect(bunyanDebugSpy).toHaveBeenCalledWith({ logContext }, msg);
}); });
it('supports logging with metadata', () => { it('supports logging with metadata', () => {
@ -41,16 +65,62 @@ describe('logger/index', () => {
expect(() => logger.debug('some meta')).not.toThrow(); expect(() => logger.debug('some meta')).not.toThrow();
}); });
it('sets meta', () => { describe('meta functions', () => {
expect(() => setMeta({ any: 'test' })).not.toThrow(); beforeEach(() => {
}); setContext(initialContext);
});
it('adds meta', () => { it('sets meta', () => {
expect(() => addMeta({ new: 'test' })).not.toThrow(); const logMeta = { foo: 'foo' };
}); const meta = { bar: 'bar' };
setMeta(meta);
it('removes meta', () => { logger.debug(logMeta, '');
expect(() => removeMeta(['new'])).not.toThrow();
expect(bunyanDebugSpy).toHaveBeenCalledWith(
{ logContext: initialContext, ...meta, ...logMeta },
'',
);
expect(bunyanDebugSpy).toHaveBeenCalledTimes(1);
});
it('adds meta', () => {
const logMeta = { foo: 'foo' };
const meta = { bar: 'bar' };
addMeta(meta);
logger.debug(logMeta, '');
expect(bunyanDebugSpy).toHaveBeenCalledWith(
{ logContext: initialContext, ...meta, ...logMeta },
'',
);
expect(bunyanDebugSpy).toHaveBeenCalledTimes(1);
});
it('removes meta', () => {
const logMeta = { foo: 'foo' };
const meta = { bar: 'bar' };
setMeta(meta);
logger.debug(logMeta, '');
expect(bunyanDebugSpy).toHaveBeenCalledWith(
{ logContext: initialContext, ...meta, ...logMeta },
'',
);
expect(bunyanDebugSpy).toHaveBeenCalledTimes(1);
removeMeta(Object.keys(meta));
logger.debug(logMeta, '');
expect(bunyanDebugSpy).toHaveBeenCalledWith(
{ logContext: initialContext, ...logMeta },
'',
);
expect(bunyanDebugSpy).toHaveBeenCalledTimes(2);
});
}); });
it('sets level', () => { it('sets level', () => {
@ -59,15 +129,30 @@ describe('logger/index', () => {
expect(logLevel()).toBe('debug'); expect(logLevel()).toBe('debug');
}); });
it('should create a child logger', () => {
const childLogger = (logger as RenovateLogger).childLogger();
const loggerSpy = jest.spyOn(logger, 'debug');
const childLoggerSpy = jest.spyOn(childLogger, 'debug');
childLogger.debug('test');
expect(loggerSpy).toHaveBeenCalledTimes(0);
expect(childLoggerSpy).toHaveBeenCalledTimes(1);
expect(childLoggerSpy).toHaveBeenCalledWith('test');
});
it('saves problems', () => { it('saves problems', () => {
addSecret('p4$$w0rd'); addSecret('p4$$w0rd');
levels('stdout', 'fatal'); levels('stdout', 'fatal');
logger.fatal('fatal error');
logger.error('some meta'); logger.error('some meta');
logger.error({ some: 'meta', password: 'super secret' }); logger.error({ some: 'meta', password: 'super secret' });
logger.error({ some: 'meta' }, 'message'); logger.error({ some: 'meta' }, 'message');
logger.warn('a warning with a p4$$w0rd'); logger.warn('a warning with a p4$$w0rd');
logger.trace('ignored');
logger.info('ignored'); logger.info('ignored');
expect(getProblems()).toMatchObject([ expect(getProblems()).toMatchObject([
{ msg: 'fatal error' },
{ msg: 'some meta' }, { msg: 'some meta' },
{ some: 'meta', password: '***********' }, { some: 'meta', password: '***********' },
{ some: 'meta', msg: 'message' }, { some: 'meta', msg: 'message' },

View file

@ -6,9 +6,8 @@ import upath from 'upath';
import cmdSerializer from './cmd-serializer'; import cmdSerializer from './cmd-serializer';
import configSerializer from './config-serializer'; import configSerializer from './config-serializer';
import errSerializer from './err-serializer'; import errSerializer from './err-serializer';
import { once, reset as onceReset } from './once';
import { RenovateStream } from './pretty-stdout'; import { RenovateStream } from './pretty-stdout';
import { getRemappedLevel } from './remap'; import { RenovateLogger } from './renovate-logger';
import type { BunyanRecord, Logger } from './types'; import type { BunyanRecord, Logger } from './types';
import { import {
ProblemStream, ProblemStream,
@ -17,161 +16,120 @@ import {
withSanitizer, withSanitizer,
} from './utils'; } from './utils';
let logContext: string = getEnv('LOG_CONTEXT') ?? nanoid();
let curMeta: Record<string, unknown> = {};
const problems = new ProblemStream(); const problems = new ProblemStream();
let stdoutLevel = validateLogLevel(getEnv('LOG_LEVEL'), 'info'); let stdoutLevel = validateLogLevel(getEnv('LOG_LEVEL'), 'info');
const stdout: bunyan.Stream = {
name: 'stdout',
level: stdoutLevel,
stream: process.stdout,
};
export function logLevel(): bunyan.LogLevelString { export function logLevel(): bunyan.LogLevelString {
return stdoutLevel; return stdoutLevel;
} }
// istanbul ignore if: not testable export function createDefaultStreams(
if (getEnv('LOG_FORMAT') !== 'json') { stdoutLevel: bunyan.LogLevelString,
// TODO: typings (#9615) problems: ProblemStream,
const prettyStdOut = new RenovateStream() as any; logFile: string | undefined,
prettyStdOut.pipe(process.stdout); ): bunyan.Stream[] {
stdout.stream = prettyStdOut; const stdout: bunyan.Stream = {
stdout.type = 'raw'; name: 'stdout',
level: stdoutLevel,
stream: process.stdout,
};
// istanbul ignore if: not testable
if (getEnv('LOG_FORMAT') !== 'json') {
// TODO: typings (#9615)
const prettyStdOut = new RenovateStream() as any;
prettyStdOut.pipe(process.stdout);
stdout.stream = prettyStdOut;
stdout.type = 'raw';
}
const problemsStream: bunyan.Stream = {
name: 'problems',
level: 'warn' as bunyan.LogLevel,
stream: problems as any,
type: 'raw',
};
// istanbul ignore next: not easily testable
const logFileStream: bunyan.Stream | undefined = is.string(logFile)
? createLogFileStream(logFile)
: undefined;
return [stdout, problemsStream, logFileStream].filter(
Boolean,
) as bunyan.Stream[];
} }
const bunyanLogger = bunyan.createLogger({ // istanbul ignore next: not easily testable
name: 'renovate', function createLogFileStream(logFile: string): bunyan.Stream {
serializers: { // Ensure log file directory exists
body: configSerializer,
cmd: cmdSerializer,
config: configSerializer,
migratedConfig: configSerializer,
originalConfig: configSerializer,
presetConfig: configSerializer,
oldConfig: configSerializer,
newConfig: configSerializer,
err: errSerializer,
},
streams: [
stdout,
{
name: 'problems',
level: 'warn' as bunyan.LogLevel,
stream: problems as any,
type: 'raw',
},
].map(withSanitizer),
});
const logFactory = (
_level: bunyan.LogLevelString,
): ((p1: unknown, p2: unknown) => void) => {
return (p1: any, p2: any): void => {
let level = _level;
if (p2) {
// meta and msg provided
const msg = p2;
const meta: Record<string, unknown> = { logContext, ...curMeta, ...p1 };
const remappedLevel = getRemappedLevel(msg);
// istanbul ignore if: not testable
if (remappedLevel) {
meta.oldLevel = level;
level = remappedLevel;
}
bunyanLogger[level](meta, msg);
} else if (is.string(p1)) {
// only message provided
const msg = p1;
const meta: Record<string, unknown> = { logContext, ...curMeta };
const remappedLevel = getRemappedLevel(msg);
// istanbul ignore if: not testable
if (remappedLevel) {
meta.oldLevel = level;
level = remappedLevel;
}
bunyanLogger[level](meta, msg);
} else {
// only meta provided
bunyanLogger[level]({ logContext, ...curMeta, ...p1 });
}
};
};
const loggerLevels: bunyan.LogLevelString[] = [
'trace',
'debug',
'info',
'warn',
'error',
'fatal',
];
export const logger: Logger = { once: { reset: onceReset } } as any;
loggerLevels.forEach((loggerLevel) => {
logger[loggerLevel] = logFactory(loggerLevel) as never;
const logOnceFn = (p1: any, p2: any): void => {
once(() => {
const logFn = logger[loggerLevel];
if (is.undefined(p2)) {
logFn(p1);
} else {
logFn(p1, p2);
}
}, logOnceFn);
};
logger.once[loggerLevel] = logOnceFn as never;
});
const logFile = getEnv('LOG_FILE');
// istanbul ignore if: not easily testable
if (is.string(logFile)) {
// ensure log file directory exists
const directoryName = upath.dirname(logFile); const directoryName = upath.dirname(logFile);
fs.ensureDirSync(directoryName); fs.ensureDirSync(directoryName);
addStream({ return {
name: 'logfile', name: 'logfile',
path: logFile, path: logFile,
level: validateLogLevel(getEnv('LOG_FILE_LEVEL'), 'debug'), level: validateLogLevel(getEnv('LOG_FILE_LEVEL'), 'debug'),
};
}
function serializedSanitizedLogger(streams: bunyan.Stream[]): bunyan {
return bunyan.createLogger({
name: 'renovate',
serializers: {
body: configSerializer,
cmd: cmdSerializer,
config: configSerializer,
migratedConfig: configSerializer,
originalConfig: configSerializer,
presetConfig: configSerializer,
oldConfig: configSerializer,
newConfig: configSerializer,
err: errSerializer,
},
streams: streams.map(withSanitizer),
}); });
} }
const defaultStreams = createDefaultStreams(
stdoutLevel,
problems,
getEnv('LOG_FILE'),
);
const bunyanLogger = serializedSanitizedLogger(defaultStreams);
const logContext = getEnv('LOG_CONTEXT') ?? nanoid();
const loggerInternal = new RenovateLogger(bunyanLogger, logContext, {});
export const logger: Logger = loggerInternal;
export function setContext(value: string): void { export function setContext(value: string): void {
logContext = value; loggerInternal.logContext = value;
} }
export function getContext(): any { export function getContext(): any {
return logContext; return loggerInternal.logContext;
} }
// setMeta overrides existing meta, may remove fields if no longer existing // setMeta overrides existing meta, may remove fields if no longer existing
export function setMeta(obj: Record<string, unknown>): void { export function setMeta(obj: Record<string, unknown>): void {
curMeta = { ...obj }; loggerInternal.setMeta(obj);
} }
// addMeta overrides or adds fields but does not remove any // addMeta overrides or adds fields but does not remove any
export function addMeta(obj: Record<string, unknown>): void { export function addMeta(obj: Record<string, unknown>): void {
curMeta = { ...curMeta, ...obj }; loggerInternal.addMeta(obj);
} }
// removeMeta removes the provided fields from meta // removeMeta removes the provided fields from meta
export function removeMeta(fields: string[]): void { export function removeMeta(fields: string[]): void {
Object.keys(curMeta).forEach((key) => { loggerInternal.removeMeta(fields);
if (fields.includes(key)) {
delete curMeta[key];
}
});
} }
export /* istanbul ignore next */ function addStream( export /* istanbul ignore next */ function addStream(
stream: bunyan.Stream, stream: bunyan.Stream,
): void { ): void {
bunyanLogger.addStream(withSanitizer(stream)); loggerInternal.addStream(stream);
} }
/** /**

View file

@ -0,0 +1,156 @@
import is from '@sindresorhus/is';
import type * as bunyan from 'bunyan';
import { once, reset as onceReset } from './once';
import { getRemappedLevel } from './remap';
import type { Logger } from './types';
import { getMessage, toMeta, withSanitizer } from './utils';
const loggerLevels: bunyan.LogLevelString[] = [
'trace',
'debug',
'info',
'warn',
'error',
'fatal',
];
type LoggerFunction = (p1: string | Record<string, any>, p2?: string) => void;
export class RenovateLogger implements Logger {
readonly logger: Logger = { once: { reset: onceReset } } as any;
readonly once = this.logger.once;
constructor(
private readonly bunyanLogger: bunyan,
private context: string,
private meta: Record<string, unknown>,
) {
for (const level of loggerLevels) {
this.logger[level] = this.logFactory(level) as never;
this.logger.once[level] = this.logOnceFn(level);
}
}
trace(p1: string): void;
trace(p1: Record<string, any>, p2?: string): void;
trace(p1: string | Record<string, any>, p2?: string): void {
this.log('trace', p1, p2);
}
debug(p1: string): void;
debug(p1: Record<string, any>, p2?: string): void;
debug(p1: string | Record<string, any>, p2?: string): void {
this.log('debug', p1, p2);
}
info(p1: string): void;
info(p1: Record<string, any>, p2?: string): void;
info(p1: string | Record<string, any>, p2?: string): void {
this.log('info', p1, p2);
}
warn(p1: string): void;
warn(p1: Record<string, any>, p2?: string): void;
warn(p1: string | Record<string, any>, p2?: string): void {
this.log('warn', p1, p2);
}
error(p1: string): void;
error(p1: Record<string, any>, p2?: string): void;
error(p1: string | Record<string, any>, p2?: string): void {
this.log('error', p1, p2);
}
fatal(p1: string): void;
fatal(p1: Record<string, any>, p2?: string): void;
fatal(p1: string | Record<string, any>, p2?: string): void {
this.log('fatal', p1, p2);
}
addStream(stream: bunyan.Stream): void {
this.bunyanLogger.addStream(withSanitizer(stream));
}
childLogger(): RenovateLogger {
return new RenovateLogger(
this.bunyanLogger.child({}),
this.context,
this.meta,
);
}
get logContext(): string {
return this.context;
}
set logContext(context: string) {
this.context = context;
}
setMeta(obj: Record<string, unknown>): void {
this.meta = { ...obj };
}
addMeta(obj: Record<string, unknown>): void {
this.meta = { ...this.meta, ...obj };
}
removeMeta(fields: string[]): void {
for (const key of Object.keys(this.meta)) {
if (fields.includes(key)) {
delete this.meta[key];
}
}
}
private logFactory(_level: bunyan.LogLevelString): LoggerFunction {
return (p1: string | Record<string, any>, p2?: string): void => {
const meta: Record<string, unknown> = {
logContext: this.context,
...this.meta,
...toMeta(p1),
};
const msg = getMessage(p1, p2);
let level = _level;
if (is.string(msg)) {
const remappedLevel = getRemappedLevel(msg);
// istanbul ignore if: not easily testable
if (remappedLevel) {
meta.oldLevel = level;
level = remappedLevel;
}
this.bunyanLogger[level](meta, msg);
} else {
this.bunyanLogger[level](meta);
}
};
}
private logOnceFn(level: bunyan.LogLevelString): LoggerFunction {
const logOnceFn = (p1: string | Record<string, any>, p2?: string): void => {
once(() => {
const logFn = this[level].bind(this); // bind to the instance.
if (is.string(p1)) {
logFn(p1);
} else {
logFn(p1, p2);
}
}, logOnceFn);
};
return logOnceFn;
}
private log(
level: bunyan.LogLevelString,
p1: string | Record<string, any>,
p2?: string,
): void {
const logFn = this.logger[level];
if (is.string(p1)) {
logFn(p1);
} else {
logFn(p1, p2);
}
}
}

View file

@ -339,3 +339,16 @@ export function getEnv(key: string): string | undefined {
.map((v) => v?.toLowerCase().trim()) .map((v) => v?.toLowerCase().trim())
.find(is.nonEmptyStringAndNotWhitespace); .find(is.nonEmptyStringAndNotWhitespace);
} }
export function getMessage(
p1: string | Record<string, any>,
p2?: string,
): string | undefined {
return is.string(p1) ? p1 : p2;
}
export function toMeta(
p1: string | Record<string, any>,
): Record<string, unknown> {
return is.object(p1) ? p1 : {};
}

View file

@ -38,6 +38,7 @@ import { GlasskubePackagesDatasource } from './glasskube-packages';
import { GoDatasource } from './go'; import { GoDatasource } from './go';
import { GolangVersionDatasource } from './golang-version'; import { GolangVersionDatasource } from './golang-version';
import { GradleVersionDatasource } from './gradle-version'; import { GradleVersionDatasource } from './gradle-version';
import { HackageDatasource } from './hackage';
import { HelmDatasource } from './helm'; import { HelmDatasource } from './helm';
import { HermitDatasource } from './hermit'; import { HermitDatasource } from './hermit';
import { HexDatasource } from './hex'; import { HexDatasource } from './hex';
@ -111,6 +112,7 @@ api.set(GlasskubePackagesDatasource.id, new GlasskubePackagesDatasource());
api.set(GoDatasource.id, new GoDatasource()); api.set(GoDatasource.id, new GoDatasource());
api.set(GolangVersionDatasource.id, new GolangVersionDatasource()); api.set(GolangVersionDatasource.id, new GolangVersionDatasource());
api.set(GradleVersionDatasource.id, new GradleVersionDatasource()); api.set(GradleVersionDatasource.id, new GradleVersionDatasource());
api.set(HackageDatasource.id, new HackageDatasource());
api.set(HelmDatasource.id, new HelmDatasource()); api.set(HelmDatasource.id, new HelmDatasource());
api.set(HermitDatasource.id, new HermitDatasource()); api.set(HermitDatasource.id, new HermitDatasource());
api.set(HexDatasource.id, new HexDatasource()); api.set(HexDatasource.id, new HexDatasource());

View file

@ -1,9 +1,16 @@
This datasource returns the database engine versions available for use on [AWS RDS](https://aws.amazon.com/rds/) via the AWS API. This datasource returns the database engine versions available for use on [AWS RDS](https://aws.amazon.com/rds/) via the AWS API.
Generally speaking, all publicly released database versions are available for use on RDS. Generally speaking, all publicly released database versions are available for use on RDS.
However, new versions may not be available on RDS for a few weeks or months after their release while AWS tests them. However, new versions may not be available on RDS for a few weeks or months after their release while AWS tests them.
In addition, AWS may pull existing versions if serious problems arise during their use. In addition, AWS may pull existing versions if serious problems arise during their use.
**AWS API configuration** <!-- prettier-ignore -->
!!! warning
The default versioning of the `aws-rds` datasource is _not_ compatible with [AWS Aurora](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)!
If you use AWS Aurora, you must set your own custom versioning.
Scroll down to see an example.
### AWS API configuration
Since the datasource uses the AWS SDK for JavaScript, you can configure it like other AWS Tools. Since the datasource uses the AWS SDK for JavaScript, you can configure it like other AWS Tools.
You can use common AWS configuration options, for example: You can use common AWS configuration options, for example:
@ -14,9 +21,7 @@ You can use common AWS configuration options, for example:
Read the [AWS Developer Guide - Configuring the SDK for JavaScript](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/configuring-the-jssdk.html) for more information on these configuration options. Read the [AWS Developer Guide - Configuring the SDK for JavaScript](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/configuring-the-jssdk.html) for more information on these configuration options.
The minimal IAM privileges required for this datasource are: ```json title="Minimal IAM privileges needed for this datasource"
```json
{ {
"Sid": "AllowDBEngineVersionLookup", "Sid": "AllowDBEngineVersionLookup",
"Effect": "Allow", "Effect": "Allow",
@ -27,7 +32,7 @@ The minimal IAM privileges required for this datasource are:
Read the [AWS RDS IAM reference](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonrds.html) for more information. Read the [AWS RDS IAM reference](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonrds.html) for more information.
**Usage** ### Usage
Because Renovate has no manager for the AWS RDS datasource, you need to help Renovate by configuring the custom manager to identify the RDS dependencies you want updated. Because Renovate has no manager for the AWS RDS datasource, you need to help Renovate by configuring the custom manager to identify the RDS dependencies you want updated.
@ -53,16 +58,14 @@ For example:
[{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}] [{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}]
``` ```
Here's an example of using the custom manager to configure this datasource: ```json title="Using a custom manager to configure this datasource"
```json
{ {
"customManagers": [ "customManagers": [
{ {
"customType": "regex", "customType": "regex",
"fileMatch": ["\\.yaml$"], "fileMatch": ["\\.yaml$"],
"matchStrings": [ "matchStrings": [
".*amiFilter=(?<lookupName>.+?)[ ]*\n[ ]*(?<depName>[a-zA-Z0-9-_:]*)[ ]*?:[ ]*?[\"|']?(?<currentValue>[.\\d]+)[\"|']?.*" ".*rdsFilter=(?<lookupName>.+?)[ ]*\n[ ]*(?<depName>[a-zA-Z0-9-_:]*)[ ]*?:[ ]*?[\"|']?(?<currentValue>[.\\d]+)[\"|']?.*"
], ],
"datasourceTemplate": "aws-rds" "datasourceTemplate": "aws-rds"
} }
@ -74,6 +77,33 @@ The configuration above matches every YAML file, and recognizes these lines:
```yaml ```yaml
spec: spec:
# amiFilter=[{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}] # rdsFilter=[{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}]
engineVersion: 5.7.34 engineVersion: 5.7.34
``` ```
#### Using Terraform, `aws-rds` datasource and Aurora MySQL
Here is the Renovate configuration to use Terraform, `aws-rds` and Aurora MySQL:
```json
{
"customManagers": [
{
"description": "Update RDS",
"customType": "regex",
"fileMatch": [".+\\.tf$"],
"matchStrings": [
"\\s*#\\s*renovate:\\s*rdsFilter=(?<lookupName>.+?) depName=(?<depName>.*) versioning=(?<versioning>.*)\\s*.*_version\\s*=\\s*\"(?<currentValue>.*)\""
],
"datasourceTemplate": "aws-rds"
}
]
}
```
The above configuration is an example of updating an AWS RDS version inside a Terraform file, using a custom manager.
```
# renovate:rdsFilter=[{"Name":"engine","Values":["aurora-mysql"]},{"Name":"engine-version","Values":["8.0"]}] depName=aurora-mysql versioning=loose
engine_version = "8.0.mysql_aurora.3.05.2"
```

View file

@ -13,7 +13,7 @@ describe('modules/datasource/github-runners/index', () => {
releases: [ releases: [
{ version: '16.04', isDeprecated: true }, { version: '16.04', isDeprecated: true },
{ version: '18.04', isDeprecated: true }, { version: '18.04', isDeprecated: true },
{ version: '20.04' }, { version: '20.04', isDeprecated: true },
{ version: '22.04' }, { version: '22.04' },
{ version: '24.04' }, { version: '24.04' },
], ],
@ -58,6 +58,7 @@ describe('modules/datasource/github-runners/index', () => {
{ version: '2016', isDeprecated: true }, { version: '2016', isDeprecated: true },
{ version: '2019' }, { version: '2019' },
{ version: '2022' }, { version: '2022' },
{ version: '2025', isStable: false },
], ],
sourceUrl: 'https://github.com/actions/runner-images', sourceUrl: 'https://github.com/actions/runner-images',
}); });

View file

@ -19,7 +19,7 @@ export class GithubRunnersDatasource extends Datasource {
ubuntu: [ ubuntu: [
{ version: '24.04' }, { version: '24.04' },
{ version: '22.04' }, { version: '22.04' },
{ version: '20.04' }, { version: '20.04', isDeprecated: true },
{ version: '18.04', isDeprecated: true }, { version: '18.04', isDeprecated: true },
{ version: '16.04', isDeprecated: true }, { version: '16.04', isDeprecated: true },
], ],
@ -39,6 +39,7 @@ export class GithubRunnersDatasource extends Datasource {
{ version: '10.15', isDeprecated: true }, { version: '10.15', isDeprecated: true },
], ],
windows: [ windows: [
{ version: '2025', isStable: false },
{ version: '2022' }, { version: '2022' },
{ version: '2019' }, { version: '2019' },
{ version: '2016', isDeprecated: true }, { version: '2016', isDeprecated: true },

View file

@ -494,7 +494,10 @@ describe('modules/datasource/go/releases-goproxy', () => {
.get('.v2/@latest') .get('.v2/@latest')
.reply(200, { Version: 'v2.4.0' }) .reply(200, { Version: 'v2.4.0' })
.get('.v3/@v/list') .get('.v3/@v/list')
.reply(200, ['v3.0.0', 'v3.0.1', ' \n'].join('\n')) .reply(
200,
['v1.0.0', 'v2.0.0', 'v3.0.0', 'v3.0.1', 'v4.0.0', ' \n'].join('\n'),
)
.get('.v3/@v/v3.0.0.info') .get('.v3/@v/v3.0.0.info')
.reply(200, { Version: 'v3.0.0', Time: '2022-05-21T10:33:21Z' }) .reply(200, { Version: 'v3.0.0', Time: '2022-05-21T10:33:21Z' })
.get('.v3/@v/v3.0.1.info') .get('.v3/@v/v3.0.1.info')
@ -602,8 +605,6 @@ describe('modules/datasource/go/releases-goproxy', () => {
.get('/@v/list') .get('/@v/list')
.reply(200) .reply(200)
.get('/@latest') .get('/@latest')
.reply(404)
.get('/v2/@v/list')
.reply(404); .reply(404);
const res = await datasource.getReleases({ const res = await datasource.getReleases({
@ -621,9 +622,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
.get('/@v/list') .get('/@v/list')
.reply(200) .reply(200)
.get('/@latest') .get('/@latest')
.reply(200, { Version: 'v0.0.0-20230905200255-921286631fa9' }) .reply(200, { Version: 'v0.0.0-20230905200255-921286631fa9' });
.get('/v2/@v/list')
.reply(404);
const res = await datasource.getReleases({ const res = await datasource.getReleases({
packageName: 'github.com/google/btree', packageName: 'github.com/google/btree',

View file

@ -213,9 +213,24 @@ export class GoProxyDatasource extends Datasource {
major += 1; // v0 and v1 are the same module major += 1; // v0 and v1 are the same module
} }
let releases: Release[] = [];
try { try {
const res = await this.listVersions(baseUrl, pkg); const res = await this.listVersions(baseUrl, pkg);
const releases = await p.map(res, async (versionInfo) => {
// Artifactory returns all versions in any major (past and future),
// so starting from v2, we filter them in order to avoid the infinite loop
const filteredReleases = res.filter(({ version }) => {
if (major < 2) {
return true;
}
return (
version.split(regEx(/[^\d]+/)).find(is.truthy) === major.toString()
);
});
releases = await p.map(filteredReleases, async (versionInfo) => {
const { version, newDigest, releaseTimestamp } = versionInfo; const { version, newDigest, releaseTimestamp } = versionInfo;
if (releaseTimestamp) { if (releaseTimestamp) {
@ -258,6 +273,10 @@ export class GoProxyDatasource extends Datasource {
} }
} }
} }
if (!releases.length) {
break;
}
} }
return result; return result;

View file

@ -0,0 +1,57 @@
import { getPkgReleases } from '..';
import * as httpMock from '../../../../test/http-mock';
import { HackageDatasource, versionToRelease } from './index';
const baseUrl = 'https://hackage.haskell.org/';
describe('modules/datasource/hackage/index', () => {
describe('versionToRelease', () => {
it('should make release with given version', () => {
expect(
versionToRelease('3.1.0', 'base', 'http://localhost').version,
).toBe('3.1.0');
});
});
describe('getReleases', () => {
it('return null with empty registryUrl', async () => {
expect(
await new HackageDatasource().getReleases({
packageName: 'base',
registryUrl: undefined,
}),
).toBeNull();
});
it('returns null for 404', async () => {
httpMock.scope(baseUrl).get('/package/base.json').reply(404);
expect(
await getPkgReleases({
datasource: HackageDatasource.id,
packageName: 'base',
}),
).toBeNull();
});
it('returns release for 200', async () => {
httpMock
.scope(baseUrl)
.get('/package/base.json')
.reply(200, { '4.20.0.1': 'normal' });
expect(
await getPkgReleases({
datasource: HackageDatasource.id,
packageName: 'base',
}),
).toEqual({
registryUrl: baseUrl,
releases: [
{
changelogUrl: baseUrl + 'package/base-4.20.0.1/changelog',
version: '4.20.0.1',
},
],
});
});
});
});

View file

@ -0,0 +1,54 @@
import is from '@sindresorhus/is';
import { joinUrlParts } from '../../../util/url';
import * as pvpVersioning from '../../versioning/pvp';
import { Datasource } from '../datasource';
import type { GetReleasesConfig, Release, ReleaseResult } from '../types';
import { HackagePackageMetadata } from './schema';
export class HackageDatasource extends Datasource {
static readonly id = 'hackage';
constructor() {
super(HackageDatasource.id);
}
override readonly defaultVersioning = pvpVersioning.id;
override readonly customRegistrySupport = false;
override readonly defaultRegistryUrls = ['https://hackage.haskell.org/'];
async getReleases(config: GetReleasesConfig): Promise<ReleaseResult | null> {
const { registryUrl, packageName } = config;
if (!is.nonEmptyString(registryUrl)) {
return null;
}
const massagedPackageName = encodeURIComponent(packageName);
const url = joinUrlParts(
registryUrl,
'package',
`${massagedPackageName}.json`,
);
const res = await this.http.getJson(url, HackagePackageMetadata);
const keys = Object.keys(res.body);
return {
releases: keys.map((version) =>
versionToRelease(version, packageName, registryUrl),
),
};
}
}
export function versionToRelease(
version: string,
packageName: string,
registryUrl: string,
): Release {
return {
version,
changelogUrl: joinUrlParts(
registryUrl,
'package',
`${packageName}-${version}`,
'changelog',
),
};
}

View file

@ -0,0 +1,7 @@
This datasource uses
[the Hackage JSON API](https://hackage.haskell.org/api#package-info-json)
to fetch versions for published Haskell packages.
While not all versions use [PVP](https://pvp.haskell.org), the majority does.
This manager assumes a default versioning set to PVP.
Versioning can be overwritten using `packageRules`, e.g. with `matchDatasources`.

View file

@ -0,0 +1,3 @@
import { z } from 'zod';
export const HackagePackageMetadata = z.record(z.string());

View file

@ -19,7 +19,9 @@
"licenses": [ "licenses": [
"MIT" "MIT"
], ],
"links": {}, "links": {
"GitHub": "https://github.com/renovate_test/private_package"
},
"maintainers": [] "maintainers": []
}, },
"name": "private_package", "name": "private_package",

View file

@ -104,6 +104,7 @@ exports[`modules/datasource/hex/index getReleases processes a private repo with
"version": "0.1.1", "version": "0.1.1",
}, },
], ],
"sourceUrl": "https://github.com/renovate_test/private_package",
} }
`; `;

View file

@ -168,6 +168,7 @@ describe('modules/datasource/hex/index', () => {
expect(result).toEqual({ expect(result).toEqual({
homepage: 'https://hex.pm/packages/renovate_test/private_package', homepage: 'https://hex.pm/packages/renovate_test/private_package',
sourceUrl: 'https://github.com/renovate_test/private_package',
registryUrl: 'https://hex.pm', registryUrl: 'https://hex.pm',
releases: [ releases: [
{ releaseTimestamp: '2021-08-04T15:26:26.500Z', version: '0.1.0' }, { releaseTimestamp: '2021-08-04T15:26:26.500Z', version: '0.1.0' },

View file

@ -8,9 +8,21 @@ export const HexRelease = z
html_url: z.string().optional(), html_url: z.string().optional(),
meta: z meta: z
.object({ .object({
links: z.object({ links: z
Github: z.string(), .record(z.string())
}), .transform((links) =>
Object.fromEntries(
Object.entries(links).map(([key, value]) => [
key.toLowerCase(),
value,
]),
),
)
.pipe(
z.object({
github: z.string(),
}),
),
}) })
.nullable() .nullable()
.catch(null), .catch(null),
@ -53,8 +65,8 @@ export const HexRelease = z
releaseResult.homepage = hexResponse.html_url; releaseResult.homepage = hexResponse.html_url;
} }
if (hexResponse.meta?.links?.Github) { if (hexResponse.meta?.links?.github) {
releaseResult.sourceUrl = hexResponse.meta.links.Github; releaseResult.sourceUrl = hexResponse.meta.links.github;
} }
return releaseResult; return releaseResult;

View file

@ -46,7 +46,10 @@ describe('modules/datasource/maven/s3', () => {
Bucket: 'repobucket', Bucket: 'repobucket',
Key: 'org/example/package/maven-metadata.xml', Key: 'org/example/package/maven-metadata.xml',
}) })
.resolvesOnce({ Body: meta as never }); .resolvesOnce({
Body: meta as never,
LastModified: new Date('2020-01-01T00:00Z'),
});
const res = await get('org.example:package', baseUrlS3); const res = await get('org.example:package', baseUrlS3);
@ -89,7 +92,7 @@ describe('modules/datasource/maven/s3', () => {
{ {
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml', failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
}, },
'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars', 'Maven S3 lookup error: credentials provider error, check "AWS_ACCESS_KEY_ID" and "AWS_SECRET_ACCESS_KEY" variables',
); );
}); });
@ -108,7 +111,7 @@ describe('modules/datasource/maven/s3', () => {
{ {
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml', failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
}, },
'Dependency lookup failed. Please a correct AWS_REGION env var', 'Maven S3 lookup error: missing region, check "AWS_REGION" variable',
); );
}); });
@ -127,7 +130,7 @@ describe('modules/datasource/maven/s3', () => {
{ {
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml', failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
}, },
'S3 url not found', 'Maven S3 lookup error: object not found',
); );
}); });
@ -146,10 +149,23 @@ describe('modules/datasource/maven/s3', () => {
{ {
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml', failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
}, },
'S3 url not found', 'Maven S3 lookup error: object not found',
); );
}); });
it('returns null for Deleted marker', async () => {
s3mock
.on(GetObjectCommand, {
Bucket: 'repobucket',
Key: 'org/example/package/maven-metadata.xml',
})
.resolvesOnce({ DeleteMarker: true });
const res = await get('org.example:package', baseUrlS3);
expect(res).toBeNull();
});
it('returns null for unknown error', async () => { it('returns null for unknown error', async () => {
s3mock s3mock
.on(GetObjectCommand, { .on(GetObjectCommand, {
@ -163,10 +179,10 @@ describe('modules/datasource/maven/s3', () => {
expect(res).toBeNull(); expect(res).toBeNull();
expect(logger.debug).toHaveBeenCalledWith( expect(logger.debug).toHaveBeenCalledWith(
{ {
err: expect.objectContaining({ message: 'Unknown error' }),
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml', failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
message: 'Unknown error',
}, },
'Unknown S3 download error', 'Maven S3 lookup error: unknown error',
); );
}); });
@ -178,9 +194,6 @@ describe('modules/datasource/maven/s3', () => {
}) })
.resolvesOnce({}); .resolvesOnce({});
expect(await get('org.example:package', baseUrlS3)).toBeNull(); expect(await get('org.example:package', baseUrlS3)).toBeNull();
expect(logger.debug).toHaveBeenCalledWith(
"Expecting Readable response type got 'undefined' type instead",
);
}); });
}); });
}); });

View file

@ -1,4 +1,5 @@
import type { XmlDocument } from 'xmldoc'; import type { XmlDocument } from 'xmldoc';
import type { Result } from '../../../util/result';
import type { ReleaseResult } from '../types'; import type { ReleaseResult } from '../types';
export interface MavenDependency { export interface MavenDependency {
@ -19,3 +20,30 @@ export type DependencyInfo = Pick<
ReleaseResult, ReleaseResult,
'homepage' | 'sourceUrl' | 'packageScope' 'homepage' | 'sourceUrl' | 'packageScope'
>; >;
export interface MavenFetchSuccess<T = string> {
isCacheable?: boolean;
lastModified?: string;
data: T;
}
export type MavenFetchError =
| { type: 'invalid-url' }
| { type: 'host-disabled' }
| { type: 'not-found' }
| { type: 'host-error' }
| { type: 'permission-issue' }
| { type: 'temporary-error' }
| { type: 'maven-central-temporary-error'; err: Error }
| { type: 'connection-error' }
| { type: 'unsupported-host' }
| { type: 'unsupported-format' }
| { type: 'unsupported-protocol' }
| { type: 'credentials-error' }
| { type: 'missing-aws-region' }
| { type: 'unknown'; err: Error };
export type MavenFetchResult<T = string> = Result<
MavenFetchSuccess<T>,
MavenFetchError
>;

View file

@ -2,6 +2,7 @@ import type Request from 'got/dist/source/core';
import { partial } from '../../../../test/util'; import { partial } from '../../../../test/util';
import { HOST_DISABLED } from '../../../constants/error-messages'; import { HOST_DISABLED } from '../../../constants/error-messages';
import { Http, HttpError } from '../../../util/http'; import { Http, HttpError } from '../../../util/http';
import type { MavenFetchError } from './types';
import { import {
checkResource, checkResource,
downloadHttpProtocol, downloadHttpProtocol,
@ -55,9 +56,12 @@ describe('modules/datasource/maven/util', () => {
}); });
describe('downloadS3Protocol', () => { describe('downloadS3Protocol', () => {
it('returns null for non-S3 URLs', async () => { it('fails for non-S3 URLs', async () => {
const res = await downloadS3Protocol(new URL('http://not-s3.com/')); const res = await downloadS3Protocol(new URL('http://not-s3.com/'));
expect(res).toBeNull(); expect(res.unwrap()).toEqual({
ok: false,
err: { type: 'invalid-url' } satisfies MavenFetchError,
});
}); });
}); });
@ -67,7 +71,10 @@ describe('modules/datasource/maven/util', () => {
get: () => Promise.reject(httpError({ message: HOST_DISABLED })), get: () => Promise.reject(httpError({ message: HOST_DISABLED })),
}); });
const res = await downloadHttpProtocol(http, 'some://'); const res = await downloadHttpProtocol(http, 'some://');
expect(res).toBeNull(); expect(res.unwrap()).toEqual({
ok: false,
err: { type: 'host-disabled' } satisfies MavenFetchError,
});
}); });
it('returns empty for host error', async () => { it('returns empty for host error', async () => {
@ -75,7 +82,10 @@ describe('modules/datasource/maven/util', () => {
get: () => Promise.reject(httpError({ code: 'ETIMEDOUT' })), get: () => Promise.reject(httpError({ code: 'ETIMEDOUT' })),
}); });
const res = await downloadHttpProtocol(http, 'some://'); const res = await downloadHttpProtocol(http, 'some://');
expect(res).toBeNull(); expect(res.unwrap()).toEqual({
ok: false,
err: { type: 'host-error' } satisfies MavenFetchError,
});
}); });
it('returns empty for temporary error', async () => { it('returns empty for temporary error', async () => {
@ -83,7 +93,10 @@ describe('modules/datasource/maven/util', () => {
get: () => Promise.reject(httpError({ code: 'ECONNRESET' })), get: () => Promise.reject(httpError({ code: 'ECONNRESET' })),
}); });
const res = await downloadHttpProtocol(http, 'some://'); const res = await downloadHttpProtocol(http, 'some://');
expect(res).toBeNull(); expect(res.unwrap()).toEqual({
ok: false,
err: { type: 'temporary-error' } satisfies MavenFetchError,
});
}); });
it('returns empty for connection error', async () => { it('returns empty for connection error', async () => {
@ -91,7 +104,10 @@ describe('modules/datasource/maven/util', () => {
get: () => Promise.reject(httpError({ code: 'ECONNREFUSED' })), get: () => Promise.reject(httpError({ code: 'ECONNREFUSED' })),
}); });
const res = await downloadHttpProtocol(http, 'some://'); const res = await downloadHttpProtocol(http, 'some://');
expect(res).toBeNull(); expect(res.unwrap()).toEqual({
ok: false,
err: { type: 'connection-error' } satisfies MavenFetchError,
});
}); });
it('returns empty for unsupported error', async () => { it('returns empty for unsupported error', async () => {
@ -100,7 +116,10 @@ describe('modules/datasource/maven/util', () => {
Promise.reject(httpError({ name: 'UnsupportedProtocolError' })), Promise.reject(httpError({ name: 'UnsupportedProtocolError' })),
}); });
const res = await downloadHttpProtocol(http, 'some://'); const res = await downloadHttpProtocol(http, 'some://');
expect(res).toBeNull(); expect(res.unwrap()).toEqual({
ok: false,
err: { type: 'unsupported-host' } satisfies MavenFetchError,
});
}); });
}); });

View file

@ -20,6 +20,8 @@ import type {
DependencyInfo, DependencyInfo,
HttpResourceCheckResult, HttpResourceCheckResult,
MavenDependency, MavenDependency,
MavenFetchResult,
MavenFetchSuccess,
MavenXml, MavenXml,
} from './types'; } from './types';
@ -69,121 +71,183 @@ export async function downloadHttpProtocol(
http: Http, http: Http,
pkgUrl: URL | string, pkgUrl: URL | string,
opts: HttpOptions = {}, opts: HttpOptions = {},
): Promise<HttpResponse | null> { ): Promise<MavenFetchResult> {
const url = pkgUrl.toString(); const url = pkgUrl.toString();
const res = await Result.wrap(http.get(url, opts)) const fetchResult = await Result.wrap<HttpResponse, Error>(
.onError((err) => { http.get(url, opts),
)
.transform((res): MavenFetchSuccess => {
const result: MavenFetchSuccess = { data: res.body };
if (!res.authorization) {
result.isCacheable = true;
}
const lastModified = normalizeDate(res?.headers?.['last-modified']);
if (lastModified) {
result.lastModified = lastModified;
}
return result;
})
.catch((err): MavenFetchResult => {
// istanbul ignore next: never happens, needs for type narrowing // istanbul ignore next: never happens, needs for type narrowing
if (!(err instanceof HttpError)) { if (!(err instanceof HttpError)) {
return; return Result.err({ type: 'unknown', err });
} }
const failedUrl = url; const failedUrl = url;
if (err.message === HOST_DISABLED) { if (err.message === HOST_DISABLED) {
logger.trace({ failedUrl }, 'Host disabled'); logger.trace({ failedUrl }, 'Host disabled');
return; return Result.err({ type: 'host-disabled' });
} }
if (isNotFoundError(err)) { if (isNotFoundError(err)) {
logger.trace({ failedUrl }, `Url not found`); logger.trace({ failedUrl }, `Url not found`);
return; return Result.err({ type: 'not-found' });
} }
if (isHostError(err)) { if (isHostError(err)) {
logger.debug(`Cannot connect to host ${failedUrl}`); logger.debug(`Cannot connect to host ${failedUrl}`);
return; return Result.err({ type: 'host-error' });
} }
if (isPermissionsIssue(err)) { if (isPermissionsIssue(err)) {
logger.debug( logger.debug(
`Dependency lookup unauthorized. Please add authentication with a hostRule for ${failedUrl}`, `Dependency lookup unauthorized. Please add authentication with a hostRule for ${failedUrl}`,
); );
return; return Result.err({ type: 'permission-issue' });
} }
if (isTemporaryError(err)) { if (isTemporaryError(err)) {
logger.debug({ failedUrl, err }, 'Temporary error'); logger.debug({ failedUrl, err }, 'Temporary error');
return; if (getHost(url) === getHost(MAVEN_REPO)) {
return Result.err({ type: 'maven-central-temporary-error', err });
} else {
return Result.err({ type: 'temporary-error' });
}
} }
if (isConnectionError(err)) { if (isConnectionError(err)) {
logger.debug(`Connection refused to maven registry ${failedUrl}`); logger.debug(`Connection refused to maven registry ${failedUrl}`);
return; return Result.err({ type: 'connection-error' });
} }
if (isUnsupportedHostError(err)) { if (isUnsupportedHostError(err)) {
logger.debug(`Unsupported host ${failedUrl}`); logger.debug(`Unsupported host ${failedUrl}`);
return; return Result.err({ type: 'unsupported-host' });
} }
logger.info({ failedUrl, err }, 'Unknown HTTP download error'); logger.info({ failedUrl, err }, 'Unknown HTTP download error');
}) return Result.err({ type: 'unknown', err });
.catch((err): Result<HttpResponse | 'silent-error', ExternalHostError> => { });
if (
err instanceof HttpError &&
isTemporaryError(err) &&
getHost(url) === getHost(MAVEN_REPO)
) {
return Result.err(new ExternalHostError(err));
}
return Result.ok('silent-error'); const { err } = fetchResult.unwrap();
}) if (err?.type === 'maven-central-temporary-error') {
.unwrapOrThrow(); throw new ExternalHostError(err.err);
if (res === 'silent-error') {
return null;
} }
return res; return fetchResult;
}
export async function downloadHttpContent(
http: Http,
pkgUrl: URL | string,
opts: HttpOptions = {},
): Promise<string | null> {
const fetchResult = await downloadHttpProtocol(http, pkgUrl, opts);
return fetchResult.transform(({ data }) => data).unwrapOrNull();
} }
function isS3NotFound(err: Error): boolean { function isS3NotFound(err: Error): boolean {
return err.message === 'NotFound' || err.message === 'NoSuchKey'; return err.message === 'NotFound' || err.message === 'NoSuchKey';
} }
export async function downloadS3Protocol(pkgUrl: URL): Promise<string | null> { export async function downloadS3Protocol(
pkgUrl: URL,
): Promise<MavenFetchResult> {
logger.trace({ url: pkgUrl.toString() }, `Attempting to load S3 dependency`); logger.trace({ url: pkgUrl.toString() }, `Attempting to load S3 dependency`);
try {
const s3Url = parseS3Url(pkgUrl); const s3Url = parseS3Url(pkgUrl);
if (s3Url === null) { if (!s3Url) {
return null; return Result.err({ type: 'invalid-url' });
}
const { Body: res } = await getS3Client().send(new GetObjectCommand(s3Url));
if (res instanceof Readable) {
return streamToString(res);
}
logger.debug(
`Expecting Readable response type got '${typeof res}' type instead`,
);
} catch (err) {
const failedUrl = pkgUrl.toString();
if (err.name === 'CredentialsProviderError') {
logger.debug(
{ failedUrl },
'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars',
);
} else if (err.message === 'Region is missing') {
logger.debug(
{ failedUrl },
'Dependency lookup failed. Please a correct AWS_REGION env var',
);
} else if (isS3NotFound(err)) {
logger.trace({ failedUrl }, `S3 url not found`);
} else {
logger.debug(
{ failedUrl, message: err.message },
'Unknown S3 download error',
);
}
} }
return null;
return await Result.wrap(() => {
const command = new GetObjectCommand(s3Url);
const client = getS3Client();
return client.send(command);
})
.transform(
async ({
Body,
LastModified,
DeleteMarker,
}): Promise<MavenFetchResult> => {
if (DeleteMarker) {
logger.trace(
{ failedUrl: pkgUrl.toString() },
'Maven S3 lookup error: DeleteMarker encountered',
);
return Result.err({ type: 'not-found' });
}
if (!(Body instanceof Readable)) {
logger.debug(
{ failedUrl: pkgUrl.toString() },
'Maven S3 lookup error: unsupported Body type',
);
return Result.err({ type: 'unsupported-format' });
}
const data = await streamToString(Body);
const result: MavenFetchSuccess = { data };
const lastModified = normalizeDate(LastModified);
if (lastModified) {
result.lastModified = lastModified;
}
return Result.ok(result);
},
)
.catch((err): MavenFetchResult => {
if (!(err instanceof Error)) {
return Result.err(err);
}
const failedUrl = pkgUrl.toString();
if (err.name === 'CredentialsProviderError') {
logger.debug(
{ failedUrl },
'Maven S3 lookup error: credentials provider error, check "AWS_ACCESS_KEY_ID" and "AWS_SECRET_ACCESS_KEY" variables',
);
return Result.err({ type: 'credentials-error' });
}
if (err.message === 'Region is missing') {
logger.debug(
{ failedUrl },
'Maven S3 lookup error: missing region, check "AWS_REGION" variable',
);
return Result.err({ type: 'missing-aws-region' });
}
if (isS3NotFound(err)) {
logger.trace({ failedUrl }, 'Maven S3 lookup error: object not found');
return Result.err({ type: 'not-found' });
}
logger.debug({ failedUrl, err }, 'Maven S3 lookup error: unknown error');
return Result.err({ type: 'unknown', err });
});
} }
export async function downloadArtifactRegistryProtocol( export async function downloadArtifactRegistryProtocol(
http: Http, http: Http,
pkgUrl: URL, pkgUrl: URL,
): Promise<HttpResponse | null> { ): Promise<MavenFetchResult> {
const opts: HttpOptions = {}; const opts: HttpOptions = {};
const host = pkgUrl.host; const host = pkgUrl.host;
const path = pkgUrl.pathname; const path = pkgUrl.pathname;
@ -312,32 +376,30 @@ export async function downloadMavenXml(
const protocol = pkgUrl.protocol; const protocol = pkgUrl.protocol;
if (protocol === 'http:' || protocol === 'https:') { if (protocol === 'http:' || protocol === 'https:') {
const res = await downloadHttpProtocol(http, pkgUrl); const rawResult = await downloadHttpProtocol(http, pkgUrl);
const body = res?.body; const xmlResult = rawResult.transform(({ isCacheable, data }): MavenXml => {
if (body) { const xml = new XmlDocument(data);
return { return { isCacheable, xml };
xml: new XmlDocument(body), });
isCacheable: !res.authorization, return xmlResult.unwrapOr({});
};
}
} }
if (protocol === 'artifactregistry:') { if (protocol === 'artifactregistry:') {
const res = await downloadArtifactRegistryProtocol(http, pkgUrl); const rawResult = await downloadArtifactRegistryProtocol(http, pkgUrl);
const body = res?.body; const xmlResult = rawResult.transform(({ isCacheable, data }): MavenXml => {
if (body) { const xml = new XmlDocument(data);
return { return { isCacheable, xml };
xml: new XmlDocument(body), });
isCacheable: !res.authorization, return xmlResult.unwrapOr({});
};
}
} }
if (protocol === 's3:') { if (protocol === 's3:') {
const res = await downloadS3Protocol(pkgUrl); const rawResult = await downloadS3Protocol(pkgUrl);
if (res) { const xmlResult = rawResult.transform(({ isCacheable, data }): MavenXml => {
return { xml: new XmlDocument(res) }; const xml = new XmlDocument(data);
} return { xml };
});
return xmlResult.unwrapOr({});
} }
logger.debug( logger.debug(

View file

@ -4,7 +4,7 @@ exports[`modules/datasource/orb/index getReleases processes homeUrl 1`] = `
{ {
"homepage": "https://google.com", "homepage": "https://google.com",
"isPrivate": false, "isPrivate": false,
"registryUrl": "https://circleci.com/", "registryUrl": "https://circleci.com",
"releases": [ "releases": [
{ {
"releaseTimestamp": "2018-12-11T05:28:14.080Z", "releaseTimestamp": "2018-12-11T05:28:14.080Z",
@ -53,7 +53,7 @@ exports[`modules/datasource/orb/index getReleases processes real data 1`] = `
{ {
"homepage": "https://circleci.com/developer/orbs/orb/hyper-expanse/library-release-workflows", "homepage": "https://circleci.com/developer/orbs/orb/hyper-expanse/library-release-workflows",
"isPrivate": false, "isPrivate": false,
"registryUrl": "https://circleci.com/", "registryUrl": "https://circleci.com",
"releases": [ "releases": [
{ {
"releaseTimestamp": "2018-12-11T05:28:14.080Z", "releaseTimestamp": "2018-12-11T05:28:14.080Z",

View file

@ -92,5 +92,18 @@ describe('modules/datasource/orb/index', () => {
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
expect(res?.homepage).toBe('https://google.com'); expect(res?.homepage).toBe('https://google.com');
}); });
it('supports other registries', async () => {
httpMock
.scope('https://cci.internal.dev')
.post('/graphql-unstable')
.reply(200, orbData);
const res = await getPkgReleases({
datasource,
packageName: 'hyper-expanse/library-release-workflows',
registryUrls: ['https://cci.internal.dev'],
});
expect(res?.registryUrl).toBe('https://cci.internal.dev');
});
}); });
}); });

View file

@ -1,5 +1,6 @@
import { logger } from '../../../logger'; import { logger } from '../../../logger';
import { cache } from '../../../util/cache/package/decorator'; import { cache } from '../../../util/cache/package/decorator';
import { joinUrlParts } from '../../../util/url';
import { Datasource } from '../datasource'; import { Datasource } from '../datasource';
import type { GetReleasesConfig, ReleaseResult } from '../types'; import type { GetReleasesConfig, ReleaseResult } from '../types';
import type { OrbResponse } from './types'; import type { OrbResponse } from './types';
@ -27,9 +28,10 @@ export class OrbDatasource extends Datasource {
super(OrbDatasource.id); super(OrbDatasource.id);
} }
override readonly customRegistrySupport = false; override readonly customRegistrySupport = true;
override readonly defaultRegistryUrls = ['https://circleci.com/']; override readonly defaultRegistryUrls = ['https://circleci.com/'];
override readonly registryStrategy = 'hunt';
override readonly releaseTimestampSupport = true; override readonly releaseTimestampSupport = true;
override readonly releaseTimestampNote = override readonly releaseTimestampNote =
@ -47,7 +49,7 @@ export class OrbDatasource extends Datasource {
if (!registryUrl) { if (!registryUrl) {
return null; return null;
} }
const url = `${registryUrl}graphql-unstable`; const url = joinUrlParts(registryUrl, 'graphql-unstable');
const body = { const body = {
query, query,
variables: { packageName, maxVersions: MAX_VERSIONS }, variables: { packageName, maxVersions: MAX_VERSIONS },

View file

@ -149,9 +149,9 @@ describe('modules/datasource/sbt-package/index', () => {
.get('/org/example/example_2.12/') .get('/org/example/example_2.12/')
.reply(200, `<a href='1.2.3/'>1.2.3/</a>`) .reply(200, `<a href='1.2.3/'>1.2.3/</a>`)
.get('/org/example/example_2.12/1.2.3/example-1.2.3.pom') .get('/org/example/example_2.12/1.2.3/example-1.2.3.pom')
.reply(200, ``) .reply(404)
.get('/org/example/example_2.12/1.2.3/example_2.12-1.2.3.pom') .get('/org/example/example_2.12/1.2.3/example_2.12-1.2.3.pom')
.reply(200, ``); .reply(404);
const res = await getPkgReleases({ const res = await getPkgReleases({
versioning: mavenVersioning.id, versioning: mavenVersioning.id,
@ -267,7 +267,7 @@ describe('modules/datasource/sbt-package/index', () => {
`, `,
) )
.get('/org/example/example_2.13/1.2.3/example_2.13-1.2.3.pom') .get('/org/example/example_2.13/1.2.3/example_2.13-1.2.3.pom')
.reply(200); .reply(404);
const res = await getPkgReleases({ const res = await getPkgReleases({
versioning: mavenVersioning.id, versioning: mavenVersioning.id,

View file

@ -10,8 +10,7 @@ import * as ivyVersioning from '../../versioning/ivy';
import { compare } from '../../versioning/maven/compare'; import { compare } from '../../versioning/maven/compare';
import { MavenDatasource } from '../maven'; import { MavenDatasource } from '../maven';
import { MAVEN_REPO } from '../maven/common'; import { MAVEN_REPO } from '../maven/common';
import { downloadHttpProtocol } from '../maven/util'; import { downloadHttpContent, downloadHttpProtocol } from '../maven/util';
import { normalizeDate } from '../metadata';
import type { import type {
GetReleasesConfig, GetReleasesConfig,
PostprocessReleaseConfig, PostprocessReleaseConfig,
@ -88,8 +87,11 @@ export class SbtPackageDatasource extends MavenDatasource {
let dependencyUrl: string | undefined; let dependencyUrl: string | undefined;
let packageUrls: string[] | undefined; let packageUrls: string[] | undefined;
for (const packageRootUrl of packageRootUrls) { for (const packageRootUrl of packageRootUrls) {
const res = await downloadHttpProtocol(this.http, packageRootUrl); const packageRootContent = await downloadHttpContent(
if (!res) { this.http,
packageRootUrl,
);
if (!packageRootContent) {
continue; continue;
} }
@ -103,7 +105,7 @@ export class SbtPackageDatasource extends MavenDatasource {
dependencyUrl = trimTrailingSlash(packageRootUrl); dependencyUrl = trimTrailingSlash(packageRootUrl);
const rootPath = new URL(packageRootUrl).pathname; const rootPath = new URL(packageRootUrl).pathname;
const artifactSubdirs = extractPageLinks(res.body, (href) => { const artifactSubdirs = extractPageLinks(packageRootContent, (href) => {
const path = href.replace(rootPath, ''); const path = href.replace(rootPath, '');
if ( if (
@ -149,15 +151,15 @@ export class SbtPackageDatasource extends MavenDatasource {
const allVersions = new Set<string>(); const allVersions = new Set<string>();
for (const pkgUrl of packageUrls) { for (const pkgUrl of packageUrls) {
const res = await downloadHttpProtocol(this.http, pkgUrl); const packageContent = await downloadHttpContent(this.http, pkgUrl);
// istanbul ignore if // istanbul ignore if
if (!res) { if (!packageContent) {
invalidPackageUrls.add(pkgUrl); invalidPackageUrls.add(pkgUrl);
continue; continue;
} }
const rootPath = new URL(pkgUrl).pathname; const rootPath = new URL(pkgUrl).pathname;
const versions = extractPageLinks(res.body, (href) => { const versions = extractPageLinks(packageContent, (href) => {
const path = href.replace(rootPath, ''); const path = href.replace(rootPath, '');
if (path.startsWith('.')) { if (path.startsWith('.')) {
return null; return null;
@ -275,20 +277,20 @@ export class SbtPackageDatasource extends MavenDatasource {
} }
const res = await downloadHttpProtocol(this.http, pomUrl); const res = await downloadHttpProtocol(this.http, pomUrl);
const content = res?.body; const { val } = res.unwrap();
if (!content) { if (!val) {
invalidPomFiles.add(pomUrl); invalidPomFiles.add(pomUrl);
continue; continue;
} }
const result: PomInfo = {}; const result: PomInfo = {};
const releaseTimestamp = normalizeDate(res.headers['last-modified']); const releaseTimestamp = val.lastModified;
if (releaseTimestamp) { if (releaseTimestamp) {
result.releaseTimestamp = releaseTimestamp; result.releaseTimestamp = releaseTimestamp;
} }
const pomXml = new XmlDocument(content); const pomXml = new XmlDocument(val.data);
const homepage = pomXml.valueWithPath('url'); const homepage = pomXml.valueWithPath('url');
if (homepage) { if (homepage) {

View file

@ -7,7 +7,7 @@ import * as ivyVersioning from '../../versioning/ivy';
import { compare } from '../../versioning/maven/compare'; import { compare } from '../../versioning/maven/compare';
import { Datasource } from '../datasource'; import { Datasource } from '../datasource';
import { MAVEN_REPO } from '../maven/common'; import { MAVEN_REPO } from '../maven/common';
import { downloadHttpProtocol } from '../maven/util'; import { downloadHttpContent } from '../maven/util';
import { extractPageLinks, getLatestVersion } from '../sbt-package/util'; import { extractPageLinks, getLatestVersion } from '../sbt-package/util';
import type { import type {
GetReleasesConfig, GetReleasesConfig,
@ -43,8 +43,7 @@ export class SbtPluginDatasource extends Datasource {
scalaVersion: string, scalaVersion: string,
): Promise<string[] | null> { ): Promise<string[] | null> {
const pkgUrl = ensureTrailingSlash(searchRoot); const pkgUrl = ensureTrailingSlash(searchRoot);
const res = await downloadHttpProtocol(this.http, pkgUrl); const indexContent = await downloadHttpContent(this.http, pkgUrl);
const indexContent = res?.body;
if (indexContent) { if (indexContent) {
const rootPath = new URL(pkgUrl).pathname; const rootPath = new URL(pkgUrl).pathname;
let artifactSubdirs = extractPageLinks(indexContent, (href) => { let artifactSubdirs = extractPageLinks(indexContent, (href) => {
@ -84,8 +83,7 @@ export class SbtPluginDatasource extends Datasource {
const releases: string[] = []; const releases: string[] = [];
for (const searchSubdir of artifactSubdirs) { for (const searchSubdir of artifactSubdirs) {
const pkgUrl = ensureTrailingSlash(`${searchRoot}/${searchSubdir}`); const pkgUrl = ensureTrailingSlash(`${searchRoot}/${searchSubdir}`);
const res = await downloadHttpProtocol(this.http, pkgUrl); const content = await downloadHttpContent(this.http, pkgUrl);
const content = res?.body;
if (content) { if (content) {
const rootPath = new URL(pkgUrl).pathname; const rootPath = new URL(pkgUrl).pathname;
const subdirReleases = extractPageLinks(content, (href) => { const subdirReleases = extractPageLinks(content, (href) => {
@ -133,8 +131,7 @@ export class SbtPluginDatasource extends Datasource {
for (const pomFileName of pomFileNames) { for (const pomFileName of pomFileNames) {
const pomUrl = `${searchRoot}/${artifactDir}/${version}/${pomFileName}`; const pomUrl = `${searchRoot}/${artifactDir}/${version}/${pomFileName}`;
const res = await downloadHttpProtocol(this.http, pomUrl); const content = await downloadHttpContent(this.http, pomUrl);
const content = res?.body;
if (content) { if (content) {
const pomXml = new XmlDocument(content); const pomXml = new XmlDocument(content);
@ -173,13 +170,16 @@ export class SbtPluginDatasource extends Datasource {
return href; return href;
}; };
const res = await downloadHttpProtocol( const searchRootContent = await downloadHttpContent(
this.http, this.http,
ensureTrailingSlash(searchRoot), ensureTrailingSlash(searchRoot),
); );
if (res) { if (searchRootContent) {
const releases: string[] = []; const releases: string[] = [];
const scalaVersionItems = extractPageLinks(res.body, hrefFilterMap); const scalaVersionItems = extractPageLinks(
searchRootContent,
hrefFilterMap,
);
const scalaVersions = scalaVersionItems.map((x) => const scalaVersions = scalaVersionItems.map((x) =>
x.replace(regEx(/^scala_/), ''), x.replace(regEx(/^scala_/), ''),
); );
@ -188,24 +188,22 @@ export class SbtPluginDatasource extends Datasource {
: scalaVersions; : scalaVersions;
for (const searchVersion of searchVersions) { for (const searchVersion of searchVersions) {
const searchSubRoot = `${searchRoot}/scala_${searchVersion}`; const searchSubRoot = `${searchRoot}/scala_${searchVersion}`;
const subRootRes = await downloadHttpProtocol( const subRootContent = await downloadHttpContent(
this.http, this.http,
ensureTrailingSlash(searchSubRoot), ensureTrailingSlash(searchSubRoot),
); );
if (subRootRes) { if (subRootContent) {
const { body: subRootContent } = subRootRes;
const sbtVersionItems = extractPageLinks( const sbtVersionItems = extractPageLinks(
subRootContent, subRootContent,
hrefFilterMap, hrefFilterMap,
); );
for (const sbtItem of sbtVersionItems) { for (const sbtItem of sbtVersionItems) {
const releasesRoot = `${searchSubRoot}/${sbtItem}`; const releasesRoot = `${searchSubRoot}/${sbtItem}`;
const releaseIndexRes = await downloadHttpProtocol( const releasesIndexContent = await downloadHttpContent(
this.http, this.http,
ensureTrailingSlash(releasesRoot), ensureTrailingSlash(releasesRoot),
); );
if (releaseIndexRes) { if (releasesIndexContent) {
const { body: releasesIndexContent } = releaseIndexRes;
const releasesParsed = extractPageLinks( const releasesParsed = extractPageLinks(
releasesIndexContent, releasesIndexContent,
hrefFilterMap, hrefFilterMap,

View file

@ -44,7 +44,7 @@ resources:
- container: linux - container: linux
image: ubuntu:24.04 image: ubuntu:24.04
- container: python - container: python
image: python:3.13@sha256:bc78d3c007f86dbb87d711b8b082d9d564b8025487e780d24ccb8581d83ef8b0 image: python:3.13@sha256:cea505b81701dd9e46b8dde96eaa8054c4bd2035dbb660edeb7af947ed38a0ad
stages: stages:
- stage: StageOne - stage: StageOne

View file

@ -4,6 +4,7 @@ import { Fixtures } from '../../../../test/fixtures';
import { GlobalConfig } from '../../../config/global'; import { GlobalConfig } from '../../../config/global';
import type { RepoGlobalConfig } from '../../../config/types'; import type { RepoGlobalConfig } from '../../../config/types';
import { BazelDatasource } from '../../datasource/bazel'; import { BazelDatasource } from '../../datasource/bazel';
import { DockerDatasource } from '../../datasource/docker';
import { GithubTagsDatasource } from '../../datasource/github-tags'; import { GithubTagsDatasource } from '../../datasource/github-tags';
import { MavenDatasource } from '../../datasource/maven'; import { MavenDatasource } from '../../datasource/maven';
import * as parser from './parser'; import * as parser from './parser';
@ -290,6 +291,60 @@ describe('modules/manager/bazel-module/extract', () => {
]); ]);
}); });
it('returns oci.pull dependencies', async () => {
const input = codeBlock`
oci.pull(
name = "nginx_image",
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
image = "index.docker.io/library/nginx",
platforms = ["linux/amd64"],
tag = "1.27.1",
)
`;
const result = await extractPackageFile(input, 'MODULE.bazel');
if (!result) {
throw new Error('Expected a result.');
}
expect(result.deps).toEqual([
{
datasource: DockerDatasource.id,
depType: 'oci_pull',
depName: 'nginx_image',
packageName: 'index.docker.io/library/nginx',
currentValue: '1.27.1',
currentDigest:
'sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720',
},
]);
});
it('returns oci.pull dependencies without tags', async () => {
const input = codeBlock`
oci.pull(
name = "nginx_image",
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
image = "index.docker.io/library/nginx",
platforms = ["linux/amd64"],
)
`;
const result = await extractPackageFile(input, 'MODULE.bazel');
if (!result) {
throw new Error('Expected a result.');
}
expect(result.deps).toEqual([
{
datasource: DockerDatasource.id,
depType: 'oci_pull',
depName: 'nginx_image',
packageName: 'index.docker.io/library/nginx',
currentDigest:
'sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720',
},
]);
});
it('returns maven.install and bazel_dep dependencies together', async () => { it('returns maven.install and bazel_dep dependencies together', async () => {
const input = codeBlock` const input = codeBlock`
bazel_dep(name = "bazel_jar_jar", version = "0.1.0") bazel_dep(name = "bazel_jar_jar", version = "0.1.0")

View file

@ -7,6 +7,7 @@ import * as bazelrc from './bazelrc';
import type { RecordFragment } from './fragments'; import type { RecordFragment } from './fragments';
import { parse } from './parser'; import { parse } from './parser';
import { RuleToMavenPackageDep, fillRegistryUrls } from './parser/maven'; import { RuleToMavenPackageDep, fillRegistryUrls } from './parser/maven';
import { RuleToDockerPackageDep } from './parser/oci';
import { RuleToBazelModulePackageDep } from './rules'; import { RuleToBazelModulePackageDep } from './rules';
import * as rules from './rules'; import * as rules from './rules';
@ -18,11 +19,16 @@ export async function extractPackageFile(
const records = parse(content); const records = parse(content);
const pfc = await extractBazelPfc(records, packageFile); const pfc = await extractBazelPfc(records, packageFile);
const mavenDeps = extractMavenDeps(records); const mavenDeps = extractMavenDeps(records);
const dockerDeps = LooseArray(RuleToDockerPackageDep).parse(records);
if (mavenDeps.length) { if (mavenDeps.length) {
pfc.deps.push(...mavenDeps); pfc.deps.push(...mavenDeps);
} }
if (dockerDeps.length) {
pfc.deps.push(...dockerDeps);
}
return pfc.deps.length ? pfc : null; return pfc.deps.length ? pfc : null;
} catch (err) { } catch (err) {
logger.debug({ err, packageFile }, 'Failed to parse bazel module file.'); logger.debug({ err, packageFile }, 'Failed to parse bazel module file.');

View file

@ -1,5 +1,6 @@
import type { Category } from '../../../constants'; import type { Category } from '../../../constants';
import { BazelDatasource } from '../../datasource/bazel'; import { BazelDatasource } from '../../datasource/bazel';
import { DockerDatasource } from '../../datasource/docker';
import { GithubTagsDatasource } from '../../datasource/github-tags'; import { GithubTagsDatasource } from '../../datasource/github-tags';
import { MavenDatasource } from '../../datasource/maven'; import { MavenDatasource } from '../../datasource/maven';
import { extractPackageFile } from './extract'; import { extractPackageFile } from './extract';
@ -10,11 +11,12 @@ export const url = 'https://bazel.build/external/module';
export const categories: Category[] = ['bazel']; export const categories: Category[] = ['bazel'];
export const defaultConfig = { export const defaultConfig = {
fileMatch: ['(^|/)MODULE\\.bazel$'], fileMatch: ['(^|/|\\.)MODULE\\.bazel$'],
}; };
export const supportedDatasources = [ export const supportedDatasources = [
BazelDatasource.id, BazelDatasource.id,
DockerDatasource.id,
GithubTagsDatasource.id, GithubTagsDatasource.id,
MavenDatasource.id, MavenDatasource.id,
]; ];

View file

@ -286,5 +286,34 @@ describe('modules/manager/bazel-module/parser/index', () => {
), ),
]); ]);
}); });
it('finds oci.pull', () => {
const input = codeBlock`
oci.pull(
name = "nginx_image",
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
image = "index.docker.io/library/nginx",
platforms = ["linux/amd64"],
tag = "1.27.1",
)
`;
const res = parse(input);
expect(res).toEqual([
fragments.record(
{
rule: fragments.string('oci_pull'),
name: fragments.string('nginx_image'),
digest: fragments.string(
'sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720',
),
image: fragments.string('index.docker.io/library/nginx'),
platforms: fragments.array([fragments.string('linux/amd64')], true),
tag: fragments.string('1.27.1'),
},
true,
),
]);
});
}); });
}); });

View file

@ -3,8 +3,9 @@ import { Ctx } from '../context';
import type { RecordFragment } from '../fragments'; import type { RecordFragment } from '../fragments';
import { mavenRules } from './maven'; import { mavenRules } from './maven';
import { moduleRules } from './module'; import { moduleRules } from './module';
import { ociRules } from './oci';
const rule = q.alt<Ctx>(moduleRules, mavenRules); const rule = q.alt<Ctx>(moduleRules, mavenRules, ociRules);
const query = q.tree<Ctx>({ const query = q.tree<Ctx>({
type: 'root-tree', type: 'root-tree',

View file

@ -0,0 +1,41 @@
import { query as q } from 'good-enough-parser';
import { z } from 'zod';
import { DockerDatasource } from '../../../datasource/docker';
import type { PackageDependency } from '../../types';
import type { Ctx } from '../context';
import { RecordFragmentSchema, StringFragmentSchema } from '../fragments';
import { kvParams } from './common';
export const RuleToDockerPackageDep = RecordFragmentSchema.extend({
children: z.object({
rule: StringFragmentSchema.extend({
value: z.literal('oci_pull'),
}),
name: StringFragmentSchema,
image: StringFragmentSchema,
tag: StringFragmentSchema.optional(),
digest: StringFragmentSchema.optional(),
}),
}).transform(
({ children: { rule, name, image, tag, digest } }): PackageDependency => ({
datasource: DockerDatasource.id,
depType: rule.value,
depName: name.value,
packageName: image.value,
currentValue: tag?.value,
currentDigest: digest?.value,
}),
);
export const ociRules = q
.sym<Ctx>('oci')
.op('.')
.sym('pull', (ctx, token) => ctx.startRule('oci_pull'))
.join(
q.tree({
type: 'wrapped-tree',
maxDepth: 1,
search: kvParams,
postHandler: (ctx) => ctx.endRule(),
}),
);

View file

@ -1,5 +1,7 @@
The `bazel-module` manager can update [Bazel module (bzlmod)](https://bazel.build/external/module) enabled workspaces. The `bazel-module` manager can update [Bazel module (bzlmod)](https://bazel.build/external/module) enabled workspaces.
### Maven
It also takes care about maven artifacts initalized with [bzlmod](https://github.com/bazelbuild/rules_jvm_external/blob/master/docs/bzlmod.md). For simplicity the name of extension variable is limited to `maven*`. E.g.: It also takes care about maven artifacts initalized with [bzlmod](https://github.com/bazelbuild/rules_jvm_external/blob/master/docs/bzlmod.md). For simplicity the name of extension variable is limited to `maven*`. E.g.:
``` ```
@ -26,3 +28,21 @@ maven.artifact(
version = "1.11.1", version = "1.11.1",
) )
``` ```
### Docker
Similarly, it updates Docker / OCI images pulled with [oci_pull](https://github.com/bazel-contrib/rules_oci/blob/main/docs/pull.md).
Note that the extension must be called `oci`:
```
oci = use_extension("@rules_oci//oci:extensions.bzl", "oci")
oci.pull(
name = "nginx_image",
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
image = "index.docker.io/library/nginx",
platforms = ["linux/amd64"],
tag = "1.27.1",
)
```

View file

@ -41,101 +41,198 @@ describe('modules/manager/bun/artifacts', () => {
expect(await updateArtifacts(updateArtifact)).toBeNull(); expect(await updateArtifacts(updateArtifact)).toBeNull();
}); });
it('skips if cannot read lock file', async () => { describe('when using .lockb lockfile format', () => {
updateArtifact.updatedDeps = [ it('skips if cannot read lock file', async () => {
{ manager: 'bun', lockFiles: ['bun.lockb'] }, updateArtifact.updatedDeps = [
]; { manager: 'bun', lockFiles: ['bun.lockb'] },
expect(await updateArtifacts(updateArtifact)).toBeNull(); ];
}); expect(await updateArtifacts(updateArtifact)).toBeNull();
});
it('returns null if lock content unchanged', async () => {
updateArtifact.updatedDeps = [ it('returns null if lock content unchanged', async () => {
{ manager: 'bun', lockFiles: ['bun.lockb'] }, updateArtifact.updatedDeps = [
]; { manager: 'bun', lockFiles: ['bun.lockb'] },
const oldLock = Buffer.from('old'); ];
fs.readFile.mockResolvedValueOnce(oldLock as never); const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never); fs.readFile.mockResolvedValueOnce(oldLock as never);
expect(await updateArtifacts(updateArtifact)).toBeNull(); fs.readFile.mockResolvedValueOnce(oldLock as never);
}); expect(await updateArtifacts(updateArtifact)).toBeNull();
});
it('returns updated lock content', async () => {
updateArtifact.updatedDeps = [ it('returns updated lock content', async () => {
{ manager: 'bun', lockFiles: ['bun.lockb'] }, updateArtifact.updatedDeps = [
]; { manager: 'bun', lockFiles: ['bun.lockb'] },
const oldLock = Buffer.from('old'); ];
fs.readFile.mockResolvedValueOnce(oldLock as never); const oldLock = Buffer.from('old');
const newLock = Buffer.from('new'); fs.readFile.mockResolvedValueOnce(oldLock as never);
fs.readFile.mockResolvedValueOnce(newLock as never); const newLock = Buffer.from('new');
expect(await updateArtifacts(updateArtifact)).toEqual([ fs.readFile.mockResolvedValueOnce(newLock as never);
{ expect(await updateArtifacts(updateArtifact)).toEqual([
file: { {
path: 'bun.lockb', file: {
type: 'addition', path: 'bun.lockb',
contents: newLock, type: 'addition',
}, contents: newLock,
}, },
]); },
}); ]);
});
it('supports lockFileMaintenance', async () => {
updateArtifact.updatedDeps = [ it('supports lockFileMaintenance', async () => {
{ manager: 'bun', lockFiles: ['bun.lockb'] }, updateArtifact.updatedDeps = [
]; { manager: 'bun', lockFiles: ['bun.lockb'] },
updateArtifact.config.updateType = 'lockFileMaintenance'; ];
const oldLock = Buffer.from('old'); updateArtifact.config.updateType = 'lockFileMaintenance';
fs.readFile.mockResolvedValueOnce(oldLock as never); const oldLock = Buffer.from('old');
const newLock = Buffer.from('new'); fs.readFile.mockResolvedValueOnce(oldLock as never);
fs.readFile.mockResolvedValueOnce(newLock as never); const newLock = Buffer.from('new');
expect(await updateArtifacts(updateArtifact)).toEqual([ fs.readFile.mockResolvedValueOnce(newLock as never);
{ expect(await updateArtifacts(updateArtifact)).toEqual([
file: { {
path: 'bun.lockb', file: {
type: 'addition', path: 'bun.lockb',
contents: newLock, type: 'addition',
}, contents: newLock,
}, },
]); },
}); ]);
});
it('handles temporary error', async () => {
const execError = new ExecError(TEMPORARY_ERROR, { it('handles temporary error', async () => {
cmd: '', const execError = new ExecError(TEMPORARY_ERROR, {
stdout: '', cmd: '',
stderr: '', stdout: '',
options: { encoding: 'utf8' }, stderr: '',
options: { encoding: 'utf8' },
});
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lockb'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
exec.mockRejectedValueOnce(execError);
await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
TEMPORARY_ERROR,
);
});
it('handles full error', async () => {
const execError = new ExecError('nope', {
cmd: '',
stdout: '',
stderr: '',
options: { encoding: 'utf8' },
});
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lockb'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
exec.mockRejectedValueOnce(execError);
expect(await updateArtifacts(updateArtifact)).toEqual([
{ artifactError: { lockFile: 'bun.lockb', stderr: 'nope' } },
]);
}); });
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lockb'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
exec.mockRejectedValueOnce(execError);
await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
TEMPORARY_ERROR,
);
}); });
it('handles full error', async () => { describe('when using .lock lockfile format', () => {
const execError = new ExecError('nope', { it('skips if cannot read lock file', async () => {
cmd: '', updateArtifact.updatedDeps = [
stdout: '', { manager: 'bun', lockFiles: ['bun.lock'] },
stderr: '', ];
options: { encoding: 'utf8' }, expect(await updateArtifacts(updateArtifact)).toBeNull();
});
it('returns null if lock content unchanged', async () => {
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lock'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
fs.readFile.mockResolvedValueOnce(oldLock as never);
expect(await updateArtifacts(updateArtifact)).toBeNull();
});
it('returns updated lock content', async () => {
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lock'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
const newLock = Buffer.from('new');
fs.readFile.mockResolvedValueOnce(newLock as never);
expect(await updateArtifacts(updateArtifact)).toEqual([
{
file: {
path: 'bun.lock',
type: 'addition',
contents: newLock,
},
},
]);
});
it('supports lockFileMaintenance', async () => {
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lock'] },
];
updateArtifact.config.updateType = 'lockFileMaintenance';
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
const newLock = Buffer.from('new');
fs.readFile.mockResolvedValueOnce(newLock as never);
expect(await updateArtifacts(updateArtifact)).toEqual([
{
file: {
path: 'bun.lock',
type: 'addition',
contents: newLock,
},
},
]);
});
it('handles temporary error', async () => {
const execError = new ExecError(TEMPORARY_ERROR, {
cmd: '',
stdout: '',
stderr: '',
options: { encoding: 'utf8' },
});
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lock'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
exec.mockRejectedValueOnce(execError);
await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
TEMPORARY_ERROR,
);
});
it('handles full error', async () => {
const execError = new ExecError('nope', {
cmd: '',
stdout: '',
stderr: '',
options: { encoding: 'utf8' },
});
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lock'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
exec.mockRejectedValueOnce(execError);
expect(await updateArtifacts(updateArtifact)).toEqual([
{ artifactError: { lockFile: 'bun.lock', stderr: 'nope' } },
]);
}); });
updateArtifact.updatedDeps = [
{ manager: 'bun', lockFiles: ['bun.lockb'] },
];
const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never);
exec.mockRejectedValueOnce(execError);
expect(await updateArtifacts(updateArtifact)).toEqual([
{ artifactError: { lockFile: 'bun.lockb', stderr: 'nope' } },
]);
}); });
}); });
describe('bun command execution', () => { describe('bun command execution', () => {
it('check install options with configs', async () => { it('check install options with configs', async () => {
const lockfileFormats = ['bun.lockb', 'bun.lock'];
const testCases = [ const testCases = [
{ {
allowScripts: undefined, allowScripts: undefined,
@ -184,38 +281,40 @@ describe('modules/manager/bun/artifacts', () => {
}, },
]; ];
for (const testCase of testCases) { for (const lockFile of lockfileFormats) {
GlobalConfig.set({ for (const testCase of testCases) {
...globalConfig, GlobalConfig.set({
allowScripts: testCase.allowScripts, ...globalConfig,
}); allowScripts: testCase.allowScripts,
const updateArtifact: UpdateArtifact = { });
config: { ignoreScripts: testCase.ignoreScripts }, const updateArtifact: UpdateArtifact = {
newPackageFileContent: '', config: { ignoreScripts: testCase.ignoreScripts },
packageFileName: '', newPackageFileContent: '',
updatedDeps: [{ manager: 'bun', lockFiles: ['bun.lockb'] }], packageFileName: '',
}; updatedDeps: [{ manager: 'bun', lockFiles: [lockFile] }],
};
const oldLock = Buffer.from('old'); const oldLock = Buffer.from('old');
fs.readFile.mockResolvedValueOnce(oldLock as never); fs.readFile.mockResolvedValueOnce(oldLock as never);
const newLock = Buffer.from('new'); const newLock = Buffer.from('new');
fs.readFile.mockResolvedValueOnce(newLock as never); fs.readFile.mockResolvedValueOnce(newLock as never);
await updateArtifacts(updateArtifact); await updateArtifacts(updateArtifact);
expect(exec).toHaveBeenCalledWith(testCase.expectedCmd, { expect(exec).toHaveBeenCalledWith(testCase.expectedCmd, {
cwdFile: '', cwdFile: '',
docker: {}, docker: {},
toolConstraints: [ toolConstraints: [
{ {
toolName: 'bun', toolName: 'bun',
}, },
], ],
userConfiguredEnv: undefined, userConfiguredEnv: undefined,
}); });
exec.mockClear(); exec.mockClear();
GlobalConfig.reset(); GlobalConfig.reset();
}
} }
}); });
}); });

View file

@ -9,60 +9,120 @@ describe('modules/manager/bun/extract', () => {
expect(await extractAllPackageFiles({}, ['package.json'])).toEqual([]); expect(await extractAllPackageFiles({}, ['package.json'])).toEqual([]);
}); });
it('ignores missing package.json file', async () => { describe('when using the .lockb lockfile format', () => {
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]); it('ignores missing package.json file', async () => {
}); expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
});
it('ignores invalid package.json file', async () => { it('ignores invalid package.json file', async () => {
(fs.readLocalFile as jest.Mock).mockResolvedValueOnce('invalid'); (fs.readLocalFile as jest.Mock).mockResolvedValueOnce('invalid');
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]); expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
}); });
it('handles null response', async () => { it('handles null response', async () => {
fs.getSiblingFileName.mockReturnValueOnce('package.json'); fs.getSiblingFileName.mockReturnValueOnce('package.json');
fs.readLocalFile.mockResolvedValueOnce( fs.readLocalFile.mockResolvedValueOnce(
// This package.json returns null from the extractor // This package.json returns null from the extractor
JSON.stringify({ JSON.stringify({
_id: 1, _id: 1,
_args: 1, _args: 1,
_from: 1, _from: 1,
}), }),
); );
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]); expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
}); });
it('parses valid package.json file', async () => { it('parses valid package.json file', async () => {
fs.getSiblingFileName.mockReturnValueOnce('package.json'); fs.getSiblingFileName.mockReturnValueOnce('package.json');
fs.readLocalFile.mockResolvedValueOnce( fs.readLocalFile.mockResolvedValueOnce(
JSON.stringify({ JSON.stringify({
name: 'test', name: 'test',
version: '0.0.1', version: '0.0.1',
dependencies: { dependencies: {
dep1: '1.0.0', dep1: '1.0.0',
},
}),
);
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toMatchObject([
{
deps: [
{
currentValue: '1.0.0',
datasource: 'npm',
depName: 'dep1',
depType: 'dependencies',
prettyDepType: 'dependency',
}, },
], }),
extractedConstraints: {}, );
lockFiles: ['bun.lockb'], expect(await extractAllPackageFiles({}, ['bun.lockb'])).toMatchObject([
managerData: { {
hasPackageManager: false, deps: [
packageJsonName: 'test', {
currentValue: '1.0.0',
datasource: 'npm',
depName: 'dep1',
depType: 'dependencies',
prettyDepType: 'dependency',
},
],
extractedConstraints: {},
lockFiles: ['bun.lockb'],
managerData: {
hasPackageManager: false,
packageJsonName: 'test',
},
packageFile: 'package.json',
packageFileVersion: '0.0.1',
}, },
packageFile: 'package.json', ]);
packageFileVersion: '0.0.1', });
}, });
]);
describe('when using the .lock lockfile format', () => {
it('ignores missing package.json file', async () => {
expect(await extractAllPackageFiles({}, ['bun.lock'])).toEqual([]);
});
it('ignores invalid package.json file', async () => {
(fs.readLocalFile as jest.Mock).mockResolvedValueOnce('invalid');
expect(await extractAllPackageFiles({}, ['bun.lock'])).toEqual([]);
});
it('handles null response', async () => {
fs.getSiblingFileName.mockReturnValueOnce('package.json');
fs.readLocalFile.mockResolvedValueOnce(
// This package.json returns null from the extractor
JSON.stringify({
_id: 1,
_args: 1,
_from: 1,
}),
);
expect(await extractAllPackageFiles({}, ['bun.lock'])).toEqual([]);
});
it('parses valid package.json file', async () => {
fs.getSiblingFileName.mockReturnValueOnce('package.json');
fs.readLocalFile.mockResolvedValueOnce(
JSON.stringify({
name: 'test',
version: '0.0.1',
dependencies: {
dep1: '1.0.0',
},
}),
);
expect(await extractAllPackageFiles({}, ['bun.lock'])).toMatchObject([
{
deps: [
{
currentValue: '1.0.0',
datasource: 'npm',
depName: 'dep1',
depType: 'dependencies',
prettyDepType: 'dependency',
},
],
extractedConstraints: {},
lockFiles: ['bun.lock'],
managerData: {
hasPackageManager: false,
packageJsonName: 'test',
},
packageFile: 'package.json',
packageFileVersion: '0.0.1',
},
]);
});
}); });
}); });
}); });

View file

@ -18,7 +18,12 @@ export async function extractAllPackageFiles(
): Promise<PackageFile[]> { ): Promise<PackageFile[]> {
const packageFiles: PackageFile<NpmManagerData>[] = []; const packageFiles: PackageFile<NpmManagerData>[] = [];
for (const matchedFile of matchedFiles) { for (const matchedFile of matchedFiles) {
if (!matchesFileName(matchedFile, 'bun.lockb')) { if (
!(
matchesFileName(matchedFile, 'bun.lockb') ||
matchesFileName(matchedFile, 'bun.lock')
)
) {
logger.warn({ matchedFile }, 'Invalid bun lockfile match'); logger.warn({ matchedFile }, 'Invalid bun lockfile match');
continue; continue;
} }

View file

@ -13,7 +13,7 @@ export const supersedesManagers = ['npm'];
export const supportsLockFileMaintenance = true; export const supportsLockFileMaintenance = true;
export const defaultConfig = { export const defaultConfig = {
fileMatch: ['(^|/)bun\\.lockb$'], fileMatch: ['(^|/)bun\\.lockb?$'],
digest: { digest: {
prBodyDefinitions: { prBodyDefinitions: {
Change: Change:

View file

@ -505,11 +505,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
}, },
}, },
{ {
"datasource": "rubygems", "currentDigest": "0b799ead604f900ed50685e9b2d469cd2befba5b",
"datasource": "git-refs",
"depName": "health_check", "depName": "health_check",
"managerData": { "managerData": {
"lineNumber": 53, "lineNumber": 53,
}, },
"packageName": "https://github.com/ianheggie/health_check",
"sourceUrl": "https://github.com/ianheggie/health_check",
}, },
{ {
"currentValue": "'~> 4.3'", "currentValue": "'~> 4.3'",
@ -539,12 +542,15 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
}, },
}, },
{ {
"currentDigest": "54b17ba8c7d8d20a16dfc65d1775241833219cf2",
"currentValue": "'~> 0.6'", "currentValue": "'~> 0.6'",
"datasource": "rubygems", "datasource": "git-refs",
"depName": "http_parser.rb", "depName": "http_parser.rb",
"managerData": { "managerData": {
"lineNumber": 57, "lineNumber": 57,
}, },
"packageName": "https://github.com/tmm1/http_parser.rb",
"sourceUrl": "https://github.com/tmm1/http_parser.rb",
}, },
{ {
"currentValue": "'~> 1.3'", "currentValue": "'~> 1.3'",
@ -591,11 +597,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
}, },
}, },
{ {
"datasource": "rubygems", "currentDigest": "fd184883048b922b176939f851338d0a4971a532",
"datasource": "git-refs",
"depName": "nilsimsa", "depName": "nilsimsa",
"managerData": { "managerData": {
"lineNumber": 63, "lineNumber": 63,
}, },
"packageName": "https://github.com/witgo/nilsimsa",
"sourceUrl": "https://github.com/witgo/nilsimsa",
}, },
{ {
"currentValue": "'~> 1.10'", "currentValue": "'~> 1.10'",
@ -660,11 +669,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
}, },
}, },
{ {
"datasource": "rubygems", "currentDigest": "58465d2e213991f8afb13b984854a49fcdcc980c",
"datasource": "git-refs",
"depName": "posix-spawn", "depName": "posix-spawn",
"managerData": { "managerData": {
"lineNumber": 71, "lineNumber": 71,
}, },
"packageName": "https://github.com/rtomayko/posix-spawn",
"sourceUrl": "https://github.com/rtomayko/posix-spawn",
}, },
{ {
"currentValue": "'~> 2.1'", "currentValue": "'~> 2.1'",
@ -899,11 +911,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
}, },
}, },
{ {
"datasource": "rubygems", "currentDigest": "e742697a0906e74e8bb777ef98137bc3955d981d",
"datasource": "git-refs",
"depName": "json-ld", "depName": "json-ld",
"managerData": { "managerData": {
"lineNumber": 99, "lineNumber": 99,
}, },
"packageName": "https://github.com/ruby-rdf/json-ld.git",
"sourceUrl": "https://github.com/ruby-rdf/json-ld",
}, },
{ {
"currentValue": "'~> 3.0'", "currentValue": "'~> 3.0'",
@ -1494,11 +1509,13 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
}, },
}, },
{ {
"datasource": "rubygems", "datasource": "git-refs",
"depName": "webpacker", "depName": "webpacker",
"managerData": { "managerData": {
"lineNumber": 16, "lineNumber": 16,
}, },
"packageName": "https://github.com/rails/webpacker",
"sourceUrl": "https://github.com/rails/webpacker",
}, },
{ {
"currentValue": ""~> 3.1.11"", "currentValue": ""~> 3.1.11"",
@ -1681,7 +1698,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
}, },
}, },
{ {
"datasource": "rubygems", "currentValue": "update-pg",
"datasource": "git-refs",
"depName": "queue_classic", "depName": "queue_classic",
"depTypes": [ "depTypes": [
"job", "job",
@ -1689,6 +1707,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
"managerData": { "managerData": {
"lineNumber": 54, "lineNumber": 54,
}, },
"packageName": "https://github.com/rafaelfranca/queue_classic",
"sourceUrl": "https://github.com/rafaelfranca/queue_classic",
}, },
{ {
"datasource": "rubygems", "datasource": "rubygems",
@ -1791,7 +1811,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
}, },
}, },
{ {
"datasource": "rubygems", "currentValue": "close-race",
"datasource": "git-refs",
"depName": "websocket-client-simple", "depName": "websocket-client-simple",
"depTypes": [ "depTypes": [
"cable", "cable",
@ -1799,6 +1820,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
"managerData": { "managerData": {
"lineNumber": 71, "lineNumber": 71,
}, },
"packageName": "https://github.com/matthewd/websocket-client-simple",
"sourceUrl": "https://github.com/matthewd/websocket-client-simple",
}, },
{ {
"datasource": "rubygems", "datasource": "rubygems",
@ -2024,15 +2047,19 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
}, },
}, },
{ {
"datasource": "rubygems", "currentValue": "master",
"datasource": "git-refs",
"depName": "activerecord-jdbcsqlite3-adapter", "depName": "activerecord-jdbcsqlite3-adapter",
"lockedVersion": "52.1-java", "lockedVersion": "52.1-java",
"managerData": { "managerData": {
"lineNumber": 129, "lineNumber": 129,
}, },
"packageName": "https://github.com/jruby/activerecord-jdbc-adapter",
"sourceUrl": "https://github.com/jruby/activerecord-jdbc-adapter",
}, },
{ {
"datasource": "rubygems", "currentValue": "master",
"datasource": "git-refs",
"depName": "activerecord-jdbcmysql-adapter", "depName": "activerecord-jdbcmysql-adapter",
"depTypes": [ "depTypes": [
"db", "db",
@ -2041,9 +2068,12 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
"managerData": { "managerData": {
"lineNumber": 131, "lineNumber": 131,
}, },
"packageName": "https://github.com/jruby/activerecord-jdbc-adapter",
"sourceUrl": "https://github.com/jruby/activerecord-jdbc-adapter",
}, },
{ {
"datasource": "rubygems", "currentValue": "master",
"datasource": "git-refs",
"depName": "activerecord-jdbcpostgresql-adapter", "depName": "activerecord-jdbcpostgresql-adapter",
"depTypes": [ "depTypes": [
"db", "db",
@ -2052,6 +2082,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
"managerData": { "managerData": {
"lineNumber": 132, "lineNumber": 132,
}, },
"packageName": "https://github.com/jruby/activerecord-jdbc-adapter",
"sourceUrl": "https://github.com/jruby/activerecord-jdbc-adapter",
}, },
{ {
"currentValue": "">= 1.3.0"", "currentValue": "">= 1.3.0"",
@ -2104,11 +2136,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
}, },
}, },
{ {
"datasource": "rubygems", "currentValue": "master",
"datasource": "git-refs",
"depName": "activerecord-oracle_enhanced-adapter", "depName": "activerecord-oracle_enhanced-adapter",
"managerData": { "managerData": {
"lineNumber": 154, "lineNumber": 154,
}, },
"packageName": "https://github.com/rsim/oracle-enhanced",
"sourceUrl": "https://github.com/rsim/oracle-enhanced",
}, },
{ {
"datasource": "rubygems", "datasource": "rubygems",

View file

@ -171,14 +171,22 @@ describe('modules/manager/bundler/extract', () => {
it('parses inline source in Gemfile', async () => { it('parses inline source in Gemfile', async () => {
const sourceInlineGemfile = codeBlock` const sourceInlineGemfile = codeBlock`
baz = 'https://gems.baz.com' baz = 'https://gems.baz.com'
gem 'inline_gem'
gem "inline_source_gem", source: 'https://gems.foo.com' gem "inline_source_gem", source: 'https://gems.foo.com'
gem 'inline_source_gem_with_version', "~> 1", source: 'https://gems.bar.com' gem 'inline_source_gem_with_version', "~> 1", source: 'https://gems.bar.com'
gem 'inline_source_gem_with_variable_source', source: baz gem 'inline_source_gem_with_variable_source', source: baz
gem 'inline_source_gem_with_variable_source_and_require_after', source: baz, require: %w[inline_source_gem]
gem "inline_source_gem_with_require_after", source: 'https://gems.foo.com', require: %w[inline_source_gem]
gem "inline_source_gem_with_require_before", require: %w[inline_source_gem], source: 'https://gems.foo.com'
gem "inline_source_gem_with_group_before", group: :production, source: 'https://gems.foo.com'
`; `;
fs.readLocalFile.mockResolvedValueOnce(sourceInlineGemfile); fs.readLocalFile.mockResolvedValueOnce(sourceInlineGemfile);
const res = await extractPackageFile(sourceInlineGemfile, 'Gemfile'); const res = await extractPackageFile(sourceInlineGemfile, 'Gemfile');
expect(res).toMatchObject({ expect(res).toMatchObject({
deps: [ deps: [
{
depName: 'inline_gem',
},
{ {
depName: 'inline_source_gem', depName: 'inline_source_gem',
registryUrls: ['https://gems.foo.com'], registryUrls: ['https://gems.foo.com'],
@ -192,6 +200,104 @@ describe('modules/manager/bundler/extract', () => {
depName: 'inline_source_gem_with_variable_source', depName: 'inline_source_gem_with_variable_source',
registryUrls: ['https://gems.baz.com'], registryUrls: ['https://gems.baz.com'],
}, },
{
depName: 'inline_source_gem_with_variable_source_and_require_after',
registryUrls: ['https://gems.baz.com'],
},
{
depName: 'inline_source_gem_with_require_after',
registryUrls: ['https://gems.foo.com'],
},
{
depName: 'inline_source_gem_with_require_before',
registryUrls: ['https://gems.foo.com'],
},
{
depName: 'inline_source_gem_with_group_before',
registryUrls: ['https://gems.foo.com'],
},
],
});
});
it('parses git refs in Gemfile', async () => {
const gitRefGemfile = codeBlock`
gem 'foo', git: 'https://github.com/foo/foo', ref: 'fd184883048b922b176939f851338d0a4971a532'
gem 'bar', git: 'https://github.com/bar/bar', tag: 'v1.0.0'
gem 'baz', github: 'baz/baz', branch: 'master'
`;
fs.readLocalFile.mockResolvedValueOnce(gitRefGemfile);
const res = await extractPackageFile(gitRefGemfile, 'Gemfile');
expect(res).toMatchObject({
deps: [
{
depName: 'foo',
packageName: 'https://github.com/foo/foo',
sourceUrl: 'https://github.com/foo/foo',
currentDigest: 'fd184883048b922b176939f851338d0a4971a532',
datasource: 'git-refs',
},
{
depName: 'bar',
packageName: 'https://github.com/bar/bar',
sourceUrl: 'https://github.com/bar/bar',
currentValue: 'v1.0.0',
datasource: 'git-refs',
},
{
depName: 'baz',
packageName: 'https://github.com/baz/baz',
sourceUrl: 'https://github.com/baz/baz',
currentValue: 'master',
datasource: 'git-refs',
},
],
});
});
it('parses multiple current values Gemfile', async () => {
const multipleValuesGemfile = codeBlock`
gem 'gem_without_values'
gem 'gem_with_one_value', ">= 3.0.5"
gem 'gem_with_multiple_values', ">= 3.0.5", "< 3.2"
`;
fs.readLocalFile.mockResolvedValueOnce(multipleValuesGemfile);
const res = await extractPackageFile(multipleValuesGemfile, 'Gemfile');
expect(res).toMatchObject({
deps: [
{
depName: 'gem_without_values',
},
{
depName: 'gem_with_one_value',
currentValue: '">= 3.0.5"',
},
{
depName: 'gem_with_multiple_values',
currentValue: '">= 3.0.5", "< 3.2"',
},
],
});
});
it('skips local gems in Gemfile', async () => {
const pathGemfile = codeBlock`
gem 'foo', path: 'vendor/foo'
gem 'bar'
`;
fs.readLocalFile.mockResolvedValueOnce(pathGemfile);
const res = await extractPackageFile(pathGemfile, 'Gemfile');
expect(res).toMatchObject({
deps: [
{
depName: 'foo',
skipReason: 'internal-package',
},
{
depName: 'bar',
},
], ],
}); });
}); });

View file

@ -2,6 +2,7 @@ import is from '@sindresorhus/is';
import { logger } from '../../../logger'; import { logger } from '../../../logger';
import { readLocalFile } from '../../../util/fs'; import { readLocalFile } from '../../../util/fs';
import { newlineRegex, regEx } from '../../../util/regex'; import { newlineRegex, regEx } from '../../../util/regex';
import { GitRefsDatasource } from '../../datasource/git-refs';
import { RubyVersionDatasource } from '../../datasource/ruby-version'; import { RubyVersionDatasource } from '../../datasource/ruby-version';
import { RubygemsDatasource } from '../../datasource/rubygems'; import { RubygemsDatasource } from '../../datasource/rubygems';
import type { PackageDependency, PackageFileContent } from '../types'; import type { PackageDependency, PackageFileContent } from '../types';
@ -12,6 +13,20 @@ function formatContent(input: string): string {
return input.replace(regEx(/^ {2}/), '') + '\n'; //remove leading whitespace and add a new line at the end return input.replace(regEx(/^ {2}/), '') + '\n'; //remove leading whitespace and add a new line at the end
} }
const variableMatchRegex = regEx(
`^(?<key>\\w+)\\s*=\\s*['"](?<value>[^'"]+)['"]`,
);
const gemMatchRegex = regEx(
`^\\s*gem\\s+(['"])(?<depName>[^'"]+)(['"])(\\s*,\\s*(?<currentValue>(['"])[^'"]+['"](\\s*,\\s*['"][^'"]+['"])?))?`,
);
const sourceMatchRegex = regEx(
`source:\\s*((?:['"](?<registryUrl>[^'"]+)['"])|(?<sourceName>\\w+))?`,
);
const gitRefsMatchRegex = regEx(
`((git:\\s*['"](?<gitUrl>[^'"]+)['"])|(\\s*,\\s*github:\\s*['"](?<repoName>[^'"]+)['"]))(\\s*,\\s*branch:\\s*['"](?<branchName>[^'"]+)['"])?(\\s*,\\s*ref:\\s*['"](?<refName>[^'"]+)['"])?(\\s*,\\s*tag:\\s*['"](?<tagName>[^'"]+)['"])?`,
);
const pathMatchRegex = regEx(`path:\\s*['"](?<path>[^'"]+)['"]`);
export async function extractPackageFile( export async function extractPackageFile(
content: string, content: string,
packageFile?: string, packageFile?: string,
@ -114,9 +129,6 @@ export async function extractPackageFile(
}); });
} }
const variableMatchRegex = regEx(
`^(?<key>\\w+)\\s*=\\s*['"](?<value>[^'"]+)['"]`,
);
const variableMatch = variableMatchRegex.exec(line); const variableMatch = variableMatchRegex.exec(line);
if (variableMatch) { if (variableMatch) {
if (variableMatch.groups?.key) { if (variableMatch.groups?.key) {
@ -124,28 +136,56 @@ export async function extractPackageFile(
} }
} }
const gemMatchRegex = regEx( const gemMatch = gemMatchRegex.exec(line)?.groups;
`^\\s*gem\\s+(['"])(?<depName>[^'"]+)(['"])(\\s*,\\s*(?<currentValue>(['"])[^'"]+['"](\\s*,\\s*['"][^'"]+['"])?))?(\\s*,\\s*source:\\s*(['"](?<registryUrl>[^'"]+)['"]|(?<sourceName>[^'"]+)))?`,
);
const gemMatch = gemMatchRegex.exec(line);
if (gemMatch) { if (gemMatch) {
const dep: PackageDependency = { const dep: PackageDependency = {
depName: gemMatch.groups?.depName, depName: gemMatch.depName,
managerData: { lineNumber }, managerData: { lineNumber },
datasource: RubygemsDatasource.id,
}; };
if (gemMatch.groups?.currentValue) {
const currentValue = gemMatch.groups.currentValue; if (gemMatch.currentValue) {
const currentValue = gemMatch.currentValue;
dep.currentValue = currentValue; dep.currentValue = currentValue;
} }
if (gemMatch.groups?.registryUrl) {
const registryUrl = gemMatch.groups.registryUrl; const pathMatch = pathMatchRegex.exec(line)?.groups;
dep.registryUrls = [registryUrl]; if (pathMatch) {
dep.skipReason = 'internal-package';
} }
if (gemMatch.groups?.sourceName) {
const registryUrl = variables[gemMatch.groups.sourceName]; const sourceMatch = sourceMatchRegex.exec(line)?.groups;
dep.registryUrls = [registryUrl]; if (sourceMatch) {
if (sourceMatch.registryUrl) {
dep.registryUrls = [sourceMatch.registryUrl];
} else if (sourceMatch.sourceName) {
dep.registryUrls = [variables[sourceMatch.sourceName]];
}
}
const gitRefsMatch = gitRefsMatchRegex.exec(line)?.groups;
if (gitRefsMatch) {
if (gitRefsMatch.gitUrl) {
const gitUrl = gitRefsMatch.gitUrl;
dep.packageName = gitUrl;
if (gitUrl.startsWith('https://')) {
dep.sourceUrl = gitUrl.replace(/\.git$/, '');
}
} else if (gitRefsMatch.repoName) {
dep.packageName = `https://github.com/${gitRefsMatch.repoName}`;
dep.sourceUrl = dep.packageName;
}
if (gitRefsMatch.refName) {
dep.currentDigest = gitRefsMatch.refName;
} else if (gitRefsMatch.branchName) {
dep.currentValue = gitRefsMatch.branchName;
} else if (gitRefsMatch.tagName) {
dep.currentValue = gitRefsMatch.tagName;
}
dep.datasource = GitRefsDatasource.id;
} }
dep.datasource = RubygemsDatasource.id;
res.deps.push(dep); res.deps.push(dep);
} }

View file

@ -13,6 +13,33 @@ describe('modules/manager/circleci/extract', () => {
expect(extractPackageFile('nothing here')).toBeNull(); expect(extractPackageFile('nothing here')).toBeNull();
}); });
it('handles registry alias', () => {
const res = extractPackageFile(
'executors:\n my-executor:\n docker:\n - image: quay.io/myName/myPackage:0.6.2',
'',
{
registryAliases: {
'quay.io': 'my-quay-mirror.registry.com',
'index.docker.io': 'my-docker-mirror.registry.com',
},
},
);
expect(res).toEqual({
deps: [
{
autoReplaceStringTemplate:
'quay.io/myName/myPackage:{{#if newValue}}{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}',
currentDigest: undefined,
currentValue: '0.6.2',
datasource: 'docker',
depName: 'my-quay-mirror.registry.com/myName/myPackage',
depType: 'docker',
replaceString: 'quay.io/myName/myPackage:0.6.2',
},
],
});
});
it('extracts multiple image and resolves yaml anchors', () => { it('extracts multiple image and resolves yaml anchors', () => {
const res = extractPackageFile(file1); const res = extractPackageFile(file1);
expect(res?.deps).toEqual([ expect(res?.deps).toEqual([
@ -222,5 +249,65 @@ describe('modules/manager/circleci/extract', () => {
}, },
]); ]);
}); });
it('extracts orb definitions', () => {
const res = extractPackageFile(codeBlock`
version: 2.1
orbs:
myorb:
orbs:
python: circleci/python@2.1.1
executors:
python:
docker:
- image: cimg/python:3.9
jobs:
test_image:
docker:
- image: cimg/python:3.7
steps:
- checkout
workflows:
Test:
jobs:
- myorb/test_image`);
expect(res).toEqual({
deps: [
{
currentValue: '2.1.1',
datasource: 'orb',
depName: 'python',
depType: 'orb',
packageName: 'circleci/python',
versioning: 'npm',
},
{
autoReplaceStringTemplate:
'{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}',
currentDigest: undefined,
currentValue: '3.9',
datasource: 'docker',
depName: 'cimg/python',
depType: 'docker',
replaceString: 'cimg/python:3.9',
},
{
autoReplaceStringTemplate:
'{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}',
currentDigest: undefined,
currentValue: '3.7',
datasource: 'docker',
depName: 'cimg/python',
depType: 'docker',
replaceString: 'cimg/python:3.7',
},
],
});
});
}); });
}); });

View file

@ -4,20 +4,21 @@ import { parseSingleYaml } from '../../../util/yaml';
import { OrbDatasource } from '../../datasource/orb'; import { OrbDatasource } from '../../datasource/orb';
import * as npmVersioning from '../../versioning/npm'; import * as npmVersioning from '../../versioning/npm';
import { getDep } from '../dockerfile/extract'; import { getDep } from '../dockerfile/extract';
import type { PackageDependency, PackageFileContent } from '../types'; import type {
import { CircleCiFile, type CircleCiJob } from './schema'; ExtractConfig,
PackageDependency,
PackageFileContent,
} from '../types';
import { CircleCiFile, type CircleCiJob, type CircleCiOrb } from './schema';
export function extractPackageFile( function extractDefinition(
content: string, definition: CircleCiOrb | CircleCiFile,
packageFile?: string, config?: ExtractConfig,
): PackageFileContent | null { ): PackageDependency[] {
const deps: PackageDependency[] = []; const deps: PackageDependency[] = [];
try {
const parsed = parseSingleYaml(content, {
customSchema: CircleCiFile,
});
for (const [key, orb] of Object.entries(parsed.orbs ?? {})) { for (const [key, orb] of Object.entries(definition.orbs ?? {})) {
if (typeof orb === 'string') {
const [packageName, currentValue] = orb.split('@'); const [packageName, currentValue] = orb.split('@');
deps.push({ deps.push({
@ -28,25 +29,44 @@ export function extractPackageFile(
versioning: npmVersioning.id, versioning: npmVersioning.id,
datasource: OrbDatasource.id, datasource: OrbDatasource.id,
}); });
} else {
deps.push(...extractDefinition(orb, config));
} }
}
// extract environments // extract environments
const environments: CircleCiJob[] = [ const environments: CircleCiJob[] = [
Object.values(parsed.executors ?? {}), Object.values(definition.executors ?? {}),
Object.values(parsed.jobs ?? {}), Object.values(definition.jobs ?? {}),
].flat(); ].flat();
for (const job of environments) { for (const job of environments) {
for (const dockerElement of coerceArray(job.docker)) { for (const dockerElement of coerceArray(job.docker)) {
deps.push({ deps.push({
...getDep(dockerElement.image), ...getDep(dockerElement.image, true, config?.registryAliases),
depType: 'docker', depType: 'docker',
}); });
}
} }
}
return deps;
}
export function extractPackageFile(
content: string,
packageFile?: string,
config?: ExtractConfig,
): PackageFileContent | null {
const deps: PackageDependency[] = [];
try {
const parsed = parseSingleYaml(content, {
customSchema: CircleCiFile,
});
deps.push(...extractDefinition(parsed, config));
for (const alias of coerceArray(parsed.aliases)) { for (const alias of coerceArray(parsed.aliases)) {
deps.push({ deps.push({
...getDep(alias.image), ...getDep(alias.image, true, config?.registryAliases),
depType: 'docker', depType: 'docker',
}); });
} }

View file

@ -4,14 +4,31 @@ export const CircleCiDocker = z.object({
image: z.string(), image: z.string(),
}); });
export type CircleCiJob = z.infer<typeof CircleCiJob>;
export const CircleCiJob = z.object({ export const CircleCiJob = z.object({
docker: z.array(CircleCiDocker).optional(), docker: z.array(CircleCiDocker).optional(),
}); });
export type CircleCiJob = z.infer<typeof CircleCiJob>;
const baseOrb = z.object({
executors: z.record(z.string(), CircleCiJob).optional(),
jobs: z.record(z.string(), CircleCiJob).optional(),
});
type Orb = z.infer<typeof baseOrb> & {
orbs?: Record<string, string | Orb>;
};
export const CircleCiOrb: z.ZodType<Orb> = baseOrb.extend({
orbs: z.lazy(() =>
z.record(z.string(), z.union([z.string(), CircleCiOrb])).optional(),
),
});
export type CircleCiOrb = z.infer<typeof CircleCiOrb>;
export const CircleCiFile = z.object({ export const CircleCiFile = z.object({
aliases: z.array(CircleCiDocker).optional(), aliases: z.array(CircleCiDocker).optional(),
executors: z.record(z.string(), CircleCiJob).optional(), executors: z.record(z.string(), CircleCiJob).optional(),
jobs: z.record(z.string(), CircleCiJob).optional(), jobs: z.record(z.string(), CircleCiJob).optional(),
orbs: z.record(z.string()).optional(), orbs: z.record(z.string(), z.union([z.string(), CircleCiOrb])).optional(),
}); });
export type CircleCiFile = z.infer<typeof CircleCiFile>;

View file

@ -2,13 +2,13 @@ import is from '@sindresorhus/is';
import type { Category } from '../../../../constants'; import type { Category } from '../../../../constants';
import type { import type {
ExtractConfig, ExtractConfig,
MaybePromise,
PackageDependency, PackageDependency,
PackageFileContent, PackageFileContent,
Result,
} from '../../types'; } from '../../types';
import { validMatchFields } from '../utils';
import { handleAny, handleCombination, handleRecursive } from './strategies'; import { handleAny, handleCombination, handleRecursive } from './strategies';
import type { RegexManagerConfig, RegexManagerTemplates } from './types'; import type { RegexManagerConfig, RegexManagerTemplates } from './types';
import { validMatchFields } from './utils';
export const categories: Category[] = ['custom']; export const categories: Category[] = ['custom'];
@ -22,7 +22,7 @@ export function extractPackageFile(
content: string, content: string,
packageFile: string, packageFile: string,
config: ExtractConfig, config: ExtractConfig,
): Result<PackageFileContent | null> { ): MaybePromise<PackageFileContent | null> {
let deps: PackageDependency[]; let deps: PackageDependency[];
switch (config.matchStringsStrategy) { switch (config.matchStringsStrategy) {
default: default:

View file

@ -1,11 +1,10 @@
import is from '@sindresorhus/is'; import is from '@sindresorhus/is';
import { logger } from '../../../../logger';
import { regEx } from '../../../../util/regex'; import { regEx } from '../../../../util/regex';
import type { PackageDependency } from '../../types'; import type { PackageDependency } from '../../types';
import { checkIsValidDependency } from '../utils';
import type { RecursionParameter, RegexManagerConfig } from './types'; import type { RecursionParameter, RegexManagerConfig } from './types';
import { import {
createDependency, createDependency,
isValidDependency,
mergeExtractionTemplate, mergeExtractionTemplate,
mergeGroups, mergeGroups,
regexMatchAll, regexMatchAll,
@ -32,7 +31,7 @@ export function handleAny(
) )
.filter(is.truthy) .filter(is.truthy)
.filter((dep: PackageDependency) => .filter((dep: PackageDependency) =>
checkIsValidDependency(dep, packageFile), checkIsValidDependency(dep, packageFile, 'regex'),
); );
} }
@ -61,7 +60,7 @@ export function handleCombination(
return [createDependency(extraction, config)] return [createDependency(extraction, config)]
.filter(is.truthy) .filter(is.truthy)
.filter((dep: PackageDependency) => .filter((dep: PackageDependency) =>
checkIsValidDependency(dep, packageFile), checkIsValidDependency(dep, packageFile, 'regex'),
); );
} }
@ -84,7 +83,7 @@ export function handleRecursive(
}) })
.filter(is.truthy) .filter(is.truthy)
.filter((dep: PackageDependency) => .filter((dep: PackageDependency) =>
checkIsValidDependency(dep, packageFile), checkIsValidDependency(dep, packageFile, 'regex'),
); );
} }
@ -116,23 +115,3 @@ function processRecursive(parameters: RecursionParameter): PackageDependency[] {
}); });
}); });
} }
function checkIsValidDependency(
dep: PackageDependency,
packageFile: string,
): boolean {
const isValid = isValidDependency(dep);
if (!isValid) {
const meta = {
packageDependency: dep,
packageFile,
};
logger.trace(
meta,
'Discovered a package dependency by matching regex, but it did not pass validation. Discarding',
);
return isValid;
}
return isValid;
}

Some files were not shown because too many files have changed in this diff Show more