mirror of
https://github.com/renovatebot/renovate.git
synced 2025-01-09 13:36:26 +00:00
Merge branch 'main' of https://github.com/renovatebot/renovate into fix-auto-merge-if-plus-one
This commit is contained in:
commit
74137a8460
182 changed files with 6742 additions and 3442 deletions
|
@ -1 +1 @@
|
|||
FROM ghcr.io/containerbase/devcontainer:13.0.24
|
||||
FROM ghcr.io/containerbase/devcontainer:13.5.5
|
||||
|
|
|
@ -34,7 +34,7 @@ runs:
|
|||
|
||||
- name: Check cache miss for MacOS
|
||||
id: macos-cache
|
||||
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ env.MACOS_KEY }}
|
||||
|
@ -43,7 +43,7 @@ runs:
|
|||
|
||||
- name: Check cache miss for Windows
|
||||
id: windows-cache
|
||||
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ env.WINDOWS_KEY }}
|
||||
|
|
6
.github/actions/setup-node/action.yml
vendored
6
.github/actions/setup-node/action.yml
vendored
|
@ -34,7 +34,7 @@ runs:
|
|||
|
||||
- name: Restore `node_modules`
|
||||
id: node-modules-restore
|
||||
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ env.CACHE_KEY }}
|
||||
|
@ -64,7 +64,7 @@ runs:
|
|||
|
||||
- name: Cache and restore `pnpm store`
|
||||
if: env.CACHE_HIT != 'true'
|
||||
uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: ${{ env.PNPM_STORE }}
|
||||
key: |
|
||||
|
@ -87,7 +87,7 @@ runs:
|
|||
|
||||
- name: Write `node_modules` cache
|
||||
if: inputs.save-cache == 'true' && env.CACHE_HIT != 'true'
|
||||
uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: node_modules
|
||||
key: ${{ env.CACHE_KEY }}
|
||||
|
|
22
.github/workflows/build.yml
vendored
22
.github/workflows/build.yml
vendored
|
@ -31,7 +31,7 @@ concurrency:
|
|||
env:
|
||||
DEFAULT_BRANCH: ${{ github.event.repository.default_branch }}
|
||||
NODE_VERSION: 22
|
||||
PDM_VERSION: 2.21.0 # renovate: datasource=pypi depName=pdm
|
||||
PDM_VERSION: 2.22.1 # renovate: datasource=pypi depName=pdm
|
||||
DRY_RUN: true
|
||||
TEST_LEGACY_DECRYPTION: true
|
||||
SPARSE_CHECKOUT: |-
|
||||
|
@ -209,7 +209,7 @@ jobs:
|
|||
os: ${{ runner.os }}
|
||||
|
||||
- name: Restore eslint cache
|
||||
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: .cache/eslint
|
||||
key: eslint-main-cache
|
||||
|
@ -228,7 +228,7 @@ jobs:
|
|||
|
||||
- name: Save eslint cache
|
||||
if: github.event_name == 'push'
|
||||
uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: .cache/eslint
|
||||
key: eslint-main-cache
|
||||
|
@ -255,7 +255,7 @@ jobs:
|
|||
os: ${{ runner.os }}
|
||||
|
||||
- name: Restore prettier cache
|
||||
uses: actions/cache/restore@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/restore@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: .cache/prettier
|
||||
key: prettier-main-cache
|
||||
|
@ -280,7 +280,7 @@ jobs:
|
|||
|
||||
- name: Save prettier cache
|
||||
if: github.event_name == 'push'
|
||||
uses: actions/cache/save@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache/save@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: .cache/prettier
|
||||
key: prettier-main-cache
|
||||
|
@ -373,7 +373,7 @@ jobs:
|
|||
os: ${{ runner.os }}
|
||||
|
||||
- name: Cache jest
|
||||
uses: actions/cache@6849a6489940f00c2f30c0fb92c6274307ccb58a # v4.1.2
|
||||
uses: actions/cache@1bd1e32a3bdc45362d1e726936510720a7c30a57 # v4.2.0
|
||||
with:
|
||||
path: .cache/jest
|
||||
key: |
|
||||
|
@ -411,7 +411,7 @@ jobs:
|
|||
|
||||
- name: Save coverage artifacts
|
||||
if: (success() || failure()) && github.event.pull_request.draft != true && matrix.coverage
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
with:
|
||||
name: ${{ matrix.upload-artifact-name }}
|
||||
path: |
|
||||
|
@ -438,7 +438,7 @@ jobs:
|
|||
merge-multiple: true
|
||||
|
||||
- name: Codecov
|
||||
uses: codecov/codecov-action@015f24e6818733317a2da2edd6290ab26238649a # v5.0.7
|
||||
uses: codecov/codecov-action@1e68e06f1dbfde0e4cefc87efeba9e4643565303 # v5.1.2
|
||||
with:
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
directory: coverage/lcov
|
||||
|
@ -567,7 +567,7 @@ jobs:
|
|||
run: pnpm test-e2e:pack
|
||||
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
with:
|
||||
name: renovate-package
|
||||
path: renovate-0.0.0-semantic-release.tgz
|
||||
|
@ -611,7 +611,7 @@ jobs:
|
|||
run: pnpm test:docs
|
||||
|
||||
- name: Upload
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
with:
|
||||
name: docs
|
||||
path: tmp/docs/
|
||||
|
@ -684,7 +684,7 @@ jobs:
|
|||
show-progress: false
|
||||
|
||||
- name: docker-config
|
||||
uses: containerbase/internal-tools@fa96b70003f221771f8c015cd3f598818ebf4d78 # v3.5.4
|
||||
uses: containerbase/internal-tools@c440de95307545d23ff0e0b57018147e02ae217f # v3.5.15
|
||||
with:
|
||||
command: docker-config
|
||||
|
||||
|
|
6
.github/workflows/codeql-analysis.yml
vendored
6
.github/workflows/codeql-analysis.yml
vendored
|
@ -41,7 +41,7 @@ jobs:
|
|||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5
|
||||
uses: github/codeql-action/init@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
|
||||
with:
|
||||
languages: javascript
|
||||
|
||||
|
@ -51,7 +51,7 @@ jobs:
|
|||
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5
|
||||
uses: github/codeql-action/autobuild@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 https://git.io/JvXDl
|
||||
|
@ -65,4 +65,4 @@ jobs:
|
|||
# make release
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5
|
||||
uses: github/codeql-action/analyze@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
|
||||
|
|
22
.github/workflows/find-issues-with-missing-labels.yml
vendored
Normal file
22
.github/workflows/find-issues-with-missing-labels.yml
vendored
Normal file
|
@ -0,0 +1,22 @@
|
|||
name: 'Find issues with missing labels'
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run every Sunday at midnight
|
||||
- cron: '0 0 * * 0'
|
||||
|
||||
jobs:
|
||||
check-unlabeled-issues:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
permissions:
|
||||
issues: write
|
||||
|
||||
env:
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
|
||||
|
||||
- name: Search for issues with missing labels
|
||||
run: bash ./tools/find-issues-with-missing-labels.sh
|
4
.github/workflows/scorecard.yml
vendored
4
.github/workflows/scorecard.yml
vendored
|
@ -43,7 +43,7 @@ jobs:
|
|||
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
|
||||
# format to the repository Actions tab.
|
||||
- name: 'Upload artifact'
|
||||
uses: actions/upload-artifact@b4b15b8c7c6ac21ea08fcf65892d2ee8f75cf882 # v4.4.3
|
||||
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
|
||||
with:
|
||||
name: SARIF file
|
||||
path: results.sarif
|
||||
|
@ -51,6 +51,6 @@ jobs:
|
|||
|
||||
# Upload the results to GitHub's code scanning dashboard.
|
||||
- name: 'Upload to code-scanning'
|
||||
uses: github/codeql-action/upload-sarif@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5
|
||||
uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
|
||||
with:
|
||||
sarif_file: results.sarif
|
||||
|
|
2
.github/workflows/trivy.yml
vendored
2
.github/workflows/trivy.yml
vendored
|
@ -31,7 +31,7 @@ jobs:
|
|||
format: 'sarif'
|
||||
output: 'trivy-results.sarif'
|
||||
|
||||
- uses: github/codeql-action/upload-sarif@f09c1c0a94de965c15400f5634aa42fac8fb8f88 # v3.27.5
|
||||
- uses: github/codeql-action/upload-sarif@48ab28a6f5dbc2a99bf1e0131198dd8f1df78169 # v3.28.0
|
||||
with:
|
||||
sarif_file: trivy-results.sarif
|
||||
category: 'docker-image-${{ matrix.tag }}'
|
||||
|
|
2
.github/workflows/update-data.yml
vendored
2
.github/workflows/update-data.yml
vendored
|
@ -42,7 +42,7 @@ jobs:
|
|||
run: pnpm prettier-fix
|
||||
|
||||
- name: Create pull request
|
||||
uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5
|
||||
uses: peter-evans/create-pull-request@67ccf781d68cd99b580ae25a5c18a1cc84ffff1f # v7.0.6
|
||||
with:
|
||||
author: 'Renovate Bot <renovate@whitesourcesoftware.com>'
|
||||
branch: 'chore/update-static-data'
|
||||
|
|
|
@ -1 +1 @@
|
|||
3.13.0
|
||||
3.13.1
|
||||
|
|
|
@ -221,3 +221,15 @@ Add a label `auto:retry-latest` to any Discussion where the user should retry th
|
|||
</details>
|
||||
|
||||
Apply the `self-hosted` label when an issue is applicable only to users who self-administer their own bot.
|
||||
|
||||
## Automated check for Issues with missing labels
|
||||
|
||||
We have a GitHub Action (`find-issues-with-missing-labels.yml`) to find issues on our repository that are missing labels.
|
||||
Any Issues with missing labels will be put in a list in a new "error" Issue.
|
||||
|
||||
The Action runs each week.
|
||||
|
||||
### Apply the correct labels manually
|
||||
|
||||
The Action will _not_ fix any badly labeled issues.
|
||||
This means that you, or we, must apply the correct labels to any affected Issue.
|
||||
|
|
Binary file not shown.
Before Width: | Height: | Size: 5.5 KiB After Width: | Height: | Size: 15 KiB |
|
@ -1,12 +1,24 @@
|
|||
# Renovate configuration overview
|
||||
|
||||
When Renovate runs on a repository, the final config used is derived from the:
|
||||
Each time Renovate runs on a repository it reads the configuration files listed below and creates a final config.
|
||||
This final config describes what Renovate will do during its run.
|
||||
|
||||
- Default config
|
||||
- Global config
|
||||
- Inherited config
|
||||
- Repository config
|
||||
- Resolved presets referenced in config
|
||||
The final config is internal to Renovate, and is _not_ saved or cached for a later run.
|
||||
But you can always find the final config in Renovate's logs.
|
||||
|
||||
Renovate reads the configuration files in this order (from from top to bottom):
|
||||
|
||||
1. Default config
|
||||
2. Global config
|
||||
- File config
|
||||
- Environment config
|
||||
- CLI config
|
||||
3. Inherited config
|
||||
4. Resolved presets referenced in config
|
||||
5. Repository config
|
||||
|
||||
Items with a higher number override items that have lower numbers.
|
||||
If the item has the `mergeable` property, it will merge with lower numbers instead.
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
!!! note
|
||||
|
@ -159,6 +171,19 @@ Inherited config may use all Repository config settings, and any Global config o
|
|||
|
||||
For information on how the Mend Renovate App supports Inherited config, see the dedicated "Mend Renovate App Config" section toward the end of this page.
|
||||
|
||||
#### Presets handling
|
||||
|
||||
If the inherited config contains `extends` presets, then Renovate will:
|
||||
|
||||
1. Resolve the presets
|
||||
1. Add the resolved preset config to the beginning of the inherited config
|
||||
1. Merge the presets on top of the global config
|
||||
|
||||
##### You can not ignore presets from inherited config
|
||||
|
||||
You can _not_ use `ignorePresets` in your repository config to ignore presets _within_ inherited config.
|
||||
This is because inherited config is resolved _before_ the repository config.
|
||||
|
||||
### Repository config
|
||||
|
||||
Repository config is the config loaded from a config file in the repository.
|
||||
|
|
|
@ -456,11 +456,19 @@ For `sbt` note that Renovate will update the version string only for packages th
|
|||
|
||||
## cloneSubmodules
|
||||
|
||||
Enabling this option will mean that any detected Git submodules will be cloned at time of repository clone.
|
||||
Enabling this option will mean that detected Git submodules will be cloned at time of repository clone.
|
||||
By default all will be cloned, but this can be customized by configuring `cloneSubmodulesFilter` too.
|
||||
Submodules are always cloned recursively.
|
||||
|
||||
Important: private submodules aren't supported by Renovate, unless the underlying `ssh` layer already has the correct permissions.
|
||||
|
||||
## cloneSubmodulesFilter
|
||||
|
||||
Use this option together with `cloneSubmodules` if you wish to clone only a subset of submodules.
|
||||
|
||||
This config option supports regex and glob filters, including negative matches.
|
||||
For more details on this syntax see Renovate's [string pattern matching documentation](./string-pattern-matching.md).
|
||||
|
||||
## commitBody
|
||||
|
||||
Configure this if you wish Renovate to add a commit body, otherwise Renovate uses a regular single-line commit.
|
||||
|
@ -769,6 +777,7 @@ Example:
|
|||
"customManagers": [
|
||||
{
|
||||
"customType": "regex",
|
||||
"fileMatch": ["values.yaml$"],
|
||||
"matchStrings": [
|
||||
"ENV .*?_VERSION=(?<currentValue>.*) # (?<datasource>.*?)/(?<depName>.*?)\\s"
|
||||
]
|
||||
|
@ -2246,6 +2255,7 @@ Supported lock files:
|
|||
- `pubspec.lock`
|
||||
- `pyproject.toml`
|
||||
- `requirements.txt`
|
||||
- `uv.lock`
|
||||
- `yarn.lock`
|
||||
|
||||
Support for new lock files may be added via feature request.
|
||||
|
@ -3347,6 +3357,7 @@ Table with options:
|
|||
| `gomodTidyE` | Run `go mod tidy -e` after Go module updates. |
|
||||
| `gomodUpdateImportPaths` | Update source import paths on major module updates, using [mod](https://github.com/marwan-at-work/mod). |
|
||||
| `gomodSkipVendor` | Never run `go mod vendor` after Go module updates. |
|
||||
| `gomodVendor` | Always run `go mod vendor` after Go module updates even if vendor files aren't detected. |
|
||||
| `helmUpdateSubChartArchives` | Update subchart archives in the `/charts` folder. |
|
||||
| `npmDedupe` | Run `npm install` with `--prefer-dedupe` for npm >= 7 or `npm dedupe` after `package-lock.json` update for npm <= 6. |
|
||||
| `pnpmDedupe` | Run `pnpm dedupe --config.ignore-scripts=true` after `pnpm-lock.yaml` updates. |
|
||||
|
@ -3612,7 +3623,7 @@ Behavior:
|
|||
- `bump` = e.g. bump the range even if the new version satisfies the existing range, e.g. `^1.0.0` -> `^1.1.0`
|
||||
- `replace` = Replace the range with a newer one if the new version falls outside it, and update nothing otherwise
|
||||
- `widen` = Widen the range with newer one, e.g. `^1.0.0` -> `^1.0.0 || ^2.0.0`
|
||||
- `update-lockfile` = Update the lock file when in-range updates are available, otherwise `replace` for updates out of range. Works for `bundler`, `cargo`, `composer`, `npm`, `yarn`, `pnpm`, `terraform` and `poetry` so far
|
||||
- `update-lockfile` = Update the lock file when in-range updates are available, otherwise `replace` for updates out of range. Works for `bundler`, `cargo`, `composer`, `gleam`, `npm`, `yarn`, `pnpm`, `terraform` and `poetry` so far
|
||||
- `in-range-only` = Update the lock file when in-range updates are available, ignore package file updates
|
||||
|
||||
Renovate's `"auto"` strategy works like this for npm:
|
||||
|
@ -3690,6 +3701,7 @@ This feature works with the following managers:
|
|||
|
||||
- [`ansible`](modules/manager/ansible/index.md)
|
||||
- [`bitbucket-pipelines`](modules/manager/bitbucket-pipelines/index.md)
|
||||
- [`circleci`](modules/manager/circleci/index.md)
|
||||
- [`docker-compose`](modules/manager/docker-compose/index.md)
|
||||
- [`dockerfile`](modules/manager/dockerfile/index.md)
|
||||
- [`droneci`](modules/manager/droneci/index.md)
|
||||
|
@ -3784,7 +3796,7 @@ If enabled Renovate tries to determine PR reviewers by matching rules defined in
|
|||
Read the docs for your platform for details on syntax and allowed file locations:
|
||||
|
||||
- [GitHub Docs, About code owners](https://docs.github.com/en/repositories/managing-your-repositorys-settings-and-features/customizing-your-repository/about-code-owners)
|
||||
- [GitLab, Code Owners](https://docs.gitlab.com/ee/user/project/code_owners.html)
|
||||
- [GitLab, Code Owners](https://docs.gitlab.com/ee/user/project/codeowners/)
|
||||
- [Bitbucket, Set up and use code owners](https://support.atlassian.com/bitbucket-cloud/docs/set-up-and-use-code-owners/)
|
||||
|
||||
## reviewersSampleSize
|
||||
|
@ -3852,6 +3864,11 @@ You could then configure a schedule like this at the repository level:
|
|||
This would mean that Renovate can run for 7 hours each night, plus all the time on weekends.
|
||||
Note how the above example makes use of the "OR" logic of combining multiple schedules in the array.
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
!!! note
|
||||
If both the day of the week _and_ the day of the month are restricted in the schedule, then Renovate only runs when both the day of the month _and_ day of the week match!
|
||||
For example: `* * 1-7 * 4` means Renovate only runs on the _first_ Thursday of the month.
|
||||
|
||||
It's common to use `schedule` in combination with [`timezone`](#timezone).
|
||||
You should configure [`updateNotScheduled=false`](#updatenotscheduled) if you want the schedule more strictly enforced so that _updates_ to existing branches aren't pushed out of schedule.
|
||||
You can also configure [`automergeSchedule`](#automergeschedule) to limit the hours in which branches/PRs are _automerged_ (if [`automerge`](#automerge) is configured).
|
||||
|
|
|
@ -307,7 +307,7 @@ Renovate will get the credentials with the [`google-auth-library`](https://www.n
|
|||
service_account: ${{ env.SERVICE_ACCOUNT }}
|
||||
|
||||
- name: renovate
|
||||
uses: renovatebot/github-action@v41.0.5
|
||||
uses: renovatebot/github-action@v41.0.7
|
||||
env:
|
||||
RENOVATE_HOST_RULES: |
|
||||
[
|
||||
|
@ -478,7 +478,7 @@ Make sure to install the Google Cloud SDK into the custom image, as you need the
|
|||
For example:
|
||||
|
||||
```Dockerfile
|
||||
FROM renovate/renovate:39.42.4
|
||||
FROM renovate/renovate:39.82.1
|
||||
# Include the "Docker tip" which you can find here https://cloud.google.com/sdk/docs/install
|
||||
# under "Installation" for "Debian/Ubuntu"
|
||||
RUN ...
|
||||
|
|
|
@ -9,29 +9,42 @@ Requirements:
|
|||
Create a `docker-compose.yaml` and `otel-collector-config.yml` file as seen below in a folder.
|
||||
|
||||
```yaml title="docker-compose.yaml"
|
||||
version: '3'
|
||||
name: renovate-otel-demo
|
||||
|
||||
services:
|
||||
# Jaeger
|
||||
# Jaeger for storing traces
|
||||
jaeger:
|
||||
image: jaegertracing/all-in-one:1.63.0
|
||||
image: jaegertracing/jaeger:2.1.0
|
||||
ports:
|
||||
- '16686:16686'
|
||||
- '4317'
|
||||
- '16686:16686' # Web UI
|
||||
- '4317' # OTLP gRPC
|
||||
- '4318' # OTLP HTTP
|
||||
|
||||
# Prometheus for storing metrics
|
||||
prometheus:
|
||||
image: prom/prometheus:v3.0.1
|
||||
ports:
|
||||
- '9090:9090' # Web UI
|
||||
- '4318' # OTLP HTTP
|
||||
command:
|
||||
- --web.enable-otlp-receiver
|
||||
# Mirror these flags from the Dockerfile, because `command` overwrites the default flags.
|
||||
# https://github.com/prometheus/prometheus/blob/5b5fee08af4c73230b2dae35964816f7b3c29351/Dockerfile#L23-L24
|
||||
- --config.file=/etc/prometheus/prometheus.yml
|
||||
- --storage.tsdb.path=/prometheus
|
||||
|
||||
otel-collector:
|
||||
image: otel/opentelemetry-collector-contrib:0.114.0
|
||||
command: ['--config=/etc/otel-collector-config.yml']
|
||||
# Using the Contrib version to access the spanmetrics connector.
|
||||
# If you don't need the spanmetrics connector, you can use the standard version
|
||||
image: otel/opentelemetry-collector-contrib:0.116.1
|
||||
volumes:
|
||||
- ./otel-collector-config.yml:/etc/otel-collector-config.yml
|
||||
- ./otel-collector-config.yml:/etc/otelcol-contrib/config.yaml
|
||||
ports:
|
||||
- '1888:1888' # pprof extension
|
||||
- '13133:13133' # health_check extension
|
||||
- '55679:55679' # zpages extension
|
||||
- '4318:4318' # OTLP HTTP
|
||||
- '4317:4317' # OTLP GRPC
|
||||
- '9123:9123' # Prometheus exporter
|
||||
- '4318:4318' # OTLP HTTP ( exposed to the host )
|
||||
- '4317:4317' # OTLP gRPC ( exposed to the host )
|
||||
depends_on:
|
||||
- jaeger
|
||||
- prometheus
|
||||
```
|
||||
|
||||
```yaml title="otel-collector-config.yml"
|
||||
|
@ -39,28 +52,36 @@ receivers:
|
|||
otlp:
|
||||
protocols:
|
||||
grpc:
|
||||
endpoint: 0.0.0.0:4317
|
||||
http:
|
||||
endpoint: 0.0.0.0:4318
|
||||
|
||||
exporters:
|
||||
otlp/jaeger:
|
||||
endpoint: jaeger:4317
|
||||
tls:
|
||||
insecure: true
|
||||
logging:
|
||||
prometheus:
|
||||
endpoint: '0.0.0.0:9123'
|
||||
otlphttp/prometheus:
|
||||
endpoint: http://prometheus:9090/api/v1/otlp
|
||||
debug:
|
||||
# verbosity: normal
|
||||
|
||||
processors:
|
||||
batch:
|
||||
connectors:
|
||||
spanmetrics:
|
||||
metrics_exporter: prometheus
|
||||
latency_histogram_buckets: [10ms, 100ms, 250ms, 1s, 30s, 1m, 5m]
|
||||
histogram:
|
||||
exponential:
|
||||
dimensions:
|
||||
- name: http.method
|
||||
default: GET
|
||||
- name: http.status_code
|
||||
- name: http.host
|
||||
dimensions_cache_size: 1000
|
||||
aggregation_temporality: 'AGGREGATION_TEMPORALITY_CUMULATIVE'
|
||||
exemplars:
|
||||
enabled: true
|
||||
|
||||
processors:
|
||||
batch:
|
||||
|
||||
extensions:
|
||||
health_check:
|
||||
|
@ -72,12 +93,23 @@ service:
|
|||
pipelines:
|
||||
traces:
|
||||
receivers: [otlp]
|
||||
exporters: [otlp/jaeger, logging]
|
||||
processors: [spanmetrics, batch]
|
||||
exporters:
|
||||
- otlp/jaeger
|
||||
# Send traces to connector for metrics calculation
|
||||
- spanmetrics
|
||||
# Enable debug exporter to see traces in the logs
|
||||
#- debug
|
||||
processors: [batch]
|
||||
|
||||
metrics:
|
||||
receivers: [otlp]
|
||||
exporters: [prometheus]
|
||||
receivers:
|
||||
- otlp # Receive metrics from Renovate.
|
||||
- spanmetrics # Receive metrics calculated by the spanmetrics connector.
|
||||
processors: [batch]
|
||||
exporters:
|
||||
- otlphttp/prometheus
|
||||
# Enable debug exporter to see metrics in the logs
|
||||
# - debug
|
||||
```
|
||||
|
||||
Start setup using this command inside the folder containing the files created in the earlier steps:
|
||||
|
@ -86,7 +118,11 @@ Start setup using this command inside the folder containing the files created in
|
|||
docker-compose up
|
||||
```
|
||||
|
||||
This command will start an [OpenTelemetry Collector](https://github.com/open-telemetry/opentelemetry-collector-contrib) and an instance of [Jaeger](https://www.jaegertracing.io/).
|
||||
This command will start:
|
||||
|
||||
- an [OpenTelemetry Collector](https://github.com/open-telemetry/opentelemetry-collector-contrib)
|
||||
- an instance of [Jaeger for traces](https://www.jaegertracing.io/)
|
||||
- and [Prometheus](https://prometheus.io/)
|
||||
|
||||
Jaeger will be now reachable under [http://localhost:16686](http://localhost:16686).
|
||||
|
||||
|
@ -97,7 +133,8 @@ To start Renovate with OpenTelemetry enabled run following command, after pointi
|
|||
```
|
||||
docker run \
|
||||
--rm \
|
||||
-e OTEL_EXPORTER_OTLP_ENDPOINT=http://localhost:4318 \
|
||||
--network renovate-otel-demo_default \
|
||||
-e OTEL_EXPORTER_OTLP_ENDPOINT=http://otel-collector:4318 \
|
||||
-v "/path/to/your/config.js:/usr/src/app/config.js" \
|
||||
renovate/renovate:latest
|
||||
```
|
||||
|
@ -130,100 +167,90 @@ You should be able to see now the full trace view which shows each HTTP request
|
|||
### Metrics
|
||||
|
||||
Additional to the received traces some metrics are calculated.
|
||||
This is achieved using the [spanmetricsprocessor](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/spanmetricsprocessor).
|
||||
The previous implemented setup will produce following metrics, which are exposed under [http://localhost:9123/metrics](http://localhost:9123/metrics):
|
||||
This is achieved using the [spanmetrics connector](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/connector/spanmetricsconnector).
|
||||
The previously implemented setup will produce following metrics, which pushed to [Prometheus](http://localhost:9090):
|
||||
|
||||
```
|
||||
# HELP calls_total
|
||||
# TYPE calls_total counter
|
||||
|
||||
### Example of internal spans
|
||||
calls_total{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 3
|
||||
calls_total{operation="run",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 1
|
||||
### Example of http calls from Renovate to external services
|
||||
calls_total{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 9
|
||||
|
||||
...
|
||||
|
||||
# HELP latency
|
||||
# TYPE latency histogram
|
||||
### Example of internal spans
|
||||
latency_bucket{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET",le="0.1"} 0
|
||||
...
|
||||
latency_bucket{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET",le="9.223372036854775e+12"} 3
|
||||
latency_bucket{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET",le="+Inf"} 3
|
||||
latency_sum{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 30947.4689
|
||||
latency_count{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 3
|
||||
|
||||
...
|
||||
traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 2
|
||||
traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="run", status_code="STATUS_CODE_UNSET"} 2
|
||||
|
||||
### Example of http calls from Renovate to external services
|
||||
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="0.1"} 0
|
||||
traces_span_metrics_calls_total{http_host="api.github.com:443", http_method="POST", http_status_code="200", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_CLIENT", span_name="POST", status_code="STATUS_CODE_UNSET"} 4
|
||||
|
||||
|
||||
### Example histogram metrics
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="8", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 0
|
||||
...
|
||||
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="250"} 3
|
||||
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="9.223372036854775e+12"} 9
|
||||
latency_bucket{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET",le="+Inf"} 9
|
||||
latency_sum{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 2306.1385999999998
|
||||
latency_count{http_host="api.github.com:443",http_method="POST",http_status_code="200",operation="HTTPS POST",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 9
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="2000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 0
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="5000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="15000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="10000", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_method="GET", job="renovatebot.com/renovate", le="+Inf", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
|
||||
|
||||
traces_span_metrics_duration_milliseconds_sum{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 4190.694209
|
||||
traces_span_metrics_duration_milliseconds_count{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repository", status_code="STATUS_CODE_UNSET"} 1
|
||||
```
|
||||
|
||||
The [spanmetricsprocessor](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/processor/spanmetricsprocessor) creates two sets of metrics.
|
||||
The [spanmetrics connector](https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/main/connector/spanmetricsconnector) creates two sets of metrics.
|
||||
|
||||
#### Calls metric
|
||||
|
||||
At first there are the `calls_total` metrics which display how often specific trace spans have been observed.
|
||||
At first there are the `traces_span_metrics_calls_total` metrics.
|
||||
These metrics show how often _specific_ trace spans have been observed.
|
||||
|
||||
For example:
|
||||
`calls_total{operation="renovate repository",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 3` signals that 3 repositories have been renovated.
|
||||
`calls_total{operation="run",service_name="renovate",span_kind="SPAN_KIND_INTERNAL",status_code="STATUS_CODE_UNSET"} 1` represents how often Renovate has been run.
|
||||
|
||||
- `traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="repositories", status_code="STATUS_CODE_UNSET"} 2` signals that 2 repositories have been renovated.
|
||||
- `traces_span_metrics_calls_total{http_method="GET", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_INTERNAL", span_name="run", status_code="STATUS_CODE_UNSET"} 1` represents how often Renovate has been run.
|
||||
|
||||
If we combine this using the PrometheusQueryLanguage ( PromQL ), we can calculate the average count of repositories each Renovate run handles.
|
||||
|
||||
```
|
||||
calls_total{operation="renovate repository",service_name="renovate"} / calls_total{operation="run",service_name="renovate"}
|
||||
traces_span_metrics_calls_total{span_name="repository",service_name="renovate"} / traces_span_metrics_calls_total{span_name="run",service_name="renovate"}
|
||||
```
|
||||
|
||||
This metrics is also for spans generated by http calls:
|
||||
These metrics are generated for HTTP call spans too:
|
||||
|
||||
```yaml
|
||||
calls_total{http_host="registry.terraform.io:443",http_method="GET",http_status_code="200",operation="HTTPS GET",service_name="renovate",span_kind="SPAN_KIND_CLIENT",status_code="STATUS_CODE_UNSET"} 5
|
||||
traces_span_metrics_calls_total{http_host="prometheus-community.github.io:443", http_method="GET", http_status_code="200", job="renovatebot.com/renovate", service_name="renovate", span_kind="SPAN_KIND_CLIENT", span_name="GET", status_code="STATUS_CODE_UNSET"} 5
|
||||
```
|
||||
|
||||
#### Latency buckets
|
||||
|
||||
The second class of metrics exposed are the latency focused latency buckets which allow to create [heatmaps](https://grafana.com/docs/grafana/latest/basics/intro-histograms/#heatmaps).
|
||||
The second class of metrics exposed are the latency-focused buckets, that allow creating [heatmaps](https://grafana.com/docs/grafana/latest/basics/intro-histograms/#heatmaps).
|
||||
A request is added to a backed if the latency is bigger than the bucket value (`le`). `request_duration => le`
|
||||
|
||||
As an example if we receive a request which need `1.533s` to complete get following metrics:
|
||||
|
||||
```
|
||||
latency_bucket{http_host="api.github.com:443",le="0.1"} 0
|
||||
latency_bucket{http_host="api.github.com:443",le="1"} 0
|
||||
latency_bucket{http_host="api.github.com:443",le="2"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="6"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="10"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="100"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="250"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="+Inf"} 1
|
||||
latency_sum{http_host="api.github.com:443"} 1.533
|
||||
latency_count{http_host="api.github.com:443"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="0.1"} 0
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="1"} 0
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="2"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="6"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="10"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="100"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="250"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="+Inf"} 1
|
||||
traces_span_metrics_duration_milliseconds_sum{http_host="api.github.com:443"} 1.533
|
||||
traces_span_metrics_duration_milliseconds_count{http_host="api.github.com:443"} 1
|
||||
```
|
||||
|
||||
Now we have another request which this time takes 10s to complete:
|
||||
|
||||
```
|
||||
latency_bucket{http_host="api.github.com:443",le="0.1"} 0
|
||||
latency_bucket{http_host="api.github.com:443",le="1"} 0
|
||||
latency_bucket{http_host="api.github.com:443",le="2"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="6"} 1
|
||||
latency_bucket{http_host="api.github.com:443",le="10"} 2
|
||||
latency_bucket{http_host="api.github.com:443",le="100"} 2
|
||||
latency_bucket{http_host="api.github.com:443",le="250"} 2
|
||||
latency_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 2
|
||||
latency_bucket{http_host="api.github.com:443",le="+Inf"} 2
|
||||
latency_sum{http_host="api.github.com:443"} 11.533
|
||||
latency_count{http_host="api.github.com:443"} 2
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="0.1"} 0
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="1"} 0
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="2"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="6"} 1
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="10"} 2
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="100"} 2
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="250"} 2
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="9.223372036854775e+12"} 2
|
||||
traces_span_metrics_duration_milliseconds_bucket{http_host="api.github.com:443",le="+Inf"} 2
|
||||
traces_span_metrics_duration_milliseconds_sum{http_host="api.github.com:443"} 11.533
|
||||
traces_span_metrics_duration_milliseconds_count{http_host="api.github.com:443"} 2
|
||||
```
|
||||
|
||||
More about the functionality can be found on the Prometheus page for [metric types](https://prometheus.io/docs/concepts/metric_types/#histogram).
|
||||
|
|
|
@ -25,8 +25,8 @@ It builds `latest` based on the `main` branch and all SemVer tags are published
|
|||
```sh title="Example of valid tags"
|
||||
docker run --rm renovate/renovate
|
||||
docker run --rm renovate/renovate:39
|
||||
docker run --rm renovate/renovate:39.42
|
||||
docker run --rm renovate/renovate:39.42.4
|
||||
docker run --rm renovate/renovate:39.82
|
||||
docker run --rm renovate/renovate:39.82.1
|
||||
```
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
|
@ -62,7 +62,7 @@ spec:
|
|||
- name: renovate
|
||||
# Update this to the latest available and then enable Renovate on
|
||||
# the manifest
|
||||
image: renovate/renovate:39.42.4
|
||||
image: renovate/renovate:39.82.1
|
||||
args:
|
||||
- user/repo
|
||||
# Environment Variables
|
||||
|
@ -121,7 +121,7 @@ spec:
|
|||
template:
|
||||
spec:
|
||||
containers:
|
||||
- image: renovate/renovate:39.42.4
|
||||
- image: renovate/renovate:39.82.1
|
||||
name: renovate-bot
|
||||
env: # For illustration purposes, please use secrets.
|
||||
- name: RENOVATE_PLATFORM
|
||||
|
@ -367,7 +367,7 @@ spec:
|
|||
containers:
|
||||
- name: renovate
|
||||
# Update this to the latest available and then enable Renovate on the manifest
|
||||
image: renovate/renovate:39.42.4
|
||||
image: renovate/renovate:39.82.1
|
||||
volumeMounts:
|
||||
- name: ssh-key-volume
|
||||
readOnly: true
|
||||
|
|
|
@ -29,36 +29,6 @@ The Renovate team only fixes bugs in an older version if:
|
|||
If you're using the Mend Renovate App, you don't need to do anything, as the Renovate maintainers update it regularly.
|
||||
If you're self hosting Renovate, use the latest release if possible.
|
||||
|
||||
## When is the Mend Renovate App updated with new Renovate versions?
|
||||
|
||||
The Renovate maintainers manually update the app.
|
||||
The maintainers don't follow any release schedule or release cadence.
|
||||
This means the Mend Renovate App can lag a few hours to a week behind the open source version.
|
||||
Major releases of Renovate are held back until the maintainers are reasonably certain it works for most users.
|
||||
|
||||
## How can I see which version the Mend Renovate app is using?
|
||||
|
||||
Follow these steps to see which version the Mend Renovate app is on:
|
||||
|
||||
1. Go to the [Mend Developer Portal](https://developer.mend.io/)
|
||||
1. Sign in to the Renovate app with your GitHub or Bitbucket account
|
||||
1. Select your organization
|
||||
1. Select a installed repository
|
||||
1. Select a job from the _Recent jobs_ overview
|
||||
1. Select the _Info_ Log Level from the dropdown menu
|
||||
1. You should see something like this:
|
||||
|
||||
```
|
||||
INFO: Repository started
|
||||
{
|
||||
"renovateVersion": "39.11.5"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
!!! tip
|
||||
The PRs that Renovate creates have a link to the "repository job log" in the footer of the PR body text.
|
||||
|
||||
## Renovate core features not supported on all platforms
|
||||
|
||||
| Feature | Platforms which lack feature | See Renovate issue(s) |
|
||||
|
|
|
@ -499,7 +499,7 @@ private-package==1.2.3
|
|||
|
||||
#### Packages that Renovate needs
|
||||
|
||||
Renovate relies on `pip`'s integration with the Python [keyring](https://pypi.org/project/keyring/) package along with the [keyrigs.envvars](https://pypi.org/project/keyrings.envvars/) backend for this.
|
||||
Renovate relies on `pip`'s integration with the Python [keyring](https://pypi.org/project/keyring/) package along with the [keyrings.envvars](https://pypi.org/project/keyrings.envvars/) backend for this.
|
||||
|
||||
##### Self-hosting Renovate
|
||||
|
||||
|
@ -511,7 +511,7 @@ But if you are self-hosting Renovate and:
|
|||
- _not_ running Renovate in a Containerbase environment
|
||||
- or, _not_ using the Docker sidecar container
|
||||
|
||||
Then you must install the Python keyring package and the keyrigs.envvars package into your self-hosted environment.
|
||||
Then you must install the Python keyring package and the keyrings.envvars package into your self-hosted environment.
|
||||
|
||||
### poetry
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
![Renovate banner](https://app.renovatebot.com/images/whitesource_renovate_660_220.jpg){ loading=lazy }
|
||||
![Renovate banner](./assets/images/mend-renovate-cli-banner.jpg){ loading=lazy }
|
||||
|
||||
# Renovate documentation
|
||||
|
||||
|
|
|
@ -4,12 +4,13 @@ The information on this page is for the Mend-hosted cloud apps:
|
|||
|
||||
- Renovate App on GitHub
|
||||
- Mend App on Bitbucket
|
||||
- Mend App on Azure DevOps
|
||||
|
||||
If you self-host, you can skip reading this page.
|
||||
|
||||
## :warning: Migrate secrets in your Renovate config file :warning:
|
||||
|
||||
The Mend-hosted cloud app will stop reading secrets from the Renovate config file in your repository on 01-Oct-2024.
|
||||
Use of encrypted secrets in the Mend Renovate cloud apps has been deprecated and soon the apps will stop reading secrets from the Renovate config file in your repository.
|
||||
You must migrate any secrets you currently keep in the Renovate config file, and put them in the app settings page on [developer.mend.io](https://developer.mend.io).
|
||||
To add secrets you must have admin-level rights.
|
||||
|
||||
|
|
|
@ -17,6 +17,36 @@ The Renovate logs for the Mend-hosted apps are on the [Mend Developer Portal](ht
|
|||
|
||||
Reading the logs can help you understand the configuration that Renovate used.
|
||||
|
||||
## Renovate Version
|
||||
|
||||
The Renovate version used by the Mend-hosted apps is updated manually by the maintainers of the app.
|
||||
The maintainers don't follow any release schedule or release cadence, but try to update at least once a week.
|
||||
This means the Mend Renovate App can lag a few hours to a week behind the open source version.
|
||||
|
||||
Major releases of Renovate are held back until the maintainers are reasonably certain it works for most users.
|
||||
|
||||
### Which version is the Mend Renovate app using?
|
||||
|
||||
Follow these steps to see which version the Mend Renovate app used for a specific job:
|
||||
|
||||
1. Sign in to the [Mend Developer Portal](https://developer.mend.io/) with your GitHub or Bitbucket account
|
||||
1. Select your organization
|
||||
1. Select a installed repository
|
||||
1. Select a job from the _Recent jobs_ overview
|
||||
1. Select the _Info_ Log Level from the dropdown menu
|
||||
1. You should see something like this:
|
||||
|
||||
```
|
||||
INFO: Repository started
|
||||
{
|
||||
"renovateVersion": "39.11.5"
|
||||
}
|
||||
```
|
||||
|
||||
<!-- prettier-ignore -->
|
||||
!!! tip
|
||||
The PRs that Renovate creates have a link to the "repository job log" in the footer of the PR body text.
|
||||
|
||||
## Onboarding behavior
|
||||
|
||||
### Installing Renovate into all repositories leads to silent mode
|
||||
|
|
|
@ -8,7 +8,7 @@ They can be referenced from the Renovate config files inside the repo using `{{
|
|||
|
||||
## Old method
|
||||
|
||||
This method will stop working on 01-Oct-2024:
|
||||
This method is deprecated:
|
||||
|
||||
```json title="Put encrypted secret in Renovate config"
|
||||
{
|
||||
|
@ -25,7 +25,7 @@ This method will stop working on 01-Oct-2024:
|
|||
|
||||
## New method
|
||||
|
||||
This is the new method, that you should start using:
|
||||
This is the new method that you should start using:
|
||||
|
||||
```json title="Reference the app secret in the Renovate config"
|
||||
{
|
||||
|
|
|
@ -130,6 +130,23 @@ If you use Azure DevOps:
|
|||
The username of the PAT must match the username of the _user of the PAT_.
|
||||
The generated `nuget.config` forces the basic authentication, which cannot be overridden externally!
|
||||
|
||||
## Ignoring package files when using presets
|
||||
|
||||
Because `nuget` manager has a dedicated `ignorePaths` entry in the `:ignoreModulesAndTests` preset, if you're using any presets that extend it (like `config:recommended`), you need to put your `ignorePaths` inside the `nuget` section for it to be merged.
|
||||
For example:
|
||||
|
||||
```json
|
||||
{
|
||||
"$schema": "https://docs.renovatebot.com/renovate-schema.json",
|
||||
"extends": ["config:recommended"],
|
||||
"nuget": {
|
||||
"ignorePaths": ["IgnoreThisPackage/**"]
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Otherwise, all `nuget.ignorePaths` values in `:ignoreModulesAndTests` will override values you put inside `ignorePaths` at the top-level config.
|
||||
|
||||
## Future work
|
||||
|
||||
We welcome contributions or feature requests to support more patterns or use cases.
|
||||
|
|
|
@ -39,6 +39,10 @@ This includes the following:
|
|||
|
||||
If set to any value, Renovate will stop using the Docker Hub API (`https://hub.docker.com`) to fetch tags and instead use the normal Docker API for images pulled from `https://index.docker.io`.
|
||||
|
||||
## `RENOVATE_X_ENCRYPTED_STRICT`
|
||||
|
||||
If set to `"true"`, a config error Issue will be raised in case repository config contains `encrypted` objects without any `privateKey` defined.
|
||||
|
||||
## `RENOVATE_X_EXEC_GPID_HANDLE`
|
||||
|
||||
If set, Renovate will terminate the whole process group of a terminated child process spawned by Renovate.
|
||||
|
|
|
@ -88,7 +88,7 @@ Pinning your development dependencies means you, and your team, are using the sa
|
|||
This makes the developer-tool side of your builds reproducible.
|
||||
Debugging faulty versions of your tools is easier, because you can use Git to check out different versions of the tools.
|
||||
|
||||
### Why updating often is easier, faster and safer
|
||||
## Why updating often is easier, faster and safer
|
||||
|
||||
You may think that updating takes too much time.
|
||||
But updating regularly actually _saves_ you time, because:
|
||||
|
@ -98,14 +98,14 @@ But updating regularly actually _saves_ you time, because:
|
|||
- You'll be ready for CVE patches
|
||||
- You'll look for ways to automate the updates
|
||||
|
||||
#### Regular updates tend to be small
|
||||
### Regular updates tend to be small
|
||||
|
||||
Firstly, when you update regularly updates tend to be small.
|
||||
The update's changelogs are small, quick to read, and easy to understand.
|
||||
You probably only need to make changes in a few places (if at all) to merge the PR and get going again.
|
||||
Because you're reading the changelogs regularly, you'll get a feel for the direction of the upstream project.
|
||||
|
||||
#### Applying `major` updates is easier
|
||||
### Applying `major` updates is easier
|
||||
|
||||
Secondly, when you're current with upstream, `major` updates are easier.
|
||||
This is because you already:
|
||||
|
@ -114,14 +114,14 @@ This is because you already:
|
|||
- use the latest names for features/variables
|
||||
- read the previous changelogs
|
||||
|
||||
#### You'll be ready for CVE patches
|
||||
### You'll be ready for CVE patches
|
||||
|
||||
Thirdly, you'll be ready when a upstream package releases a patch for a critical CVE.
|
||||
If you're current, you can review and merge Renovate's PR quickly.
|
||||
|
||||
When you're behind on updates, you'll have a bad time, because you must read _more_ changelogs and make _more_ changes before you can merge the critical patch.
|
||||
|
||||
#### You'll look for ways to automate the updates
|
||||
### You'll look for ways to automate the updates
|
||||
|
||||
Finally, when you're updating often, you'll start looking for ways to automate the updates.
|
||||
You may start to [`automerge`](./configuration-options.md#automerge) development dependencies like Prettier, or ESLint when the linter passes.
|
||||
|
|
|
@ -1,53 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`config/massage massageConfig does not massage lockFileMaintenance 1`] = `
|
||||
{
|
||||
"packageRules": [
|
||||
{
|
||||
"lockFileMaintenance": {
|
||||
"enabled": true,
|
||||
},
|
||||
"matchBaseBranches": [
|
||||
"release/ft10/1.9.x",
|
||||
],
|
||||
"matchManagers": [
|
||||
"helmv3",
|
||||
],
|
||||
"schedule": [
|
||||
"at any time",
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`config/massage massageConfig massages packageRules matchUpdateTypes 1`] = `
|
||||
{
|
||||
"packageRules": [
|
||||
{
|
||||
"matchPackageNames": [
|
||||
"foo",
|
||||
],
|
||||
"separateMajorMinor": false,
|
||||
},
|
||||
{
|
||||
"matchPackageNames": [
|
||||
"foo",
|
||||
],
|
||||
"matchUpdateTypes": [
|
||||
"minor",
|
||||
],
|
||||
"semanticCommitType": "feat",
|
||||
},
|
||||
{
|
||||
"matchPackageNames": [
|
||||
"foo",
|
||||
],
|
||||
"matchUpdateTypes": [
|
||||
"patch",
|
||||
],
|
||||
"semanticCommitType": "fix",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -12,6 +12,7 @@ describe('config/decrypt', () => {
|
|||
beforeEach(() => {
|
||||
config = {};
|
||||
GlobalConfig.reset();
|
||||
delete process.env.RENOVATE_X_ENCRYPTED_STRICT;
|
||||
});
|
||||
|
||||
it('returns empty with no privateKey', async () => {
|
||||
|
@ -30,5 +31,14 @@ describe('config/decrypt', () => {
|
|||
expect(res.encrypted).toBeUndefined();
|
||||
expect(res.a).toBeUndefined();
|
||||
});
|
||||
|
||||
it('throws exception if encrypted found but no privateKey', async () => {
|
||||
config.encrypted = { a: '1' };
|
||||
process.env.RENOVATE_X_ENCRYPTED_STRICT = 'true';
|
||||
|
||||
await expect(decryptConfig(config, repository)).rejects.toThrow(
|
||||
'config-validation',
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import { CONFIG_VALIDATION } from '../constants/error-messages';
|
||||
import { logger } from '../logger';
|
||||
import { regEx } from '../util/regex';
|
||||
import { addSecretForSanitizing } from '../util/sanitize';
|
||||
|
@ -173,7 +174,15 @@ export async function decryptConfig(
|
|||
}
|
||||
}
|
||||
} else {
|
||||
logger.error('Found encrypted data but no privateKey');
|
||||
if (process.env.RENOVATE_X_ENCRYPTED_STRICT === 'true') {
|
||||
const error = new Error(CONFIG_VALIDATION);
|
||||
error.validationSource = 'config';
|
||||
error.validationError = 'Encrypted config unsupported';
|
||||
error.validationMessage = `This config contains an encrypted object at location \`$.${key}\` but no privateKey is configured. To support encrypted config, the Renovate administrator must configure a \`privateKey\` in Global Configuration.`;
|
||||
throw error;
|
||||
} else {
|
||||
logger.error('Found encrypted data but no privateKey');
|
||||
}
|
||||
}
|
||||
delete decryptedConfig.encrypted;
|
||||
} else if (is.array(val)) {
|
||||
|
|
|
@ -32,14 +32,23 @@ describe('config/index', () => {
|
|||
it('merges packageRules', () => {
|
||||
const parentConfig = { ...defaultConfig };
|
||||
Object.assign(parentConfig, {
|
||||
packageRules: [{ a: 1 }, { a: 2 }],
|
||||
packageRules: [
|
||||
{ matchPackageNames: ['pkg1'] },
|
||||
{ matchPackageNames: ['pkg2'] },
|
||||
],
|
||||
});
|
||||
const childConfig = {
|
||||
packageRules: [{ a: 3 }, { a: 4 }],
|
||||
packageRules: [
|
||||
{ matchPackageNames: ['pkg3'] },
|
||||
{ matchPackageNames: ['pkg4'] },
|
||||
],
|
||||
};
|
||||
const config = mergeChildConfig(parentConfig, childConfig);
|
||||
expect(config.packageRules.map((rule) => rule.a)).toMatchObject([
|
||||
1, 2, 3, 4,
|
||||
expect(config.packageRules).toMatchObject([
|
||||
{ matchPackageNames: ['pkg1'] },
|
||||
{ matchPackageNames: ['pkg2'] },
|
||||
{ matchPackageNames: ['pkg3'] },
|
||||
{ matchPackageNames: ['pkg4'] },
|
||||
]);
|
||||
});
|
||||
|
||||
|
@ -95,9 +104,15 @@ describe('config/index', () => {
|
|||
|
||||
it('handles null child packageRules', () => {
|
||||
const parentConfig = { ...defaultConfig };
|
||||
parentConfig.packageRules = [{ a: 3 }, { a: 4 }];
|
||||
parentConfig.packageRules = [
|
||||
{ matchPackageNames: ['pkg1'] },
|
||||
{ matchPackageNames: ['pkg2'] },
|
||||
];
|
||||
const config = mergeChildConfig(parentConfig, {});
|
||||
expect(config.packageRules).toHaveLength(2);
|
||||
expect(config.packageRules).toMatchObject([
|
||||
{ matchPackageNames: ['pkg1'] },
|
||||
{ matchPackageNames: ['pkg2'] },
|
||||
]);
|
||||
});
|
||||
|
||||
it('handles undefined childConfig', () => {
|
||||
|
|
|
@ -17,15 +17,6 @@ describe('config/massage', () => {
|
|||
expect(Array.isArray(res.schedule)).toBeTrue();
|
||||
});
|
||||
|
||||
it('massages npmToken', () => {
|
||||
const config: RenovateConfig = {
|
||||
npmToken: 'some-token',
|
||||
};
|
||||
expect(massage.massageConfig(config)).toEqual({
|
||||
npmrc: '//registry.npmjs.org/:_authToken=some-token\n',
|
||||
});
|
||||
});
|
||||
|
||||
it('massages packageRules matchUpdateTypes', () => {
|
||||
const config: RenovateConfig = {
|
||||
packageRules: [
|
||||
|
@ -42,7 +33,24 @@ describe('config/massage', () => {
|
|||
],
|
||||
};
|
||||
const res = massage.massageConfig(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toEqual({
|
||||
packageRules: [
|
||||
{
|
||||
matchPackageNames: ['foo'],
|
||||
separateMajorMinor: false,
|
||||
},
|
||||
{
|
||||
matchPackageNames: ['foo'],
|
||||
matchUpdateTypes: ['minor'],
|
||||
semanticCommitType: 'feat',
|
||||
},
|
||||
{
|
||||
matchPackageNames: ['foo'],
|
||||
matchUpdateTypes: ['patch'],
|
||||
semanticCommitType: 'fix',
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(res.packageRules).toHaveLength(3);
|
||||
});
|
||||
|
||||
|
@ -73,7 +81,18 @@ describe('config/massage', () => {
|
|||
],
|
||||
};
|
||||
const res = massage.massageConfig(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toEqual({
|
||||
packageRules: [
|
||||
{
|
||||
lockFileMaintenance: {
|
||||
enabled: true,
|
||||
},
|
||||
matchBaseBranches: ['release/ft10/1.9.x'],
|
||||
matchManagers: ['helmv3'],
|
||||
schedule: ['at any time'],
|
||||
},
|
||||
],
|
||||
});
|
||||
expect(res.packageRules).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
|
|
|
@ -21,9 +21,6 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
|
|||
for (const [key, val] of Object.entries(config)) {
|
||||
if (allowedStrings.includes(key) && is.string(val)) {
|
||||
massagedConfig[key] = [val];
|
||||
} else if (key === 'npmToken' && is.string(val) && val.length < 50) {
|
||||
massagedConfig.npmrc = `//registry.npmjs.org/:_authToken=${val}\n`;
|
||||
delete massagedConfig.npmToken;
|
||||
} else if (is.array(val)) {
|
||||
massagedConfig[key] = [];
|
||||
val.forEach((item) => {
|
||||
|
|
|
@ -52,6 +52,7 @@ const options: RenovateOptions[] = [
|
|||
subType: 'string',
|
||||
globalOnly: true,
|
||||
patternMatch: true,
|
||||
mergeable: true,
|
||||
},
|
||||
{
|
||||
name: 'detectGlobalManagerConfig',
|
||||
|
@ -515,7 +516,7 @@ const options: RenovateOptions[] = [
|
|||
description:
|
||||
'Change this value to override the default Renovate sidecar image.',
|
||||
type: 'string',
|
||||
default: 'ghcr.io/containerbase/sidecar:13.0.24',
|
||||
default: 'ghcr.io/containerbase/sidecar:13.5.5',
|
||||
globalOnly: true,
|
||||
},
|
||||
{
|
||||
|
@ -2389,6 +2390,7 @@ const options: RenovateOptions[] = [
|
|||
'gomodTidyE',
|
||||
'gomodUpdateImportPaths',
|
||||
'gomodSkipVendor',
|
||||
'gomodVendor',
|
||||
'helmUpdateSubChartArchives',
|
||||
'npmDedupe',
|
||||
'pnpmDedupe',
|
||||
|
@ -2855,6 +2857,14 @@ const options: RenovateOptions[] = [
|
|||
type: 'boolean',
|
||||
default: false,
|
||||
},
|
||||
{
|
||||
name: 'cloneSubmodulesFilter',
|
||||
description:
|
||||
'List of submodules names or patterns to clone when cloneSubmodules=true.',
|
||||
type: 'array',
|
||||
subType: 'string',
|
||||
default: ['*'],
|
||||
},
|
||||
{
|
||||
name: 'ignorePrAuthor',
|
||||
description:
|
||||
|
|
|
@ -513,485 +513,6 @@ describe('config/presets/index', () => {
|
|||
});
|
||||
});
|
||||
|
||||
describe('parsePreset', () => {
|
||||
// default namespace
|
||||
it('returns default package name', () => {
|
||||
expect(presets.parsePreset(':base')).toEqual({
|
||||
repo: 'default',
|
||||
params: undefined,
|
||||
presetName: 'base',
|
||||
presetPath: undefined,
|
||||
presetSource: 'internal',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github', () => {
|
||||
expect(presets.parsePreset('github>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('handles special chars', () => {
|
||||
expect(presets.parsePreset('github>some/repo:foo+bar')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'foo+bar',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles', () => {
|
||||
expect(presets.parsePreset('github>some/repo:somefile')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset name', () => {
|
||||
expect(
|
||||
presets.parsePreset('github>some/repo:somefile/somepreset'),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile/somepreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github file with preset name with .json extension', () => {
|
||||
expect(presets.parsePreset('github>some/repo:somefile.json')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github file with preset name with .json5 extension', () => {
|
||||
expect(presets.parsePreset('github>some/repo:somefile.json5')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json5',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset name with .json extension', () => {
|
||||
expect(
|
||||
presets.parsePreset('github>some/repo:somefile.json/somepreset'),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json/somepreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset name with .json5 extension', () => {
|
||||
expect(
|
||||
presets.parsePreset('github>some/repo:somefile.json5/somepreset'),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json5/somepreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset and sub-preset name', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'github>some/repo:somefile/somepreset/somesubpreset',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile/somepreset/somesubpreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subdirectories', () => {
|
||||
expect(
|
||||
presets.parsePreset('github>some/repo//somepath/somesubpath/somefile'),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: 'somepath/somesubpath',
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github toplevel file using subdirectory syntax', () => {
|
||||
expect(presets.parsePreset('github>some/repo//somefile')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses gitlab', () => {
|
||||
expect(presets.parsePreset('gitlab>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'gitlab',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses gitea', () => {
|
||||
expect(presets.parsePreset('gitea>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'gitea',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local', () => {
|
||||
expect(presets.parsePreset('local>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with spaces', () => {
|
||||
expect(presets.parsePreset('local>A2B CD/A2B_Renovate')).toEqual({
|
||||
repo: 'A2B CD/A2B_Renovate',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with subdirectory', () => {
|
||||
expect(
|
||||
presets.parsePreset('local>some-group/some-repo//some-dir/some-file'),
|
||||
).toEqual({
|
||||
repo: 'some-group/some-repo',
|
||||
params: undefined,
|
||||
presetName: 'some-file',
|
||||
presetPath: 'some-dir',
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with spaces and subdirectory', () => {
|
||||
expect(
|
||||
presets.parsePreset('local>A2B CD/A2B_Renovate//some-dir/some-file'),
|
||||
).toEqual({
|
||||
repo: 'A2B CD/A2B_Renovate',
|
||||
params: undefined,
|
||||
presetName: 'some-file',
|
||||
presetPath: 'some-dir',
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with sub preset and tag', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'local>some-group/some-repo:some-file/subpreset#1.2.3',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'some-group/some-repo',
|
||||
params: undefined,
|
||||
presetName: 'some-file/subpreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
tag: '1.2.3',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with subdirectory and tag', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'local>some-group/some-repo//some-dir/some-file#1.2.3',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'some-group/some-repo',
|
||||
params: undefined,
|
||||
presetName: 'some-file',
|
||||
presetPath: 'some-dir',
|
||||
presetSource: 'local',
|
||||
tag: '1.2.3',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with subdirectory and branch/tag with a slash', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'local>PROJECT/repository//path/to/preset#feature/branch',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'PROJECT/repository',
|
||||
params: undefined,
|
||||
presetName: 'preset',
|
||||
presetPath: 'path/to',
|
||||
presetSource: 'local',
|
||||
tag: 'feature/branch',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with sub preset and branch/tag with a slash', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'local>PROJECT/repository:preset/subpreset#feature/branch',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'PROJECT/repository',
|
||||
params: undefined,
|
||||
presetName: 'preset/subpreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
tag: 'feature/branch',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses no prefix as local', () => {
|
||||
expect(presets.parsePreset('some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local Bitbucket user repo with preset name', () => {
|
||||
expect(presets.parsePreset('local>~john_doe/repo//somefile')).toEqual({
|
||||
repo: '~john_doe/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local Bitbucket user repo', () => {
|
||||
expect(presets.parsePreset('local>~john_doe/renovate-config')).toEqual({
|
||||
repo: '~john_doe/renovate-config',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns default package name with params', () => {
|
||||
expect(presets.parsePreset(':group(packages/eslint, eslint)')).toEqual({
|
||||
repo: 'default',
|
||||
params: ['packages/eslint', 'eslint'],
|
||||
presetName: 'group',
|
||||
presetPath: undefined,
|
||||
presetSource: 'internal',
|
||||
});
|
||||
});
|
||||
|
||||
// scoped namespace
|
||||
it('returns simple scope', () => {
|
||||
expect(presets.parsePreset('@somescope')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns simple scope and params', () => {
|
||||
expect(presets.parsePreset('@somescope(param1)')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: ['param1'],
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and default', () => {
|
||||
expect(presets.parsePreset('@somescope/somepackagename')).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and params and default', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'@somescope/somepackagename(param1, param2, param3)',
|
||||
),
|
||||
).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: ['param1', 'param2', 'param3'],
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with presetName', () => {
|
||||
expect(presets.parsePreset('@somescope:somePresetName')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: undefined,
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with presetName and params', () => {
|
||||
expect(presets.parsePreset('@somescope:somePresetName(param1)')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: ['param1'],
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and presetName', () => {
|
||||
expect(
|
||||
presets.parsePreset('@somescope/somepackagename:somePresetName'),
|
||||
).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: undefined,
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and presetName and params', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'@somescope/somepackagename:somePresetName(param1, param2)',
|
||||
),
|
||||
).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: ['param1', 'param2'],
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
// non-scoped namespace
|
||||
it('returns non-scoped default', () => {
|
||||
expect(presets.parsePreset('somepackage')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns non-scoped package name', () => {
|
||||
expect(presets.parsePreset('somepackage:webapp')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: undefined,
|
||||
presetName: 'webapp',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns non-scoped package name full', () => {
|
||||
expect(presets.parsePreset('renovate-config-somepackage:webapp')).toEqual(
|
||||
{
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: undefined,
|
||||
presetName: 'webapp',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('returns non-scoped package name with params', () => {
|
||||
expect(presets.parsePreset('somepackage:webapp(param1)')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: ['param1'],
|
||||
presetName: 'webapp',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTPS URLs', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
|
||||
params: undefined,
|
||||
presetName: '',
|
||||
presetPath: undefined,
|
||||
presetSource: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTP URLs', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
|
||||
params: undefined,
|
||||
presetName: '',
|
||||
presetPath: undefined,
|
||||
presetSource: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTPS URLs with parameters', () => {
|
||||
expect(
|
||||
presets.parsePreset(
|
||||
'https://my.server/gitea/renovate-config/raw/branch/main/default.json(param1)',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
|
||||
params: ['param1'],
|
||||
presetName: '',
|
||||
presetPath: undefined,
|
||||
presetSource: 'http',
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('getPreset', () => {
|
||||
it('handles removed presets with a migration', async () => {
|
||||
const res = await presets.getPreset(':base', {});
|
||||
|
|
|
@ -24,7 +24,8 @@ import * as http from './http';
|
|||
import * as internal from './internal';
|
||||
import * as local from './local';
|
||||
import * as npm from './npm';
|
||||
import type { ParsedPreset, Preset, PresetApi } from './types';
|
||||
import { parsePreset } from './parse';
|
||||
import type { Preset, PresetApi } from './types';
|
||||
import {
|
||||
PRESET_DEP_NOT_FOUND,
|
||||
PRESET_INVALID,
|
||||
|
@ -46,13 +47,6 @@ const presetSources: Record<string, PresetApi> = {
|
|||
|
||||
const presetCacheNamespace = 'preset';
|
||||
|
||||
const nonScopedPresetWithSubdirRegex = regEx(
|
||||
/^(?<repo>~?[\w\-. /]+?)\/\/(?:(?<presetPath>[\w\-./]+)\/)?(?<presetName>[\w\-.]+)(?:#(?<tag>[\w\-./]+?))?$/,
|
||||
);
|
||||
const gitPresetRegex = regEx(
|
||||
/^(?<repo>~?[\w\-. /]+)(?::(?<presetName>[\w\-.+/]+))?(?:#(?<tag>[\w\-./]+?))?$/,
|
||||
);
|
||||
|
||||
export function replaceArgs(
|
||||
obj: string,
|
||||
argMapping: Record<string, any>,
|
||||
|
@ -105,120 +99,6 @@ export function replaceArgs(
|
|||
return obj;
|
||||
}
|
||||
|
||||
export function parsePreset(input: string): ParsedPreset {
|
||||
let str = input;
|
||||
let presetSource: string | undefined;
|
||||
let presetPath: string | undefined;
|
||||
let repo: string;
|
||||
let presetName: string;
|
||||
let tag: string | undefined;
|
||||
let params: string[] | undefined;
|
||||
if (str.startsWith('github>')) {
|
||||
presetSource = 'github';
|
||||
str = str.substring('github>'.length);
|
||||
} else if (str.startsWith('gitlab>')) {
|
||||
presetSource = 'gitlab';
|
||||
str = str.substring('gitlab>'.length);
|
||||
} else if (str.startsWith('gitea>')) {
|
||||
presetSource = 'gitea';
|
||||
str = str.substring('gitea>'.length);
|
||||
} else if (str.startsWith('local>')) {
|
||||
presetSource = 'local';
|
||||
str = str.substring('local>'.length);
|
||||
} else if (str.startsWith('http://') || str.startsWith('https://')) {
|
||||
presetSource = 'http';
|
||||
} else if (
|
||||
!str.startsWith('@') &&
|
||||
!str.startsWith(':') &&
|
||||
str.includes('/')
|
||||
) {
|
||||
presetSource = 'local';
|
||||
}
|
||||
str = str.replace(regEx(/^npm>/), '');
|
||||
presetSource = presetSource ?? 'npm';
|
||||
if (str.includes('(')) {
|
||||
params = str
|
||||
.slice(str.indexOf('(') + 1, -1)
|
||||
.split(',')
|
||||
.map((elem) => elem.trim());
|
||||
str = str.slice(0, str.indexOf('('));
|
||||
}
|
||||
if (presetSource === 'http') {
|
||||
return { presetSource, repo: str, presetName: '', params };
|
||||
}
|
||||
const presetsPackages = [
|
||||
'compatibility',
|
||||
'config',
|
||||
'customManagers',
|
||||
'default',
|
||||
'docker',
|
||||
'group',
|
||||
'helpers',
|
||||
'mergeConfidence',
|
||||
'monorepo',
|
||||
'npm',
|
||||
'packages',
|
||||
'preview',
|
||||
'replacements',
|
||||
'schedule',
|
||||
'security',
|
||||
'workarounds',
|
||||
];
|
||||
if (
|
||||
presetsPackages.some((presetPackage) => str.startsWith(`${presetPackage}:`))
|
||||
) {
|
||||
presetSource = 'internal';
|
||||
[repo, presetName] = str.split(':');
|
||||
} else if (str.startsWith(':')) {
|
||||
// default namespace
|
||||
presetSource = 'internal';
|
||||
repo = 'default';
|
||||
presetName = str.slice(1);
|
||||
} else if (str.startsWith('@')) {
|
||||
// scoped namespace
|
||||
[, repo] = regEx(/(@.*?)(:|$)/).exec(str)!;
|
||||
str = str.slice(repo.length);
|
||||
if (!repo.includes('/')) {
|
||||
repo += '/renovate-config';
|
||||
}
|
||||
if (str === '') {
|
||||
presetName = 'default';
|
||||
} else {
|
||||
presetName = str.slice(1);
|
||||
}
|
||||
} else if (str.includes('//')) {
|
||||
// non-scoped namespace with a subdirectory preset
|
||||
|
||||
// Validation
|
||||
if (str.includes(':')) {
|
||||
throw new Error(PRESET_PROHIBITED_SUBPRESET);
|
||||
}
|
||||
if (!nonScopedPresetWithSubdirRegex.test(str)) {
|
||||
throw new Error(PRESET_INVALID);
|
||||
}
|
||||
({ repo, presetPath, presetName, tag } =
|
||||
nonScopedPresetWithSubdirRegex.exec(str)!.groups!);
|
||||
} else {
|
||||
({ repo, presetName, tag } = gitPresetRegex.exec(str)!.groups!);
|
||||
|
||||
if (presetSource === 'npm' && !repo.startsWith('renovate-config-')) {
|
||||
repo = `renovate-config-${repo}`;
|
||||
}
|
||||
if (!is.nonEmptyString(presetName)) {
|
||||
presetName = 'default';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
presetSource,
|
||||
presetPath,
|
||||
repo,
|
||||
presetName,
|
||||
tag,
|
||||
params,
|
||||
};
|
||||
}
|
||||
|
||||
export async function getPreset(
|
||||
preset: string,
|
||||
baseConfig?: RenovateConfig,
|
||||
|
|
|
@ -96,7 +96,7 @@ export const presets: Record<string, Preset> = {
|
|||
packageRules: [
|
||||
{
|
||||
automerge: true,
|
||||
matchCurrentVersion: '>= 1.0.0',
|
||||
matchCurrentVersion: '!/^0/',
|
||||
matchUpdateTypes: ['minor', 'patch'],
|
||||
},
|
||||
],
|
||||
|
|
11
lib/config/presets/internal/global.ts
Normal file
11
lib/config/presets/internal/global.ts
Normal file
|
@ -0,0 +1,11 @@
|
|||
import type { Preset } from '../types';
|
||||
|
||||
/* eslint sort-keys: ["error", "asc", {caseSensitive: false, natural: true}] */
|
||||
|
||||
export const presets: Record<string, Preset> = {
|
||||
safeEnv: {
|
||||
allowedEnv: ['GO*'],
|
||||
description:
|
||||
'Hopefully safe environment variables to allow users to configure.',
|
||||
},
|
||||
};
|
13
lib/config/presets/internal/group.spec.ts
Normal file
13
lib/config/presets/internal/group.spec.ts
Normal file
|
@ -0,0 +1,13 @@
|
|||
import { presets } from './group';
|
||||
|
||||
const exceptions = new Set(['monorepos', 'recommended']);
|
||||
|
||||
describe('config/presets/internal/group', () => {
|
||||
const presetNames = Object.keys(presets).filter(
|
||||
(name) => !exceptions.has(name),
|
||||
);
|
||||
|
||||
it.each(presetNames)('group:%s contains packageRules', (name: string) => {
|
||||
expect(presets[name]).toHaveProperty('packageRules');
|
||||
});
|
||||
});
|
|
@ -111,14 +111,19 @@ const staticGroups = {
|
|||
},
|
||||
fusionjs: {
|
||||
description: 'Group Fusion.js packages together.',
|
||||
matchPackageNames: [
|
||||
'fusion-cli',
|
||||
'fusion-core',
|
||||
'fusion-test-utils',
|
||||
'fusion-tokens',
|
||||
'fusion-plugin-**',
|
||||
'fusion-react**',
|
||||
'fusion-apollo**',
|
||||
packageRules: [
|
||||
{
|
||||
groupName: 'Fusion.js packages',
|
||||
matchPackageNames: [
|
||||
'fusion-cli',
|
||||
'fusion-core',
|
||||
'fusion-test-utils',
|
||||
'fusion-tokens',
|
||||
'fusion-plugin-**',
|
||||
'fusion-react**',
|
||||
'fusion-apollo**',
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
githubArtifactActions: {
|
||||
|
@ -311,9 +316,10 @@ const staticGroups = {
|
|||
'k8s.io/cluster-bootstrap**',
|
||||
'k8s.io/code-generator**',
|
||||
'k8s.io/component-base**',
|
||||
'k8s.io/component-helpers**',
|
||||
'k8s.io/controller-manager**',
|
||||
'k8s.io/cri-api**',
|
||||
// 'k8s.io/csi-api', has not go.mod set up and does not follow the versioning of other repos
|
||||
// 'k8s.io/csi-api', has no go.mod set up and does not follow the versioning of other repos
|
||||
'k8s.io/csi-translation-lib**',
|
||||
'k8s.io/kube-aggregator**',
|
||||
'k8s.io/kube-controller-manager**',
|
||||
|
@ -341,6 +347,16 @@ const staticGroups = {
|
|||
},
|
||||
],
|
||||
},
|
||||
micrometer: {
|
||||
description:
|
||||
"Group Micrometer packages together, e.g. 'io.micrometer:micrometer-core'.",
|
||||
packageRules: [
|
||||
{
|
||||
groupName: 'micrometer',
|
||||
matchPackageNames: ['io.micrometer:micrometer-**'],
|
||||
},
|
||||
],
|
||||
},
|
||||
nodeJs: {
|
||||
description:
|
||||
"Group anything that looks like Node.js together so that it's updated together.",
|
||||
|
@ -462,6 +478,7 @@ const staticGroups = {
|
|||
'group:jestPlusTypes',
|
||||
'group:jwtFramework',
|
||||
'group:kubernetes',
|
||||
'group:micrometer',
|
||||
'group:phpstan',
|
||||
'group:polymer',
|
||||
'group:react',
|
||||
|
|
|
@ -30,7 +30,8 @@ describe('config/presets/internal/index', () => {
|
|||
const config = await resolveConfigPresets(
|
||||
massageConfig(presetConfig),
|
||||
);
|
||||
const res = await validateConfig('repo', config, true);
|
||||
const configType = groupName === 'global' ? 'global' : 'repo';
|
||||
const res = await validateConfig(configType, config, true);
|
||||
expect(res.errors).toHaveLength(0);
|
||||
expect(res.warnings).toHaveLength(0);
|
||||
} catch (err) {
|
||||
|
|
|
@ -3,6 +3,7 @@ import * as configPreset from './config';
|
|||
import * as customManagersPreset from './custom-managers';
|
||||
import * as defaultPreset from './default';
|
||||
import * as dockerPreset from './docker';
|
||||
import * as globalPreset from './global';
|
||||
import * as groupPreset from './group';
|
||||
import * as helpersPreset from './helpers';
|
||||
import * as mergeConfidence from './merge-confidence';
|
||||
|
@ -22,6 +23,7 @@ export const groups: Record<string, Record<string, Preset>> = {
|
|||
customManagers: customManagersPreset.presets,
|
||||
default: defaultPreset.presets,
|
||||
docker: dockerPreset.presets,
|
||||
global: globalPreset.presets,
|
||||
group: groupPreset.presets,
|
||||
helpers: helpersPreset.presets,
|
||||
mergeConfidence: mergeConfidence.presets,
|
||||
|
|
|
@ -135,7 +135,8 @@ export const presets: Record<string, Preset> = {
|
|||
},
|
||||
react: {
|
||||
description: 'All React packages.',
|
||||
matchPackageNames: ['@types/react', 'react**'],
|
||||
matchDatasources: ['npm'],
|
||||
matchPackageNames: ['@types/react**', 'react**'],
|
||||
},
|
||||
stylelint: {
|
||||
description: 'All Stylelint packages.',
|
||||
|
|
462
lib/config/presets/parse.spec.ts
Normal file
462
lib/config/presets/parse.spec.ts
Normal file
|
@ -0,0 +1,462 @@
|
|||
import { parsePreset } from './parse';
|
||||
|
||||
describe('config/presets/parse', () => {
|
||||
describe('parsePreset', () => {
|
||||
// default namespace
|
||||
it('returns default package name', () => {
|
||||
expect(parsePreset(':base')).toEqual({
|
||||
repo: 'default',
|
||||
params: undefined,
|
||||
presetName: 'base',
|
||||
presetPath: undefined,
|
||||
presetSource: 'internal',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github', () => {
|
||||
expect(parsePreset('github>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('handles special chars', () => {
|
||||
expect(parsePreset('github>some/repo:foo+bar')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'foo+bar',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles', () => {
|
||||
expect(parsePreset('github>some/repo:somefile')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset name', () => {
|
||||
expect(parsePreset('github>some/repo:somefile/somepreset')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile/somepreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github file with preset name with .json extension', () => {
|
||||
expect(parsePreset('github>some/repo:somefile.json')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github file with preset name with .json5 extension', () => {
|
||||
expect(parsePreset('github>some/repo:somefile.json5')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json5',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset name with .json extension', () => {
|
||||
expect(parsePreset('github>some/repo:somefile.json/somepreset')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json/somepreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset name with .json5 extension', () => {
|
||||
expect(parsePreset('github>some/repo:somefile.json5/somepreset')).toEqual(
|
||||
{
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile.json5/somepreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
tag: undefined,
|
||||
},
|
||||
);
|
||||
});
|
||||
|
||||
it('parses github subfiles with preset and sub-preset name', () => {
|
||||
expect(
|
||||
parsePreset('github>some/repo:somefile/somepreset/somesubpreset'),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile/somepreset/somesubpreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github subdirectories', () => {
|
||||
expect(
|
||||
parsePreset('github>some/repo//somepath/somesubpath/somefile'),
|
||||
).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: 'somepath/somesubpath',
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses github toplevel file using subdirectory syntax', () => {
|
||||
expect(parsePreset('github>some/repo//somefile')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: undefined,
|
||||
presetSource: 'github',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses gitlab', () => {
|
||||
expect(parsePreset('gitlab>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'gitlab',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses gitea', () => {
|
||||
expect(parsePreset('gitea>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'gitea',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local', () => {
|
||||
expect(parsePreset('local>some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with spaces', () => {
|
||||
expect(parsePreset('local>A2B CD/A2B_Renovate')).toEqual({
|
||||
repo: 'A2B CD/A2B_Renovate',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with subdirectory', () => {
|
||||
expect(
|
||||
parsePreset('local>some-group/some-repo//some-dir/some-file'),
|
||||
).toEqual({
|
||||
repo: 'some-group/some-repo',
|
||||
params: undefined,
|
||||
presetName: 'some-file',
|
||||
presetPath: 'some-dir',
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with spaces and subdirectory', () => {
|
||||
expect(
|
||||
parsePreset('local>A2B CD/A2B_Renovate//some-dir/some-file'),
|
||||
).toEqual({
|
||||
repo: 'A2B CD/A2B_Renovate',
|
||||
params: undefined,
|
||||
presetName: 'some-file',
|
||||
presetPath: 'some-dir',
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with sub preset and tag', () => {
|
||||
expect(
|
||||
parsePreset('local>some-group/some-repo:some-file/subpreset#1.2.3'),
|
||||
).toEqual({
|
||||
repo: 'some-group/some-repo',
|
||||
params: undefined,
|
||||
presetName: 'some-file/subpreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
tag: '1.2.3',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with subdirectory and tag', () => {
|
||||
expect(
|
||||
parsePreset('local>some-group/some-repo//some-dir/some-file#1.2.3'),
|
||||
).toEqual({
|
||||
repo: 'some-group/some-repo',
|
||||
params: undefined,
|
||||
presetName: 'some-file',
|
||||
presetPath: 'some-dir',
|
||||
presetSource: 'local',
|
||||
tag: '1.2.3',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with subdirectory and branch/tag with a slash', () => {
|
||||
expect(
|
||||
parsePreset('local>PROJECT/repository//path/to/preset#feature/branch'),
|
||||
).toEqual({
|
||||
repo: 'PROJECT/repository',
|
||||
params: undefined,
|
||||
presetName: 'preset',
|
||||
presetPath: 'path/to',
|
||||
presetSource: 'local',
|
||||
tag: 'feature/branch',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local with sub preset and branch/tag with a slash', () => {
|
||||
expect(
|
||||
parsePreset('local>PROJECT/repository:preset/subpreset#feature/branch'),
|
||||
).toEqual({
|
||||
repo: 'PROJECT/repository',
|
||||
params: undefined,
|
||||
presetName: 'preset/subpreset',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
tag: 'feature/branch',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses no prefix as local', () => {
|
||||
expect(parsePreset('some/repo')).toEqual({
|
||||
repo: 'some/repo',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local Bitbucket user repo with preset name', () => {
|
||||
expect(parsePreset('local>~john_doe/repo//somefile')).toEqual({
|
||||
repo: '~john_doe/repo',
|
||||
params: undefined,
|
||||
presetName: 'somefile',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses local Bitbucket user repo', () => {
|
||||
expect(parsePreset('local>~john_doe/renovate-config')).toEqual({
|
||||
repo: '~john_doe/renovate-config',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'local',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns default package name with params', () => {
|
||||
expect(parsePreset(':group(packages/eslint, eslint)')).toEqual({
|
||||
repo: 'default',
|
||||
params: ['packages/eslint', 'eslint'],
|
||||
presetName: 'group',
|
||||
presetPath: undefined,
|
||||
presetSource: 'internal',
|
||||
});
|
||||
});
|
||||
|
||||
// scoped namespace
|
||||
it('returns simple scope', () => {
|
||||
expect(parsePreset('@somescope')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns simple scope and params', () => {
|
||||
expect(parsePreset('@somescope(param1)')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: ['param1'],
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and default', () => {
|
||||
expect(parsePreset('@somescope/somepackagename')).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and params and default', () => {
|
||||
expect(
|
||||
parsePreset('@somescope/somepackagename(param1, param2, param3)'),
|
||||
).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: ['param1', 'param2', 'param3'],
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with presetName', () => {
|
||||
expect(parsePreset('@somescope:somePresetName')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: undefined,
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with presetName and params', () => {
|
||||
expect(parsePreset('@somescope:somePresetName(param1)')).toEqual({
|
||||
repo: '@somescope/renovate-config',
|
||||
params: ['param1'],
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and presetName', () => {
|
||||
expect(parsePreset('@somescope/somepackagename:somePresetName')).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: undefined,
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns scope with repo and presetName and params', () => {
|
||||
expect(
|
||||
parsePreset(
|
||||
'@somescope/somepackagename:somePresetName(param1, param2)',
|
||||
),
|
||||
).toEqual({
|
||||
repo: '@somescope/somepackagename',
|
||||
params: ['param1', 'param2'],
|
||||
presetName: 'somePresetName',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
// non-scoped namespace
|
||||
it('returns non-scoped default', () => {
|
||||
expect(parsePreset('somepackage')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: undefined,
|
||||
presetName: 'default',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns non-scoped package name', () => {
|
||||
expect(parsePreset('somepackage:webapp')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: undefined,
|
||||
presetName: 'webapp',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns non-scoped package name full', () => {
|
||||
expect(parsePreset('renovate-config-somepackage:webapp')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: undefined,
|
||||
presetName: 'webapp',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('returns non-scoped package name with params', () => {
|
||||
expect(parsePreset('somepackage:webapp(param1)')).toEqual({
|
||||
repo: 'renovate-config-somepackage',
|
||||
params: ['param1'],
|
||||
presetName: 'webapp',
|
||||
presetPath: undefined,
|
||||
presetSource: 'npm',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTPS URLs', () => {
|
||||
expect(
|
||||
parsePreset(
|
||||
'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
|
||||
params: undefined,
|
||||
presetName: '',
|
||||
presetPath: undefined,
|
||||
presetSource: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTP URLs', () => {
|
||||
expect(
|
||||
parsePreset(
|
||||
'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'http://my.server/users/me/repos/renovate-presets/raw/default.json?at=refs%2Fheads%2Fmain',
|
||||
params: undefined,
|
||||
presetName: '',
|
||||
presetPath: undefined,
|
||||
presetSource: 'http',
|
||||
});
|
||||
});
|
||||
|
||||
it('parses HTTPS URLs with parameters', () => {
|
||||
expect(
|
||||
parsePreset(
|
||||
'https://my.server/gitea/renovate-config/raw/branch/main/default.json(param1)',
|
||||
),
|
||||
).toEqual({
|
||||
repo: 'https://my.server/gitea/renovate-config/raw/branch/main/default.json',
|
||||
params: ['param1'],
|
||||
presetName: '',
|
||||
presetPath: undefined,
|
||||
presetSource: 'http',
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
126
lib/config/presets/parse.ts
Normal file
126
lib/config/presets/parse.ts
Normal file
|
@ -0,0 +1,126 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import { regEx } from '../../util/regex';
|
||||
import type { ParsedPreset } from './types';
|
||||
import { PRESET_INVALID, PRESET_PROHIBITED_SUBPRESET } from './util';
|
||||
|
||||
const nonScopedPresetWithSubdirRegex = regEx(
|
||||
/^(?<repo>~?[\w\-. /]+?)\/\/(?:(?<presetPath>[\w\-./]+)\/)?(?<presetName>[\w\-.]+)(?:#(?<tag>[\w\-./]+?))?$/,
|
||||
);
|
||||
const gitPresetRegex = regEx(
|
||||
/^(?<repo>~?[\w\-. /]+)(?::(?<presetName>[\w\-.+/]+))?(?:#(?<tag>[\w\-./]+?))?$/,
|
||||
);
|
||||
|
||||
export function parsePreset(input: string): ParsedPreset {
|
||||
let str = input;
|
||||
let presetSource: string | undefined;
|
||||
let presetPath: string | undefined;
|
||||
let repo: string;
|
||||
let presetName: string;
|
||||
let tag: string | undefined;
|
||||
let params: string[] | undefined;
|
||||
if (str.startsWith('github>')) {
|
||||
presetSource = 'github';
|
||||
str = str.substring('github>'.length);
|
||||
} else if (str.startsWith('gitlab>')) {
|
||||
presetSource = 'gitlab';
|
||||
str = str.substring('gitlab>'.length);
|
||||
} else if (str.startsWith('gitea>')) {
|
||||
presetSource = 'gitea';
|
||||
str = str.substring('gitea>'.length);
|
||||
} else if (str.startsWith('local>')) {
|
||||
presetSource = 'local';
|
||||
str = str.substring('local>'.length);
|
||||
} else if (str.startsWith('http://') || str.startsWith('https://')) {
|
||||
presetSource = 'http';
|
||||
} else if (
|
||||
!str.startsWith('@') &&
|
||||
!str.startsWith(':') &&
|
||||
str.includes('/')
|
||||
) {
|
||||
presetSource = 'local';
|
||||
}
|
||||
str = str.replace(regEx(/^npm>/), '');
|
||||
presetSource = presetSource ?? 'npm';
|
||||
if (str.includes('(')) {
|
||||
params = str
|
||||
.slice(str.indexOf('(') + 1, -1)
|
||||
.split(',')
|
||||
.map((elem) => elem.trim());
|
||||
str = str.slice(0, str.indexOf('('));
|
||||
}
|
||||
if (presetSource === 'http') {
|
||||
return { presetSource, repo: str, presetName: '', params };
|
||||
}
|
||||
const presetsPackages = [
|
||||
'compatibility',
|
||||
'config',
|
||||
'customManagers',
|
||||
'default',
|
||||
'docker',
|
||||
'global',
|
||||
'group',
|
||||
'helpers',
|
||||
'mergeConfidence',
|
||||
'monorepo',
|
||||
'npm',
|
||||
'packages',
|
||||
'preview',
|
||||
'replacements',
|
||||
'schedule',
|
||||
'security',
|
||||
'workarounds',
|
||||
];
|
||||
if (
|
||||
presetsPackages.some((presetPackage) => str.startsWith(`${presetPackage}:`))
|
||||
) {
|
||||
presetSource = 'internal';
|
||||
[repo, presetName] = str.split(':');
|
||||
} else if (str.startsWith(':')) {
|
||||
// default namespace
|
||||
presetSource = 'internal';
|
||||
repo = 'default';
|
||||
presetName = str.slice(1);
|
||||
} else if (str.startsWith('@')) {
|
||||
// scoped namespace
|
||||
[, repo] = regEx(/(@.*?)(:|$)/).exec(str)!;
|
||||
str = str.slice(repo.length);
|
||||
if (!repo.includes('/')) {
|
||||
repo += '/renovate-config';
|
||||
}
|
||||
if (str === '') {
|
||||
presetName = 'default';
|
||||
} else {
|
||||
presetName = str.slice(1);
|
||||
}
|
||||
} else if (str.includes('//')) {
|
||||
// non-scoped namespace with a subdirectory preset
|
||||
|
||||
// Validation
|
||||
if (str.includes(':')) {
|
||||
throw new Error(PRESET_PROHIBITED_SUBPRESET);
|
||||
}
|
||||
if (!nonScopedPresetWithSubdirRegex.test(str)) {
|
||||
throw new Error(PRESET_INVALID);
|
||||
}
|
||||
({ repo, presetPath, presetName, tag } =
|
||||
nonScopedPresetWithSubdirRegex.exec(str)!.groups!);
|
||||
} else {
|
||||
({ repo, presetName, tag } = gitPresetRegex.exec(str)!.groups!);
|
||||
|
||||
if (presetSource === 'npm' && !repo.startsWith('renovate-config-')) {
|
||||
repo = `renovate-config-${repo}`;
|
||||
}
|
||||
if (!is.nonEmptyString(presetName)) {
|
||||
presetName = 'default';
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
presetSource,
|
||||
presetPath,
|
||||
repo,
|
||||
presetName,
|
||||
tag,
|
||||
params,
|
||||
};
|
||||
}
|
|
@ -239,6 +239,7 @@ export interface RenovateConfig
|
|||
baseBranch?: string;
|
||||
defaultBranch?: string;
|
||||
branchList?: string[];
|
||||
cloneSubmodulesFilter?: string[];
|
||||
description?: string | string[];
|
||||
force?: RenovateConfig;
|
||||
errors?: ValidationMessage[];
|
||||
|
@ -305,6 +306,9 @@ export interface RenovateConfig
|
|||
statusCheckNames?: Record<StatusCheckKey, string | null>;
|
||||
env?: UserEnv;
|
||||
logLevelRemap?: LogLevelRemap[];
|
||||
|
||||
branchTopic?: string;
|
||||
additionalBranchPrefix?: string;
|
||||
}
|
||||
|
||||
const CustomDatasourceFormats = ['json', 'plain', 'yaml', 'html'] as const;
|
||||
|
|
17
lib/config/validation-helpers/utils.spec.ts
Normal file
17
lib/config/validation-helpers/utils.spec.ts
Normal file
|
@ -0,0 +1,17 @@
|
|||
import { getParentName } from './utils';
|
||||
|
||||
describe('config/validation-helpers/utils', () => {
|
||||
describe('getParentName()', () => {
|
||||
it('ignores encrypted in root', () => {
|
||||
expect(getParentName('encrypted')).toBeEmptyString();
|
||||
});
|
||||
|
||||
it('handles array types', () => {
|
||||
expect(getParentName('hostRules[1]')).toBe('hostRules');
|
||||
});
|
||||
|
||||
it('handles encrypted within array types', () => {
|
||||
expect(getParentName('hostRules[0].encrypted')).toBe('hostRules');
|
||||
});
|
||||
});
|
||||
});
|
138
lib/config/validation-helpers/utils.ts
Normal file
138
lib/config/validation-helpers/utils.ts
Normal file
|
@ -0,0 +1,138 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import { logger } from '../../logger';
|
||||
import type {
|
||||
RegexManagerConfig,
|
||||
RegexManagerTemplates,
|
||||
} from '../../modules/manager/custom/regex/types';
|
||||
import { regEx } from '../../util/regex';
|
||||
import type { ValidationMessage } from '../types';
|
||||
|
||||
export function getParentName(parentPath: string | undefined): string {
|
||||
return parentPath
|
||||
? parentPath
|
||||
.replace(regEx(/\.?encrypted$/), '')
|
||||
.replace(regEx(/\[\d+\]$/), '')
|
||||
.split('.')
|
||||
.pop()!
|
||||
: '.';
|
||||
}
|
||||
|
||||
export function validatePlainObject(
|
||||
val: Record<string, unknown>,
|
||||
): true | string {
|
||||
for (const [key, value] of Object.entries(val)) {
|
||||
if (!is.string(value)) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
export function validateNumber(
|
||||
key: string,
|
||||
val: unknown,
|
||||
allowsNegative: boolean,
|
||||
currentPath?: string,
|
||||
subKey?: string,
|
||||
): ValidationMessage[] {
|
||||
const errors: ValidationMessage[] = [];
|
||||
const path = `${currentPath}${subKey ? '.' + subKey : ''}`;
|
||||
if (is.number(val)) {
|
||||
if (val < 0 && !allowsNegative) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Configuration option \`${path}\` should be a positive integer. Found negative value instead.`,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Configuration option \`${path}\` should be an integer. Found: ${JSON.stringify(
|
||||
val,
|
||||
)} (${typeof val}).`,
|
||||
});
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/** An option is a false global if it has the same name as a global only option
|
||||
* but is actually just the field of a non global option or field an children of the non global option
|
||||
* eg. token: it's global option used as the bot's token as well and
|
||||
* also it can be the token used for a platform inside the hostRules configuration
|
||||
*/
|
||||
export function isFalseGlobal(
|
||||
optionName: string,
|
||||
parentPath?: string,
|
||||
): boolean {
|
||||
if (parentPath?.includes('hostRules')) {
|
||||
if (
|
||||
optionName === 'token' ||
|
||||
optionName === 'username' ||
|
||||
optionName === 'password'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
function hasField(
|
||||
customManager: Partial<RegexManagerConfig>,
|
||||
field: string,
|
||||
): boolean {
|
||||
const templateField = `${field}Template` as keyof RegexManagerTemplates;
|
||||
return !!(
|
||||
customManager[templateField] ??
|
||||
customManager.matchStrings?.some((matchString) =>
|
||||
matchString.includes(`(?<${field}>`),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
export function validateRegexManagerFields(
|
||||
customManager: Partial<RegexManagerConfig>,
|
||||
currentPath: string,
|
||||
errors: ValidationMessage[],
|
||||
): void {
|
||||
if (is.nonEmptyArray(customManager.matchStrings)) {
|
||||
for (const matchString of customManager.matchStrings) {
|
||||
try {
|
||||
regEx(matchString);
|
||||
} catch (err) {
|
||||
logger.debug(
|
||||
{ err },
|
||||
'customManager.matchStrings regEx validation error',
|
||||
);
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Invalid regExp for ${currentPath}: \`${matchString}\``,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Each Custom Manager must contain a non-empty matchStrings array`,
|
||||
});
|
||||
}
|
||||
|
||||
const mandatoryFields = ['currentValue', 'datasource'];
|
||||
for (const field of mandatoryFields) {
|
||||
if (!hasField(customManager, field)) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Regex Managers must contain ${field}Template configuration or regex group named ${field}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const nameFields = ['depName', 'packageName'];
|
||||
if (!nameFields.some((field) => hasField(customManager, field))) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Regex Managers must contain depName or packageName regex groups or templates`,
|
||||
});
|
||||
}
|
||||
}
|
|
@ -4,22 +4,6 @@ import type { RenovateConfig } from './types';
|
|||
import * as configValidation from './validation';
|
||||
|
||||
describe('config/validation', () => {
|
||||
describe('getParentName()', () => {
|
||||
it('ignores encrypted in root', () => {
|
||||
expect(configValidation.getParentName('encrypted')).toBeEmptyString();
|
||||
});
|
||||
|
||||
it('handles array types', () => {
|
||||
expect(configValidation.getParentName('hostRules[1]')).toBe('hostRules');
|
||||
});
|
||||
|
||||
it('handles encrypted within array types', () => {
|
||||
expect(configValidation.getParentName('hostRules[0].encrypted')).toBe(
|
||||
'hostRules',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validateConfig(config)', () => {
|
||||
it('returns deprecation warnings', async () => {
|
||||
const config = {
|
||||
|
|
|
@ -1,11 +1,6 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import { logger } from '../logger';
|
||||
import { allManagersList, getManagerList } from '../modules/manager';
|
||||
import { isCustomManager } from '../modules/manager/custom';
|
||||
import type {
|
||||
RegexManagerConfig,
|
||||
RegexManagerTemplates,
|
||||
} from '../modules/manager/custom/regex/types';
|
||||
import type { CustomManager } from '../modules/manager/custom/types';
|
||||
import type { HostRule } from '../types';
|
||||
import { getExpression } from '../util/jsonata';
|
||||
|
@ -39,6 +34,13 @@ import { allowedStatusCheckStrings } from './types';
|
|||
import * as managerValidator from './validation-helpers/managers';
|
||||
import * as matchBaseBranchesValidator from './validation-helpers/match-base-branches';
|
||||
import * as regexOrGlobValidator from './validation-helpers/regex-glob-matchers';
|
||||
import {
|
||||
getParentName,
|
||||
isFalseGlobal,
|
||||
validateNumber,
|
||||
validatePlainObject,
|
||||
validateRegexManagerFields,
|
||||
} from './validation-helpers/utils';
|
||||
|
||||
const options = getOptions();
|
||||
|
||||
|
@ -84,42 +86,6 @@ function isIgnored(key: string): boolean {
|
|||
return ignoredNodes.includes(key);
|
||||
}
|
||||
|
||||
function validatePlainObject(val: Record<string, unknown>): true | string {
|
||||
for (const [key, value] of Object.entries(val)) {
|
||||
if (!is.string(value)) {
|
||||
return key;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function validateNumber(
|
||||
key: string,
|
||||
val: unknown,
|
||||
currentPath?: string,
|
||||
subKey?: string,
|
||||
): ValidationMessage[] {
|
||||
const errors: ValidationMessage[] = [];
|
||||
const path = `${currentPath}${subKey ? '.' + subKey : ''}`;
|
||||
if (is.number(val)) {
|
||||
if (val < 0 && !optionAllowsNegativeIntegers.has(key)) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Configuration option \`${path}\` should be a positive integer. Found negative value instead.`,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Configuration option \`${path}\` should be an integer. Found: ${JSON.stringify(
|
||||
val,
|
||||
)} (${typeof val}).`,
|
||||
});
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
function getUnsupportedEnabledManagers(enabledManagers: string[]): string[] {
|
||||
return enabledManagers.filter(
|
||||
(manager) => !allManagersList.includes(manager.replace('custom.', '')),
|
||||
|
@ -186,16 +152,6 @@ function initOptions(): void {
|
|||
optionsInitialized = true;
|
||||
}
|
||||
|
||||
export function getParentName(parentPath: string | undefined): string {
|
||||
return parentPath
|
||||
? parentPath
|
||||
.replace(regEx(/\.?encrypted$/), '')
|
||||
.replace(regEx(/\[\d+\]$/), '')
|
||||
.split('.')
|
||||
.pop()!
|
||||
: '.';
|
||||
}
|
||||
|
||||
export async function validateConfig(
|
||||
configType: 'global' | 'inherit' | 'repo',
|
||||
config: RenovateConfig,
|
||||
|
@ -370,7 +326,8 @@ export async function validateConfig(
|
|||
});
|
||||
}
|
||||
} else if (type === 'integer') {
|
||||
errors.push(...validateNumber(key, val, currentPath));
|
||||
const allowsNegative = optionAllowsNegativeIntegers.has(key);
|
||||
errors.push(...validateNumber(key, val, allowsNegative, currentPath));
|
||||
} else if (type === 'array' && val) {
|
||||
if (is.array(val)) {
|
||||
for (const [subIndex, subval] of val.entries()) {
|
||||
|
@ -865,65 +822,6 @@ export async function validateConfig(
|
|||
return { errors, warnings };
|
||||
}
|
||||
|
||||
function hasField(
|
||||
customManager: Partial<RegexManagerConfig>,
|
||||
field: string,
|
||||
): boolean {
|
||||
const templateField = `${field}Template` as keyof RegexManagerTemplates;
|
||||
return !!(
|
||||
customManager[templateField] ??
|
||||
customManager.matchStrings?.some((matchString) =>
|
||||
matchString.includes(`(?<${field}>`),
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function validateRegexManagerFields(
|
||||
customManager: Partial<RegexManagerConfig>,
|
||||
currentPath: string,
|
||||
errors: ValidationMessage[],
|
||||
): void {
|
||||
if (is.nonEmptyArray(customManager.matchStrings)) {
|
||||
for (const matchString of customManager.matchStrings) {
|
||||
try {
|
||||
regEx(matchString);
|
||||
} catch (err) {
|
||||
logger.debug(
|
||||
{ err },
|
||||
'customManager.matchStrings regEx validation error',
|
||||
);
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Invalid regExp for ${currentPath}: \`${matchString}\``,
|
||||
});
|
||||
}
|
||||
}
|
||||
} else {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Each Custom Manager must contain a non-empty matchStrings array`,
|
||||
});
|
||||
}
|
||||
|
||||
const mandatoryFields = ['currentValue', 'datasource'];
|
||||
for (const field of mandatoryFields) {
|
||||
if (!hasField(customManager, field)) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Regex Managers must contain ${field}Template configuration or regex group named ${field}`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const nameFields = ['depName', 'packageName'];
|
||||
if (!nameFields.some((field) => hasField(customManager, field))) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
message: `Regex Managers must contain depName or packageName regex groups or templates`,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Basic validation for global config options
|
||||
*/
|
||||
|
@ -1013,7 +911,8 @@ async function validateGlobalConfig(
|
|||
});
|
||||
}
|
||||
} else if (type === 'integer') {
|
||||
warnings.push(...validateNumber(key, val, currentPath));
|
||||
const allowsNegative = optionAllowsNegativeIntegers.has(key);
|
||||
warnings.push(...validateNumber(key, val, allowsNegative, currentPath));
|
||||
} else if (type === 'boolean') {
|
||||
if (val !== true && val !== false) {
|
||||
warnings.push({
|
||||
|
@ -1079,8 +978,15 @@ async function validateGlobalConfig(
|
|||
}
|
||||
} else if (key === 'cacheTtlOverride') {
|
||||
for (const [subKey, subValue] of Object.entries(val)) {
|
||||
const allowsNegative = optionAllowsNegativeIntegers.has(key);
|
||||
warnings.push(
|
||||
...validateNumber(key, subValue, currentPath, subKey),
|
||||
...validateNumber(
|
||||
key,
|
||||
subValue,
|
||||
allowsNegative,
|
||||
currentPath,
|
||||
subKey,
|
||||
),
|
||||
);
|
||||
}
|
||||
} else {
|
||||
|
@ -1101,22 +1007,3 @@ async function validateGlobalConfig(
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** An option is a false global if it has the same name as a global only option
|
||||
* but is actually just the field of a non global option or field an children of the non global option
|
||||
* eg. token: it's global option used as the bot's token as well and
|
||||
* also it can be the token used for a platform inside the hostRules configuration
|
||||
*/
|
||||
function isFalseGlobal(optionName: string, parentPath?: string): boolean {
|
||||
if (parentPath?.includes('hostRules')) {
|
||||
if (
|
||||
optionName === 'token' ||
|
||||
optionName === 'username' ||
|
||||
optionName === 'password'
|
||||
) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
"flake8": "https://flake8.pycqa.org/en/latest/release-notes/index.html",
|
||||
"django-storages": "https://github.com/jschneier/django-storages/blob/master/CHANGELOG.rst",
|
||||
"lxml": "https://git.launchpad.net/lxml/plain/CHANGES.txt",
|
||||
"mypy": "https://mypy-lang.blogspot.com/",
|
||||
"mypy": "https://mypy.readthedocs.io/en/latest/changelog.html",
|
||||
"phonenumbers": "https://github.com/daviddrysdale/python-phonenumbers/blob/dev/python/HISTORY.md",
|
||||
"pycountry": "https://github.com/flyingcircusio/pycountry/blob/master/HISTORY.txt",
|
||||
"django-debug-toolbar": "https://django-debug-toolbar.readthedocs.io/en/latest/changes.html",
|
||||
|
|
|
@ -55,10 +55,6 @@
|
|||
"https://github.com/awslabs/aws-sdk-rust"
|
||||
],
|
||||
"awsappsync": "https://github.com/awslabs/aws-mobile-appsync-sdk-js",
|
||||
"axis2": [
|
||||
"https://gitbox.apache.org/repos/asf?p=axis-axis2-java-core.git;a=summary",
|
||||
"https://github.com/apache/axis-axis2-java-core"
|
||||
],
|
||||
"azure-functions-dotnet-worker": "https://github.com/Azure/azure-functions-dotnet-worker",
|
||||
"azure azure-libraries-for-net": "https://github.com/Azure/azure-libraries-for-net",
|
||||
"azure azure-sdk-for-net": "https://github.com/Azure/azure-sdk-for-net",
|
||||
|
@ -281,6 +277,7 @@
|
|||
"embla-carousel": "https://github.com/davidjerleke/embla-carousel",
|
||||
"emojibase": "https://github.com/milesj/emojibase",
|
||||
"emotion": "https://github.com/emotion-js/emotion",
|
||||
"envelop": "https://github.com/n1ru4l/envelop",
|
||||
"eslint": "https://github.com/eslint/eslint",
|
||||
"eslint-config-globex": "https://github.com/GlobexDesignsInc/eslint-config-globex",
|
||||
"eslint-stylistic": "https://github.com/eslint-stylistic/eslint-stylistic",
|
||||
|
@ -307,9 +304,12 @@
|
|||
"go-cloud": "https://github.com/google/go-cloud",
|
||||
"google-api-dotnet-client": "https://github.com/googleapis/google-api-dotnet-client",
|
||||
"grafana": "https://github.com/grafana/grafana",
|
||||
"graphiql": "https://github.com/graphql/graphiql",
|
||||
"graphql-hive-gateway": "https://github.com/graphql-hive/gateway",
|
||||
"graphql-mesh": "https://github.com/Urigo/graphql-mesh",
|
||||
"graphql-modules": "https://github.com/Urigo/graphql-modules",
|
||||
"graphql-tools": "https://github.com/ardatan/graphql-tools",
|
||||
"graphql-yoga": "https://github.com/dotansimha/graphql-yoga",
|
||||
"graphqlcodegenerator": [
|
||||
"https://github.com/dotansimha/graphql-code-generator-community",
|
||||
"https://github.com/dotansimha/graphql-code-generator",
|
||||
|
@ -321,6 +321,7 @@
|
|||
"grpc-java": "https://github.com/grpc/grpc-java",
|
||||
"gstreamer-rust": "https://gitlab.freedesktop.org/gstreamer/gstreamer-rs",
|
||||
"guava": "https://github.com/google/guava",
|
||||
"happy-dom": "https://github.com/capricorn86/happy-dom",
|
||||
"Hangfire": "https://github.com/HangfireIO/Hangfire",
|
||||
"hickory-dns": "https://github.com/hickory-dns/hickory-dns",
|
||||
"infrastructure-ui": "https://github.com/instructure/instructure-ui",
|
||||
|
@ -328,9 +329,12 @@
|
|||
"istanbuljs": "https://github.com/istanbuljs/istanbuljs",
|
||||
"jackson": [
|
||||
"https://github.com/FasterXML/jackson",
|
||||
"https://github.com/FasterXML/jackson-annotations",
|
||||
"https://github.com/FasterXML/jackson-core",
|
||||
"https://github.com/FasterXML/jackson-databind",
|
||||
"https://github.com/FasterXML/jackson-dataformats-binary",
|
||||
"https://github.com/FasterXML/jackson-dataformats-text",
|
||||
"https://github.com/FasterXML/jackson-jaxrs-providers",
|
||||
"https://github.com/FasterXML/jackson-module-kotlin"
|
||||
],
|
||||
"jasmine": "https://github.com/jasmine/jasmine",
|
||||
|
@ -357,7 +361,9 @@
|
|||
"lerna-lite": "https://github.com/lerna-lite/lerna-lite",
|
||||
"lexical": "https://github.com/facebook/lexical",
|
||||
"linguijs": "https://github.com/lingui/js-lingui",
|
||||
"linkifyjs": "https://github.com/nfrasser/linkifyjs",
|
||||
"log4j2": "https://github.com/apache/logging-log4j2",
|
||||
"logback": "https://github.com/qos-ch/logback",
|
||||
"loopback": [
|
||||
"https://github.com/strongloop/loopback-next",
|
||||
"https://github.com/loopbackio/loopback-next"
|
||||
|
@ -424,9 +430,13 @@
|
|||
"opentelemetry-erlang": "https://github.com/open-telemetry/opentelemetry-erlang",
|
||||
"opentelemetry-erlang-contrib": "https://github.com/open-telemetry/opentelemetry-erlang-contrib",
|
||||
"opentelemetry-go": "https://github.com/open-telemetry/opentelemetry-go",
|
||||
"opentelemetry-go-contrib": "https://github.com/open-telemetry/opentelemetry-go-contrib",
|
||||
"opentelemetry-java": "https://github.com/open-telemetry/opentelemetry-java",
|
||||
"opentelemetry-java-contrib": "https://github.com/open-telemetry/opentelemetry-java-contrib",
|
||||
"opentelemetry-js": "https://github.com/open-telemetry/opentelemetry-js",
|
||||
"opentelemetry-js-contrib": "https://github.com/open-telemetry/opentelemetry-js-contrib",
|
||||
"opentelemetry-rust": "https://github.com/open-telemetry/opentelemetry-rust",
|
||||
"opentelemetry-rust-contrib": "https://github.com/open-telemetry/opentelemetry-rust-contrib",
|
||||
"orleans": "https://github.com/dotnet/orleans",
|
||||
"panda-css": "https://github.com/chakra-ui/panda",
|
||||
"parcel": "https://github.com/parcel-bundler/parcel",
|
||||
|
@ -442,6 +452,7 @@
|
|||
"pollyjs": "https://github.com/Netflix/pollyjs",
|
||||
"pothos": "https://github.com/hayes/pothos",
|
||||
"pouchdb": "https://github.com/pouchdb/pouchdb",
|
||||
"powermock": "https://github.com/powermock/powermock",
|
||||
"prisma": "https://github.com/prisma/prisma",
|
||||
"prometheus-net": "https://github.com/prometheus-net/prometheus-net",
|
||||
"promster": "https://github.com/tdeekens/promster",
|
||||
|
@ -478,6 +489,7 @@
|
|||
"sanity": "https://github.com/sanity-io/sanity",
|
||||
"serilog-ui": "https://github.com/serilog-contrib/serilog-ui",
|
||||
"scaffdog": "https://github.com/scaffdog/scaffdog",
|
||||
"sea-orm": "https://github.com/SeaQL/sea-orm",
|
||||
"secretlint": "https://github.com/secretlint/secretlint",
|
||||
"sendgrid-nodejs": "https://github.com/sendgrid/sendgrid-nodejs",
|
||||
"sentry-dotnet": "https://github.com/getsentry/sentry-dotnet",
|
||||
|
@ -493,6 +505,7 @@
|
|||
"skiasharp": "https://github.com/mono/SkiaSharp",
|
||||
"slack-net": "https://github.com/soxtoby/SlackNet",
|
||||
"slf4j": "https://github.com/qos-ch/slf4j",
|
||||
"slim-message-bus": "https://github.com/zarusz/SlimMessageBus",
|
||||
"spectre-console": "https://github.com/spectreconsole/spectre.console",
|
||||
"springfox": "https://github.com/springfox/springfox",
|
||||
"steeltoe": "https://github.com/SteeltoeOSS/steeltoe",
|
||||
|
@ -540,6 +553,7 @@
|
|||
"unhead": "https://github.com/unjs/unhead",
|
||||
"unocss": "https://github.com/unocss/unocss",
|
||||
"uppy": "https://github.com/transloadit/uppy",
|
||||
"utoipa": "https://github.com/juhaku/utoipa",
|
||||
"vaadin-hilla": "https://github.com/vaadin/hilla",
|
||||
"vaadinWebComponents": "https://github.com/vaadin/web-components",
|
||||
"visx": "https://github.com/airbnb/visx",
|
||||
|
@ -581,12 +595,17 @@
|
|||
"apache-poi": "/^org.apache.poi:/",
|
||||
"aws-java-sdk": "/^com.amazonaws:aws-java-sdk-/",
|
||||
"aws-java-sdk-v2": "/^software.amazon.awssdk:/",
|
||||
"axis2": "/^org.apache.axis2:/",
|
||||
"babel6": "/^babel6$/",
|
||||
"clarity": ["/^@cds//", "/^@clr//"],
|
||||
"embroider": "/^@embroider//",
|
||||
"forge": "/^@forge//",
|
||||
"fullcalendar": "/^@fullcalendar//",
|
||||
"hotchocolate": "/^HotChocolate\\./",
|
||||
"oracle-database": [
|
||||
"/^com.oracle.database.jdbc:/",
|
||||
"/^com.oracle.database.nls:/"
|
||||
],
|
||||
"prometheus-simpleclient": "/^io.prometheus:simpleclient/",
|
||||
"russh": ["/^russh$/", "/^russh-keys$/"],
|
||||
"spfx": ["/^@microsoft/sp-/", "/^@microsoft/eslint-.+-spfx$/"],
|
||||
|
|
|
@ -59,10 +59,12 @@ describe('instrumentation/index', () => {
|
|||
_registeredSpanProcessors: [
|
||||
{
|
||||
_exporter: {
|
||||
_transport: {
|
||||
_delegate: {
|
||||
_transport: {
|
||||
_parameters: {
|
||||
url: 'https://collector.example.com/v1/traces',
|
||||
_transport: {
|
||||
_parameters: {
|
||||
url: 'https://collector.example.com/v1/traces',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
@ -88,10 +90,12 @@ describe('instrumentation/index', () => {
|
|||
{ _exporter: {} },
|
||||
{
|
||||
_exporter: {
|
||||
_transport: {
|
||||
_delegate: {
|
||||
_transport: {
|
||||
_parameters: {
|
||||
url: 'https://collector.example.com/v1/traces',
|
||||
_transport: {
|
||||
_parameters: {
|
||||
url: 'https://collector.example.com/v1/traces',
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
|
|
|
@ -1,8 +1,10 @@
|
|||
import type { WriteStream } from 'node:fs';
|
||||
import bunyan from 'bunyan';
|
||||
import fs from 'fs-extra';
|
||||
import { partial } from '../../test/util';
|
||||
import { add } from '../util/host-rules';
|
||||
import { addSecretForSanitizing as addSecret } from '../util/sanitize';
|
||||
import type { RenovateLogger } from './renovate-logger';
|
||||
import {
|
||||
addMeta,
|
||||
addStream,
|
||||
|
@ -17,16 +19,38 @@ import {
|
|||
setMeta,
|
||||
} from '.';
|
||||
|
||||
const initialContext = 'initial_context';
|
||||
|
||||
jest.unmock('.');
|
||||
jest.mock('nanoid', () => ({
|
||||
nanoid: () => 'initial_context',
|
||||
}));
|
||||
|
||||
const bunyanDebugSpy = jest.spyOn(bunyan.prototype, 'debug');
|
||||
|
||||
describe('logger/index', () => {
|
||||
it('inits', () => {
|
||||
expect(logger).toBeDefined();
|
||||
});
|
||||
|
||||
it('uses an auto-generated log context', () => {
|
||||
logger.debug('');
|
||||
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledWith(
|
||||
{ logContext: initialContext },
|
||||
'',
|
||||
);
|
||||
});
|
||||
|
||||
it('sets and gets context', () => {
|
||||
setContext('123test');
|
||||
expect(getContext()).toBe('123test');
|
||||
const logContext = '123test';
|
||||
const msg = 'test';
|
||||
setContext(logContext);
|
||||
|
||||
logger.debug(msg);
|
||||
|
||||
expect(getContext()).toBe(logContext);
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledWith({ logContext }, msg);
|
||||
});
|
||||
|
||||
it('supports logging with metadata', () => {
|
||||
|
@ -41,16 +65,62 @@ describe('logger/index', () => {
|
|||
expect(() => logger.debug('some meta')).not.toThrow();
|
||||
});
|
||||
|
||||
it('sets meta', () => {
|
||||
expect(() => setMeta({ any: 'test' })).not.toThrow();
|
||||
});
|
||||
describe('meta functions', () => {
|
||||
beforeEach(() => {
|
||||
setContext(initialContext);
|
||||
});
|
||||
|
||||
it('adds meta', () => {
|
||||
expect(() => addMeta({ new: 'test' })).not.toThrow();
|
||||
});
|
||||
it('sets meta', () => {
|
||||
const logMeta = { foo: 'foo' };
|
||||
const meta = { bar: 'bar' };
|
||||
setMeta(meta);
|
||||
|
||||
it('removes meta', () => {
|
||||
expect(() => removeMeta(['new'])).not.toThrow();
|
||||
logger.debug(logMeta, '');
|
||||
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledWith(
|
||||
{ logContext: initialContext, ...meta, ...logMeta },
|
||||
'',
|
||||
);
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('adds meta', () => {
|
||||
const logMeta = { foo: 'foo' };
|
||||
const meta = { bar: 'bar' };
|
||||
addMeta(meta);
|
||||
|
||||
logger.debug(logMeta, '');
|
||||
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledWith(
|
||||
{ logContext: initialContext, ...meta, ...logMeta },
|
||||
'',
|
||||
);
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
|
||||
it('removes meta', () => {
|
||||
const logMeta = { foo: 'foo' };
|
||||
const meta = { bar: 'bar' };
|
||||
setMeta(meta);
|
||||
|
||||
logger.debug(logMeta, '');
|
||||
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledWith(
|
||||
{ logContext: initialContext, ...meta, ...logMeta },
|
||||
'',
|
||||
);
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledTimes(1);
|
||||
|
||||
removeMeta(Object.keys(meta));
|
||||
|
||||
logger.debug(logMeta, '');
|
||||
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledWith(
|
||||
{ logContext: initialContext, ...logMeta },
|
||||
'',
|
||||
);
|
||||
expect(bunyanDebugSpy).toHaveBeenCalledTimes(2);
|
||||
});
|
||||
});
|
||||
|
||||
it('sets level', () => {
|
||||
|
@ -59,15 +129,30 @@ describe('logger/index', () => {
|
|||
expect(logLevel()).toBe('debug');
|
||||
});
|
||||
|
||||
it('should create a child logger', () => {
|
||||
const childLogger = (logger as RenovateLogger).childLogger();
|
||||
const loggerSpy = jest.spyOn(logger, 'debug');
|
||||
const childLoggerSpy = jest.spyOn(childLogger, 'debug');
|
||||
|
||||
childLogger.debug('test');
|
||||
|
||||
expect(loggerSpy).toHaveBeenCalledTimes(0);
|
||||
expect(childLoggerSpy).toHaveBeenCalledTimes(1);
|
||||
expect(childLoggerSpy).toHaveBeenCalledWith('test');
|
||||
});
|
||||
|
||||
it('saves problems', () => {
|
||||
addSecret('p4$$w0rd');
|
||||
levels('stdout', 'fatal');
|
||||
logger.fatal('fatal error');
|
||||
logger.error('some meta');
|
||||
logger.error({ some: 'meta', password: 'super secret' });
|
||||
logger.error({ some: 'meta' }, 'message');
|
||||
logger.warn('a warning with a p4$$w0rd');
|
||||
logger.trace('ignored');
|
||||
logger.info('ignored');
|
||||
expect(getProblems()).toMatchObject([
|
||||
{ msg: 'fatal error' },
|
||||
{ msg: 'some meta' },
|
||||
{ some: 'meta', password: '***********' },
|
||||
{ some: 'meta', msg: 'message' },
|
||||
|
|
|
@ -6,9 +6,8 @@ import upath from 'upath';
|
|||
import cmdSerializer from './cmd-serializer';
|
||||
import configSerializer from './config-serializer';
|
||||
import errSerializer from './err-serializer';
|
||||
import { once, reset as onceReset } from './once';
|
||||
import { RenovateStream } from './pretty-stdout';
|
||||
import { getRemappedLevel } from './remap';
|
||||
import { RenovateLogger } from './renovate-logger';
|
||||
import type { BunyanRecord, Logger } from './types';
|
||||
import {
|
||||
ProblemStream,
|
||||
|
@ -17,161 +16,120 @@ import {
|
|||
withSanitizer,
|
||||
} from './utils';
|
||||
|
||||
let logContext: string = getEnv('LOG_CONTEXT') ?? nanoid();
|
||||
let curMeta: Record<string, unknown> = {};
|
||||
|
||||
const problems = new ProblemStream();
|
||||
|
||||
let stdoutLevel = validateLogLevel(getEnv('LOG_LEVEL'), 'info');
|
||||
const stdout: bunyan.Stream = {
|
||||
name: 'stdout',
|
||||
level: stdoutLevel,
|
||||
stream: process.stdout,
|
||||
};
|
||||
|
||||
export function logLevel(): bunyan.LogLevelString {
|
||||
return stdoutLevel;
|
||||
}
|
||||
|
||||
// istanbul ignore if: not testable
|
||||
if (getEnv('LOG_FORMAT') !== 'json') {
|
||||
// TODO: typings (#9615)
|
||||
const prettyStdOut = new RenovateStream() as any;
|
||||
prettyStdOut.pipe(process.stdout);
|
||||
stdout.stream = prettyStdOut;
|
||||
stdout.type = 'raw';
|
||||
export function createDefaultStreams(
|
||||
stdoutLevel: bunyan.LogLevelString,
|
||||
problems: ProblemStream,
|
||||
logFile: string | undefined,
|
||||
): bunyan.Stream[] {
|
||||
const stdout: bunyan.Stream = {
|
||||
name: 'stdout',
|
||||
level: stdoutLevel,
|
||||
stream: process.stdout,
|
||||
};
|
||||
|
||||
// istanbul ignore if: not testable
|
||||
if (getEnv('LOG_FORMAT') !== 'json') {
|
||||
// TODO: typings (#9615)
|
||||
const prettyStdOut = new RenovateStream() as any;
|
||||
prettyStdOut.pipe(process.stdout);
|
||||
stdout.stream = prettyStdOut;
|
||||
stdout.type = 'raw';
|
||||
}
|
||||
|
||||
const problemsStream: bunyan.Stream = {
|
||||
name: 'problems',
|
||||
level: 'warn' as bunyan.LogLevel,
|
||||
stream: problems as any,
|
||||
type: 'raw',
|
||||
};
|
||||
|
||||
// istanbul ignore next: not easily testable
|
||||
const logFileStream: bunyan.Stream | undefined = is.string(logFile)
|
||||
? createLogFileStream(logFile)
|
||||
: undefined;
|
||||
|
||||
return [stdout, problemsStream, logFileStream].filter(
|
||||
Boolean,
|
||||
) as bunyan.Stream[];
|
||||
}
|
||||
|
||||
const bunyanLogger = bunyan.createLogger({
|
||||
name: 'renovate',
|
||||
serializers: {
|
||||
body: configSerializer,
|
||||
cmd: cmdSerializer,
|
||||
config: configSerializer,
|
||||
migratedConfig: configSerializer,
|
||||
originalConfig: configSerializer,
|
||||
presetConfig: configSerializer,
|
||||
oldConfig: configSerializer,
|
||||
newConfig: configSerializer,
|
||||
err: errSerializer,
|
||||
},
|
||||
streams: [
|
||||
stdout,
|
||||
{
|
||||
name: 'problems',
|
||||
level: 'warn' as bunyan.LogLevel,
|
||||
stream: problems as any,
|
||||
type: 'raw',
|
||||
},
|
||||
].map(withSanitizer),
|
||||
});
|
||||
|
||||
const logFactory = (
|
||||
_level: bunyan.LogLevelString,
|
||||
): ((p1: unknown, p2: unknown) => void) => {
|
||||
return (p1: any, p2: any): void => {
|
||||
let level = _level;
|
||||
if (p2) {
|
||||
// meta and msg provided
|
||||
const msg = p2;
|
||||
const meta: Record<string, unknown> = { logContext, ...curMeta, ...p1 };
|
||||
const remappedLevel = getRemappedLevel(msg);
|
||||
// istanbul ignore if: not testable
|
||||
if (remappedLevel) {
|
||||
meta.oldLevel = level;
|
||||
level = remappedLevel;
|
||||
}
|
||||
bunyanLogger[level](meta, msg);
|
||||
} else if (is.string(p1)) {
|
||||
// only message provided
|
||||
const msg = p1;
|
||||
const meta: Record<string, unknown> = { logContext, ...curMeta };
|
||||
const remappedLevel = getRemappedLevel(msg);
|
||||
// istanbul ignore if: not testable
|
||||
if (remappedLevel) {
|
||||
meta.oldLevel = level;
|
||||
level = remappedLevel;
|
||||
}
|
||||
bunyanLogger[level](meta, msg);
|
||||
} else {
|
||||
// only meta provided
|
||||
bunyanLogger[level]({ logContext, ...curMeta, ...p1 });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const loggerLevels: bunyan.LogLevelString[] = [
|
||||
'trace',
|
||||
'debug',
|
||||
'info',
|
||||
'warn',
|
||||
'error',
|
||||
'fatal',
|
||||
];
|
||||
|
||||
export const logger: Logger = { once: { reset: onceReset } } as any;
|
||||
|
||||
loggerLevels.forEach((loggerLevel) => {
|
||||
logger[loggerLevel] = logFactory(loggerLevel) as never;
|
||||
|
||||
const logOnceFn = (p1: any, p2: any): void => {
|
||||
once(() => {
|
||||
const logFn = logger[loggerLevel];
|
||||
if (is.undefined(p2)) {
|
||||
logFn(p1);
|
||||
} else {
|
||||
logFn(p1, p2);
|
||||
}
|
||||
}, logOnceFn);
|
||||
};
|
||||
logger.once[loggerLevel] = logOnceFn as never;
|
||||
});
|
||||
|
||||
const logFile = getEnv('LOG_FILE');
|
||||
// istanbul ignore if: not easily testable
|
||||
if (is.string(logFile)) {
|
||||
// ensure log file directory exists
|
||||
// istanbul ignore next: not easily testable
|
||||
function createLogFileStream(logFile: string): bunyan.Stream {
|
||||
// Ensure log file directory exists
|
||||
const directoryName = upath.dirname(logFile);
|
||||
fs.ensureDirSync(directoryName);
|
||||
|
||||
addStream({
|
||||
return {
|
||||
name: 'logfile',
|
||||
path: logFile,
|
||||
level: validateLogLevel(getEnv('LOG_FILE_LEVEL'), 'debug'),
|
||||
};
|
||||
}
|
||||
|
||||
function serializedSanitizedLogger(streams: bunyan.Stream[]): bunyan {
|
||||
return bunyan.createLogger({
|
||||
name: 'renovate',
|
||||
serializers: {
|
||||
body: configSerializer,
|
||||
cmd: cmdSerializer,
|
||||
config: configSerializer,
|
||||
migratedConfig: configSerializer,
|
||||
originalConfig: configSerializer,
|
||||
presetConfig: configSerializer,
|
||||
oldConfig: configSerializer,
|
||||
newConfig: configSerializer,
|
||||
err: errSerializer,
|
||||
},
|
||||
streams: streams.map(withSanitizer),
|
||||
});
|
||||
}
|
||||
|
||||
const defaultStreams = createDefaultStreams(
|
||||
stdoutLevel,
|
||||
problems,
|
||||
getEnv('LOG_FILE'),
|
||||
);
|
||||
|
||||
const bunyanLogger = serializedSanitizedLogger(defaultStreams);
|
||||
const logContext = getEnv('LOG_CONTEXT') ?? nanoid();
|
||||
const loggerInternal = new RenovateLogger(bunyanLogger, logContext, {});
|
||||
|
||||
export const logger: Logger = loggerInternal;
|
||||
|
||||
export function setContext(value: string): void {
|
||||
logContext = value;
|
||||
loggerInternal.logContext = value;
|
||||
}
|
||||
|
||||
export function getContext(): any {
|
||||
return logContext;
|
||||
return loggerInternal.logContext;
|
||||
}
|
||||
|
||||
// setMeta overrides existing meta, may remove fields if no longer existing
|
||||
export function setMeta(obj: Record<string, unknown>): void {
|
||||
curMeta = { ...obj };
|
||||
loggerInternal.setMeta(obj);
|
||||
}
|
||||
|
||||
// addMeta overrides or adds fields but does not remove any
|
||||
export function addMeta(obj: Record<string, unknown>): void {
|
||||
curMeta = { ...curMeta, ...obj };
|
||||
loggerInternal.addMeta(obj);
|
||||
}
|
||||
|
||||
// removeMeta removes the provided fields from meta
|
||||
export function removeMeta(fields: string[]): void {
|
||||
Object.keys(curMeta).forEach((key) => {
|
||||
if (fields.includes(key)) {
|
||||
delete curMeta[key];
|
||||
}
|
||||
});
|
||||
loggerInternal.removeMeta(fields);
|
||||
}
|
||||
|
||||
export /* istanbul ignore next */ function addStream(
|
||||
stream: bunyan.Stream,
|
||||
): void {
|
||||
bunyanLogger.addStream(withSanitizer(stream));
|
||||
loggerInternal.addStream(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
156
lib/logger/renovate-logger.ts
Normal file
156
lib/logger/renovate-logger.ts
Normal file
|
@ -0,0 +1,156 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import type * as bunyan from 'bunyan';
|
||||
import { once, reset as onceReset } from './once';
|
||||
import { getRemappedLevel } from './remap';
|
||||
import type { Logger } from './types';
|
||||
import { getMessage, toMeta, withSanitizer } from './utils';
|
||||
|
||||
const loggerLevels: bunyan.LogLevelString[] = [
|
||||
'trace',
|
||||
'debug',
|
||||
'info',
|
||||
'warn',
|
||||
'error',
|
||||
'fatal',
|
||||
];
|
||||
|
||||
type LoggerFunction = (p1: string | Record<string, any>, p2?: string) => void;
|
||||
|
||||
export class RenovateLogger implements Logger {
|
||||
readonly logger: Logger = { once: { reset: onceReset } } as any;
|
||||
readonly once = this.logger.once;
|
||||
|
||||
constructor(
|
||||
private readonly bunyanLogger: bunyan,
|
||||
private context: string,
|
||||
private meta: Record<string, unknown>,
|
||||
) {
|
||||
for (const level of loggerLevels) {
|
||||
this.logger[level] = this.logFactory(level) as never;
|
||||
this.logger.once[level] = this.logOnceFn(level);
|
||||
}
|
||||
}
|
||||
|
||||
trace(p1: string): void;
|
||||
trace(p1: Record<string, any>, p2?: string): void;
|
||||
trace(p1: string | Record<string, any>, p2?: string): void {
|
||||
this.log('trace', p1, p2);
|
||||
}
|
||||
|
||||
debug(p1: string): void;
|
||||
debug(p1: Record<string, any>, p2?: string): void;
|
||||
debug(p1: string | Record<string, any>, p2?: string): void {
|
||||
this.log('debug', p1, p2);
|
||||
}
|
||||
|
||||
info(p1: string): void;
|
||||
info(p1: Record<string, any>, p2?: string): void;
|
||||
info(p1: string | Record<string, any>, p2?: string): void {
|
||||
this.log('info', p1, p2);
|
||||
}
|
||||
|
||||
warn(p1: string): void;
|
||||
warn(p1: Record<string, any>, p2?: string): void;
|
||||
warn(p1: string | Record<string, any>, p2?: string): void {
|
||||
this.log('warn', p1, p2);
|
||||
}
|
||||
|
||||
error(p1: string): void;
|
||||
error(p1: Record<string, any>, p2?: string): void;
|
||||
error(p1: string | Record<string, any>, p2?: string): void {
|
||||
this.log('error', p1, p2);
|
||||
}
|
||||
|
||||
fatal(p1: string): void;
|
||||
fatal(p1: Record<string, any>, p2?: string): void;
|
||||
fatal(p1: string | Record<string, any>, p2?: string): void {
|
||||
this.log('fatal', p1, p2);
|
||||
}
|
||||
|
||||
addStream(stream: bunyan.Stream): void {
|
||||
this.bunyanLogger.addStream(withSanitizer(stream));
|
||||
}
|
||||
|
||||
childLogger(): RenovateLogger {
|
||||
return new RenovateLogger(
|
||||
this.bunyanLogger.child({}),
|
||||
this.context,
|
||||
this.meta,
|
||||
);
|
||||
}
|
||||
|
||||
get logContext(): string {
|
||||
return this.context;
|
||||
}
|
||||
|
||||
set logContext(context: string) {
|
||||
this.context = context;
|
||||
}
|
||||
|
||||
setMeta(obj: Record<string, unknown>): void {
|
||||
this.meta = { ...obj };
|
||||
}
|
||||
|
||||
addMeta(obj: Record<string, unknown>): void {
|
||||
this.meta = { ...this.meta, ...obj };
|
||||
}
|
||||
|
||||
removeMeta(fields: string[]): void {
|
||||
for (const key of Object.keys(this.meta)) {
|
||||
if (fields.includes(key)) {
|
||||
delete this.meta[key];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private logFactory(_level: bunyan.LogLevelString): LoggerFunction {
|
||||
return (p1: string | Record<string, any>, p2?: string): void => {
|
||||
const meta: Record<string, unknown> = {
|
||||
logContext: this.context,
|
||||
...this.meta,
|
||||
...toMeta(p1),
|
||||
};
|
||||
const msg = getMessage(p1, p2);
|
||||
let level = _level;
|
||||
|
||||
if (is.string(msg)) {
|
||||
const remappedLevel = getRemappedLevel(msg);
|
||||
// istanbul ignore if: not easily testable
|
||||
if (remappedLevel) {
|
||||
meta.oldLevel = level;
|
||||
level = remappedLevel;
|
||||
}
|
||||
this.bunyanLogger[level](meta, msg);
|
||||
} else {
|
||||
this.bunyanLogger[level](meta);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
private logOnceFn(level: bunyan.LogLevelString): LoggerFunction {
|
||||
const logOnceFn = (p1: string | Record<string, any>, p2?: string): void => {
|
||||
once(() => {
|
||||
const logFn = this[level].bind(this); // bind to the instance.
|
||||
if (is.string(p1)) {
|
||||
logFn(p1);
|
||||
} else {
|
||||
logFn(p1, p2);
|
||||
}
|
||||
}, logOnceFn);
|
||||
};
|
||||
return logOnceFn;
|
||||
}
|
||||
|
||||
private log(
|
||||
level: bunyan.LogLevelString,
|
||||
p1: string | Record<string, any>,
|
||||
p2?: string,
|
||||
): void {
|
||||
const logFn = this.logger[level];
|
||||
if (is.string(p1)) {
|
||||
logFn(p1);
|
||||
} else {
|
||||
logFn(p1, p2);
|
||||
}
|
||||
}
|
||||
}
|
|
@ -339,3 +339,16 @@ export function getEnv(key: string): string | undefined {
|
|||
.map((v) => v?.toLowerCase().trim())
|
||||
.find(is.nonEmptyStringAndNotWhitespace);
|
||||
}
|
||||
|
||||
export function getMessage(
|
||||
p1: string | Record<string, any>,
|
||||
p2?: string,
|
||||
): string | undefined {
|
||||
return is.string(p1) ? p1 : p2;
|
||||
}
|
||||
|
||||
export function toMeta(
|
||||
p1: string | Record<string, any>,
|
||||
): Record<string, unknown> {
|
||||
return is.object(p1) ? p1 : {};
|
||||
}
|
||||
|
|
|
@ -38,6 +38,7 @@ import { GlasskubePackagesDatasource } from './glasskube-packages';
|
|||
import { GoDatasource } from './go';
|
||||
import { GolangVersionDatasource } from './golang-version';
|
||||
import { GradleVersionDatasource } from './gradle-version';
|
||||
import { HackageDatasource } from './hackage';
|
||||
import { HelmDatasource } from './helm';
|
||||
import { HermitDatasource } from './hermit';
|
||||
import { HexDatasource } from './hex';
|
||||
|
@ -111,6 +112,7 @@ api.set(GlasskubePackagesDatasource.id, new GlasskubePackagesDatasource());
|
|||
api.set(GoDatasource.id, new GoDatasource());
|
||||
api.set(GolangVersionDatasource.id, new GolangVersionDatasource());
|
||||
api.set(GradleVersionDatasource.id, new GradleVersionDatasource());
|
||||
api.set(HackageDatasource.id, new HackageDatasource());
|
||||
api.set(HelmDatasource.id, new HelmDatasource());
|
||||
api.set(HermitDatasource.id, new HermitDatasource());
|
||||
api.set(HexDatasource.id, new HexDatasource());
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
This datasource returns the database engine versions available for use on [AWS RDS](https://aws.amazon.com/rds/) via the AWS API.
|
||||
|
||||
Generally speaking, all publicly released database versions are available for use on RDS.
|
||||
However, new versions may not be available on RDS for a few weeks or months after their release while AWS tests them.
|
||||
In addition, AWS may pull existing versions if serious problems arise during their use.
|
||||
|
||||
**AWS API configuration**
|
||||
<!-- prettier-ignore -->
|
||||
!!! warning
|
||||
The default versioning of the `aws-rds` datasource is _not_ compatible with [AWS Aurora](https://docs.aws.amazon.com/AmazonRDS/latest/AuroraUserGuide/CHAP_AuroraOverview.html)!
|
||||
If you use AWS Aurora, you must set your own custom versioning.
|
||||
Scroll down to see an example.
|
||||
|
||||
### AWS API configuration
|
||||
|
||||
Since the datasource uses the AWS SDK for JavaScript, you can configure it like other AWS Tools.
|
||||
You can use common AWS configuration options, for example:
|
||||
|
@ -14,9 +21,7 @@ You can use common AWS configuration options, for example:
|
|||
|
||||
Read the [AWS Developer Guide - Configuring the SDK for JavaScript](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/configuring-the-jssdk.html) for more information on these configuration options.
|
||||
|
||||
The minimal IAM privileges required for this datasource are:
|
||||
|
||||
```json
|
||||
```json title="Minimal IAM privileges needed for this datasource"
|
||||
{
|
||||
"Sid": "AllowDBEngineVersionLookup",
|
||||
"Effect": "Allow",
|
||||
|
@ -27,7 +32,7 @@ The minimal IAM privileges required for this datasource are:
|
|||
|
||||
Read the [AWS RDS IAM reference](https://docs.aws.amazon.com/service-authorization/latest/reference/list_amazonrds.html) for more information.
|
||||
|
||||
**Usage**
|
||||
### Usage
|
||||
|
||||
Because Renovate has no manager for the AWS RDS datasource, you need to help Renovate by configuring the custom manager to identify the RDS dependencies you want updated.
|
||||
|
||||
|
@ -53,16 +58,14 @@ For example:
|
|||
[{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}]
|
||||
```
|
||||
|
||||
Here's an example of using the custom manager to configure this datasource:
|
||||
|
||||
```json
|
||||
```json title="Using a custom manager to configure this datasource"
|
||||
{
|
||||
"customManagers": [
|
||||
{
|
||||
"customType": "regex",
|
||||
"fileMatch": ["\\.yaml$"],
|
||||
"matchStrings": [
|
||||
".*amiFilter=(?<lookupName>.+?)[ ]*\n[ ]*(?<depName>[a-zA-Z0-9-_:]*)[ ]*?:[ ]*?[\"|']?(?<currentValue>[.\\d]+)[\"|']?.*"
|
||||
".*rdsFilter=(?<lookupName>.+?)[ ]*\n[ ]*(?<depName>[a-zA-Z0-9-_:]*)[ ]*?:[ ]*?[\"|']?(?<currentValue>[.\\d]+)[\"|']?.*"
|
||||
],
|
||||
"datasourceTemplate": "aws-rds"
|
||||
}
|
||||
|
@ -74,6 +77,33 @@ The configuration above matches every YAML file, and recognizes these lines:
|
|||
|
||||
```yaml
|
||||
spec:
|
||||
# amiFilter=[{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}]
|
||||
# rdsFilter=[{"Name":"engine","Values":["mysql"]},{"Name":"engine-version","Values":["5.7"]}]
|
||||
engineVersion: 5.7.34
|
||||
```
|
||||
|
||||
#### Using Terraform, `aws-rds` datasource and Aurora MySQL
|
||||
|
||||
Here is the Renovate configuration to use Terraform, `aws-rds` and Aurora MySQL:
|
||||
|
||||
```json
|
||||
{
|
||||
"customManagers": [
|
||||
{
|
||||
"description": "Update RDS",
|
||||
"customType": "regex",
|
||||
"fileMatch": [".+\\.tf$"],
|
||||
"matchStrings": [
|
||||
"\\s*#\\s*renovate:\\s*rdsFilter=(?<lookupName>.+?) depName=(?<depName>.*) versioning=(?<versioning>.*)\\s*.*_version\\s*=\\s*\"(?<currentValue>.*)\""
|
||||
],
|
||||
"datasourceTemplate": "aws-rds"
|
||||
}
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
The above configuration is an example of updating an AWS RDS version inside a Terraform file, using a custom manager.
|
||||
|
||||
```
|
||||
# renovate:rdsFilter=[{"Name":"engine","Values":["aurora-mysql"]},{"Name":"engine-version","Values":["8.0"]}] depName=aurora-mysql versioning=loose
|
||||
engine_version = "8.0.mysql_aurora.3.05.2"
|
||||
```
|
||||
|
|
|
@ -13,7 +13,7 @@ describe('modules/datasource/github-runners/index', () => {
|
|||
releases: [
|
||||
{ version: '16.04', isDeprecated: true },
|
||||
{ version: '18.04', isDeprecated: true },
|
||||
{ version: '20.04' },
|
||||
{ version: '20.04', isDeprecated: true },
|
||||
{ version: '22.04' },
|
||||
{ version: '24.04' },
|
||||
],
|
||||
|
@ -58,6 +58,7 @@ describe('modules/datasource/github-runners/index', () => {
|
|||
{ version: '2016', isDeprecated: true },
|
||||
{ version: '2019' },
|
||||
{ version: '2022' },
|
||||
{ version: '2025', isStable: false },
|
||||
],
|
||||
sourceUrl: 'https://github.com/actions/runner-images',
|
||||
});
|
||||
|
|
|
@ -19,7 +19,7 @@ export class GithubRunnersDatasource extends Datasource {
|
|||
ubuntu: [
|
||||
{ version: '24.04' },
|
||||
{ version: '22.04' },
|
||||
{ version: '20.04' },
|
||||
{ version: '20.04', isDeprecated: true },
|
||||
{ version: '18.04', isDeprecated: true },
|
||||
{ version: '16.04', isDeprecated: true },
|
||||
],
|
||||
|
@ -39,6 +39,7 @@ export class GithubRunnersDatasource extends Datasource {
|
|||
{ version: '10.15', isDeprecated: true },
|
||||
],
|
||||
windows: [
|
||||
{ version: '2025', isStable: false },
|
||||
{ version: '2022' },
|
||||
{ version: '2019' },
|
||||
{ version: '2016', isDeprecated: true },
|
||||
|
|
|
@ -494,7 +494,10 @@ describe('modules/datasource/go/releases-goproxy', () => {
|
|||
.get('.v2/@latest')
|
||||
.reply(200, { Version: 'v2.4.0' })
|
||||
.get('.v3/@v/list')
|
||||
.reply(200, ['v3.0.0', 'v3.0.1', ' \n'].join('\n'))
|
||||
.reply(
|
||||
200,
|
||||
['v1.0.0', 'v2.0.0', 'v3.0.0', 'v3.0.1', 'v4.0.0', ' \n'].join('\n'),
|
||||
)
|
||||
.get('.v3/@v/v3.0.0.info')
|
||||
.reply(200, { Version: 'v3.0.0', Time: '2022-05-21T10:33:21Z' })
|
||||
.get('.v3/@v/v3.0.1.info')
|
||||
|
@ -602,8 +605,6 @@ describe('modules/datasource/go/releases-goproxy', () => {
|
|||
.get('/@v/list')
|
||||
.reply(200)
|
||||
.get('/@latest')
|
||||
.reply(404)
|
||||
.get('/v2/@v/list')
|
||||
.reply(404);
|
||||
|
||||
const res = await datasource.getReleases({
|
||||
|
@ -621,9 +622,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
|
|||
.get('/@v/list')
|
||||
.reply(200)
|
||||
.get('/@latest')
|
||||
.reply(200, { Version: 'v0.0.0-20230905200255-921286631fa9' })
|
||||
.get('/v2/@v/list')
|
||||
.reply(404);
|
||||
.reply(200, { Version: 'v0.0.0-20230905200255-921286631fa9' });
|
||||
|
||||
const res = await datasource.getReleases({
|
||||
packageName: 'github.com/google/btree',
|
||||
|
|
|
@ -213,9 +213,24 @@ export class GoProxyDatasource extends Datasource {
|
|||
major += 1; // v0 and v1 are the same module
|
||||
}
|
||||
|
||||
let releases: Release[] = [];
|
||||
|
||||
try {
|
||||
const res = await this.listVersions(baseUrl, pkg);
|
||||
const releases = await p.map(res, async (versionInfo) => {
|
||||
|
||||
// Artifactory returns all versions in any major (past and future),
|
||||
// so starting from v2, we filter them in order to avoid the infinite loop
|
||||
const filteredReleases = res.filter(({ version }) => {
|
||||
if (major < 2) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return (
|
||||
version.split(regEx(/[^\d]+/)).find(is.truthy) === major.toString()
|
||||
);
|
||||
});
|
||||
|
||||
releases = await p.map(filteredReleases, async (versionInfo) => {
|
||||
const { version, newDigest, releaseTimestamp } = versionInfo;
|
||||
|
||||
if (releaseTimestamp) {
|
||||
|
@ -258,6 +273,10 @@ export class GoProxyDatasource extends Datasource {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!releases.length) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
|
|
57
lib/modules/datasource/hackage/index.spec.ts
Normal file
57
lib/modules/datasource/hackage/index.spec.ts
Normal file
|
@ -0,0 +1,57 @@
|
|||
import { getPkgReleases } from '..';
|
||||
import * as httpMock from '../../../../test/http-mock';
|
||||
import { HackageDatasource, versionToRelease } from './index';
|
||||
|
||||
const baseUrl = 'https://hackage.haskell.org/';
|
||||
|
||||
describe('modules/datasource/hackage/index', () => {
|
||||
describe('versionToRelease', () => {
|
||||
it('should make release with given version', () => {
|
||||
expect(
|
||||
versionToRelease('3.1.0', 'base', 'http://localhost').version,
|
||||
).toBe('3.1.0');
|
||||
});
|
||||
});
|
||||
|
||||
describe('getReleases', () => {
|
||||
it('return null with empty registryUrl', async () => {
|
||||
expect(
|
||||
await new HackageDatasource().getReleases({
|
||||
packageName: 'base',
|
||||
registryUrl: undefined,
|
||||
}),
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null for 404', async () => {
|
||||
httpMock.scope(baseUrl).get('/package/base.json').reply(404);
|
||||
expect(
|
||||
await getPkgReleases({
|
||||
datasource: HackageDatasource.id,
|
||||
packageName: 'base',
|
||||
}),
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
it('returns release for 200', async () => {
|
||||
httpMock
|
||||
.scope(baseUrl)
|
||||
.get('/package/base.json')
|
||||
.reply(200, { '4.20.0.1': 'normal' });
|
||||
expect(
|
||||
await getPkgReleases({
|
||||
datasource: HackageDatasource.id,
|
||||
packageName: 'base',
|
||||
}),
|
||||
).toEqual({
|
||||
registryUrl: baseUrl,
|
||||
releases: [
|
||||
{
|
||||
changelogUrl: baseUrl + 'package/base-4.20.0.1/changelog',
|
||||
version: '4.20.0.1',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
54
lib/modules/datasource/hackage/index.ts
Normal file
54
lib/modules/datasource/hackage/index.ts
Normal file
|
@ -0,0 +1,54 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import { joinUrlParts } from '../../../util/url';
|
||||
import * as pvpVersioning from '../../versioning/pvp';
|
||||
import { Datasource } from '../datasource';
|
||||
import type { GetReleasesConfig, Release, ReleaseResult } from '../types';
|
||||
import { HackagePackageMetadata } from './schema';
|
||||
|
||||
export class HackageDatasource extends Datasource {
|
||||
static readonly id = 'hackage';
|
||||
|
||||
constructor() {
|
||||
super(HackageDatasource.id);
|
||||
}
|
||||
|
||||
override readonly defaultVersioning = pvpVersioning.id;
|
||||
override readonly customRegistrySupport = false;
|
||||
override readonly defaultRegistryUrls = ['https://hackage.haskell.org/'];
|
||||
|
||||
async getReleases(config: GetReleasesConfig): Promise<ReleaseResult | null> {
|
||||
const { registryUrl, packageName } = config;
|
||||
if (!is.nonEmptyString(registryUrl)) {
|
||||
return null;
|
||||
}
|
||||
const massagedPackageName = encodeURIComponent(packageName);
|
||||
const url = joinUrlParts(
|
||||
registryUrl,
|
||||
'package',
|
||||
`${massagedPackageName}.json`,
|
||||
);
|
||||
const res = await this.http.getJson(url, HackagePackageMetadata);
|
||||
const keys = Object.keys(res.body);
|
||||
return {
|
||||
releases: keys.map((version) =>
|
||||
versionToRelease(version, packageName, registryUrl),
|
||||
),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export function versionToRelease(
|
||||
version: string,
|
||||
packageName: string,
|
||||
registryUrl: string,
|
||||
): Release {
|
||||
return {
|
||||
version,
|
||||
changelogUrl: joinUrlParts(
|
||||
registryUrl,
|
||||
'package',
|
||||
`${packageName}-${version}`,
|
||||
'changelog',
|
||||
),
|
||||
};
|
||||
}
|
7
lib/modules/datasource/hackage/readme.md
Normal file
7
lib/modules/datasource/hackage/readme.md
Normal file
|
@ -0,0 +1,7 @@
|
|||
This datasource uses
|
||||
[the Hackage JSON API](https://hackage.haskell.org/api#package-info-json)
|
||||
to fetch versions for published Haskell packages.
|
||||
|
||||
While not all versions use [PVP](https://pvp.haskell.org), the majority does.
|
||||
This manager assumes a default versioning set to PVP.
|
||||
Versioning can be overwritten using `packageRules`, e.g. with `matchDatasources`.
|
3
lib/modules/datasource/hackage/schema.ts
Normal file
3
lib/modules/datasource/hackage/schema.ts
Normal file
|
@ -0,0 +1,3 @@
|
|||
import { z } from 'zod';
|
||||
|
||||
export const HackagePackageMetadata = z.record(z.string());
|
|
@ -19,7 +19,9 @@
|
|||
"licenses": [
|
||||
"MIT"
|
||||
],
|
||||
"links": {},
|
||||
"links": {
|
||||
"GitHub": "https://github.com/renovate_test/private_package"
|
||||
},
|
||||
"maintainers": []
|
||||
},
|
||||
"name": "private_package",
|
||||
|
|
|
@ -104,6 +104,7 @@ exports[`modules/datasource/hex/index getReleases processes a private repo with
|
|||
"version": "0.1.1",
|
||||
},
|
||||
],
|
||||
"sourceUrl": "https://github.com/renovate_test/private_package",
|
||||
}
|
||||
`;
|
||||
|
||||
|
|
|
@ -168,6 +168,7 @@ describe('modules/datasource/hex/index', () => {
|
|||
|
||||
expect(result).toEqual({
|
||||
homepage: 'https://hex.pm/packages/renovate_test/private_package',
|
||||
sourceUrl: 'https://github.com/renovate_test/private_package',
|
||||
registryUrl: 'https://hex.pm',
|
||||
releases: [
|
||||
{ releaseTimestamp: '2021-08-04T15:26:26.500Z', version: '0.1.0' },
|
||||
|
|
|
@ -8,9 +8,21 @@ export const HexRelease = z
|
|||
html_url: z.string().optional(),
|
||||
meta: z
|
||||
.object({
|
||||
links: z.object({
|
||||
Github: z.string(),
|
||||
}),
|
||||
links: z
|
||||
.record(z.string())
|
||||
.transform((links) =>
|
||||
Object.fromEntries(
|
||||
Object.entries(links).map(([key, value]) => [
|
||||
key.toLowerCase(),
|
||||
value,
|
||||
]),
|
||||
),
|
||||
)
|
||||
.pipe(
|
||||
z.object({
|
||||
github: z.string(),
|
||||
}),
|
||||
),
|
||||
})
|
||||
.nullable()
|
||||
.catch(null),
|
||||
|
@ -53,8 +65,8 @@ export const HexRelease = z
|
|||
releaseResult.homepage = hexResponse.html_url;
|
||||
}
|
||||
|
||||
if (hexResponse.meta?.links?.Github) {
|
||||
releaseResult.sourceUrl = hexResponse.meta.links.Github;
|
||||
if (hexResponse.meta?.links?.github) {
|
||||
releaseResult.sourceUrl = hexResponse.meta.links.github;
|
||||
}
|
||||
|
||||
return releaseResult;
|
||||
|
|
|
@ -46,7 +46,10 @@ describe('modules/datasource/maven/s3', () => {
|
|||
Bucket: 'repobucket',
|
||||
Key: 'org/example/package/maven-metadata.xml',
|
||||
})
|
||||
.resolvesOnce({ Body: meta as never });
|
||||
.resolvesOnce({
|
||||
Body: meta as never,
|
||||
LastModified: new Date('2020-01-01T00:00Z'),
|
||||
});
|
||||
|
||||
const res = await get('org.example:package', baseUrlS3);
|
||||
|
||||
|
@ -89,7 +92,7 @@ describe('modules/datasource/maven/s3', () => {
|
|||
{
|
||||
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
|
||||
},
|
||||
'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars',
|
||||
'Maven S3 lookup error: credentials provider error, check "AWS_ACCESS_KEY_ID" and "AWS_SECRET_ACCESS_KEY" variables',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -108,7 +111,7 @@ describe('modules/datasource/maven/s3', () => {
|
|||
{
|
||||
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
|
||||
},
|
||||
'Dependency lookup failed. Please a correct AWS_REGION env var',
|
||||
'Maven S3 lookup error: missing region, check "AWS_REGION" variable',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -127,7 +130,7 @@ describe('modules/datasource/maven/s3', () => {
|
|||
{
|
||||
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
|
||||
},
|
||||
'S3 url not found',
|
||||
'Maven S3 lookup error: object not found',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -146,10 +149,23 @@ describe('modules/datasource/maven/s3', () => {
|
|||
{
|
||||
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
|
||||
},
|
||||
'S3 url not found',
|
||||
'Maven S3 lookup error: object not found',
|
||||
);
|
||||
});
|
||||
|
||||
it('returns null for Deleted marker', async () => {
|
||||
s3mock
|
||||
.on(GetObjectCommand, {
|
||||
Bucket: 'repobucket',
|
||||
Key: 'org/example/package/maven-metadata.xml',
|
||||
})
|
||||
.resolvesOnce({ DeleteMarker: true });
|
||||
|
||||
const res = await get('org.example:package', baseUrlS3);
|
||||
|
||||
expect(res).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null for unknown error', async () => {
|
||||
s3mock
|
||||
.on(GetObjectCommand, {
|
||||
|
@ -163,10 +179,10 @@ describe('modules/datasource/maven/s3', () => {
|
|||
expect(res).toBeNull();
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
{
|
||||
err: expect.objectContaining({ message: 'Unknown error' }),
|
||||
failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
|
||||
message: 'Unknown error',
|
||||
},
|
||||
'Unknown S3 download error',
|
||||
'Maven S3 lookup error: unknown error',
|
||||
);
|
||||
});
|
||||
|
||||
|
@ -178,9 +194,6 @@ describe('modules/datasource/maven/s3', () => {
|
|||
})
|
||||
.resolvesOnce({});
|
||||
expect(await get('org.example:package', baseUrlS3)).toBeNull();
|
||||
expect(logger.debug).toHaveBeenCalledWith(
|
||||
"Expecting Readable response type got 'undefined' type instead",
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
import type { XmlDocument } from 'xmldoc';
|
||||
import type { Result } from '../../../util/result';
|
||||
import type { ReleaseResult } from '../types';
|
||||
|
||||
export interface MavenDependency {
|
||||
|
@ -19,3 +20,30 @@ export type DependencyInfo = Pick<
|
|||
ReleaseResult,
|
||||
'homepage' | 'sourceUrl' | 'packageScope'
|
||||
>;
|
||||
|
||||
export interface MavenFetchSuccess<T = string> {
|
||||
isCacheable?: boolean;
|
||||
lastModified?: string;
|
||||
data: T;
|
||||
}
|
||||
|
||||
export type MavenFetchError =
|
||||
| { type: 'invalid-url' }
|
||||
| { type: 'host-disabled' }
|
||||
| { type: 'not-found' }
|
||||
| { type: 'host-error' }
|
||||
| { type: 'permission-issue' }
|
||||
| { type: 'temporary-error' }
|
||||
| { type: 'maven-central-temporary-error'; err: Error }
|
||||
| { type: 'connection-error' }
|
||||
| { type: 'unsupported-host' }
|
||||
| { type: 'unsupported-format' }
|
||||
| { type: 'unsupported-protocol' }
|
||||
| { type: 'credentials-error' }
|
||||
| { type: 'missing-aws-region' }
|
||||
| { type: 'unknown'; err: Error };
|
||||
|
||||
export type MavenFetchResult<T = string> = Result<
|
||||
MavenFetchSuccess<T>,
|
||||
MavenFetchError
|
||||
>;
|
||||
|
|
|
@ -2,6 +2,7 @@ import type Request from 'got/dist/source/core';
|
|||
import { partial } from '../../../../test/util';
|
||||
import { HOST_DISABLED } from '../../../constants/error-messages';
|
||||
import { Http, HttpError } from '../../../util/http';
|
||||
import type { MavenFetchError } from './types';
|
||||
import {
|
||||
checkResource,
|
||||
downloadHttpProtocol,
|
||||
|
@ -55,9 +56,12 @@ describe('modules/datasource/maven/util', () => {
|
|||
});
|
||||
|
||||
describe('downloadS3Protocol', () => {
|
||||
it('returns null for non-S3 URLs', async () => {
|
||||
it('fails for non-S3 URLs', async () => {
|
||||
const res = await downloadS3Protocol(new URL('http://not-s3.com/'));
|
||||
expect(res).toBeNull();
|
||||
expect(res.unwrap()).toEqual({
|
||||
ok: false,
|
||||
err: { type: 'invalid-url' } satisfies MavenFetchError,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
@ -67,7 +71,10 @@ describe('modules/datasource/maven/util', () => {
|
|||
get: () => Promise.reject(httpError({ message: HOST_DISABLED })),
|
||||
});
|
||||
const res = await downloadHttpProtocol(http, 'some://');
|
||||
expect(res).toBeNull();
|
||||
expect(res.unwrap()).toEqual({
|
||||
ok: false,
|
||||
err: { type: 'host-disabled' } satisfies MavenFetchError,
|
||||
});
|
||||
});
|
||||
|
||||
it('returns empty for host error', async () => {
|
||||
|
@ -75,7 +82,10 @@ describe('modules/datasource/maven/util', () => {
|
|||
get: () => Promise.reject(httpError({ code: 'ETIMEDOUT' })),
|
||||
});
|
||||
const res = await downloadHttpProtocol(http, 'some://');
|
||||
expect(res).toBeNull();
|
||||
expect(res.unwrap()).toEqual({
|
||||
ok: false,
|
||||
err: { type: 'host-error' } satisfies MavenFetchError,
|
||||
});
|
||||
});
|
||||
|
||||
it('returns empty for temporary error', async () => {
|
||||
|
@ -83,7 +93,10 @@ describe('modules/datasource/maven/util', () => {
|
|||
get: () => Promise.reject(httpError({ code: 'ECONNRESET' })),
|
||||
});
|
||||
const res = await downloadHttpProtocol(http, 'some://');
|
||||
expect(res).toBeNull();
|
||||
expect(res.unwrap()).toEqual({
|
||||
ok: false,
|
||||
err: { type: 'temporary-error' } satisfies MavenFetchError,
|
||||
});
|
||||
});
|
||||
|
||||
it('returns empty for connection error', async () => {
|
||||
|
@ -91,7 +104,10 @@ describe('modules/datasource/maven/util', () => {
|
|||
get: () => Promise.reject(httpError({ code: 'ECONNREFUSED' })),
|
||||
});
|
||||
const res = await downloadHttpProtocol(http, 'some://');
|
||||
expect(res).toBeNull();
|
||||
expect(res.unwrap()).toEqual({
|
||||
ok: false,
|
||||
err: { type: 'connection-error' } satisfies MavenFetchError,
|
||||
});
|
||||
});
|
||||
|
||||
it('returns empty for unsupported error', async () => {
|
||||
|
@ -100,7 +116,10 @@ describe('modules/datasource/maven/util', () => {
|
|||
Promise.reject(httpError({ name: 'UnsupportedProtocolError' })),
|
||||
});
|
||||
const res = await downloadHttpProtocol(http, 'some://');
|
||||
expect(res).toBeNull();
|
||||
expect(res.unwrap()).toEqual({
|
||||
ok: false,
|
||||
err: { type: 'unsupported-host' } satisfies MavenFetchError,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -20,6 +20,8 @@ import type {
|
|||
DependencyInfo,
|
||||
HttpResourceCheckResult,
|
||||
MavenDependency,
|
||||
MavenFetchResult,
|
||||
MavenFetchSuccess,
|
||||
MavenXml,
|
||||
} from './types';
|
||||
|
||||
|
@ -69,121 +71,183 @@ export async function downloadHttpProtocol(
|
|||
http: Http,
|
||||
pkgUrl: URL | string,
|
||||
opts: HttpOptions = {},
|
||||
): Promise<HttpResponse | null> {
|
||||
): Promise<MavenFetchResult> {
|
||||
const url = pkgUrl.toString();
|
||||
const res = await Result.wrap(http.get(url, opts))
|
||||
.onError((err) => {
|
||||
const fetchResult = await Result.wrap<HttpResponse, Error>(
|
||||
http.get(url, opts),
|
||||
)
|
||||
.transform((res): MavenFetchSuccess => {
|
||||
const result: MavenFetchSuccess = { data: res.body };
|
||||
|
||||
if (!res.authorization) {
|
||||
result.isCacheable = true;
|
||||
}
|
||||
|
||||
const lastModified = normalizeDate(res?.headers?.['last-modified']);
|
||||
if (lastModified) {
|
||||
result.lastModified = lastModified;
|
||||
}
|
||||
|
||||
return result;
|
||||
})
|
||||
.catch((err): MavenFetchResult => {
|
||||
// istanbul ignore next: never happens, needs for type narrowing
|
||||
if (!(err instanceof HttpError)) {
|
||||
return;
|
||||
return Result.err({ type: 'unknown', err });
|
||||
}
|
||||
|
||||
const failedUrl = url;
|
||||
if (err.message === HOST_DISABLED) {
|
||||
logger.trace({ failedUrl }, 'Host disabled');
|
||||
return;
|
||||
return Result.err({ type: 'host-disabled' });
|
||||
}
|
||||
|
||||
if (isNotFoundError(err)) {
|
||||
logger.trace({ failedUrl }, `Url not found`);
|
||||
return;
|
||||
return Result.err({ type: 'not-found' });
|
||||
}
|
||||
|
||||
if (isHostError(err)) {
|
||||
logger.debug(`Cannot connect to host ${failedUrl}`);
|
||||
return;
|
||||
return Result.err({ type: 'host-error' });
|
||||
}
|
||||
|
||||
if (isPermissionsIssue(err)) {
|
||||
logger.debug(
|
||||
`Dependency lookup unauthorized. Please add authentication with a hostRule for ${failedUrl}`,
|
||||
);
|
||||
return;
|
||||
return Result.err({ type: 'permission-issue' });
|
||||
}
|
||||
|
||||
if (isTemporaryError(err)) {
|
||||
logger.debug({ failedUrl, err }, 'Temporary error');
|
||||
return;
|
||||
if (getHost(url) === getHost(MAVEN_REPO)) {
|
||||
return Result.err({ type: 'maven-central-temporary-error', err });
|
||||
} else {
|
||||
return Result.err({ type: 'temporary-error' });
|
||||
}
|
||||
}
|
||||
|
||||
if (isConnectionError(err)) {
|
||||
logger.debug(`Connection refused to maven registry ${failedUrl}`);
|
||||
return;
|
||||
return Result.err({ type: 'connection-error' });
|
||||
}
|
||||
|
||||
if (isUnsupportedHostError(err)) {
|
||||
logger.debug(`Unsupported host ${failedUrl}`);
|
||||
return;
|
||||
return Result.err({ type: 'unsupported-host' });
|
||||
}
|
||||
|
||||
logger.info({ failedUrl, err }, 'Unknown HTTP download error');
|
||||
})
|
||||
.catch((err): Result<HttpResponse | 'silent-error', ExternalHostError> => {
|
||||
if (
|
||||
err instanceof HttpError &&
|
||||
isTemporaryError(err) &&
|
||||
getHost(url) === getHost(MAVEN_REPO)
|
||||
) {
|
||||
return Result.err(new ExternalHostError(err));
|
||||
}
|
||||
return Result.err({ type: 'unknown', err });
|
||||
});
|
||||
|
||||
return Result.ok('silent-error');
|
||||
})
|
||||
.unwrapOrThrow();
|
||||
|
||||
if (res === 'silent-error') {
|
||||
return null;
|
||||
const { err } = fetchResult.unwrap();
|
||||
if (err?.type === 'maven-central-temporary-error') {
|
||||
throw new ExternalHostError(err.err);
|
||||
}
|
||||
|
||||
return res;
|
||||
return fetchResult;
|
||||
}
|
||||
|
||||
export async function downloadHttpContent(
|
||||
http: Http,
|
||||
pkgUrl: URL | string,
|
||||
opts: HttpOptions = {},
|
||||
): Promise<string | null> {
|
||||
const fetchResult = await downloadHttpProtocol(http, pkgUrl, opts);
|
||||
return fetchResult.transform(({ data }) => data).unwrapOrNull();
|
||||
}
|
||||
|
||||
function isS3NotFound(err: Error): boolean {
|
||||
return err.message === 'NotFound' || err.message === 'NoSuchKey';
|
||||
}
|
||||
|
||||
export async function downloadS3Protocol(pkgUrl: URL): Promise<string | null> {
|
||||
export async function downloadS3Protocol(
|
||||
pkgUrl: URL,
|
||||
): Promise<MavenFetchResult> {
|
||||
logger.trace({ url: pkgUrl.toString() }, `Attempting to load S3 dependency`);
|
||||
try {
|
||||
const s3Url = parseS3Url(pkgUrl);
|
||||
if (s3Url === null) {
|
||||
return null;
|
||||
}
|
||||
const { Body: res } = await getS3Client().send(new GetObjectCommand(s3Url));
|
||||
if (res instanceof Readable) {
|
||||
return streamToString(res);
|
||||
}
|
||||
logger.debug(
|
||||
`Expecting Readable response type got '${typeof res}' type instead`,
|
||||
);
|
||||
} catch (err) {
|
||||
const failedUrl = pkgUrl.toString();
|
||||
if (err.name === 'CredentialsProviderError') {
|
||||
logger.debug(
|
||||
{ failedUrl },
|
||||
'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars',
|
||||
);
|
||||
} else if (err.message === 'Region is missing') {
|
||||
logger.debug(
|
||||
{ failedUrl },
|
||||
'Dependency lookup failed. Please a correct AWS_REGION env var',
|
||||
);
|
||||
} else if (isS3NotFound(err)) {
|
||||
logger.trace({ failedUrl }, `S3 url not found`);
|
||||
} else {
|
||||
logger.debug(
|
||||
{ failedUrl, message: err.message },
|
||||
'Unknown S3 download error',
|
||||
);
|
||||
}
|
||||
|
||||
const s3Url = parseS3Url(pkgUrl);
|
||||
if (!s3Url) {
|
||||
return Result.err({ type: 'invalid-url' });
|
||||
}
|
||||
return null;
|
||||
|
||||
return await Result.wrap(() => {
|
||||
const command = new GetObjectCommand(s3Url);
|
||||
const client = getS3Client();
|
||||
return client.send(command);
|
||||
})
|
||||
.transform(
|
||||
async ({
|
||||
Body,
|
||||
LastModified,
|
||||
DeleteMarker,
|
||||
}): Promise<MavenFetchResult> => {
|
||||
if (DeleteMarker) {
|
||||
logger.trace(
|
||||
{ failedUrl: pkgUrl.toString() },
|
||||
'Maven S3 lookup error: DeleteMarker encountered',
|
||||
);
|
||||
return Result.err({ type: 'not-found' });
|
||||
}
|
||||
|
||||
if (!(Body instanceof Readable)) {
|
||||
logger.debug(
|
||||
{ failedUrl: pkgUrl.toString() },
|
||||
'Maven S3 lookup error: unsupported Body type',
|
||||
);
|
||||
return Result.err({ type: 'unsupported-format' });
|
||||
}
|
||||
|
||||
const data = await streamToString(Body);
|
||||
const result: MavenFetchSuccess = { data };
|
||||
|
||||
const lastModified = normalizeDate(LastModified);
|
||||
if (lastModified) {
|
||||
result.lastModified = lastModified;
|
||||
}
|
||||
|
||||
return Result.ok(result);
|
||||
},
|
||||
)
|
||||
.catch((err): MavenFetchResult => {
|
||||
if (!(err instanceof Error)) {
|
||||
return Result.err(err);
|
||||
}
|
||||
|
||||
const failedUrl = pkgUrl.toString();
|
||||
|
||||
if (err.name === 'CredentialsProviderError') {
|
||||
logger.debug(
|
||||
{ failedUrl },
|
||||
'Maven S3 lookup error: credentials provider error, check "AWS_ACCESS_KEY_ID" and "AWS_SECRET_ACCESS_KEY" variables',
|
||||
);
|
||||
return Result.err({ type: 'credentials-error' });
|
||||
}
|
||||
|
||||
if (err.message === 'Region is missing') {
|
||||
logger.debug(
|
||||
{ failedUrl },
|
||||
'Maven S3 lookup error: missing region, check "AWS_REGION" variable',
|
||||
);
|
||||
return Result.err({ type: 'missing-aws-region' });
|
||||
}
|
||||
|
||||
if (isS3NotFound(err)) {
|
||||
logger.trace({ failedUrl }, 'Maven S3 lookup error: object not found');
|
||||
return Result.err({ type: 'not-found' });
|
||||
}
|
||||
|
||||
logger.debug({ failedUrl, err }, 'Maven S3 lookup error: unknown error');
|
||||
return Result.err({ type: 'unknown', err });
|
||||
});
|
||||
}
|
||||
|
||||
export async function downloadArtifactRegistryProtocol(
|
||||
http: Http,
|
||||
pkgUrl: URL,
|
||||
): Promise<HttpResponse | null> {
|
||||
): Promise<MavenFetchResult> {
|
||||
const opts: HttpOptions = {};
|
||||
const host = pkgUrl.host;
|
||||
const path = pkgUrl.pathname;
|
||||
|
@ -312,32 +376,30 @@ export async function downloadMavenXml(
|
|||
const protocol = pkgUrl.protocol;
|
||||
|
||||
if (protocol === 'http:' || protocol === 'https:') {
|
||||
const res = await downloadHttpProtocol(http, pkgUrl);
|
||||
const body = res?.body;
|
||||
if (body) {
|
||||
return {
|
||||
xml: new XmlDocument(body),
|
||||
isCacheable: !res.authorization,
|
||||
};
|
||||
}
|
||||
const rawResult = await downloadHttpProtocol(http, pkgUrl);
|
||||
const xmlResult = rawResult.transform(({ isCacheable, data }): MavenXml => {
|
||||
const xml = new XmlDocument(data);
|
||||
return { isCacheable, xml };
|
||||
});
|
||||
return xmlResult.unwrapOr({});
|
||||
}
|
||||
|
||||
if (protocol === 'artifactregistry:') {
|
||||
const res = await downloadArtifactRegistryProtocol(http, pkgUrl);
|
||||
const body = res?.body;
|
||||
if (body) {
|
||||
return {
|
||||
xml: new XmlDocument(body),
|
||||
isCacheable: !res.authorization,
|
||||
};
|
||||
}
|
||||
const rawResult = await downloadArtifactRegistryProtocol(http, pkgUrl);
|
||||
const xmlResult = rawResult.transform(({ isCacheable, data }): MavenXml => {
|
||||
const xml = new XmlDocument(data);
|
||||
return { isCacheable, xml };
|
||||
});
|
||||
return xmlResult.unwrapOr({});
|
||||
}
|
||||
|
||||
if (protocol === 's3:') {
|
||||
const res = await downloadS3Protocol(pkgUrl);
|
||||
if (res) {
|
||||
return { xml: new XmlDocument(res) };
|
||||
}
|
||||
const rawResult = await downloadS3Protocol(pkgUrl);
|
||||
const xmlResult = rawResult.transform(({ isCacheable, data }): MavenXml => {
|
||||
const xml = new XmlDocument(data);
|
||||
return { xml };
|
||||
});
|
||||
return xmlResult.unwrapOr({});
|
||||
}
|
||||
|
||||
logger.debug(
|
||||
|
|
|
@ -4,7 +4,7 @@ exports[`modules/datasource/orb/index getReleases processes homeUrl 1`] = `
|
|||
{
|
||||
"homepage": "https://google.com",
|
||||
"isPrivate": false,
|
||||
"registryUrl": "https://circleci.com/",
|
||||
"registryUrl": "https://circleci.com",
|
||||
"releases": [
|
||||
{
|
||||
"releaseTimestamp": "2018-12-11T05:28:14.080Z",
|
||||
|
@ -53,7 +53,7 @@ exports[`modules/datasource/orb/index getReleases processes real data 1`] = `
|
|||
{
|
||||
"homepage": "https://circleci.com/developer/orbs/orb/hyper-expanse/library-release-workflows",
|
||||
"isPrivate": false,
|
||||
"registryUrl": "https://circleci.com/",
|
||||
"registryUrl": "https://circleci.com",
|
||||
"releases": [
|
||||
{
|
||||
"releaseTimestamp": "2018-12-11T05:28:14.080Z",
|
||||
|
|
|
@ -92,5 +92,18 @@ describe('modules/datasource/orb/index', () => {
|
|||
expect(res).toMatchSnapshot();
|
||||
expect(res?.homepage).toBe('https://google.com');
|
||||
});
|
||||
|
||||
it('supports other registries', async () => {
|
||||
httpMock
|
||||
.scope('https://cci.internal.dev')
|
||||
.post('/graphql-unstable')
|
||||
.reply(200, orbData);
|
||||
const res = await getPkgReleases({
|
||||
datasource,
|
||||
packageName: 'hyper-expanse/library-release-workflows',
|
||||
registryUrls: ['https://cci.internal.dev'],
|
||||
});
|
||||
expect(res?.registryUrl).toBe('https://cci.internal.dev');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import { logger } from '../../../logger';
|
||||
import { cache } from '../../../util/cache/package/decorator';
|
||||
import { joinUrlParts } from '../../../util/url';
|
||||
import { Datasource } from '../datasource';
|
||||
import type { GetReleasesConfig, ReleaseResult } from '../types';
|
||||
import type { OrbResponse } from './types';
|
||||
|
@ -27,9 +28,10 @@ export class OrbDatasource extends Datasource {
|
|||
super(OrbDatasource.id);
|
||||
}
|
||||
|
||||
override readonly customRegistrySupport = false;
|
||||
override readonly customRegistrySupport = true;
|
||||
|
||||
override readonly defaultRegistryUrls = ['https://circleci.com/'];
|
||||
override readonly registryStrategy = 'hunt';
|
||||
|
||||
override readonly releaseTimestampSupport = true;
|
||||
override readonly releaseTimestampNote =
|
||||
|
@ -47,7 +49,7 @@ export class OrbDatasource extends Datasource {
|
|||
if (!registryUrl) {
|
||||
return null;
|
||||
}
|
||||
const url = `${registryUrl}graphql-unstable`;
|
||||
const url = joinUrlParts(registryUrl, 'graphql-unstable');
|
||||
const body = {
|
||||
query,
|
||||
variables: { packageName, maxVersions: MAX_VERSIONS },
|
||||
|
|
|
@ -149,9 +149,9 @@ describe('modules/datasource/sbt-package/index', () => {
|
|||
.get('/org/example/example_2.12/')
|
||||
.reply(200, `<a href='1.2.3/'>1.2.3/</a>`)
|
||||
.get('/org/example/example_2.12/1.2.3/example-1.2.3.pom')
|
||||
.reply(200, ``)
|
||||
.reply(404)
|
||||
.get('/org/example/example_2.12/1.2.3/example_2.12-1.2.3.pom')
|
||||
.reply(200, ``);
|
||||
.reply(404);
|
||||
|
||||
const res = await getPkgReleases({
|
||||
versioning: mavenVersioning.id,
|
||||
|
@ -267,7 +267,7 @@ describe('modules/datasource/sbt-package/index', () => {
|
|||
`,
|
||||
)
|
||||
.get('/org/example/example_2.13/1.2.3/example_2.13-1.2.3.pom')
|
||||
.reply(200);
|
||||
.reply(404);
|
||||
|
||||
const res = await getPkgReleases({
|
||||
versioning: mavenVersioning.id,
|
||||
|
|
|
@ -10,8 +10,7 @@ import * as ivyVersioning from '../../versioning/ivy';
|
|||
import { compare } from '../../versioning/maven/compare';
|
||||
import { MavenDatasource } from '../maven';
|
||||
import { MAVEN_REPO } from '../maven/common';
|
||||
import { downloadHttpProtocol } from '../maven/util';
|
||||
import { normalizeDate } from '../metadata';
|
||||
import { downloadHttpContent, downloadHttpProtocol } from '../maven/util';
|
||||
import type {
|
||||
GetReleasesConfig,
|
||||
PostprocessReleaseConfig,
|
||||
|
@ -88,8 +87,11 @@ export class SbtPackageDatasource extends MavenDatasource {
|
|||
let dependencyUrl: string | undefined;
|
||||
let packageUrls: string[] | undefined;
|
||||
for (const packageRootUrl of packageRootUrls) {
|
||||
const res = await downloadHttpProtocol(this.http, packageRootUrl);
|
||||
if (!res) {
|
||||
const packageRootContent = await downloadHttpContent(
|
||||
this.http,
|
||||
packageRootUrl,
|
||||
);
|
||||
if (!packageRootContent) {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
@ -103,7 +105,7 @@ export class SbtPackageDatasource extends MavenDatasource {
|
|||
dependencyUrl = trimTrailingSlash(packageRootUrl);
|
||||
|
||||
const rootPath = new URL(packageRootUrl).pathname;
|
||||
const artifactSubdirs = extractPageLinks(res.body, (href) => {
|
||||
const artifactSubdirs = extractPageLinks(packageRootContent, (href) => {
|
||||
const path = href.replace(rootPath, '');
|
||||
|
||||
if (
|
||||
|
@ -149,15 +151,15 @@ export class SbtPackageDatasource extends MavenDatasource {
|
|||
|
||||
const allVersions = new Set<string>();
|
||||
for (const pkgUrl of packageUrls) {
|
||||
const res = await downloadHttpProtocol(this.http, pkgUrl);
|
||||
const packageContent = await downloadHttpContent(this.http, pkgUrl);
|
||||
// istanbul ignore if
|
||||
if (!res) {
|
||||
if (!packageContent) {
|
||||
invalidPackageUrls.add(pkgUrl);
|
||||
continue;
|
||||
}
|
||||
|
||||
const rootPath = new URL(pkgUrl).pathname;
|
||||
const versions = extractPageLinks(res.body, (href) => {
|
||||
const versions = extractPageLinks(packageContent, (href) => {
|
||||
const path = href.replace(rootPath, '');
|
||||
if (path.startsWith('.')) {
|
||||
return null;
|
||||
|
@ -275,20 +277,20 @@ export class SbtPackageDatasource extends MavenDatasource {
|
|||
}
|
||||
|
||||
const res = await downloadHttpProtocol(this.http, pomUrl);
|
||||
const content = res?.body;
|
||||
if (!content) {
|
||||
const { val } = res.unwrap();
|
||||
if (!val) {
|
||||
invalidPomFiles.add(pomUrl);
|
||||
continue;
|
||||
}
|
||||
|
||||
const result: PomInfo = {};
|
||||
|
||||
const releaseTimestamp = normalizeDate(res.headers['last-modified']);
|
||||
const releaseTimestamp = val.lastModified;
|
||||
if (releaseTimestamp) {
|
||||
result.releaseTimestamp = releaseTimestamp;
|
||||
}
|
||||
|
||||
const pomXml = new XmlDocument(content);
|
||||
const pomXml = new XmlDocument(val.data);
|
||||
|
||||
const homepage = pomXml.valueWithPath('url');
|
||||
if (homepage) {
|
||||
|
|
|
@ -7,7 +7,7 @@ import * as ivyVersioning from '../../versioning/ivy';
|
|||
import { compare } from '../../versioning/maven/compare';
|
||||
import { Datasource } from '../datasource';
|
||||
import { MAVEN_REPO } from '../maven/common';
|
||||
import { downloadHttpProtocol } from '../maven/util';
|
||||
import { downloadHttpContent } from '../maven/util';
|
||||
import { extractPageLinks, getLatestVersion } from '../sbt-package/util';
|
||||
import type {
|
||||
GetReleasesConfig,
|
||||
|
@ -43,8 +43,7 @@ export class SbtPluginDatasource extends Datasource {
|
|||
scalaVersion: string,
|
||||
): Promise<string[] | null> {
|
||||
const pkgUrl = ensureTrailingSlash(searchRoot);
|
||||
const res = await downloadHttpProtocol(this.http, pkgUrl);
|
||||
const indexContent = res?.body;
|
||||
const indexContent = await downloadHttpContent(this.http, pkgUrl);
|
||||
if (indexContent) {
|
||||
const rootPath = new URL(pkgUrl).pathname;
|
||||
let artifactSubdirs = extractPageLinks(indexContent, (href) => {
|
||||
|
@ -84,8 +83,7 @@ export class SbtPluginDatasource extends Datasource {
|
|||
const releases: string[] = [];
|
||||
for (const searchSubdir of artifactSubdirs) {
|
||||
const pkgUrl = ensureTrailingSlash(`${searchRoot}/${searchSubdir}`);
|
||||
const res = await downloadHttpProtocol(this.http, pkgUrl);
|
||||
const content = res?.body;
|
||||
const content = await downloadHttpContent(this.http, pkgUrl);
|
||||
if (content) {
|
||||
const rootPath = new URL(pkgUrl).pathname;
|
||||
const subdirReleases = extractPageLinks(content, (href) => {
|
||||
|
@ -133,8 +131,7 @@ export class SbtPluginDatasource extends Datasource {
|
|||
|
||||
for (const pomFileName of pomFileNames) {
|
||||
const pomUrl = `${searchRoot}/${artifactDir}/${version}/${pomFileName}`;
|
||||
const res = await downloadHttpProtocol(this.http, pomUrl);
|
||||
const content = res?.body;
|
||||
const content = await downloadHttpContent(this.http, pomUrl);
|
||||
if (content) {
|
||||
const pomXml = new XmlDocument(content);
|
||||
|
||||
|
@ -173,13 +170,16 @@ export class SbtPluginDatasource extends Datasource {
|
|||
|
||||
return href;
|
||||
};
|
||||
const res = await downloadHttpProtocol(
|
||||
const searchRootContent = await downloadHttpContent(
|
||||
this.http,
|
||||
ensureTrailingSlash(searchRoot),
|
||||
);
|
||||
if (res) {
|
||||
if (searchRootContent) {
|
||||
const releases: string[] = [];
|
||||
const scalaVersionItems = extractPageLinks(res.body, hrefFilterMap);
|
||||
const scalaVersionItems = extractPageLinks(
|
||||
searchRootContent,
|
||||
hrefFilterMap,
|
||||
);
|
||||
const scalaVersions = scalaVersionItems.map((x) =>
|
||||
x.replace(regEx(/^scala_/), ''),
|
||||
);
|
||||
|
@ -188,24 +188,22 @@ export class SbtPluginDatasource extends Datasource {
|
|||
: scalaVersions;
|
||||
for (const searchVersion of searchVersions) {
|
||||
const searchSubRoot = `${searchRoot}/scala_${searchVersion}`;
|
||||
const subRootRes = await downloadHttpProtocol(
|
||||
const subRootContent = await downloadHttpContent(
|
||||
this.http,
|
||||
ensureTrailingSlash(searchSubRoot),
|
||||
);
|
||||
if (subRootRes) {
|
||||
const { body: subRootContent } = subRootRes;
|
||||
if (subRootContent) {
|
||||
const sbtVersionItems = extractPageLinks(
|
||||
subRootContent,
|
||||
hrefFilterMap,
|
||||
);
|
||||
for (const sbtItem of sbtVersionItems) {
|
||||
const releasesRoot = `${searchSubRoot}/${sbtItem}`;
|
||||
const releaseIndexRes = await downloadHttpProtocol(
|
||||
const releasesIndexContent = await downloadHttpContent(
|
||||
this.http,
|
||||
ensureTrailingSlash(releasesRoot),
|
||||
);
|
||||
if (releaseIndexRes) {
|
||||
const { body: releasesIndexContent } = releaseIndexRes;
|
||||
if (releasesIndexContent) {
|
||||
const releasesParsed = extractPageLinks(
|
||||
releasesIndexContent,
|
||||
hrefFilterMap,
|
||||
|
|
|
@ -44,7 +44,7 @@ resources:
|
|||
- container: linux
|
||||
image: ubuntu:24.04
|
||||
- container: python
|
||||
image: python:3.13@sha256:bc78d3c007f86dbb87d711b8b082d9d564b8025487e780d24ccb8581d83ef8b0
|
||||
image: python:3.13@sha256:cea505b81701dd9e46b8dde96eaa8054c4bd2035dbb660edeb7af947ed38a0ad
|
||||
|
||||
stages:
|
||||
- stage: StageOne
|
||||
|
|
|
@ -4,6 +4,7 @@ import { Fixtures } from '../../../../test/fixtures';
|
|||
import { GlobalConfig } from '../../../config/global';
|
||||
import type { RepoGlobalConfig } from '../../../config/types';
|
||||
import { BazelDatasource } from '../../datasource/bazel';
|
||||
import { DockerDatasource } from '../../datasource/docker';
|
||||
import { GithubTagsDatasource } from '../../datasource/github-tags';
|
||||
import { MavenDatasource } from '../../datasource/maven';
|
||||
import * as parser from './parser';
|
||||
|
@ -290,6 +291,60 @@ describe('modules/manager/bazel-module/extract', () => {
|
|||
]);
|
||||
});
|
||||
|
||||
it('returns oci.pull dependencies', async () => {
|
||||
const input = codeBlock`
|
||||
oci.pull(
|
||||
name = "nginx_image",
|
||||
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
|
||||
image = "index.docker.io/library/nginx",
|
||||
platforms = ["linux/amd64"],
|
||||
tag = "1.27.1",
|
||||
)
|
||||
`;
|
||||
|
||||
const result = await extractPackageFile(input, 'MODULE.bazel');
|
||||
if (!result) {
|
||||
throw new Error('Expected a result.');
|
||||
}
|
||||
expect(result.deps).toEqual([
|
||||
{
|
||||
datasource: DockerDatasource.id,
|
||||
depType: 'oci_pull',
|
||||
depName: 'nginx_image',
|
||||
packageName: 'index.docker.io/library/nginx',
|
||||
currentValue: '1.27.1',
|
||||
currentDigest:
|
||||
'sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns oci.pull dependencies without tags', async () => {
|
||||
const input = codeBlock`
|
||||
oci.pull(
|
||||
name = "nginx_image",
|
||||
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
|
||||
image = "index.docker.io/library/nginx",
|
||||
platforms = ["linux/amd64"],
|
||||
)
|
||||
`;
|
||||
|
||||
const result = await extractPackageFile(input, 'MODULE.bazel');
|
||||
if (!result) {
|
||||
throw new Error('Expected a result.');
|
||||
}
|
||||
expect(result.deps).toEqual([
|
||||
{
|
||||
datasource: DockerDatasource.id,
|
||||
depType: 'oci_pull',
|
||||
depName: 'nginx_image',
|
||||
packageName: 'index.docker.io/library/nginx',
|
||||
currentDigest:
|
||||
'sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('returns maven.install and bazel_dep dependencies together', async () => {
|
||||
const input = codeBlock`
|
||||
bazel_dep(name = "bazel_jar_jar", version = "0.1.0")
|
||||
|
|
|
@ -7,6 +7,7 @@ import * as bazelrc from './bazelrc';
|
|||
import type { RecordFragment } from './fragments';
|
||||
import { parse } from './parser';
|
||||
import { RuleToMavenPackageDep, fillRegistryUrls } from './parser/maven';
|
||||
import { RuleToDockerPackageDep } from './parser/oci';
|
||||
import { RuleToBazelModulePackageDep } from './rules';
|
||||
import * as rules from './rules';
|
||||
|
||||
|
@ -18,11 +19,16 @@ export async function extractPackageFile(
|
|||
const records = parse(content);
|
||||
const pfc = await extractBazelPfc(records, packageFile);
|
||||
const mavenDeps = extractMavenDeps(records);
|
||||
const dockerDeps = LooseArray(RuleToDockerPackageDep).parse(records);
|
||||
|
||||
if (mavenDeps.length) {
|
||||
pfc.deps.push(...mavenDeps);
|
||||
}
|
||||
|
||||
if (dockerDeps.length) {
|
||||
pfc.deps.push(...dockerDeps);
|
||||
}
|
||||
|
||||
return pfc.deps.length ? pfc : null;
|
||||
} catch (err) {
|
||||
logger.debug({ err, packageFile }, 'Failed to parse bazel module file.');
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import type { Category } from '../../../constants';
|
||||
import { BazelDatasource } from '../../datasource/bazel';
|
||||
import { DockerDatasource } from '../../datasource/docker';
|
||||
import { GithubTagsDatasource } from '../../datasource/github-tags';
|
||||
import { MavenDatasource } from '../../datasource/maven';
|
||||
import { extractPackageFile } from './extract';
|
||||
|
@ -10,11 +11,12 @@ export const url = 'https://bazel.build/external/module';
|
|||
export const categories: Category[] = ['bazel'];
|
||||
|
||||
export const defaultConfig = {
|
||||
fileMatch: ['(^|/)MODULE\\.bazel$'],
|
||||
fileMatch: ['(^|/|\\.)MODULE\\.bazel$'],
|
||||
};
|
||||
|
||||
export const supportedDatasources = [
|
||||
BazelDatasource.id,
|
||||
DockerDatasource.id,
|
||||
GithubTagsDatasource.id,
|
||||
MavenDatasource.id,
|
||||
];
|
||||
|
|
|
@ -286,5 +286,34 @@ describe('modules/manager/bazel-module/parser/index', () => {
|
|||
),
|
||||
]);
|
||||
});
|
||||
|
||||
it('finds oci.pull', () => {
|
||||
const input = codeBlock`
|
||||
oci.pull(
|
||||
name = "nginx_image",
|
||||
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
|
||||
image = "index.docker.io/library/nginx",
|
||||
platforms = ["linux/amd64"],
|
||||
tag = "1.27.1",
|
||||
)
|
||||
`;
|
||||
|
||||
const res = parse(input);
|
||||
expect(res).toEqual([
|
||||
fragments.record(
|
||||
{
|
||||
rule: fragments.string('oci_pull'),
|
||||
name: fragments.string('nginx_image'),
|
||||
digest: fragments.string(
|
||||
'sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720',
|
||||
),
|
||||
image: fragments.string('index.docker.io/library/nginx'),
|
||||
platforms: fragments.array([fragments.string('linux/amd64')], true),
|
||||
tag: fragments.string('1.27.1'),
|
||||
},
|
||||
true,
|
||||
),
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -3,8 +3,9 @@ import { Ctx } from '../context';
|
|||
import type { RecordFragment } from '../fragments';
|
||||
import { mavenRules } from './maven';
|
||||
import { moduleRules } from './module';
|
||||
import { ociRules } from './oci';
|
||||
|
||||
const rule = q.alt<Ctx>(moduleRules, mavenRules);
|
||||
const rule = q.alt<Ctx>(moduleRules, mavenRules, ociRules);
|
||||
|
||||
const query = q.tree<Ctx>({
|
||||
type: 'root-tree',
|
||||
|
|
41
lib/modules/manager/bazel-module/parser/oci.ts
Normal file
41
lib/modules/manager/bazel-module/parser/oci.ts
Normal file
|
@ -0,0 +1,41 @@
|
|||
import { query as q } from 'good-enough-parser';
|
||||
import { z } from 'zod';
|
||||
import { DockerDatasource } from '../../../datasource/docker';
|
||||
import type { PackageDependency } from '../../types';
|
||||
import type { Ctx } from '../context';
|
||||
import { RecordFragmentSchema, StringFragmentSchema } from '../fragments';
|
||||
import { kvParams } from './common';
|
||||
|
||||
export const RuleToDockerPackageDep = RecordFragmentSchema.extend({
|
||||
children: z.object({
|
||||
rule: StringFragmentSchema.extend({
|
||||
value: z.literal('oci_pull'),
|
||||
}),
|
||||
name: StringFragmentSchema,
|
||||
image: StringFragmentSchema,
|
||||
tag: StringFragmentSchema.optional(),
|
||||
digest: StringFragmentSchema.optional(),
|
||||
}),
|
||||
}).transform(
|
||||
({ children: { rule, name, image, tag, digest } }): PackageDependency => ({
|
||||
datasource: DockerDatasource.id,
|
||||
depType: rule.value,
|
||||
depName: name.value,
|
||||
packageName: image.value,
|
||||
currentValue: tag?.value,
|
||||
currentDigest: digest?.value,
|
||||
}),
|
||||
);
|
||||
|
||||
export const ociRules = q
|
||||
.sym<Ctx>('oci')
|
||||
.op('.')
|
||||
.sym('pull', (ctx, token) => ctx.startRule('oci_pull'))
|
||||
.join(
|
||||
q.tree({
|
||||
type: 'wrapped-tree',
|
||||
maxDepth: 1,
|
||||
search: kvParams,
|
||||
postHandler: (ctx) => ctx.endRule(),
|
||||
}),
|
||||
);
|
|
@ -1,5 +1,7 @@
|
|||
The `bazel-module` manager can update [Bazel module (bzlmod)](https://bazel.build/external/module) enabled workspaces.
|
||||
|
||||
### Maven
|
||||
|
||||
It also takes care about maven artifacts initalized with [bzlmod](https://github.com/bazelbuild/rules_jvm_external/blob/master/docs/bzlmod.md). For simplicity the name of extension variable is limited to `maven*`. E.g.:
|
||||
|
||||
```
|
||||
|
@ -26,3 +28,21 @@ maven.artifact(
|
|||
version = "1.11.1",
|
||||
)
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
Similarly, it updates Docker / OCI images pulled with [oci_pull](https://github.com/bazel-contrib/rules_oci/blob/main/docs/pull.md).
|
||||
|
||||
Note that the extension must be called `oci`:
|
||||
|
||||
```
|
||||
oci = use_extension("@rules_oci//oci:extensions.bzl", "oci")
|
||||
|
||||
oci.pull(
|
||||
name = "nginx_image",
|
||||
digest = "sha256:287ff321f9e3cde74b600cc26197424404157a72043226cbbf07ee8304a2c720",
|
||||
image = "index.docker.io/library/nginx",
|
||||
platforms = ["linux/amd64"],
|
||||
tag = "1.27.1",
|
||||
)
|
||||
```
|
||||
|
|
|
@ -41,101 +41,198 @@ describe('modules/manager/bun/artifacts', () => {
|
|||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('skips if cannot read lock file', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null if lock content unchanged', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns updated lock content', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{
|
||||
file: {
|
||||
path: 'bun.lockb',
|
||||
type: 'addition',
|
||||
contents: newLock,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('supports lockFileMaintenance', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
updateArtifact.config.updateType = 'lockFileMaintenance';
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{
|
||||
file: {
|
||||
path: 'bun.lockb',
|
||||
type: 'addition',
|
||||
contents: newLock,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('handles temporary error', async () => {
|
||||
const execError = new ExecError(TEMPORARY_ERROR, {
|
||||
cmd: '',
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
options: { encoding: 'utf8' },
|
||||
describe('when using .lockb lockfile format', () => {
|
||||
it('skips if cannot read lock file', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null if lock content unchanged', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns updated lock content', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{
|
||||
file: {
|
||||
path: 'bun.lockb',
|
||||
type: 'addition',
|
||||
contents: newLock,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('supports lockFileMaintenance', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
updateArtifact.config.updateType = 'lockFileMaintenance';
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{
|
||||
file: {
|
||||
path: 'bun.lockb',
|
||||
type: 'addition',
|
||||
contents: newLock,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('handles temporary error', async () => {
|
||||
const execError = new ExecError(TEMPORARY_ERROR, {
|
||||
cmd: '',
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
options: { encoding: 'utf8' },
|
||||
});
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
exec.mockRejectedValueOnce(execError);
|
||||
await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
|
||||
TEMPORARY_ERROR,
|
||||
);
|
||||
});
|
||||
|
||||
it('handles full error', async () => {
|
||||
const execError = new ExecError('nope', {
|
||||
cmd: '',
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
options: { encoding: 'utf8' },
|
||||
});
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
exec.mockRejectedValueOnce(execError);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{ artifactError: { lockFile: 'bun.lockb', stderr: 'nope' } },
|
||||
]);
|
||||
});
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
exec.mockRejectedValueOnce(execError);
|
||||
await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
|
||||
TEMPORARY_ERROR,
|
||||
);
|
||||
});
|
||||
|
||||
it('handles full error', async () => {
|
||||
const execError = new ExecError('nope', {
|
||||
cmd: '',
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
options: { encoding: 'utf8' },
|
||||
describe('when using .lock lockfile format', () => {
|
||||
it('skips if cannot read lock file', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lock'] },
|
||||
];
|
||||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns null if lock content unchanged', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lock'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toBeNull();
|
||||
});
|
||||
|
||||
it('returns updated lock content', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lock'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{
|
||||
file: {
|
||||
path: 'bun.lock',
|
||||
type: 'addition',
|
||||
contents: newLock,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('supports lockFileMaintenance', async () => {
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lock'] },
|
||||
];
|
||||
updateArtifact.config.updateType = 'lockFileMaintenance';
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{
|
||||
file: {
|
||||
path: 'bun.lock',
|
||||
type: 'addition',
|
||||
contents: newLock,
|
||||
},
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('handles temporary error', async () => {
|
||||
const execError = new ExecError(TEMPORARY_ERROR, {
|
||||
cmd: '',
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
options: { encoding: 'utf8' },
|
||||
});
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lock'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
exec.mockRejectedValueOnce(execError);
|
||||
await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
|
||||
TEMPORARY_ERROR,
|
||||
);
|
||||
});
|
||||
|
||||
it('handles full error', async () => {
|
||||
const execError = new ExecError('nope', {
|
||||
cmd: '',
|
||||
stdout: '',
|
||||
stderr: '',
|
||||
options: { encoding: 'utf8' },
|
||||
});
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lock'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
exec.mockRejectedValueOnce(execError);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{ artifactError: { lockFile: 'bun.lock', stderr: 'nope' } },
|
||||
]);
|
||||
});
|
||||
updateArtifact.updatedDeps = [
|
||||
{ manager: 'bun', lockFiles: ['bun.lockb'] },
|
||||
];
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
exec.mockRejectedValueOnce(execError);
|
||||
expect(await updateArtifacts(updateArtifact)).toEqual([
|
||||
{ artifactError: { lockFile: 'bun.lockb', stderr: 'nope' } },
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('bun command execution', () => {
|
||||
it('check install options with configs', async () => {
|
||||
const lockfileFormats = ['bun.lockb', 'bun.lock'];
|
||||
const testCases = [
|
||||
{
|
||||
allowScripts: undefined,
|
||||
|
@ -184,38 +281,40 @@ describe('modules/manager/bun/artifacts', () => {
|
|||
},
|
||||
];
|
||||
|
||||
for (const testCase of testCases) {
|
||||
GlobalConfig.set({
|
||||
...globalConfig,
|
||||
allowScripts: testCase.allowScripts,
|
||||
});
|
||||
const updateArtifact: UpdateArtifact = {
|
||||
config: { ignoreScripts: testCase.ignoreScripts },
|
||||
newPackageFileContent: '',
|
||||
packageFileName: '',
|
||||
updatedDeps: [{ manager: 'bun', lockFiles: ['bun.lockb'] }],
|
||||
};
|
||||
for (const lockFile of lockfileFormats) {
|
||||
for (const testCase of testCases) {
|
||||
GlobalConfig.set({
|
||||
...globalConfig,
|
||||
allowScripts: testCase.allowScripts,
|
||||
});
|
||||
const updateArtifact: UpdateArtifact = {
|
||||
config: { ignoreScripts: testCase.ignoreScripts },
|
||||
newPackageFileContent: '',
|
||||
packageFileName: '',
|
||||
updatedDeps: [{ manager: 'bun', lockFiles: [lockFile] }],
|
||||
};
|
||||
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
const oldLock = Buffer.from('old');
|
||||
fs.readFile.mockResolvedValueOnce(oldLock as never);
|
||||
const newLock = Buffer.from('new');
|
||||
fs.readFile.mockResolvedValueOnce(newLock as never);
|
||||
|
||||
await updateArtifacts(updateArtifact);
|
||||
await updateArtifacts(updateArtifact);
|
||||
|
||||
expect(exec).toHaveBeenCalledWith(testCase.expectedCmd, {
|
||||
cwdFile: '',
|
||||
docker: {},
|
||||
toolConstraints: [
|
||||
{
|
||||
toolName: 'bun',
|
||||
},
|
||||
],
|
||||
userConfiguredEnv: undefined,
|
||||
});
|
||||
expect(exec).toHaveBeenCalledWith(testCase.expectedCmd, {
|
||||
cwdFile: '',
|
||||
docker: {},
|
||||
toolConstraints: [
|
||||
{
|
||||
toolName: 'bun',
|
||||
},
|
||||
],
|
||||
userConfiguredEnv: undefined,
|
||||
});
|
||||
|
||||
exec.mockClear();
|
||||
GlobalConfig.reset();
|
||||
exec.mockClear();
|
||||
GlobalConfig.reset();
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
|
@ -9,60 +9,120 @@ describe('modules/manager/bun/extract', () => {
|
|||
expect(await extractAllPackageFiles({}, ['package.json'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('ignores missing package.json file', async () => {
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
|
||||
});
|
||||
describe('when using the .lockb lockfile format', () => {
|
||||
it('ignores missing package.json file', async () => {
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('ignores invalid package.json file', async () => {
|
||||
(fs.readLocalFile as jest.Mock).mockResolvedValueOnce('invalid');
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
|
||||
});
|
||||
it('ignores invalid package.json file', async () => {
|
||||
(fs.readLocalFile as jest.Mock).mockResolvedValueOnce('invalid');
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles null response', async () => {
|
||||
fs.getSiblingFileName.mockReturnValueOnce('package.json');
|
||||
fs.readLocalFile.mockResolvedValueOnce(
|
||||
// This package.json returns null from the extractor
|
||||
JSON.stringify({
|
||||
_id: 1,
|
||||
_args: 1,
|
||||
_from: 1,
|
||||
}),
|
||||
);
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
|
||||
});
|
||||
it('handles null response', async () => {
|
||||
fs.getSiblingFileName.mockReturnValueOnce('package.json');
|
||||
fs.readLocalFile.mockResolvedValueOnce(
|
||||
// This package.json returns null from the extractor
|
||||
JSON.stringify({
|
||||
_id: 1,
|
||||
_args: 1,
|
||||
_from: 1,
|
||||
}),
|
||||
);
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('parses valid package.json file', async () => {
|
||||
fs.getSiblingFileName.mockReturnValueOnce('package.json');
|
||||
fs.readLocalFile.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
name: 'test',
|
||||
version: '0.0.1',
|
||||
dependencies: {
|
||||
dep1: '1.0.0',
|
||||
},
|
||||
}),
|
||||
);
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toMatchObject([
|
||||
{
|
||||
deps: [
|
||||
{
|
||||
currentValue: '1.0.0',
|
||||
datasource: 'npm',
|
||||
depName: 'dep1',
|
||||
depType: 'dependencies',
|
||||
prettyDepType: 'dependency',
|
||||
it('parses valid package.json file', async () => {
|
||||
fs.getSiblingFileName.mockReturnValueOnce('package.json');
|
||||
fs.readLocalFile.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
name: 'test',
|
||||
version: '0.0.1',
|
||||
dependencies: {
|
||||
dep1: '1.0.0',
|
||||
},
|
||||
],
|
||||
extractedConstraints: {},
|
||||
lockFiles: ['bun.lockb'],
|
||||
managerData: {
|
||||
hasPackageManager: false,
|
||||
packageJsonName: 'test',
|
||||
}),
|
||||
);
|
||||
expect(await extractAllPackageFiles({}, ['bun.lockb'])).toMatchObject([
|
||||
{
|
||||
deps: [
|
||||
{
|
||||
currentValue: '1.0.0',
|
||||
datasource: 'npm',
|
||||
depName: 'dep1',
|
||||
depType: 'dependencies',
|
||||
prettyDepType: 'dependency',
|
||||
},
|
||||
],
|
||||
extractedConstraints: {},
|
||||
lockFiles: ['bun.lockb'],
|
||||
managerData: {
|
||||
hasPackageManager: false,
|
||||
packageJsonName: 'test',
|
||||
},
|
||||
packageFile: 'package.json',
|
||||
packageFileVersion: '0.0.1',
|
||||
},
|
||||
packageFile: 'package.json',
|
||||
packageFileVersion: '0.0.1',
|
||||
},
|
||||
]);
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('when using the .lock lockfile format', () => {
|
||||
it('ignores missing package.json file', async () => {
|
||||
expect(await extractAllPackageFiles({}, ['bun.lock'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('ignores invalid package.json file', async () => {
|
||||
(fs.readLocalFile as jest.Mock).mockResolvedValueOnce('invalid');
|
||||
expect(await extractAllPackageFiles({}, ['bun.lock'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('handles null response', async () => {
|
||||
fs.getSiblingFileName.mockReturnValueOnce('package.json');
|
||||
fs.readLocalFile.mockResolvedValueOnce(
|
||||
// This package.json returns null from the extractor
|
||||
JSON.stringify({
|
||||
_id: 1,
|
||||
_args: 1,
|
||||
_from: 1,
|
||||
}),
|
||||
);
|
||||
expect(await extractAllPackageFiles({}, ['bun.lock'])).toEqual([]);
|
||||
});
|
||||
|
||||
it('parses valid package.json file', async () => {
|
||||
fs.getSiblingFileName.mockReturnValueOnce('package.json');
|
||||
fs.readLocalFile.mockResolvedValueOnce(
|
||||
JSON.stringify({
|
||||
name: 'test',
|
||||
version: '0.0.1',
|
||||
dependencies: {
|
||||
dep1: '1.0.0',
|
||||
},
|
||||
}),
|
||||
);
|
||||
expect(await extractAllPackageFiles({}, ['bun.lock'])).toMatchObject([
|
||||
{
|
||||
deps: [
|
||||
{
|
||||
currentValue: '1.0.0',
|
||||
datasource: 'npm',
|
||||
depName: 'dep1',
|
||||
depType: 'dependencies',
|
||||
prettyDepType: 'dependency',
|
||||
},
|
||||
],
|
||||
extractedConstraints: {},
|
||||
lockFiles: ['bun.lock'],
|
||||
managerData: {
|
||||
hasPackageManager: false,
|
||||
packageJsonName: 'test',
|
||||
},
|
||||
packageFile: 'package.json',
|
||||
packageFileVersion: '0.0.1',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -18,7 +18,12 @@ export async function extractAllPackageFiles(
|
|||
): Promise<PackageFile[]> {
|
||||
const packageFiles: PackageFile<NpmManagerData>[] = [];
|
||||
for (const matchedFile of matchedFiles) {
|
||||
if (!matchesFileName(matchedFile, 'bun.lockb')) {
|
||||
if (
|
||||
!(
|
||||
matchesFileName(matchedFile, 'bun.lockb') ||
|
||||
matchesFileName(matchedFile, 'bun.lock')
|
||||
)
|
||||
) {
|
||||
logger.warn({ matchedFile }, 'Invalid bun lockfile match');
|
||||
continue;
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ export const supersedesManagers = ['npm'];
|
|||
export const supportsLockFileMaintenance = true;
|
||||
|
||||
export const defaultConfig = {
|
||||
fileMatch: ['(^|/)bun\\.lockb$'],
|
||||
fileMatch: ['(^|/)bun\\.lockb?$'],
|
||||
digest: {
|
||||
prBodyDefinitions: {
|
||||
Change:
|
||||
|
|
|
@ -505,11 +505,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentDigest": "0b799ead604f900ed50685e9b2d469cd2befba5b",
|
||||
"datasource": "git-refs",
|
||||
"depName": "health_check",
|
||||
"managerData": {
|
||||
"lineNumber": 53,
|
||||
},
|
||||
"packageName": "https://github.com/ianheggie/health_check",
|
||||
"sourceUrl": "https://github.com/ianheggie/health_check",
|
||||
},
|
||||
{
|
||||
"currentValue": "'~> 4.3'",
|
||||
|
@ -539,12 +542,15 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
|
|||
},
|
||||
},
|
||||
{
|
||||
"currentDigest": "54b17ba8c7d8d20a16dfc65d1775241833219cf2",
|
||||
"currentValue": "'~> 0.6'",
|
||||
"datasource": "rubygems",
|
||||
"datasource": "git-refs",
|
||||
"depName": "http_parser.rb",
|
||||
"managerData": {
|
||||
"lineNumber": 57,
|
||||
},
|
||||
"packageName": "https://github.com/tmm1/http_parser.rb",
|
||||
"sourceUrl": "https://github.com/tmm1/http_parser.rb",
|
||||
},
|
||||
{
|
||||
"currentValue": "'~> 1.3'",
|
||||
|
@ -591,11 +597,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentDigest": "fd184883048b922b176939f851338d0a4971a532",
|
||||
"datasource": "git-refs",
|
||||
"depName": "nilsimsa",
|
||||
"managerData": {
|
||||
"lineNumber": 63,
|
||||
},
|
||||
"packageName": "https://github.com/witgo/nilsimsa",
|
||||
"sourceUrl": "https://github.com/witgo/nilsimsa",
|
||||
},
|
||||
{
|
||||
"currentValue": "'~> 1.10'",
|
||||
|
@ -660,11 +669,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentDigest": "58465d2e213991f8afb13b984854a49fcdcc980c",
|
||||
"datasource": "git-refs",
|
||||
"depName": "posix-spawn",
|
||||
"managerData": {
|
||||
"lineNumber": 71,
|
||||
},
|
||||
"packageName": "https://github.com/rtomayko/posix-spawn",
|
||||
"sourceUrl": "https://github.com/rtomayko/posix-spawn",
|
||||
},
|
||||
{
|
||||
"currentValue": "'~> 2.1'",
|
||||
|
@ -899,11 +911,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parse mastodon Gem
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentDigest": "e742697a0906e74e8bb777ef98137bc3955d981d",
|
||||
"datasource": "git-refs",
|
||||
"depName": "json-ld",
|
||||
"managerData": {
|
||||
"lineNumber": 99,
|
||||
},
|
||||
"packageName": "https://github.com/ruby-rdf/json-ld.git",
|
||||
"sourceUrl": "https://github.com/ruby-rdf/json-ld",
|
||||
},
|
||||
{
|
||||
"currentValue": "'~> 3.0'",
|
||||
|
@ -1494,11 +1509,13 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"datasource": "git-refs",
|
||||
"depName": "webpacker",
|
||||
"managerData": {
|
||||
"lineNumber": 16,
|
||||
},
|
||||
"packageName": "https://github.com/rails/webpacker",
|
||||
"sourceUrl": "https://github.com/rails/webpacker",
|
||||
},
|
||||
{
|
||||
"currentValue": ""~> 3.1.11"",
|
||||
|
@ -1681,7 +1698,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentValue": "update-pg",
|
||||
"datasource": "git-refs",
|
||||
"depName": "queue_classic",
|
||||
"depTypes": [
|
||||
"job",
|
||||
|
@ -1689,6 +1707,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
"managerData": {
|
||||
"lineNumber": 54,
|
||||
},
|
||||
"packageName": "https://github.com/rafaelfranca/queue_classic",
|
||||
"sourceUrl": "https://github.com/rafaelfranca/queue_classic",
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
|
@ -1791,7 +1811,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentValue": "close-race",
|
||||
"datasource": "git-refs",
|
||||
"depName": "websocket-client-simple",
|
||||
"depTypes": [
|
||||
"cable",
|
||||
|
@ -1799,6 +1820,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
"managerData": {
|
||||
"lineNumber": 71,
|
||||
},
|
||||
"packageName": "https://github.com/matthewd/websocket-client-simple",
|
||||
"sourceUrl": "https://github.com/matthewd/websocket-client-simple",
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
|
@ -2024,15 +2047,19 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentValue": "master",
|
||||
"datasource": "git-refs",
|
||||
"depName": "activerecord-jdbcsqlite3-adapter",
|
||||
"lockedVersion": "52.1-java",
|
||||
"managerData": {
|
||||
"lineNumber": 129,
|
||||
},
|
||||
"packageName": "https://github.com/jruby/activerecord-jdbc-adapter",
|
||||
"sourceUrl": "https://github.com/jruby/activerecord-jdbc-adapter",
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentValue": "master",
|
||||
"datasource": "git-refs",
|
||||
"depName": "activerecord-jdbcmysql-adapter",
|
||||
"depTypes": [
|
||||
"db",
|
||||
|
@ -2041,9 +2068,12 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
"managerData": {
|
||||
"lineNumber": 131,
|
||||
},
|
||||
"packageName": "https://github.com/jruby/activerecord-jdbc-adapter",
|
||||
"sourceUrl": "https://github.com/jruby/activerecord-jdbc-adapter",
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentValue": "master",
|
||||
"datasource": "git-refs",
|
||||
"depName": "activerecord-jdbcpostgresql-adapter",
|
||||
"depTypes": [
|
||||
"db",
|
||||
|
@ -2052,6 +2082,8 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
"managerData": {
|
||||
"lineNumber": 132,
|
||||
},
|
||||
"packageName": "https://github.com/jruby/activerecord-jdbc-adapter",
|
||||
"sourceUrl": "https://github.com/jruby/activerecord-jdbc-adapter",
|
||||
},
|
||||
{
|
||||
"currentValue": "">= 1.3.0"",
|
||||
|
@ -2104,11 +2136,14 @@ exports[`modules/manager/bundler/extract extractPackageFile() parses rails Gemfi
|
|||
},
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
"currentValue": "master",
|
||||
"datasource": "git-refs",
|
||||
"depName": "activerecord-oracle_enhanced-adapter",
|
||||
"managerData": {
|
||||
"lineNumber": 154,
|
||||
},
|
||||
"packageName": "https://github.com/rsim/oracle-enhanced",
|
||||
"sourceUrl": "https://github.com/rsim/oracle-enhanced",
|
||||
},
|
||||
{
|
||||
"datasource": "rubygems",
|
||||
|
|
|
@ -171,14 +171,22 @@ describe('modules/manager/bundler/extract', () => {
|
|||
it('parses inline source in Gemfile', async () => {
|
||||
const sourceInlineGemfile = codeBlock`
|
||||
baz = 'https://gems.baz.com'
|
||||
gem 'inline_gem'
|
||||
gem "inline_source_gem", source: 'https://gems.foo.com'
|
||||
gem 'inline_source_gem_with_version', "~> 1", source: 'https://gems.bar.com'
|
||||
gem 'inline_source_gem_with_variable_source', source: baz
|
||||
gem 'inline_source_gem_with_variable_source_and_require_after', source: baz, require: %w[inline_source_gem]
|
||||
gem "inline_source_gem_with_require_after", source: 'https://gems.foo.com', require: %w[inline_source_gem]
|
||||
gem "inline_source_gem_with_require_before", require: %w[inline_source_gem], source: 'https://gems.foo.com'
|
||||
gem "inline_source_gem_with_group_before", group: :production, source: 'https://gems.foo.com'
|
||||
`;
|
||||
fs.readLocalFile.mockResolvedValueOnce(sourceInlineGemfile);
|
||||
const res = await extractPackageFile(sourceInlineGemfile, 'Gemfile');
|
||||
expect(res).toMatchObject({
|
||||
deps: [
|
||||
{
|
||||
depName: 'inline_gem',
|
||||
},
|
||||
{
|
||||
depName: 'inline_source_gem',
|
||||
registryUrls: ['https://gems.foo.com'],
|
||||
|
@ -192,6 +200,104 @@ describe('modules/manager/bundler/extract', () => {
|
|||
depName: 'inline_source_gem_with_variable_source',
|
||||
registryUrls: ['https://gems.baz.com'],
|
||||
},
|
||||
{
|
||||
depName: 'inline_source_gem_with_variable_source_and_require_after',
|
||||
registryUrls: ['https://gems.baz.com'],
|
||||
},
|
||||
{
|
||||
depName: 'inline_source_gem_with_require_after',
|
||||
registryUrls: ['https://gems.foo.com'],
|
||||
},
|
||||
{
|
||||
depName: 'inline_source_gem_with_require_before',
|
||||
registryUrls: ['https://gems.foo.com'],
|
||||
},
|
||||
{
|
||||
depName: 'inline_source_gem_with_group_before',
|
||||
registryUrls: ['https://gems.foo.com'],
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('parses git refs in Gemfile', async () => {
|
||||
const gitRefGemfile = codeBlock`
|
||||
gem 'foo', git: 'https://github.com/foo/foo', ref: 'fd184883048b922b176939f851338d0a4971a532'
|
||||
gem 'bar', git: 'https://github.com/bar/bar', tag: 'v1.0.0'
|
||||
gem 'baz', github: 'baz/baz', branch: 'master'
|
||||
`;
|
||||
|
||||
fs.readLocalFile.mockResolvedValueOnce(gitRefGemfile);
|
||||
const res = await extractPackageFile(gitRefGemfile, 'Gemfile');
|
||||
expect(res).toMatchObject({
|
||||
deps: [
|
||||
{
|
||||
depName: 'foo',
|
||||
packageName: 'https://github.com/foo/foo',
|
||||
sourceUrl: 'https://github.com/foo/foo',
|
||||
currentDigest: 'fd184883048b922b176939f851338d0a4971a532',
|
||||
datasource: 'git-refs',
|
||||
},
|
||||
{
|
||||
depName: 'bar',
|
||||
packageName: 'https://github.com/bar/bar',
|
||||
sourceUrl: 'https://github.com/bar/bar',
|
||||
currentValue: 'v1.0.0',
|
||||
datasource: 'git-refs',
|
||||
},
|
||||
{
|
||||
depName: 'baz',
|
||||
packageName: 'https://github.com/baz/baz',
|
||||
sourceUrl: 'https://github.com/baz/baz',
|
||||
currentValue: 'master',
|
||||
datasource: 'git-refs',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('parses multiple current values Gemfile', async () => {
|
||||
const multipleValuesGemfile = codeBlock`
|
||||
gem 'gem_without_values'
|
||||
gem 'gem_with_one_value', ">= 3.0.5"
|
||||
gem 'gem_with_multiple_values', ">= 3.0.5", "< 3.2"
|
||||
`;
|
||||
fs.readLocalFile.mockResolvedValueOnce(multipleValuesGemfile);
|
||||
const res = await extractPackageFile(multipleValuesGemfile, 'Gemfile');
|
||||
expect(res).toMatchObject({
|
||||
deps: [
|
||||
{
|
||||
depName: 'gem_without_values',
|
||||
},
|
||||
{
|
||||
depName: 'gem_with_one_value',
|
||||
currentValue: '">= 3.0.5"',
|
||||
},
|
||||
{
|
||||
depName: 'gem_with_multiple_values',
|
||||
currentValue: '">= 3.0.5", "< 3.2"',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('skips local gems in Gemfile', async () => {
|
||||
const pathGemfile = codeBlock`
|
||||
gem 'foo', path: 'vendor/foo'
|
||||
gem 'bar'
|
||||
`;
|
||||
|
||||
fs.readLocalFile.mockResolvedValueOnce(pathGemfile);
|
||||
const res = await extractPackageFile(pathGemfile, 'Gemfile');
|
||||
expect(res).toMatchObject({
|
||||
deps: [
|
||||
{
|
||||
depName: 'foo',
|
||||
skipReason: 'internal-package',
|
||||
},
|
||||
{
|
||||
depName: 'bar',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
|
|
@ -2,6 +2,7 @@ import is from '@sindresorhus/is';
|
|||
import { logger } from '../../../logger';
|
||||
import { readLocalFile } from '../../../util/fs';
|
||||
import { newlineRegex, regEx } from '../../../util/regex';
|
||||
import { GitRefsDatasource } from '../../datasource/git-refs';
|
||||
import { RubyVersionDatasource } from '../../datasource/ruby-version';
|
||||
import { RubygemsDatasource } from '../../datasource/rubygems';
|
||||
import type { PackageDependency, PackageFileContent } from '../types';
|
||||
|
@ -12,6 +13,20 @@ function formatContent(input: string): string {
|
|||
return input.replace(regEx(/^ {2}/), '') + '\n'; //remove leading whitespace and add a new line at the end
|
||||
}
|
||||
|
||||
const variableMatchRegex = regEx(
|
||||
`^(?<key>\\w+)\\s*=\\s*['"](?<value>[^'"]+)['"]`,
|
||||
);
|
||||
const gemMatchRegex = regEx(
|
||||
`^\\s*gem\\s+(['"])(?<depName>[^'"]+)(['"])(\\s*,\\s*(?<currentValue>(['"])[^'"]+['"](\\s*,\\s*['"][^'"]+['"])?))?`,
|
||||
);
|
||||
const sourceMatchRegex = regEx(
|
||||
`source:\\s*((?:['"](?<registryUrl>[^'"]+)['"])|(?<sourceName>\\w+))?`,
|
||||
);
|
||||
const gitRefsMatchRegex = regEx(
|
||||
`((git:\\s*['"](?<gitUrl>[^'"]+)['"])|(\\s*,\\s*github:\\s*['"](?<repoName>[^'"]+)['"]))(\\s*,\\s*branch:\\s*['"](?<branchName>[^'"]+)['"])?(\\s*,\\s*ref:\\s*['"](?<refName>[^'"]+)['"])?(\\s*,\\s*tag:\\s*['"](?<tagName>[^'"]+)['"])?`,
|
||||
);
|
||||
const pathMatchRegex = regEx(`path:\\s*['"](?<path>[^'"]+)['"]`);
|
||||
|
||||
export async function extractPackageFile(
|
||||
content: string,
|
||||
packageFile?: string,
|
||||
|
@ -114,9 +129,6 @@ export async function extractPackageFile(
|
|||
});
|
||||
}
|
||||
|
||||
const variableMatchRegex = regEx(
|
||||
`^(?<key>\\w+)\\s*=\\s*['"](?<value>[^'"]+)['"]`,
|
||||
);
|
||||
const variableMatch = variableMatchRegex.exec(line);
|
||||
if (variableMatch) {
|
||||
if (variableMatch.groups?.key) {
|
||||
|
@ -124,28 +136,56 @@ export async function extractPackageFile(
|
|||
}
|
||||
}
|
||||
|
||||
const gemMatchRegex = regEx(
|
||||
`^\\s*gem\\s+(['"])(?<depName>[^'"]+)(['"])(\\s*,\\s*(?<currentValue>(['"])[^'"]+['"](\\s*,\\s*['"][^'"]+['"])?))?(\\s*,\\s*source:\\s*(['"](?<registryUrl>[^'"]+)['"]|(?<sourceName>[^'"]+)))?`,
|
||||
);
|
||||
const gemMatch = gemMatchRegex.exec(line);
|
||||
const gemMatch = gemMatchRegex.exec(line)?.groups;
|
||||
|
||||
if (gemMatch) {
|
||||
const dep: PackageDependency = {
|
||||
depName: gemMatch.groups?.depName,
|
||||
depName: gemMatch.depName,
|
||||
managerData: { lineNumber },
|
||||
datasource: RubygemsDatasource.id,
|
||||
};
|
||||
if (gemMatch.groups?.currentValue) {
|
||||
const currentValue = gemMatch.groups.currentValue;
|
||||
|
||||
if (gemMatch.currentValue) {
|
||||
const currentValue = gemMatch.currentValue;
|
||||
dep.currentValue = currentValue;
|
||||
}
|
||||
if (gemMatch.groups?.registryUrl) {
|
||||
const registryUrl = gemMatch.groups.registryUrl;
|
||||
dep.registryUrls = [registryUrl];
|
||||
|
||||
const pathMatch = pathMatchRegex.exec(line)?.groups;
|
||||
if (pathMatch) {
|
||||
dep.skipReason = 'internal-package';
|
||||
}
|
||||
if (gemMatch.groups?.sourceName) {
|
||||
const registryUrl = variables[gemMatch.groups.sourceName];
|
||||
dep.registryUrls = [registryUrl];
|
||||
|
||||
const sourceMatch = sourceMatchRegex.exec(line)?.groups;
|
||||
if (sourceMatch) {
|
||||
if (sourceMatch.registryUrl) {
|
||||
dep.registryUrls = [sourceMatch.registryUrl];
|
||||
} else if (sourceMatch.sourceName) {
|
||||
dep.registryUrls = [variables[sourceMatch.sourceName]];
|
||||
}
|
||||
}
|
||||
|
||||
const gitRefsMatch = gitRefsMatchRegex.exec(line)?.groups;
|
||||
if (gitRefsMatch) {
|
||||
if (gitRefsMatch.gitUrl) {
|
||||
const gitUrl = gitRefsMatch.gitUrl;
|
||||
dep.packageName = gitUrl;
|
||||
|
||||
if (gitUrl.startsWith('https://')) {
|
||||
dep.sourceUrl = gitUrl.replace(/\.git$/, '');
|
||||
}
|
||||
} else if (gitRefsMatch.repoName) {
|
||||
dep.packageName = `https://github.com/${gitRefsMatch.repoName}`;
|
||||
dep.sourceUrl = dep.packageName;
|
||||
}
|
||||
if (gitRefsMatch.refName) {
|
||||
dep.currentDigest = gitRefsMatch.refName;
|
||||
} else if (gitRefsMatch.branchName) {
|
||||
dep.currentValue = gitRefsMatch.branchName;
|
||||
} else if (gitRefsMatch.tagName) {
|
||||
dep.currentValue = gitRefsMatch.tagName;
|
||||
}
|
||||
dep.datasource = GitRefsDatasource.id;
|
||||
}
|
||||
dep.datasource = RubygemsDatasource.id;
|
||||
res.deps.push(dep);
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,33 @@ describe('modules/manager/circleci/extract', () => {
|
|||
expect(extractPackageFile('nothing here')).toBeNull();
|
||||
});
|
||||
|
||||
it('handles registry alias', () => {
|
||||
const res = extractPackageFile(
|
||||
'executors:\n my-executor:\n docker:\n - image: quay.io/myName/myPackage:0.6.2',
|
||||
'',
|
||||
{
|
||||
registryAliases: {
|
||||
'quay.io': 'my-quay-mirror.registry.com',
|
||||
'index.docker.io': 'my-docker-mirror.registry.com',
|
||||
},
|
||||
},
|
||||
);
|
||||
expect(res).toEqual({
|
||||
deps: [
|
||||
{
|
||||
autoReplaceStringTemplate:
|
||||
'quay.io/myName/myPackage:{{#if newValue}}{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}',
|
||||
currentDigest: undefined,
|
||||
currentValue: '0.6.2',
|
||||
datasource: 'docker',
|
||||
depName: 'my-quay-mirror.registry.com/myName/myPackage',
|
||||
depType: 'docker',
|
||||
replaceString: 'quay.io/myName/myPackage:0.6.2',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
|
||||
it('extracts multiple image and resolves yaml anchors', () => {
|
||||
const res = extractPackageFile(file1);
|
||||
expect(res?.deps).toEqual([
|
||||
|
@ -222,5 +249,65 @@ describe('modules/manager/circleci/extract', () => {
|
|||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('extracts orb definitions', () => {
|
||||
const res = extractPackageFile(codeBlock`
|
||||
version: 2.1
|
||||
|
||||
orbs:
|
||||
myorb:
|
||||
orbs:
|
||||
python: circleci/python@2.1.1
|
||||
|
||||
executors:
|
||||
python:
|
||||
docker:
|
||||
- image: cimg/python:3.9
|
||||
|
||||
jobs:
|
||||
test_image:
|
||||
docker:
|
||||
- image: cimg/python:3.7
|
||||
steps:
|
||||
- checkout
|
||||
|
||||
workflows:
|
||||
Test:
|
||||
jobs:
|
||||
- myorb/test_image`);
|
||||
|
||||
expect(res).toEqual({
|
||||
deps: [
|
||||
{
|
||||
currentValue: '2.1.1',
|
||||
datasource: 'orb',
|
||||
depName: 'python',
|
||||
depType: 'orb',
|
||||
packageName: 'circleci/python',
|
||||
versioning: 'npm',
|
||||
},
|
||||
{
|
||||
autoReplaceStringTemplate:
|
||||
'{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}',
|
||||
currentDigest: undefined,
|
||||
currentValue: '3.9',
|
||||
datasource: 'docker',
|
||||
depName: 'cimg/python',
|
||||
depType: 'docker',
|
||||
replaceString: 'cimg/python:3.9',
|
||||
},
|
||||
{
|
||||
autoReplaceStringTemplate:
|
||||
'{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}',
|
||||
currentDigest: undefined,
|
||||
currentValue: '3.7',
|
||||
datasource: 'docker',
|
||||
depName: 'cimg/python',
|
||||
depType: 'docker',
|
||||
replaceString: 'cimg/python:3.7',
|
||||
},
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
|
@ -4,20 +4,21 @@ import { parseSingleYaml } from '../../../util/yaml';
|
|||
import { OrbDatasource } from '../../datasource/orb';
|
||||
import * as npmVersioning from '../../versioning/npm';
|
||||
import { getDep } from '../dockerfile/extract';
|
||||
import type { PackageDependency, PackageFileContent } from '../types';
|
||||
import { CircleCiFile, type CircleCiJob } from './schema';
|
||||
import type {
|
||||
ExtractConfig,
|
||||
PackageDependency,
|
||||
PackageFileContent,
|
||||
} from '../types';
|
||||
import { CircleCiFile, type CircleCiJob, type CircleCiOrb } from './schema';
|
||||
|
||||
export function extractPackageFile(
|
||||
content: string,
|
||||
packageFile?: string,
|
||||
): PackageFileContent | null {
|
||||
function extractDefinition(
|
||||
definition: CircleCiOrb | CircleCiFile,
|
||||
config?: ExtractConfig,
|
||||
): PackageDependency[] {
|
||||
const deps: PackageDependency[] = [];
|
||||
try {
|
||||
const parsed = parseSingleYaml(content, {
|
||||
customSchema: CircleCiFile,
|
||||
});
|
||||
|
||||
for (const [key, orb] of Object.entries(parsed.orbs ?? {})) {
|
||||
for (const [key, orb] of Object.entries(definition.orbs ?? {})) {
|
||||
if (typeof orb === 'string') {
|
||||
const [packageName, currentValue] = orb.split('@');
|
||||
|
||||
deps.push({
|
||||
|
@ -28,25 +29,44 @@ export function extractPackageFile(
|
|||
versioning: npmVersioning.id,
|
||||
datasource: OrbDatasource.id,
|
||||
});
|
||||
} else {
|
||||
deps.push(...extractDefinition(orb, config));
|
||||
}
|
||||
}
|
||||
|
||||
// extract environments
|
||||
const environments: CircleCiJob[] = [
|
||||
Object.values(parsed.executors ?? {}),
|
||||
Object.values(parsed.jobs ?? {}),
|
||||
].flat();
|
||||
for (const job of environments) {
|
||||
for (const dockerElement of coerceArray(job.docker)) {
|
||||
deps.push({
|
||||
...getDep(dockerElement.image),
|
||||
depType: 'docker',
|
||||
});
|
||||
}
|
||||
// extract environments
|
||||
const environments: CircleCiJob[] = [
|
||||
Object.values(definition.executors ?? {}),
|
||||
Object.values(definition.jobs ?? {}),
|
||||
].flat();
|
||||
for (const job of environments) {
|
||||
for (const dockerElement of coerceArray(job.docker)) {
|
||||
deps.push({
|
||||
...getDep(dockerElement.image, true, config?.registryAliases),
|
||||
depType: 'docker',
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return deps;
|
||||
}
|
||||
|
||||
export function extractPackageFile(
|
||||
content: string,
|
||||
packageFile?: string,
|
||||
config?: ExtractConfig,
|
||||
): PackageFileContent | null {
|
||||
const deps: PackageDependency[] = [];
|
||||
try {
|
||||
const parsed = parseSingleYaml(content, {
|
||||
customSchema: CircleCiFile,
|
||||
});
|
||||
|
||||
deps.push(...extractDefinition(parsed, config));
|
||||
|
||||
for (const alias of coerceArray(parsed.aliases)) {
|
||||
deps.push({
|
||||
...getDep(alias.image),
|
||||
...getDep(alias.image, true, config?.registryAliases),
|
||||
depType: 'docker',
|
||||
});
|
||||
}
|
||||
|
|
|
@ -4,14 +4,31 @@ export const CircleCiDocker = z.object({
|
|||
image: z.string(),
|
||||
});
|
||||
|
||||
export type CircleCiJob = z.infer<typeof CircleCiJob>;
|
||||
export const CircleCiJob = z.object({
|
||||
docker: z.array(CircleCiDocker).optional(),
|
||||
});
|
||||
export type CircleCiJob = z.infer<typeof CircleCiJob>;
|
||||
|
||||
const baseOrb = z.object({
|
||||
executors: z.record(z.string(), CircleCiJob).optional(),
|
||||
jobs: z.record(z.string(), CircleCiJob).optional(),
|
||||
});
|
||||
|
||||
type Orb = z.infer<typeof baseOrb> & {
|
||||
orbs?: Record<string, string | Orb>;
|
||||
};
|
||||
|
||||
export const CircleCiOrb: z.ZodType<Orb> = baseOrb.extend({
|
||||
orbs: z.lazy(() =>
|
||||
z.record(z.string(), z.union([z.string(), CircleCiOrb])).optional(),
|
||||
),
|
||||
});
|
||||
export type CircleCiOrb = z.infer<typeof CircleCiOrb>;
|
||||
|
||||
export const CircleCiFile = z.object({
|
||||
aliases: z.array(CircleCiDocker).optional(),
|
||||
executors: z.record(z.string(), CircleCiJob).optional(),
|
||||
jobs: z.record(z.string(), CircleCiJob).optional(),
|
||||
orbs: z.record(z.string()).optional(),
|
||||
orbs: z.record(z.string(), z.union([z.string(), CircleCiOrb])).optional(),
|
||||
});
|
||||
export type CircleCiFile = z.infer<typeof CircleCiFile>;
|
||||
|
|
|
@ -2,13 +2,13 @@ import is from '@sindresorhus/is';
|
|||
import type { Category } from '../../../../constants';
|
||||
import type {
|
||||
ExtractConfig,
|
||||
MaybePromise,
|
||||
PackageDependency,
|
||||
PackageFileContent,
|
||||
Result,
|
||||
} from '../../types';
|
||||
import { validMatchFields } from '../utils';
|
||||
import { handleAny, handleCombination, handleRecursive } from './strategies';
|
||||
import type { RegexManagerConfig, RegexManagerTemplates } from './types';
|
||||
import { validMatchFields } from './utils';
|
||||
|
||||
export const categories: Category[] = ['custom'];
|
||||
|
||||
|
@ -22,7 +22,7 @@ export function extractPackageFile(
|
|||
content: string,
|
||||
packageFile: string,
|
||||
config: ExtractConfig,
|
||||
): Result<PackageFileContent | null> {
|
||||
): MaybePromise<PackageFileContent | null> {
|
||||
let deps: PackageDependency[];
|
||||
switch (config.matchStringsStrategy) {
|
||||
default:
|
||||
|
|
|
@ -1,11 +1,10 @@
|
|||
import is from '@sindresorhus/is';
|
||||
import { logger } from '../../../../logger';
|
||||
import { regEx } from '../../../../util/regex';
|
||||
import type { PackageDependency } from '../../types';
|
||||
import { checkIsValidDependency } from '../utils';
|
||||
import type { RecursionParameter, RegexManagerConfig } from './types';
|
||||
import {
|
||||
createDependency,
|
||||
isValidDependency,
|
||||
mergeExtractionTemplate,
|
||||
mergeGroups,
|
||||
regexMatchAll,
|
||||
|
@ -32,7 +31,7 @@ export function handleAny(
|
|||
)
|
||||
.filter(is.truthy)
|
||||
.filter((dep: PackageDependency) =>
|
||||
checkIsValidDependency(dep, packageFile),
|
||||
checkIsValidDependency(dep, packageFile, 'regex'),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -61,7 +60,7 @@ export function handleCombination(
|
|||
return [createDependency(extraction, config)]
|
||||
.filter(is.truthy)
|
||||
.filter((dep: PackageDependency) =>
|
||||
checkIsValidDependency(dep, packageFile),
|
||||
checkIsValidDependency(dep, packageFile, 'regex'),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -84,7 +83,7 @@ export function handleRecursive(
|
|||
})
|
||||
.filter(is.truthy)
|
||||
.filter((dep: PackageDependency) =>
|
||||
checkIsValidDependency(dep, packageFile),
|
||||
checkIsValidDependency(dep, packageFile, 'regex'),
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -116,23 +115,3 @@ function processRecursive(parameters: RecursionParameter): PackageDependency[] {
|
|||
});
|
||||
});
|
||||
}
|
||||
|
||||
function checkIsValidDependency(
|
||||
dep: PackageDependency,
|
||||
packageFile: string,
|
||||
): boolean {
|
||||
const isValid = isValidDependency(dep);
|
||||
if (!isValid) {
|
||||
const meta = {
|
||||
packageDependency: dep,
|
||||
packageFile,
|
||||
};
|
||||
logger.trace(
|
||||
meta,
|
||||
'Discovered a package dependency by matching regex, but it did not pass validation. Discarding',
|
||||
);
|
||||
return isValid;
|
||||
}
|
||||
|
||||
return isValid;
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue