chore(deps): update dependency prettier to v2.3.0 (#10012)

Co-authored-by: Renovate Bot <bot@renovateapp.com>
Co-authored-by: Rhys Arkins <rhys@arkins.net>
Co-authored-by: Michael Kriese <michael.kriese@visualon.de>
This commit is contained in:
renovate[bot] 2021-05-17 08:06:24 +00:00 committed by GitHub
parent 9e08eaa1dc
commit b8e36daa87
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
76 changed files with 263 additions and 301 deletions

View file

@ -86,9 +86,8 @@ function addChildrenArrayInParents() {
function createSchemaForChildConfigs() { function createSchemaForChildConfigs() {
for (const option of options) { for (const option of options) {
if (option.parent) { if (option.parent) {
properties[option.parent].items.allOf[0].properties[ properties[option.parent].items.allOf[0].properties[option.name] =
option.name createSingleConfig(option);
] = createSingleConfig(option);
} }
} }
} }

View file

@ -147,10 +147,7 @@ describe(getName(), () => {
const configParser = await import('./index'); const configParser = await import('./index');
const config = configParser.mergeChildConfig(parentConfig, childConfig); const config = configParser.mergeChildConfig(parentConfig, childConfig);
expect(config.packageRules.map((rule) => rule.a)).toMatchObject([ expect(config.packageRules.map((rule) => rule.a)).toMatchObject([
1, 1, 2, 3, 4,
2,
3,
4,
]); ]);
}); });
it('merges constraints', async () => { it('merges constraints', async () => {

View file

@ -298,9 +298,8 @@ describe(getName(), () => {
enabled: true, enabled: true,
separateMinorPatch: true, separateMinorPatch: true,
}; };
const { isMigrated, migratedConfig } = configMigration.migrateConfig( const { isMigrated, migratedConfig } =
config configMigration.migrateConfig(config);
);
expect(isMigrated).toBe(false); expect(isMigrated).toBe(false);
expect(migratedConfig).toMatchObject(config); expect(migratedConfig).toMatchObject(config);
}); });

View file

@ -67,7 +67,8 @@ export function migrateConfig(
delete migratedConfig.pathRules; delete migratedConfig.pathRules;
} else if (key === 'suppressNotifications') { } else if (key === 'suppressNotifications') {
if (is.nonEmptyArray(val) && val.includes('prEditNotification')) { if (is.nonEmptyArray(val) && val.includes('prEditNotification')) {
migratedConfig.suppressNotifications = migratedConfig.suppressNotifications.filter( migratedConfig.suppressNotifications =
migratedConfig.suppressNotifications.filter(
(item) => item !== 'prEditNotification' (item) => item !== 'prEditNotification'
); );
} }
@ -127,8 +128,10 @@ export function migrateConfig(
) )
? migratedConfig.packageRules ? migratedConfig.packageRules
: []; : [];
const payload = migrateConfig(packageFile as RenovateConfig, key) const payload = migrateConfig(
.migratedConfig; packageFile as RenovateConfig,
key
).migratedConfig;
for (const subrule of payload.packageRules || []) { for (const subrule of payload.packageRules || []) {
subrule.paths = [(packageFile as any).packageFile]; subrule.paths = [(packageFile as any).packageFile];
migratedConfig.packageRules.push(subrule); migratedConfig.packageRules.push(subrule);
@ -152,8 +155,10 @@ export function migrateConfig(
migratedConfig.packageRules = is.array(migratedConfig.packageRules) migratedConfig.packageRules = is.array(migratedConfig.packageRules)
? migratedConfig.packageRules ? migratedConfig.packageRules
: []; : [];
const depTypePackageRule = migrateConfig(val as RenovateConfig, key) const depTypePackageRule = migrateConfig(
.migratedConfig; val as RenovateConfig,
key
).migratedConfig;
depTypePackageRule.depTypeList = [key]; depTypePackageRule.depTypeList = [key];
delete depTypePackageRule.packageRules; delete depTypePackageRule.packageRules;
migratedConfig.packageRules.push(depTypePackageRule); migratedConfig.packageRules.push(depTypePackageRule);

View file

@ -175,13 +175,8 @@ export async function getPreset(
if (newPreset === null) { if (newPreset === null) {
return {}; return {};
} }
const { const { presetSource, packageName, presetPath, presetName, params } =
presetSource, parsePreset(preset);
packageName,
presetPath,
presetName,
params,
} = parsePreset(preset);
let presetConfig = await presetSources[presetSource].getPreset({ let presetConfig = await presetSources[presetSource].getPreset({
packageName, packageName,
presetPath, presetPath,

View file

@ -123,9 +123,8 @@ export type PostUpgradeTasks = {
executionMode: ExecutionMode; executionMode: ExecutionMode;
}; };
type UpdateConfig< type UpdateConfig<T extends RenovateSharedConfig = RenovateSharedConfig> =
T extends RenovateSharedConfig = RenovateSharedConfig Partial<Record<UpdateType, T>>;
> = Partial<Record<UpdateType, T>>;
export type RenovateRepository = export type RenovateRepository =
| string | string

View file

@ -263,9 +263,8 @@ export async function validateConfig(
} }
if (tzRe.test(subval)) { if (tzRe.test(subval)) {
const [, timezone] = tzRe.exec(subval); const [, timezone] = tzRe.exec(subval);
const [validTimezone, errorMessage] = hasValidTimezone( const [validTimezone, errorMessage] =
timezone hasValidTimezone(timezone);
);
if (!validTimezone) { if (!validTimezone) {
errors.push({ errors.push({
topic: 'Configuration Error', topic: 'Configuration Error',
@ -314,9 +313,9 @@ export async function validateConfig(
errors.push( errors.push(
...managerValidator.check({ resolvedRule, currentPath }) ...managerValidator.check({ resolvedRule, currentPath })
); );
const selectorLength = Object.keys( const selectorLength = Object.keys(resolvedRule).filter(
resolvedRule (ruleKey) => selectors.includes(ruleKey)
).filter((ruleKey) => selectors.includes(ruleKey)).length; ).length;
if (!selectorLength) { if (!selectorLength) {
const message = `${currentPath}[${subIndex}]: Each packageRule must contain at least one match* or exclude* selector. Rule: ${JSON.stringify( const message = `${currentPath}[${subIndex}]: Each packageRule must contain at least one match* or exclude* selector. Rule: ${JSON.stringify(
packageRule packageRule

View file

@ -13,7 +13,8 @@ jest.mock('@aws-sdk/client-ecr');
jest.mock('../../util/host-rules'); jest.mock('../../util/host-rules');
type ECR = _AWS.ECR; type ECR = _AWS.ECR;
type GetAuthorizationTokenCommandOutput = _AWS.GetAuthorizationTokenCommandOutput; type GetAuthorizationTokenCommandOutput =
_AWS.GetAuthorizationTokenCommandOutput;
const AWS = mocked(_AWS); const AWS = mocked(_AWS);
const baseUrl = 'https://index.docker.io/v2'; const baseUrl = 'https://index.docker.io/v2';
@ -437,8 +438,7 @@ describe(getName(), () => {
200, 200,
{ tags }, { tags },
{ {
link: link: '<https://api.github.com/user/9287/repos?page=3&per_page=100>; rel="next", ',
'<https://api.github.com/user/9287/repos?page=3&per_page=100>; rel="next", ',
} }
) )
.get('/') .get('/')

View file

@ -114,10 +114,8 @@ async function filterMissingArtifacts(
): Promise<Release[]> { ): Promise<Release[]> {
const cacheNamespace = 'datasource-maven-metadata'; const cacheNamespace = 'datasource-maven-metadata';
const cacheKey = `${repoUrl}${dependency.dependencyUrl}`; const cacheKey = `${repoUrl}${dependency.dependencyUrl}`;
let artifactsInfo: ArtifactsInfo | null = await packageCache.get<ArtifactsInfo>( let artifactsInfo: ArtifactsInfo | null =
cacheNamespace, await packageCache.get<ArtifactsInfo>(cacheNamespace, cacheKey);
cacheKey
);
if (!isValidArtifactsInfo(artifactsInfo, versions)) { if (!isValidArtifactsInfo(artifactsInfo, versions)) {
const queue = versions const queue = versions
@ -130,7 +128,9 @@ async function filterMissingArtifacts(
return [version, artifactUrl]; return [version, artifactUrl];
}) })
.filter(([_, artifactUrl]) => Boolean(artifactUrl)) .filter(([_, artifactUrl]) => Boolean(artifactUrl))
.map(([version, artifactUrl]) => (): Promise<ArtifactInfoResult> => .map(
([version, artifactUrl]) =>
(): Promise<ArtifactInfoResult> =>
getArtifactInfo(version, artifactUrl) getArtifactInfo(version, artifactUrl)
); );
const results = await pAll(queue, { concurrency: 5 }); const results = await pAll(queue, { concurrency: 5 });

View file

@ -11,7 +11,8 @@ import { id as datasource, getNpmrc, resetCache, setNpmrc } from '.';
jest.mock('registry-auth-token'); jest.mock('registry-auth-token');
jest.mock('delay'); jest.mock('delay');
const registryAuthToken: jest.Mock<_registryAuthToken.NpmCredentials> = _registryAuthToken as never; const registryAuthToken: jest.Mock<_registryAuthToken.NpmCredentials> =
_registryAuthToken as never;
let npmResponse: any; let npmResponse: any;
describe(getName(), () => { describe(getName(), () => {

View file

@ -12,9 +12,10 @@ export const defaultRegistryUrls = [v3.getDefaultFeed()];
export const defaultVersioning = nugetVersioning.id; export const defaultVersioning = nugetVersioning.id;
export const registryStrategy = 'merge'; export const registryStrategy = 'merge';
export function parseRegistryUrl( export function parseRegistryUrl(registryUrl: string): {
registryUrl: string feedUrl: string;
): { feedUrl: string; protocolVersion: number } { protocolVersion: number;
} {
try { try {
const parsedUrl = urlApi.parse(registryUrl); const parsedUrl = urlApi.parse(registryUrl);
let protocolVersion = 2; let protocolVersion = 2;

View file

@ -115,9 +115,9 @@ export async function getReleases(
const url = `${baseUrl}/${pkgName.toLowerCase()}/index.json`; const url = `${baseUrl}/${pkgName.toLowerCase()}/index.json`;
const packageRegistration = await http.getJson<PackageRegistration>(url); const packageRegistration = await http.getJson<PackageRegistration>(url);
const catalogPages = packageRegistration.body.items || []; const catalogPages = packageRegistration.body.items || [];
const catalogPagesQueue = catalogPages.map((page) => (): Promise< const catalogPagesQueue = catalogPages.map(
CatalogEntry[] (page) => (): Promise<CatalogEntry[]> => getCatalogEntry(page)
> => getCatalogEntry(page)); );
const catalogEntries = ( const catalogEntries = (
await pAll(catalogPagesQueue, { concurrency: 5 }) await pAll(catalogPagesQueue, { concurrency: 5 })
).flat(); ).flat();

View file

@ -143,8 +143,8 @@ async function getAllPackages(regUrl: string): Promise<AllPackages | null> {
providerPackages, providerPackages,
} = registryMeta; } = registryMeta;
if (files) { if (files) {
const queue = files.map((file) => (): Promise<PackagistFile> => const queue = files.map(
getPackagistFile(regUrl, file) (file) => (): Promise<PackagistFile> => getPackagistFile(regUrl, file)
); );
const resolvedFiles = await pAll(queue, { concurrency: 5 }); const resolvedFiles = await pAll(queue, { concurrency: 5 });
for (const res of resolvedFiles) { for (const res of resolvedFiles) {

View file

@ -93,7 +93,8 @@ async function requestGithub<T = unknown>(
return null; return null;
} }
const githubRegex = /^https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+?)(\.git|\/.*)?$/; const githubRegex =
/^https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+?)(\.git|\/.*)?$/;
async function getReleasesFromGithub( async function getReleasesFromGithub(
lookupName: string, lookupName: string,

View file

@ -54,10 +54,9 @@ const bunyanLogger = bunyan.createLogger({
].map(withSanitizer), ].map(withSanitizer),
}); });
const logFactory = (level: bunyan.LogLevelString): any => ( const logFactory =
p1: any, (level: bunyan.LogLevelString): any =>
p2: any (p1: any, p2: any): void => {
): void => {
if (p2) { if (p2) {
// meta and msg provided // meta and msg provided
bunyanLogger[level]({ logContext, ...curMeta, ...p1 }, p2); bunyanLogger[level]({ logContext, ...curMeta, ...p1 }, p2);

View file

@ -2,5 +2,7 @@ export const newBlockRegEx = /^\s*-\s*((\w+):\s*(.*))$/;
export const blockLineRegEx = /^\s*((\w+):\s*(.*))$/; export const blockLineRegEx = /^\s*((\w+):\s*(.*))$/;
export const galaxyDepRegex = /[\w-]+\.[\w-]+/; export const galaxyDepRegex = /[\w-]+\.[\w-]+/;
export const dependencyRegex = /^dependencies:/; export const dependencyRegex = /^dependencies:/;
export const galaxyRegEx = /^\s+(?<lookupName>[\w.]+):\s*["'](?<version>.+)["']\s*/; export const galaxyRegEx =
export const nameMatchRegex = /(?<source>((git\+)?(?:(git|ssh|https?):\/\/)?(.*@)?(?<hostname>[\w.-]+)(?:(:\d+)?\/|:))(?<depName>[\w./-]+)(?:\.git)?)(,(?<version>[\w.]*))?/; /^\s+(?<lookupName>[\w.]+):\s*["'](?<version>.+)["']\s*/;
export const nameMatchRegex =
/(?<source>((git\+)?(?:(git|ssh|https?):\/\/)?(.*@)?(?<hostname>[\w.-]+)(?:(:\d+)?\/|:))(?<depName>[\w./-]+)(?:\.git)?)(,(?<version>[\w.]*))?/;

View file

@ -1,3 +1,4 @@
export const keyValueExtractionRegex = /^\s*(?<key>[^\s]+):\s+"?(?<value>[^"\s]+)"?\s*$/; export const keyValueExtractionRegex =
/^\s*(?<key>[^\s]+):\s+"?(?<value>[^"\s]+)"?\s*$/;
// looks for `apiVersion: argoproj.io/ // looks for `apiVersion: argoproj.io/
export const fileTestRegex = /\s*apiVersion:\s*argoproj.io\/\s*/; export const fileTestRegex = /\s*apiVersion:\s*argoproj.io\/\s*/;

View file

@ -256,7 +256,8 @@ export function extractPackageFile(
dep.datasource = datasourceGo.id; dep.datasource = datasourceGo.id;
dep.lookupName = importpath; dep.lookupName = importpath;
if (remote) { if (remote) {
const remoteMatch = /https:\/\/github\.com(?:.*\/)(([a-zA-Z]+)([-])?([a-zA-Z]+))/.exec( const remoteMatch =
/https:\/\/github\.com(?:.*\/)(([a-zA-Z]+)([-])?([a-zA-Z]+))/.exec(
remote remote
); );
if (remoteMatch && remoteMatch[0].length === remote.length) { if (remoteMatch && remoteMatch[0].length === remote.length) {

View file

@ -21,9 +21,8 @@ export function extractPackageFile(content: string): PackageFile | null {
} else if (isPluginsSection) { } else if (isPluginsSection) {
logger.debug(`serviceImageLine: "${line}"`); logger.debug(`serviceImageLine: "${line}"`);
const { currentIndent } = /^(?<currentIndent>\s*)/.exec(line).groups; const { currentIndent } = /^(?<currentIndent>\s*)/.exec(line).groups;
const depLineMatch = /^\s+(?:-\s+)?(?<depName>[^#]+)#(?<currentValue>[^:]+)/.exec( const depLineMatch =
line /^\s+(?:-\s+)?(?<depName>[^#]+)#(?<currentValue>[^:]+)/.exec(line);
);
if (currentIndent.length <= pluginsIndent.length) { if (currentIndent.length <= pluginsIndent.length) {
isPluginsSection = false; isPluginsSection = false;
pluginsIndent = ''; pluginsIndent = '';

View file

@ -72,12 +72,8 @@ function buildBundleHostVariable(hostRule: HostRule): Record<string, string> {
export async function updateArtifacts( export async function updateArtifacts(
updateArtifact: UpdateArtifact updateArtifact: UpdateArtifact
): Promise<UpdateArtifactsResult[] | null> { ): Promise<UpdateArtifactsResult[] | null> {
const { const { packageFileName, updatedDeps, newPackageFileContent, config } =
packageFileName, updateArtifact;
updatedDeps,
newPackageFileContent,
config,
} = updateArtifact;
const { constraints = {} } = config; const { constraints = {} } = config;
logger.debug(`bundler.updateArtifacts(${packageFileName})`); logger.debug(`bundler.updateArtifacts(${packageFileName})`);
const existingError = memCache.get<string>('bundlerArtifactsError'); const existingError = memCache.get<string>('bundlerArtifactsError');

View file

@ -38,7 +38,8 @@ export async function extractPackageFile(
if (rubyMatch) { if (rubyMatch) {
res.constraints = { ruby: rubyMatch[1] }; res.constraints = { ruby: rubyMatch[1] };
} }
const gemMatchRegex = /^\s*gem\s+(['"])(?<depName>[^'"]+)\1(\s*,\s*(?<currentValue>(['"])[^'"]+\5(\s*,\s*\5[^'"]+\5)?))?/; const gemMatchRegex =
/^\s*gem\s+(['"])(?<depName>[^'"]+)\1(\s*,\s*(?<currentValue>(['"])[^'"]+\5(\s*,\s*\5[^'"]+\5)?))?/;
const gemMatch = gemMatchRegex.exec(line); const gemMatch = gemMatchRegex.exec(line);
if (gemMatch) { if (gemMatch) {
const dep: PackageDependency = { const dep: PackageDependency = {

View file

@ -1,7 +1,8 @@
import * as datasourceCdnjs from '../../datasource/cdnjs'; import * as datasourceCdnjs from '../../datasource/cdnjs';
import type { PackageDependency, PackageFile } from '../types'; import type { PackageDependency, PackageFile } from '../types';
export const cloudflareUrlRegex = /\/\/cdnjs\.cloudflare\.com\/ajax\/libs\/(?<depName>[^/]+?)\/(?<currentValue>[^/]+?)\/(?<asset>[-/_.a-zA-Z0-9]+)/; export const cloudflareUrlRegex =
/\/\/cdnjs\.cloudflare\.com\/ajax\/libs\/(?<depName>[^/]+?)\/(?<currentValue>[^/]+?)\/(?<asset>[-/_.a-zA-Z0-9]+)/;
export function extractPackageFile(content: string): PackageFile { export function extractPackageFile(content: string): PackageFile {
const deps: PackageDependency[] = []; const deps: PackageDependency[] = [];

View file

@ -48,9 +48,8 @@ export function parseLine(line: string): ParsedLine {
export function gitDep(parsedLine: ParsedLine): PackageDependency | null { export function gitDep(parsedLine: ParsedLine): PackageDependency | null {
const { depName, git, tag } = parsedLine; const { depName, git, tag } = parsedLine;
if (git?.startsWith('https://github.com/')) { if (git?.startsWith('https://github.com/')) {
const githubMatch = /https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+)/.exec( const githubMatch =
git /https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+)/.exec(git);
);
const { account, repo } = githubMatch?.groups || {}; const { account, repo } = githubMatch?.groups || {};
if (account && repo) { if (account && repo) {
return { return {

View file

@ -39,13 +39,15 @@ describe(getName(), () => {
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles comments', () => { it('handles comments', () => {
const res = extractPackageFile('# some comment\n# another\n\nFROM node\n') const res = extractPackageFile(
.deps; '# some comment\n# another\n\nFROM node\n'
).deps;
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles custom hosts', () => { it('handles custom hosts', () => {
const res = extractPackageFile('FROM registry2.something.info/node:8\n') const res = extractPackageFile(
.deps; 'FROM registry2.something.info/node:8\n'
).deps;
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles custom hosts and suffix', () => { it('handles custom hosts and suffix', () => {

View file

@ -23,7 +23,8 @@ export function extractPackageFile(content: string): PackageFile | null {
continue; // eslint-disable-line no-continue continue; // eslint-disable-line no-continue
} }
const tagMatch = /^\s+-?\s+?uses: (?<depName>[\w-]+\/[\w-]+)(?<path>.*)?@(?<currentValue>.+?)\s*?$/.exec( const tagMatch =
/^\s+-?\s+?uses: (?<depName>[\w-]+\/[\w-]+)(?<path>.*)?@(?<currentValue>.+?)\s*?$/.exec(
line line
); );
if (tagMatch?.groups) { if (tagMatch?.groups) {

View file

@ -61,7 +61,8 @@ export function extractPackageFile(content: string): PackageFile | null {
foundImage = false; foundImage = false;
const serviceImageLine = skipCommentLines(lines, lineNumber + 1); const serviceImageLine = skipCommentLines(lines, lineNumber + 1);
logger.trace(`serviceImageLine: "${serviceImageLine.line}"`); logger.trace(`serviceImageLine: "${serviceImageLine.line}"`);
const serviceImageMatch = /^\s*-\s*(?:name:\s*)?'?"?([^\s'"]+)'?"?\s*$/.exec( const serviceImageMatch =
/^\s*-\s*(?:name:\s*)?'?"?([^\s'"]+)'?"?\s*$/.exec(
serviceImageLine.line serviceImageLine.line
); );
if (serviceImageMatch) { if (serviceImageMatch) {

View file

@ -45,9 +45,8 @@ export function extractPackageFile(content: string): PackageFile | null {
if (line.startsWith('go ') && validRange(line.replace('go ', ''))) { if (line.startsWith('go ') && validRange(line.replace('go ', ''))) {
constraints.go = line.replace('go ', '^'); constraints.go = line.replace('go ', '^');
} }
const replaceMatch = /^replace\s+[^\s]+[\s]+[=][>]\s+([^\s]+)\s+([^\s]+)/.exec( const replaceMatch =
line /^replace\s+[^\s]+[\s]+[=][>]\s+([^\s]+)\s+([^\s]+)/.exec(line);
);
if (replaceMatch) { if (replaceMatch) {
const dep = getDep(lineNumber, replaceMatch, 'replace'); const dep = getDep(lineNumber, replaceMatch, 'replace');
deps.push(dep); deps.push(dep);

View file

@ -4,14 +4,12 @@ import { extractAllPackageFiles } from '.';
jest.mock('../../util/fs'); jest.mock('../../util/fs');
function mockFs(files: Record<string, string>): void { function mockFs(files: Record<string, string>): void {
fs.readLocalFile.mockImplementation( fs.readLocalFile.mockImplementation((fileName: string): Promise<string> => {
(fileName: string): Promise<string> => {
const content = files?.[fileName]; const content = files?.[fileName];
return typeof content === 'string' return typeof content === 'string'
? Promise.resolve(content) ? Promise.resolve(content)
: Promise.reject(`File not found: ${fileName}`); : Promise.reject(`File not found: ${fileName}`);
} });
);
} }
describe(getName(), () => { describe(getName(), () => {

View file

@ -58,11 +58,11 @@ export async function extractAllPackageFiles(
extractedDeps.push(...deps); extractedDeps.push(...deps);
} else if (isGradleFile(packageFile)) { } else if (isGradleFile(packageFile)) {
const vars = getVars(registry, dir); const vars = getVars(registry, dir);
const { deps, urls, vars: gradleVars } = parseGradle( const {
content, deps,
vars, urls,
packageFile vars: gradleVars,
); } = parseGradle(content, vars, packageFile);
urls.forEach((url) => { urls.forEach((url) => {
if (!registryUrls.includes(url)) { if (!registryUrls.includes(url)) {
registryUrls.push(url); registryUrls.push(url);

View file

@ -84,7 +84,8 @@ const lexer = moo.states({
[TokenType.DoubleQuotedFinish]: { match: '"', pop: 1 }, [TokenType.DoubleQuotedFinish]: { match: '"', pop: 1 },
variable: { variable: {
// Supported: ${foo}, $foo, ${ foo.bar.baz }, $foo.bar.baz // Supported: ${foo}, $foo, ${ foo.bar.baz }, $foo.bar.baz
match: /\${\s*[a-zA-Z_][a-zA-Z0-9_]*(?:\s*\.\s*[a-zA-Z_][a-zA-Z0-9_]*)*\s*}|\$[a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*/, match:
/\${\s*[a-zA-Z_][a-zA-Z0-9_]*(?:\s*\.\s*[a-zA-Z_][a-zA-Z0-9_]*)*\s*}|\$[a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*/,
value: (x: string): string => value: (x: string): string =>
x.replace(/^\${?\s*/, '').replace(/\s*}$/, ''), x.replace(/^\${?\s*/, '').replace(/\s*}$/, ''),
}, },

View file

@ -247,8 +247,7 @@ describe(getName(), () => {
'user-agent': 'https://github.com/renovatebot/renovate', 'user-agent': 'https://github.com/renovatebot/renovate',
}, },
method: 'GET', method: 'GET',
url: url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
}, },
]); ]);
}); });
@ -282,8 +281,7 @@ describe(getName(), () => {
'user-agent': 'https://github.com/renovatebot/renovate', 'user-agent': 'https://github.com/renovatebot/renovate',
}, },
method: 'GET', method: 'GET',
url: url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
}, },
]); ]);
}); });

View file

@ -163,8 +163,7 @@ describe(getName(), () => {
'user-agent': 'https://github.com/renovatebot/renovate', 'user-agent': 'https://github.com/renovatebot/renovate',
}, },
method: 'GET', method: 'GET',
url: url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
}, },
]); ]);
}); });
@ -198,8 +197,7 @@ describe(getName(), () => {
'user-agent': 'https://github.com/renovatebot/renovate', 'user-agent': 'https://github.com/renovatebot/renovate',
}, },
method: 'GET', method: 'GET',
url: url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
}, },
]); ]);
}); });

View file

@ -208,9 +208,8 @@ export function collectVersionVariables(
} }
if (!dep.currentValue) { if (!dep.currentValue) {
const dependencyLiteralRegex = dependencyStringLiteralExpressionFormatMatch( const dependencyLiteralRegex =
dependency dependencyStringLiteralExpressionFormatMatch(dependency);
);
const currentValue = dependencyLiteralRegex.exec(buildGradleContent)?.[1]; const currentValue = dependencyLiteralRegex.exec(buildGradleContent)?.[1];
if (currentValue) { if (currentValue) {
dep.currentValue = currentValue; dep.currentValue = currentValue;
@ -264,9 +263,8 @@ function updateLocalVariables(
const match = regex.exec(buildGradleContent); const match = regex.exec(buildGradleContent);
if (match) { if (match) {
const variableDefinitionRegex = variableDefinitionFormatMatch(match[1]); const variableDefinitionRegex = variableDefinitionFormatMatch(match[1]);
const variableDefinitionMatch = variableDefinitionRegex.exec( const variableDefinitionMatch =
buildGradleContent variableDefinitionRegex.exec(buildGradleContent);
);
if (variableDefinitionMatch) { if (variableDefinitionMatch) {
return buildGradleContent.replace( return buildGradleContent.replace(
variableDefinitionMatch[0], variableDefinitionMatch[0],

View file

@ -28,8 +28,7 @@ describe(getName(), () => {
repoName: 'aide', repoName: 'aide',
sha256: sha256:
'0f2b7cecc70c1a27d35c06c98804fcdb9f326630de5d035afc447122186010b7', '0f2b7cecc70c1a27d35c06c98804fcdb9f326630de5d035afc447122186010b7',
url: url: 'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
}, },
newValue: 'v0.17.7', newValue: 'v0.17.7',
}; };
@ -55,8 +54,7 @@ describe(getName(), () => {
repoName: 'bazel-watcher', repoName: 'bazel-watcher',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -84,8 +82,7 @@ describe(getName(), () => {
repoName: 'bazel-watcher', repoName: 'bazel-watcher',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -136,8 +133,7 @@ describe(getName(), () => {
repoName: 'invalid/repo/name', repoName: 'invalid/repo/name',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -167,8 +163,7 @@ describe(getName(), () => {
repoName: 'wrong-version/archive/v10.2.3.tar.gz', repoName: 'wrong-version/archive/v10.2.3.tar.gz',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -207,8 +202,7 @@ describe(getName(), () => {
repoName: 'bazel-watcher', repoName: 'bazel-watcher',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -242,8 +236,7 @@ describe(getName(), () => {
repoName: 'bazel-watcher', repoName: 'bazel-watcher',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -278,8 +271,7 @@ describe(getName(), () => {
repoName: 'bazel-watcher', repoName: 'bazel-watcher',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -313,8 +305,7 @@ describe(getName(), () => {
repoName: 'bazel-watcher', repoName: 'bazel-watcher',
sha256: sha256:
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4', '26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
url: url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
}, },
newValue: 'v0.9.3', newValue: 'v0.9.3',
}; };
@ -341,8 +332,7 @@ describe(getName(), () => {
repoName: 'aide', repoName: 'aide',
sha256: sha256:
'0f2b7cecc70c1a27d35c06c98804fcdb9f326630de5d035afc447122186010b7', '0f2b7cecc70c1a27d35c06c98804fcdb9f326630de5d035afc447122186010b7',
url: url: 'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
}, },
newValue: 'v0.17.7', newValue: 'v0.17.7',
}; };

View file

@ -70,7 +70,8 @@ function extractYaml(content: string): PackageDependency[] {
function extractText(content: string): PackageDependency[] { function extractText(content: string): PackageDependency[] {
const deps: PackageDependency[] = []; const deps: PackageDependency[] = [];
const regex = /^\s*(?<depName>[\d\w-]+):(?<currentValue>[^#\s]+)[#\s]*(?<comment>.*)$/; const regex =
/^\s*(?<depName>[\d\w-]+):(?<currentValue>[^#\s]+)[#\s]*(?<comment>.*)$/;
for (const line of content.split('\n')) { for (const line of content.split('\n')) {
const match = regex.exec(line); const match = regex.exec(line);

View file

@ -9,7 +9,8 @@ import type { Image, Kustomize } from './types';
// URL specifications should follow the hashicorp URL format // URL specifications should follow the hashicorp URL format
// https://github.com/hashicorp/go-getter#url-format // https://github.com/hashicorp/go-getter#url-format
const gitUrl = /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^/\s]+\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/; const gitUrl =
/^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^/\s]+\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/;
export function extractBase(base: string): PackageDependency | null { export function extractBase(base: string): PackageDependency | null {
const match = gitUrl.exec(base); const match = gitUrl.exec(base);

View file

@ -5,7 +5,8 @@ import { getSiblingFileName, localPathExists } from '../../util/fs';
import type { PackageDependency, PackageFile } from '../types'; import type { PackageDependency, PackageFile } from '../types';
const depSectionRegExp = /defp\s+deps.*do/g; const depSectionRegExp = /defp\s+deps.*do/g;
const depMatchRegExp = /{:(\w+),\s*([^:"]+)?:?\s*"([^"]+)",?\s*(organization: "(.*)")?.*}/gm; const depMatchRegExp =
/{:(\w+),\s*([^:"]+)?:?\s*"([^"]+)",?\s*(organization: "(.*)")?.*}/gm;
export async function extractPackageFile( export async function extractPackageFile(
content: string, content: string,

View file

@ -31,9 +31,8 @@ export function detectMonorepos(
const { lernaJsonFile } = managerData; const { lernaJsonFile } = managerData;
const packages = yarnWorkspacesPackages || lernaPackages; const packages = yarnWorkspacesPackages || lernaPackages;
if (packages?.length) { if (packages?.length) {
const internalPackagePatterns = (is.array(packages) const internalPackagePatterns = (
? packages is.array(packages) ? packages : [packages]
: [packages]
).map((pattern) => getSiblingFileName(packageFile, pattern)); ).map((pattern) => getSiblingFileName(packageFile, pattern));
const internalPackageFiles = packageFiles.filter((sp) => const internalPackageFiles = packageFiles.filter((sp) =>
matchesAnyPattern( matchesAnyPattern(

View file

@ -40,8 +40,9 @@ export async function getNodeConstraint(
let lockfileVersion = 1; let lockfileVersion = 1;
try { try {
const lockFileName = getSiblingFileName(packageFile, 'package-lock.json'); const lockFileName = getSiblingFileName(packageFile, 'package-lock.json');
lockfileVersion = JSON.parse(await readLocalFile(lockFileName, 'utf8')) lockfileVersion = JSON.parse(
.lockfileVersion; await readLocalFile(lockFileName, 'utf8')
).lockfileVersion;
} catch (err) { } catch (err) {
// do nothing // do nothing
} }

View file

@ -4,13 +4,8 @@ import type { RangeStrategy } from '../../types';
import type { RangeConfig } from '../types'; import type { RangeConfig } from '../types';
export function getRangeStrategy(config: RangeConfig): RangeStrategy { export function getRangeStrategy(config: RangeConfig): RangeStrategy {
const { const { depType, depName, packageJsonType, currentValue, rangeStrategy } =
depType, config;
depName,
packageJsonType,
currentValue,
rangeStrategy,
} = config;
const isComplexRange = parseRange(currentValue).length > 1; const isComplexRange = parseRange(currentValue).length > 1;
if (rangeStrategy === 'bump' && isComplexRange) { if (rangeStrategy === 'bump' && isComplexRange) {
logger.debug( logger.debug(

View file

@ -23,15 +23,12 @@ jest.mock('./util');
const exec: jest.Mock<typeof _exec> = _exec as any; const exec: jest.Mock<typeof _exec> = _exec as any;
const env = mocked(_env); const env = mocked(_env);
const getConfiguredRegistries: jest.Mock< const getConfiguredRegistries: jest.Mock<typeof _getConfiguredRegistries> =
typeof _getConfiguredRegistries _getConfiguredRegistries as any;
> = _getConfiguredRegistries as any; const getDefaultRegistries: jest.Mock<typeof _getDefaultRegistries> =
const getDefaultRegistries: jest.Mock< _getDefaultRegistries as any;
typeof _getDefaultRegistries const getRandomString: jest.Mock<typeof _getRandomString> =
> = _getDefaultRegistries as any; _getRandomString as any;
const getRandomString: jest.Mock<
typeof _getRandomString
> = _getRandomString as any;
const hostRules = mocked(_hostRules); const hostRules = mocked(_hostRules);
const config = { const config = {

View file

@ -18,7 +18,8 @@ import { getConfiguredRegistries } from './util';
* The update of the right boundary does not make sense regarding to the lowest version restore rule, * The update of the right boundary does not make sense regarding to the lowest version restore rule,
* so we don't include it in the extracting regexp * so we don't include it in the extracting regexp
*/ */
const checkVersion = /^\s*(?:[[])?(?:(?<currentValue>[^"(,[\]]+)\s*(?:,\s*[)\]]|])?)\s*$/; const checkVersion =
/^\s*(?:[[])?(?:(?<currentValue>[^"(,[\]]+)\s*(?:,\s*[)\]]|])?)\s*$/;
const elemNames = new Set([ const elemNames = new Set([
'PackageReference', 'PackageReference',
'PackageVersion', 'PackageVersion',

View file

@ -42,14 +42,20 @@ describe(getName(), () => {
expect(res.deps).toHaveLength(3); expect(res.deps).toHaveLength(3);
}); });
it('extracts multiple dependencies', () => { it('extracts multiple dependencies', () => {
const res = extractPackageFile(requirements2, 'unused_file_name', config) const res = extractPackageFile(
.deps; requirements2,
'unused_file_name',
config
).deps;
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
expect(res).toHaveLength(5); expect(res).toHaveLength(5);
}); });
it('handles comments and commands', () => { it('handles comments and commands', () => {
const res = extractPackageFile(requirements3, 'unused_file_name', config) const res = extractPackageFile(
.deps; requirements3,
'unused_file_name',
config
).deps;
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
expect(res).toHaveLength(5); expect(res).toHaveLength(5);
}); });

View file

@ -24,7 +24,8 @@ import {
getTerraformDependencyType, getTerraformDependencyType,
} from './util'; } from './util';
const dependencyBlockExtractionRegex = /^\s*(?<type>[a-z_]+)\s+("(?<lookupName>[^"]+)"\s+)?("(?<terraformName>[^"]+)"\s+)?{\s*$/; const dependencyBlockExtractionRegex =
/^\s*(?<type>[a-z_]+)\s+("(?<lookupName>[^"]+)"\s+)?("(?<terraformName>[^"]+)"\s+)?{\s*$/;
const contentCheckList = [ const contentCheckList = [
'module "', 'module "',
'provider "', 'provider "',

View file

@ -8,8 +8,10 @@ import { TerraformDependencyTypes } from './common';
import { extractTerraformProvider } from './providers'; import { extractTerraformProvider } from './providers';
import type { ExtractionResult } from './types'; import type { ExtractionResult } from './types';
export const githubRefMatchRegex = /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i; export const githubRefMatchRegex =
export const gitTagsRefMatchRegex = /(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/; /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i;
export const gitTagsRefMatchRegex =
/(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/;
const hostnameMatchRegex = /^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/; const hostnameMatchRegex = /^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/;
export function extractTerraformModule( export function extractTerraformModule(

View file

@ -7,7 +7,8 @@ import { TerraformDependencyTypes } from './common';
import type { ExtractionResult } from './types'; import type { ExtractionResult } from './types';
import { keyValueExtractionRegex } from './util'; import { keyValueExtractionRegex } from './util';
export const sourceExtractionRegex = /^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/; export const sourceExtractionRegex =
/^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/;
export function extractTerraformProvider( export function extractTerraformProvider(
startingLine: number, startingLine: number,

View file

@ -1,7 +1,9 @@
import { TerraformDependencyTypes } from './common'; import { TerraformDependencyTypes } from './common';
export const keyValueExtractionRegex = /^\s*(?<key>[^\s]+)\s+=\s+"(?<value>[^"]+)"\s*$/; export const keyValueExtractionRegex =
export const resourceTypeExtractionRegex = /^\s*resource\s+"(?<type>[^\s]+)"\s+"(?<name>[^"]+)"\s*{/; /^\s*(?<key>[^\s]+)\s+=\s+"(?<value>[^"]+)"\s*$/;
export const resourceTypeExtractionRegex =
/^\s*resource\s+"(?<type>[^\s]+)"\s+"(?<name>[^"]+)"\s*{/;
export function getTerraformDependencyType( export function getTerraformDependencyType(
value: string value: string

View file

@ -8,8 +8,10 @@ import { TerragruntDependencyTypes } from './common';
import { extractTerragruntProvider } from './providers'; import { extractTerragruntProvider } from './providers';
import type { ExtractionResult } from './types'; import type { ExtractionResult } from './types';
export const githubRefMatchRegex = /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i; export const githubRefMatchRegex =
export const gitTagsRefMatchRegex = /(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/; /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i;
export const gitTagsRefMatchRegex =
/(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/;
const hostnameMatchRegex = /^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/; const hostnameMatchRegex = /^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/;
export function extractTerragruntModule( export function extractTerragruntModule(

View file

@ -3,7 +3,8 @@ import { TerragruntDependencyTypes } from './common';
import type { ExtractionResult } from './types'; import type { ExtractionResult } from './types';
import { keyValueExtractionRegex } from './util'; import { keyValueExtractionRegex } from './util';
export const sourceExtractionRegex = /^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/; export const sourceExtractionRegex =
/^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/;
function extractBracesContent(content): number { function extractBracesContent(content): number {
const stack = []; const stack = [];

View file

@ -162,9 +162,10 @@ export function max4000Chars(str: string): string {
return str; return str;
} }
export function getProjectAndRepo( export function getProjectAndRepo(str: string): {
str: string project: string;
): { project: string; repo: string } { repo: string;
} {
logger.trace(`getProjectAndRepo(${str})`); logger.trace(`getProjectAndRepo(${str})`);
const strSplit = str.split(`/`); const strSplit = str.split(`/`);
if (strSplit.length === 1) { if (strSplit.length === 1) {

View file

@ -312,11 +312,9 @@ function matchesState(state: string, desiredState: string): boolean {
// TODO: coverage (#9624) // TODO: coverage (#9624)
// istanbul ignore next // istanbul ignore next
const isRelevantPr = ( const isRelevantPr =
branchName: string, (branchName: string, prTitle: string | null | undefined, state: string) =>
prTitle: string | null | undefined, (p: Pr): boolean =>
state: string
) => (p: Pr): boolean =>
p.sourceBranch === branchName && p.sourceBranch === branchName &&
(!prTitle || p.title === prTitle) && (!prTitle || p.title === prTitle) &&
matchesState(p.state, state); matchesState(p.state, state);

View file

@ -22,14 +22,12 @@ describe('accumulateValues()', () => {
.get('/some-url?pagelen=10') .get('/some-url?pagelen=10')
.reply(200, { .reply(200, {
values: range(10), values: range(10),
next: next: 'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=9&role=contributor',
'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=9&role=contributor',
}) })
.get('/2.0/repositories/?pagelen=10&after=9&role=contributor') .get('/2.0/repositories/?pagelen=10&after=9&role=contributor')
.reply(200, { .reply(200, {
values: range(10), values: range(10),
next: next: 'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=19&role=contributor',
'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=19&role=contributor',
}) })
.get('/2.0/repositories/?pagelen=10&after=19&role=contributor') .get('/2.0/repositories/?pagelen=10&after=19&role=contributor')
.reply(200, { .reply(200, {

View file

@ -1691,8 +1691,7 @@ export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
logger.debug({ err }, 'Error retrieving vulnerability alerts'); logger.debug({ err }, 'Error retrieving vulnerability alerts');
logger.warn( logger.warn(
{ {
url: url: 'https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts',
'https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts',
}, },
'Cannot access vulnerability alerts. Please ensure permissions have been granted.' 'Cannot access vulnerability alerts. Please ensure permissions have been granted.'
); );

View file

@ -93,7 +93,7 @@ export function find(search: HostRuleSearch): HostRule {
logger.warn({ search }, 'Invalid hostRules search'); logger.warn({ search }, 'Invalid hostRules search');
return {}; return {};
} }
let res = ({} as any) as HostRule; let res = {} as any as HostRule;
// First, apply empty rule matches // First, apply empty rule matches
hostRules hostRules
.filter((rule) => isEmptyRule(rule)) .filter((rule) => isEmptyRule(rule))

View file

@ -205,8 +205,7 @@ describe(getName(), () => {
authorization: 'auth', authorization: 'auth',
}, },
hostname: 'store123.blob.core.windows.net', hostname: 'store123.blob.core.windows.net',
href: href: 'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
}); });
removeAuthorization(opts); removeAuthorization(opts);
@ -214,8 +213,7 @@ describe(getName(), () => {
expect(opts).toEqual({ expect(opts).toEqual({
headers: {}, headers: {},
hostname: 'store123.blob.core.windows.net', hostname: 'store123.blob.core.windows.net',
href: href: 'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
}); });
}); });

View file

@ -31,13 +31,11 @@ describe(getName(), () => {
.scope(gitlabApiHost) .scope(gitlabApiHost)
.get('/api/v4/some-url') .get('/api/v4/some-url')
.reply(200, ['a'], { .reply(200, ['a'], {
link: link: '<https://gitlab.com/api/v4/some-url&page=2>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
'<https://gitlab.com/api/v4/some-url&page=2>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
}) })
.get('/api/v4/some-url&page=2') .get('/api/v4/some-url&page=2')
.reply(200, ['b', 'c'], { .reply(200, ['b', 'c'], {
link: link: '<https://gitlab.com/api/v4/some-url&page=3>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
'<https://gitlab.com/api/v4/some-url&page=3>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
}) })
.get('/api/v4/some-url&page=3') .get('/api/v4/some-url&page=3')
.reply(200, ['d']); .reply(200, ['d']);

View file

@ -239,7 +239,8 @@ export function parsePrefixRange(input: string): PrefixRange | null {
return null; return null;
} }
const mavenBasedRangeRegex = /^(?<leftBoundStr>[[\](]\s*)(?<leftVal>[-._+a-zA-Z0-9]*?)(?<separator>\s*,\s*)(?<rightVal>[-._+a-zA-Z0-9]*?)(?<rightBoundStr>\s*[[\])])$/; const mavenBasedRangeRegex =
/^(?<leftBoundStr>[[\](]\s*)(?<leftVal>[-._+a-zA-Z0-9]*?)(?<separator>\s*,\s*)(?<rightVal>[-._+a-zA-Z0-9]*?)(?<rightBoundStr>\s*[[\])])$/;
export function parseMavenBasedRange(input: string): MavenBasedRange | null { export function parseMavenBasedRange(input: string): MavenBasedRange | null {
if (!input) { if (!input) {

View file

@ -124,7 +124,8 @@ export const create = ({
export abstract class GenericVersioningApi< export abstract class GenericVersioningApi<
T extends GenericVersion = GenericVersion T extends GenericVersion = GenericVersion
> implements VersioningApi { > implements VersioningApi
{
private _getSection(version: string, index: number): number { private _getSection(version: string, index: number): number {
const parsed = this._parse(version); const parsed = this._parse(version);
return parsed && parsed.release.length > index return parsed && parsed.release.length > index

View file

@ -10,7 +10,8 @@ export interface Range {
} }
const parse = (range: string): Range => { const parse = (range: string): Range => {
const regExp = /^(?<operator>[^\d\s]+)?(?<delimiter>\s*)(?<version>[0-9a-zA-Z-.]+)$/; const regExp =
/^(?<operator>[^\d\s]+)?(?<delimiter>\s*)(?<version>[0-9a-zA-Z-.]+)$/;
const value = (range || '').trim(); const value = (range || '').trim();

View file

@ -22,10 +22,8 @@ export async function postUpgradeCommandsExecutor(
): Promise<PostUpgradeCommandsExecutionResult> { ): Promise<PostUpgradeCommandsExecutionResult> {
let updatedArtifacts = [...(config.updatedArtifacts || [])]; let updatedArtifacts = [...(config.updatedArtifacts || [])];
const artifactErrors = [...(config.artifactErrors || [])]; const artifactErrors = [...(config.artifactErrors || [])];
const { const { allowedPostUpgradeCommands, allowPostUpgradeCommandTemplating } =
allowedPostUpgradeCommands, getAdminConfig();
allowPostUpgradeCommandTemplating,
} = getAdminConfig();
for (const upgrade of filteredUpgradeCommands) { for (const upgrade of filteredUpgradeCommands) {
addMeta({ dep: upgrade.depName }); addMeta({ dep: upgrade.depName });
@ -184,10 +182,8 @@ export default async function executePostUpgradeCommands(
postUpgradeTasks.executionMode === 'update' postUpgradeTasks.executionMode === 'update'
); );
const { const { updatedArtifacts, artifactErrors } =
updatedArtifacts, await postUpgradeCommandsExecutor(updateUpgradeCommands, config);
artifactErrors,
} = await postUpgradeCommandsExecutor(updateUpgradeCommands, config);
return postUpgradeCommandsExecutor(branchUpgradeCommands, { return postUpgradeCommandsExecutor(branchUpgradeCommands, {
...config, ...config,
updatedArtifacts, updatedArtifacts,

View file

@ -1065,14 +1065,18 @@ describe(getName(), () => {
}); });
expect(exec.exec).toHaveBeenCalledTimes(2); expect(exec.exec).toHaveBeenCalledTimes(2);
expect( expect(
(commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find( (
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
(f) => f.name === 'modified_file' (f) => f.name === 'modified_file'
).contents as Buffer).toString() ).contents as Buffer
).toString()
).toBe('modified file content again'); ).toBe('modified file content again');
expect( expect(
(commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find( (
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
(f) => f.name === 'deleted_then_created_file' (f) => f.name === 'deleted_then_created_file'
).contents as Buffer).toString() ).contents as Buffer
).toString()
).toBe('this file was once deleted'); ).toBe('this file was once deleted');
expect( expect(
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find( commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
@ -1191,9 +1195,11 @@ describe(getName(), () => {
}); });
expect(exec.exec).toHaveBeenCalledTimes(1); expect(exec.exec).toHaveBeenCalledTimes(1);
expect( expect(
(commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find( (
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
(f) => f.name === 'modified_file' (f) => f.name === 'modified_file'
).contents as Buffer).toString() ).contents as Buffer
).toString()
).toBe('modified file content'); ).toBe('modified file content');
}); });
}); });

View file

@ -101,8 +101,7 @@ describe(getName(), () => {
{ tag_name: `v1.0.0` }, { tag_name: `v1.0.0` },
{ {
tag_name: `v1.0.1`, tag_name: `v1.0.1`,
body: body: 'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
}, },
]); ]);
@ -123,8 +122,7 @@ describe(getName(), () => {
{ tag_name: `v1.0.0` }, { tag_name: `v1.0.0` },
{ {
tag_name: `v1.0.1`, tag_name: `v1.0.1`,
body: body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
}, },
]); ]);
const res = await getReleaseList( const res = await getReleaseList(
@ -145,8 +143,7 @@ describe(getName(), () => {
{ tag_name: `v1.0.0` }, { tag_name: `v1.0.0` },
{ {
tag_name: `v1.0.1`, tag_name: `v1.0.1`,
body: body: 'some body #123, [#124](https://my.custom.domain/some/yet-other-repository/issues/124)',
'some body #123, [#124](https://my.custom.domain/some/yet-other-repository/issues/124)',
}, },
]); ]);
const res = await getReleaseList( const res = await getReleaseList(
@ -183,8 +180,7 @@ describe(getName(), () => {
{ tag_name: `${prefix}1.0.0` }, { tag_name: `${prefix}1.0.0` },
{ {
tag_name: `${prefix}1.0.1`, tag_name: `${prefix}1.0.1`,
body: body: 'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
}, },
]); ]);
const res = await getReleaseNotes( const res = await getReleaseNotes(
@ -208,8 +204,7 @@ describe(getName(), () => {
{ tag_name: `${prefix}1.0.0` }, { tag_name: `${prefix}1.0.0` },
{ {
tag_name: `${prefix}1.0.1`, tag_name: `${prefix}1.0.1`,
body: body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
}, },
]); ]);

View file

@ -26,13 +26,8 @@ function matchesUnstable(
export async function getInRangeReleases( export async function getInRangeReleases(
config: BranchUpgradeConfig config: BranchUpgradeConfig
): Promise<Release[] | null> { ): Promise<Release[] | null> {
const { const { versioning, currentVersion, newVersion, depName, datasource } =
versioning, config;
currentVersion,
newVersion,
depName,
datasource,
} = config;
// istanbul ignore if // istanbul ignore if
if (!isGetPkgReleasesConfig(config)) { if (!isGetPkgReleasesConfig(config)) {
return null; return null;

View file

@ -81,12 +81,10 @@ function setupGitlabChangelogMock() {
}, },
], ],
releaseNotes: { releaseNotes: {
url: url: 'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
}, },
compare: { compare: {
url: url: 'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
}, },
}, },
], ],
@ -176,8 +174,7 @@ describe(getName(), () => {
const existingPr: Pr = { const existingPr: Pr = {
displayNumber: 'Existing PR', displayNumber: 'Existing PR',
title: 'Update dependency dummy to v1.1.0', title: 'Update dependency dummy to v1.1.0',
body: body: 'Some body<!-- Reviewable:start -->something<!-- Reviewable:end -->\n\n',
'Some body<!-- Reviewable:start -->something<!-- Reviewable:end -->\n\n',
} as never; } as never;
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks(); jest.resetAllMocks();

View file

@ -407,9 +407,9 @@ export async function ensurePr(
targetBranch: config.baseBranch, targetBranch: config.baseBranch,
prTitle, prTitle,
prBody, prBody,
labels: [ labels: [...new Set([...config.labels, ...config.addLabels])].map(
...new Set([...config.labels, ...config.addLabels]), (label) => template.compile(label, config)
].map((label) => template.compile(label, config)), ),
platformOptions: getPlatformPrOptions(config), platformOptions: getPlatformPrOptions(config),
draftPR: config.draftPR, draftPR: config.draftPR,
}); });

View file

@ -49,8 +49,9 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
let configFileParsed; let configFileParsed;
if (configFileName === 'package.json') { if (configFileName === 'package.json') {
// We already know it parses // We already know it parses
configFileParsed = JSON.parse(await readLocalFile('package.json', 'utf8')) configFileParsed = JSON.parse(
.renovate; await readLocalFile('package.json', 'utf8')
).renovate;
logger.debug({ config: configFileParsed }, 'package.json>renovate config'); logger.debug({ config: configFileParsed }, 'package.json>renovate config');
} else { } else {
let rawFileContents = await readLocalFile(configFileName, 'utf8'); let rawFileContents = await readLocalFile(configFileName, 'utf8');

View file

@ -115,9 +115,8 @@ export async function detectVulnerabilityAlerts(
combinedAlerts[fileName] ||= {}; combinedAlerts[fileName] ||= {};
combinedAlerts[fileName][datasource] ||= {}; combinedAlerts[fileName][datasource] ||= {};
combinedAlerts[fileName][datasource][depName] ||= {}; combinedAlerts[fileName][datasource][depName] ||= {};
combinedAlerts[fileName][datasource][depName][ combinedAlerts[fileName][datasource][depName][vulnerableRequirements] ||=
vulnerableRequirements {
] ||= {
advisories: [], advisories: [],
}; };
const alertDetails = const alertDetails =

View file

@ -41,7 +41,8 @@ export async function createOnboardingBranch(
} ${configFile}`; } ${configFile}`;
} }
const commitMessage = `${commitMessagePrefix} ${onboardingCommitMessage}`.trim(); const commitMessage =
`${commitMessagePrefix} ${onboardingCommitMessage}`.trim();
// istanbul ignore if // istanbul ignore if
if (getAdminConfig().dryRun) { if (getAdminConfig().dryRun) {

View file

@ -57,7 +57,8 @@ async function fetchManagerPackagerFileUpdates(
const { packageFile } = pFile; const { packageFile } = pFile;
const packageFileConfig = mergeChildConfig(managerConfig, pFile); const packageFileConfig = mergeChildConfig(managerConfig, pFile);
const { manager } = packageFileConfig; const { manager } = packageFileConfig;
const queue = pFile.deps.map((dep) => (): Promise<PackageDependency> => const queue = pFile.deps.map(
(dep) => (): Promise<PackageDependency> =>
fetchDepUpdates(packageFileConfig, dep) fetchDepUpdates(packageFileConfig, dep)
); );
logger.trace( logger.trace(
@ -75,7 +76,8 @@ async function fetchManagerUpdates(
manager: string manager: string
): Promise<void> { ): Promise<void> {
const managerConfig = getManagerConfig(config, manager); const managerConfig = getManagerConfig(config, manager);
const queue = packageFiles[manager].map((pFile) => (): Promise<void> => const queue = packageFiles[manager].map(
(pFile) => (): Promise<void> =>
fetchManagerPackagerFileUpdates(config, managerConfig, pFile) fetchManagerPackagerFileUpdates(config, managerConfig, pFile)
); );
logger.trace( logger.trace(

View file

@ -12,11 +12,8 @@ export function getBucket(
newVersion: string, newVersion: string,
versioning: allVersioning.VersioningApi versioning: allVersioning.VersioningApi
): string { ): string {
const { const { separateMajorMinor, separateMultipleMajor, separateMinorPatch } =
separateMajorMinor, config;
separateMultipleMajor,
separateMinorPatch,
} = config;
if (!separateMajorMinor) { if (!separateMajorMinor) {
return 'latest'; return 'latest';
} }

View file

@ -15,12 +15,8 @@ export function filterVersions(
latestVersion: string, latestVersion: string,
releases: Release[] releases: Release[]
): Release[] { ): Release[] {
const { const { ignoreUnstable, ignoreDeprecated, respectLatest, allowedVersions } =
ignoreUnstable, config;
ignoreDeprecated,
respectLatest,
allowedVersions,
} = config;
let versioning; let versioning;
function isVersionStable(version: string): boolean { function isVersionStable(version: string): boolean {
if (!versioning.isStable(version)) { if (!versioning.isStable(version)) {

View file

@ -50,13 +50,8 @@ export async function branchifyUpgrades(
branchUpgrades[branchName] = branchUpgrades[branchName] branchUpgrades[branchName] = branchUpgrades[branchName]
.reverse() .reverse()
.filter((upgrade) => { .filter((upgrade) => {
const { const { manager, packageFile, depName, currentValue, newValue } =
manager, upgrade;
packageFile,
depName,
currentValue,
newValue,
} = upgrade;
const upgradeKey = `${packageFile}:${depName}:${currentValue}`; const upgradeKey = `${packageFile}:${depName}:${currentValue}`;
const previousNewValue = seenUpdates[upgradeKey]; const previousNewValue = seenUpdates[upgradeKey];
if (previousNewValue && previousNewValue !== newValue) { if (previousNewValue && previousNewValue !== newValue) {

View file

@ -35,13 +35,8 @@ function getTableValues(
if (!upgrade.commitBodyTable) { if (!upgrade.commitBodyTable) {
return null; return null;
} }
const { const { datasource, lookupName, depName, currentVersion, newVersion } =
datasource, upgrade;
lookupName,
depName,
currentVersion,
newVersion,
} = upgrade;
const name = lookupName || depName; const name = lookupName || depName;
if (datasource && name && currentVersion && newVersion) { if (datasource && name && currentVersion && newVersion) {
return [datasource, name, currentVersion, newVersion]; return [datasource, name, currentVersion, newVersion];

View file

@ -252,7 +252,7 @@
"mockdate": "3.0.5", "mockdate": "3.0.5",
"nock": "13.0.11", "nock": "13.0.11",
"npm-run-all": "4.1.5", "npm-run-all": "4.1.5",
"prettier": "2.2.1", "prettier": "2.3.0",
"pretty-quick": "3.1.0", "pretty-quick": "3.1.0",
"rimraf": "3.0.2", "rimraf": "3.0.2",
"semantic-release": "17.4.2", "semantic-release": "17.4.2",

View file

@ -53,7 +53,7 @@ function getCallerFileName(): string | null {
try { try {
const err = new Error(); const err = new Error();
const stack = (err.stack as unknown) as NodeJS.CallSite[]; const stack = err.stack as unknown as NodeJS.CallSite[];
let currentFile = null; let currentFile = null;
for (const frame of stack) { for (const frame of stack) {

View file

@ -7619,10 +7619,10 @@ prelude-ls@~1.1.2:
resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=
prettier@2.2.1: prettier@2.3.0:
version "2.2.1" version "2.3.0"
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5" resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18"
integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q== integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w==
pretty-bytes@^5.1.0: pretty-bytes@^5.1.0:
version "5.6.0" version "5.6.0"