mirror of
https://github.com/renovatebot/renovate.git
synced 2025-01-11 06:26:26 +00:00
chore(deps): update dependency prettier to v2.3.0 (#10012)
Co-authored-by: Renovate Bot <bot@renovateapp.com> Co-authored-by: Rhys Arkins <rhys@arkins.net> Co-authored-by: Michael Kriese <michael.kriese@visualon.de>
This commit is contained in:
parent
9e08eaa1dc
commit
b8e36daa87
76 changed files with 263 additions and 301 deletions
|
@ -86,9 +86,8 @@ function addChildrenArrayInParents() {
|
|||
function createSchemaForChildConfigs() {
|
||||
for (const option of options) {
|
||||
if (option.parent) {
|
||||
properties[option.parent].items.allOf[0].properties[
|
||||
option.name
|
||||
] = createSingleConfig(option);
|
||||
properties[option.parent].items.allOf[0].properties[option.name] =
|
||||
createSingleConfig(option);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,10 +147,7 @@ describe(getName(), () => {
|
|||
const configParser = await import('./index');
|
||||
const config = configParser.mergeChildConfig(parentConfig, childConfig);
|
||||
expect(config.packageRules.map((rule) => rule.a)).toMatchObject([
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
4,
|
||||
1, 2, 3, 4,
|
||||
]);
|
||||
});
|
||||
it('merges constraints', async () => {
|
||||
|
|
|
@ -298,9 +298,8 @@ describe(getName(), () => {
|
|||
enabled: true,
|
||||
separateMinorPatch: true,
|
||||
};
|
||||
const { isMigrated, migratedConfig } = configMigration.migrateConfig(
|
||||
config
|
||||
);
|
||||
const { isMigrated, migratedConfig } =
|
||||
configMigration.migrateConfig(config);
|
||||
expect(isMigrated).toBe(false);
|
||||
expect(migratedConfig).toMatchObject(config);
|
||||
});
|
||||
|
|
|
@ -67,7 +67,8 @@ export function migrateConfig(
|
|||
delete migratedConfig.pathRules;
|
||||
} else if (key === 'suppressNotifications') {
|
||||
if (is.nonEmptyArray(val) && val.includes('prEditNotification')) {
|
||||
migratedConfig.suppressNotifications = migratedConfig.suppressNotifications.filter(
|
||||
migratedConfig.suppressNotifications =
|
||||
migratedConfig.suppressNotifications.filter(
|
||||
(item) => item !== 'prEditNotification'
|
||||
);
|
||||
}
|
||||
|
@ -127,8 +128,10 @@ export function migrateConfig(
|
|||
)
|
||||
? migratedConfig.packageRules
|
||||
: [];
|
||||
const payload = migrateConfig(packageFile as RenovateConfig, key)
|
||||
.migratedConfig;
|
||||
const payload = migrateConfig(
|
||||
packageFile as RenovateConfig,
|
||||
key
|
||||
).migratedConfig;
|
||||
for (const subrule of payload.packageRules || []) {
|
||||
subrule.paths = [(packageFile as any).packageFile];
|
||||
migratedConfig.packageRules.push(subrule);
|
||||
|
@ -152,8 +155,10 @@ export function migrateConfig(
|
|||
migratedConfig.packageRules = is.array(migratedConfig.packageRules)
|
||||
? migratedConfig.packageRules
|
||||
: [];
|
||||
const depTypePackageRule = migrateConfig(val as RenovateConfig, key)
|
||||
.migratedConfig;
|
||||
const depTypePackageRule = migrateConfig(
|
||||
val as RenovateConfig,
|
||||
key
|
||||
).migratedConfig;
|
||||
depTypePackageRule.depTypeList = [key];
|
||||
delete depTypePackageRule.packageRules;
|
||||
migratedConfig.packageRules.push(depTypePackageRule);
|
||||
|
|
|
@ -175,13 +175,8 @@ export async function getPreset(
|
|||
if (newPreset === null) {
|
||||
return {};
|
||||
}
|
||||
const {
|
||||
presetSource,
|
||||
packageName,
|
||||
presetPath,
|
||||
presetName,
|
||||
params,
|
||||
} = parsePreset(preset);
|
||||
const { presetSource, packageName, presetPath, presetName, params } =
|
||||
parsePreset(preset);
|
||||
let presetConfig = await presetSources[presetSource].getPreset({
|
||||
packageName,
|
||||
presetPath,
|
||||
|
|
|
@ -123,9 +123,8 @@ export type PostUpgradeTasks = {
|
|||
executionMode: ExecutionMode;
|
||||
};
|
||||
|
||||
type UpdateConfig<
|
||||
T extends RenovateSharedConfig = RenovateSharedConfig
|
||||
> = Partial<Record<UpdateType, T>>;
|
||||
type UpdateConfig<T extends RenovateSharedConfig = RenovateSharedConfig> =
|
||||
Partial<Record<UpdateType, T>>;
|
||||
|
||||
export type RenovateRepository =
|
||||
| string
|
||||
|
|
|
@ -263,9 +263,8 @@ export async function validateConfig(
|
|||
}
|
||||
if (tzRe.test(subval)) {
|
||||
const [, timezone] = tzRe.exec(subval);
|
||||
const [validTimezone, errorMessage] = hasValidTimezone(
|
||||
timezone
|
||||
);
|
||||
const [validTimezone, errorMessage] =
|
||||
hasValidTimezone(timezone);
|
||||
if (!validTimezone) {
|
||||
errors.push({
|
||||
topic: 'Configuration Error',
|
||||
|
@ -314,9 +313,9 @@ export async function validateConfig(
|
|||
errors.push(
|
||||
...managerValidator.check({ resolvedRule, currentPath })
|
||||
);
|
||||
const selectorLength = Object.keys(
|
||||
resolvedRule
|
||||
).filter((ruleKey) => selectors.includes(ruleKey)).length;
|
||||
const selectorLength = Object.keys(resolvedRule).filter(
|
||||
(ruleKey) => selectors.includes(ruleKey)
|
||||
).length;
|
||||
if (!selectorLength) {
|
||||
const message = `${currentPath}[${subIndex}]: Each packageRule must contain at least one match* or exclude* selector. Rule: ${JSON.stringify(
|
||||
packageRule
|
||||
|
|
|
@ -13,7 +13,8 @@ jest.mock('@aws-sdk/client-ecr');
|
|||
jest.mock('../../util/host-rules');
|
||||
|
||||
type ECR = _AWS.ECR;
|
||||
type GetAuthorizationTokenCommandOutput = _AWS.GetAuthorizationTokenCommandOutput;
|
||||
type GetAuthorizationTokenCommandOutput =
|
||||
_AWS.GetAuthorizationTokenCommandOutput;
|
||||
const AWS = mocked(_AWS);
|
||||
|
||||
const baseUrl = 'https://index.docker.io/v2';
|
||||
|
@ -437,8 +438,7 @@ describe(getName(), () => {
|
|||
200,
|
||||
{ tags },
|
||||
{
|
||||
link:
|
||||
'<https://api.github.com/user/9287/repos?page=3&per_page=100>; rel="next", ',
|
||||
link: '<https://api.github.com/user/9287/repos?page=3&per_page=100>; rel="next", ',
|
||||
}
|
||||
)
|
||||
.get('/')
|
||||
|
|
|
@ -114,10 +114,8 @@ async function filterMissingArtifacts(
|
|||
): Promise<Release[]> {
|
||||
const cacheNamespace = 'datasource-maven-metadata';
|
||||
const cacheKey = `${repoUrl}${dependency.dependencyUrl}`;
|
||||
let artifactsInfo: ArtifactsInfo | null = await packageCache.get<ArtifactsInfo>(
|
||||
cacheNamespace,
|
||||
cacheKey
|
||||
);
|
||||
let artifactsInfo: ArtifactsInfo | null =
|
||||
await packageCache.get<ArtifactsInfo>(cacheNamespace, cacheKey);
|
||||
|
||||
if (!isValidArtifactsInfo(artifactsInfo, versions)) {
|
||||
const queue = versions
|
||||
|
@ -130,7 +128,9 @@ async function filterMissingArtifacts(
|
|||
return [version, artifactUrl];
|
||||
})
|
||||
.filter(([_, artifactUrl]) => Boolean(artifactUrl))
|
||||
.map(([version, artifactUrl]) => (): Promise<ArtifactInfoResult> =>
|
||||
.map(
|
||||
([version, artifactUrl]) =>
|
||||
(): Promise<ArtifactInfoResult> =>
|
||||
getArtifactInfo(version, artifactUrl)
|
||||
);
|
||||
const results = await pAll(queue, { concurrency: 5 });
|
||||
|
|
|
@ -11,7 +11,8 @@ import { id as datasource, getNpmrc, resetCache, setNpmrc } from '.';
|
|||
jest.mock('registry-auth-token');
|
||||
jest.mock('delay');
|
||||
|
||||
const registryAuthToken: jest.Mock<_registryAuthToken.NpmCredentials> = _registryAuthToken as never;
|
||||
const registryAuthToken: jest.Mock<_registryAuthToken.NpmCredentials> =
|
||||
_registryAuthToken as never;
|
||||
let npmResponse: any;
|
||||
|
||||
describe(getName(), () => {
|
||||
|
|
|
@ -12,9 +12,10 @@ export const defaultRegistryUrls = [v3.getDefaultFeed()];
|
|||
export const defaultVersioning = nugetVersioning.id;
|
||||
export const registryStrategy = 'merge';
|
||||
|
||||
export function parseRegistryUrl(
|
||||
registryUrl: string
|
||||
): { feedUrl: string; protocolVersion: number } {
|
||||
export function parseRegistryUrl(registryUrl: string): {
|
||||
feedUrl: string;
|
||||
protocolVersion: number;
|
||||
} {
|
||||
try {
|
||||
const parsedUrl = urlApi.parse(registryUrl);
|
||||
let protocolVersion = 2;
|
||||
|
|
|
@ -115,9 +115,9 @@ export async function getReleases(
|
|||
const url = `${baseUrl}/${pkgName.toLowerCase()}/index.json`;
|
||||
const packageRegistration = await http.getJson<PackageRegistration>(url);
|
||||
const catalogPages = packageRegistration.body.items || [];
|
||||
const catalogPagesQueue = catalogPages.map((page) => (): Promise<
|
||||
CatalogEntry[]
|
||||
> => getCatalogEntry(page));
|
||||
const catalogPagesQueue = catalogPages.map(
|
||||
(page) => (): Promise<CatalogEntry[]> => getCatalogEntry(page)
|
||||
);
|
||||
const catalogEntries = (
|
||||
await pAll(catalogPagesQueue, { concurrency: 5 })
|
||||
).flat();
|
||||
|
|
|
@ -143,8 +143,8 @@ async function getAllPackages(regUrl: string): Promise<AllPackages | null> {
|
|||
providerPackages,
|
||||
} = registryMeta;
|
||||
if (files) {
|
||||
const queue = files.map((file) => (): Promise<PackagistFile> =>
|
||||
getPackagistFile(regUrl, file)
|
||||
const queue = files.map(
|
||||
(file) => (): Promise<PackagistFile> => getPackagistFile(regUrl, file)
|
||||
);
|
||||
const resolvedFiles = await pAll(queue, { concurrency: 5 });
|
||||
for (const res of resolvedFiles) {
|
||||
|
|
|
@ -93,7 +93,8 @@ async function requestGithub<T = unknown>(
|
|||
return null;
|
||||
}
|
||||
|
||||
const githubRegex = /^https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+?)(\.git|\/.*)?$/;
|
||||
const githubRegex =
|
||||
/^https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+?)(\.git|\/.*)?$/;
|
||||
|
||||
async function getReleasesFromGithub(
|
||||
lookupName: string,
|
||||
|
|
|
@ -54,10 +54,9 @@ const bunyanLogger = bunyan.createLogger({
|
|||
].map(withSanitizer),
|
||||
});
|
||||
|
||||
const logFactory = (level: bunyan.LogLevelString): any => (
|
||||
p1: any,
|
||||
p2: any
|
||||
): void => {
|
||||
const logFactory =
|
||||
(level: bunyan.LogLevelString): any =>
|
||||
(p1: any, p2: any): void => {
|
||||
if (p2) {
|
||||
// meta and msg provided
|
||||
bunyanLogger[level]({ logContext, ...curMeta, ...p1 }, p2);
|
||||
|
@ -68,7 +67,7 @@ const logFactory = (level: bunyan.LogLevelString): any => (
|
|||
// only meta provided
|
||||
bunyanLogger[level]({ logContext, ...curMeta, ...p1 });
|
||||
}
|
||||
};
|
||||
};
|
||||
|
||||
const loggerLevels: bunyan.LogLevelString[] = [
|
||||
'trace',
|
||||
|
|
|
@ -2,5 +2,7 @@ export const newBlockRegEx = /^\s*-\s*((\w+):\s*(.*))$/;
|
|||
export const blockLineRegEx = /^\s*((\w+):\s*(.*))$/;
|
||||
export const galaxyDepRegex = /[\w-]+\.[\w-]+/;
|
||||
export const dependencyRegex = /^dependencies:/;
|
||||
export const galaxyRegEx = /^\s+(?<lookupName>[\w.]+):\s*["'](?<version>.+)["']\s*/;
|
||||
export const nameMatchRegex = /(?<source>((git\+)?(?:(git|ssh|https?):\/\/)?(.*@)?(?<hostname>[\w.-]+)(?:(:\d+)?\/|:))(?<depName>[\w./-]+)(?:\.git)?)(,(?<version>[\w.]*))?/;
|
||||
export const galaxyRegEx =
|
||||
/^\s+(?<lookupName>[\w.]+):\s*["'](?<version>.+)["']\s*/;
|
||||
export const nameMatchRegex =
|
||||
/(?<source>((git\+)?(?:(git|ssh|https?):\/\/)?(.*@)?(?<hostname>[\w.-]+)(?:(:\d+)?\/|:))(?<depName>[\w./-]+)(?:\.git)?)(,(?<version>[\w.]*))?/;
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
export const keyValueExtractionRegex = /^\s*(?<key>[^\s]+):\s+"?(?<value>[^"\s]+)"?\s*$/;
|
||||
export const keyValueExtractionRegex =
|
||||
/^\s*(?<key>[^\s]+):\s+"?(?<value>[^"\s]+)"?\s*$/;
|
||||
// looks for `apiVersion: argoproj.io/
|
||||
export const fileTestRegex = /\s*apiVersion:\s*argoproj.io\/\s*/;
|
||||
|
|
|
@ -256,7 +256,8 @@ export function extractPackageFile(
|
|||
dep.datasource = datasourceGo.id;
|
||||
dep.lookupName = importpath;
|
||||
if (remote) {
|
||||
const remoteMatch = /https:\/\/github\.com(?:.*\/)(([a-zA-Z]+)([-])?([a-zA-Z]+))/.exec(
|
||||
const remoteMatch =
|
||||
/https:\/\/github\.com(?:.*\/)(([a-zA-Z]+)([-])?([a-zA-Z]+))/.exec(
|
||||
remote
|
||||
);
|
||||
if (remoteMatch && remoteMatch[0].length === remote.length) {
|
||||
|
|
|
@ -21,9 +21,8 @@ export function extractPackageFile(content: string): PackageFile | null {
|
|||
} else if (isPluginsSection) {
|
||||
logger.debug(`serviceImageLine: "${line}"`);
|
||||
const { currentIndent } = /^(?<currentIndent>\s*)/.exec(line).groups;
|
||||
const depLineMatch = /^\s+(?:-\s+)?(?<depName>[^#]+)#(?<currentValue>[^:]+)/.exec(
|
||||
line
|
||||
);
|
||||
const depLineMatch =
|
||||
/^\s+(?:-\s+)?(?<depName>[^#]+)#(?<currentValue>[^:]+)/.exec(line);
|
||||
if (currentIndent.length <= pluginsIndent.length) {
|
||||
isPluginsSection = false;
|
||||
pluginsIndent = '';
|
||||
|
|
|
@ -72,12 +72,8 @@ function buildBundleHostVariable(hostRule: HostRule): Record<string, string> {
|
|||
export async function updateArtifacts(
|
||||
updateArtifact: UpdateArtifact
|
||||
): Promise<UpdateArtifactsResult[] | null> {
|
||||
const {
|
||||
packageFileName,
|
||||
updatedDeps,
|
||||
newPackageFileContent,
|
||||
config,
|
||||
} = updateArtifact;
|
||||
const { packageFileName, updatedDeps, newPackageFileContent, config } =
|
||||
updateArtifact;
|
||||
const { constraints = {} } = config;
|
||||
logger.debug(`bundler.updateArtifacts(${packageFileName})`);
|
||||
const existingError = memCache.get<string>('bundlerArtifactsError');
|
||||
|
|
|
@ -38,7 +38,8 @@ export async function extractPackageFile(
|
|||
if (rubyMatch) {
|
||||
res.constraints = { ruby: rubyMatch[1] };
|
||||
}
|
||||
const gemMatchRegex = /^\s*gem\s+(['"])(?<depName>[^'"]+)\1(\s*,\s*(?<currentValue>(['"])[^'"]+\5(\s*,\s*\5[^'"]+\5)?))?/;
|
||||
const gemMatchRegex =
|
||||
/^\s*gem\s+(['"])(?<depName>[^'"]+)\1(\s*,\s*(?<currentValue>(['"])[^'"]+\5(\s*,\s*\5[^'"]+\5)?))?/;
|
||||
const gemMatch = gemMatchRegex.exec(line);
|
||||
if (gemMatch) {
|
||||
const dep: PackageDependency = {
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
import * as datasourceCdnjs from '../../datasource/cdnjs';
|
||||
import type { PackageDependency, PackageFile } from '../types';
|
||||
|
||||
export const cloudflareUrlRegex = /\/\/cdnjs\.cloudflare\.com\/ajax\/libs\/(?<depName>[^/]+?)\/(?<currentValue>[^/]+?)\/(?<asset>[-/_.a-zA-Z0-9]+)/;
|
||||
export const cloudflareUrlRegex =
|
||||
/\/\/cdnjs\.cloudflare\.com\/ajax\/libs\/(?<depName>[^/]+?)\/(?<currentValue>[^/]+?)\/(?<asset>[-/_.a-zA-Z0-9]+)/;
|
||||
|
||||
export function extractPackageFile(content: string): PackageFile {
|
||||
const deps: PackageDependency[] = [];
|
||||
|
|
|
@ -48,9 +48,8 @@ export function parseLine(line: string): ParsedLine {
|
|||
export function gitDep(parsedLine: ParsedLine): PackageDependency | null {
|
||||
const { depName, git, tag } = parsedLine;
|
||||
if (git?.startsWith('https://github.com/')) {
|
||||
const githubMatch = /https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+)/.exec(
|
||||
git
|
||||
);
|
||||
const githubMatch =
|
||||
/https:\/\/github\.com\/(?<account>[^/]+)\/(?<repo>[^/]+)/.exec(git);
|
||||
const { account, repo } = githubMatch?.groups || {};
|
||||
if (account && repo) {
|
||||
return {
|
||||
|
|
|
@ -39,13 +39,15 @@ describe(getName(), () => {
|
|||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('handles comments', () => {
|
||||
const res = extractPackageFile('# some comment\n# another\n\nFROM node\n')
|
||||
.deps;
|
||||
const res = extractPackageFile(
|
||||
'# some comment\n# another\n\nFROM node\n'
|
||||
).deps;
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('handles custom hosts', () => {
|
||||
const res = extractPackageFile('FROM registry2.something.info/node:8\n')
|
||||
.deps;
|
||||
const res = extractPackageFile(
|
||||
'FROM registry2.something.info/node:8\n'
|
||||
).deps;
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('handles custom hosts and suffix', () => {
|
||||
|
|
|
@ -23,7 +23,8 @@ export function extractPackageFile(content: string): PackageFile | null {
|
|||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
|
||||
const tagMatch = /^\s+-?\s+?uses: (?<depName>[\w-]+\/[\w-]+)(?<path>.*)?@(?<currentValue>.+?)\s*?$/.exec(
|
||||
const tagMatch =
|
||||
/^\s+-?\s+?uses: (?<depName>[\w-]+\/[\w-]+)(?<path>.*)?@(?<currentValue>.+?)\s*?$/.exec(
|
||||
line
|
||||
);
|
||||
if (tagMatch?.groups) {
|
||||
|
|
|
@ -61,7 +61,8 @@ export function extractPackageFile(content: string): PackageFile | null {
|
|||
foundImage = false;
|
||||
const serviceImageLine = skipCommentLines(lines, lineNumber + 1);
|
||||
logger.trace(`serviceImageLine: "${serviceImageLine.line}"`);
|
||||
const serviceImageMatch = /^\s*-\s*(?:name:\s*)?'?"?([^\s'"]+)'?"?\s*$/.exec(
|
||||
const serviceImageMatch =
|
||||
/^\s*-\s*(?:name:\s*)?'?"?([^\s'"]+)'?"?\s*$/.exec(
|
||||
serviceImageLine.line
|
||||
);
|
||||
if (serviceImageMatch) {
|
||||
|
|
|
@ -45,9 +45,8 @@ export function extractPackageFile(content: string): PackageFile | null {
|
|||
if (line.startsWith('go ') && validRange(line.replace('go ', ''))) {
|
||||
constraints.go = line.replace('go ', '^');
|
||||
}
|
||||
const replaceMatch = /^replace\s+[^\s]+[\s]+[=][>]\s+([^\s]+)\s+([^\s]+)/.exec(
|
||||
line
|
||||
);
|
||||
const replaceMatch =
|
||||
/^replace\s+[^\s]+[\s]+[=][>]\s+([^\s]+)\s+([^\s]+)/.exec(line);
|
||||
if (replaceMatch) {
|
||||
const dep = getDep(lineNumber, replaceMatch, 'replace');
|
||||
deps.push(dep);
|
||||
|
|
|
@ -4,14 +4,12 @@ import { extractAllPackageFiles } from '.';
|
|||
jest.mock('../../util/fs');
|
||||
|
||||
function mockFs(files: Record<string, string>): void {
|
||||
fs.readLocalFile.mockImplementation(
|
||||
(fileName: string): Promise<string> => {
|
||||
fs.readLocalFile.mockImplementation((fileName: string): Promise<string> => {
|
||||
const content = files?.[fileName];
|
||||
return typeof content === 'string'
|
||||
? Promise.resolve(content)
|
||||
: Promise.reject(`File not found: ${fileName}`);
|
||||
}
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
describe(getName(), () => {
|
||||
|
|
|
@ -58,11 +58,11 @@ export async function extractAllPackageFiles(
|
|||
extractedDeps.push(...deps);
|
||||
} else if (isGradleFile(packageFile)) {
|
||||
const vars = getVars(registry, dir);
|
||||
const { deps, urls, vars: gradleVars } = parseGradle(
|
||||
content,
|
||||
vars,
|
||||
packageFile
|
||||
);
|
||||
const {
|
||||
deps,
|
||||
urls,
|
||||
vars: gradleVars,
|
||||
} = parseGradle(content, vars, packageFile);
|
||||
urls.forEach((url) => {
|
||||
if (!registryUrls.includes(url)) {
|
||||
registryUrls.push(url);
|
||||
|
|
|
@ -84,7 +84,8 @@ const lexer = moo.states({
|
|||
[TokenType.DoubleQuotedFinish]: { match: '"', pop: 1 },
|
||||
variable: {
|
||||
// Supported: ${foo}, $foo, ${ foo.bar.baz }, $foo.bar.baz
|
||||
match: /\${\s*[a-zA-Z_][a-zA-Z0-9_]*(?:\s*\.\s*[a-zA-Z_][a-zA-Z0-9_]*)*\s*}|\$[a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*/,
|
||||
match:
|
||||
/\${\s*[a-zA-Z_][a-zA-Z0-9_]*(?:\s*\.\s*[a-zA-Z_][a-zA-Z0-9_]*)*\s*}|\$[a-zA-Z_][a-zA-Z0-9_]*(?:\.[a-zA-Z_][a-zA-Z0-9_]*)*/,
|
||||
value: (x: string): string =>
|
||||
x.replace(/^\${?\s*/, '').replace(/\s*}$/, ''),
|
||||
},
|
||||
|
|
|
@ -247,8 +247,7 @@ describe(getName(), () => {
|
|||
'user-agent': 'https://github.com/renovatebot/renovate',
|
||||
},
|
||||
method: 'GET',
|
||||
url:
|
||||
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
@ -282,8 +281,7 @@ describe(getName(), () => {
|
|||
'user-agent': 'https://github.com/renovatebot/renovate',
|
||||
},
|
||||
method: 'GET',
|
||||
url:
|
||||
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
|
@ -163,8 +163,7 @@ describe(getName(), () => {
|
|||
'user-agent': 'https://github.com/renovatebot/renovate',
|
||||
},
|
||||
method: 'GET',
|
||||
url:
|
||||
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
@ -198,8 +197,7 @@ describe(getName(), () => {
|
|||
'user-agent': 'https://github.com/renovatebot/renovate',
|
||||
},
|
||||
method: 'GET',
|
||||
url:
|
||||
'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
url: 'https://services.gradle.org/distributions/gradle-6.3-bin.zip.sha256',
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
|
|
@ -208,9 +208,8 @@ export function collectVersionVariables(
|
|||
}
|
||||
|
||||
if (!dep.currentValue) {
|
||||
const dependencyLiteralRegex = dependencyStringLiteralExpressionFormatMatch(
|
||||
dependency
|
||||
);
|
||||
const dependencyLiteralRegex =
|
||||
dependencyStringLiteralExpressionFormatMatch(dependency);
|
||||
const currentValue = dependencyLiteralRegex.exec(buildGradleContent)?.[1];
|
||||
if (currentValue) {
|
||||
dep.currentValue = currentValue;
|
||||
|
@ -264,9 +263,8 @@ function updateLocalVariables(
|
|||
const match = regex.exec(buildGradleContent);
|
||||
if (match) {
|
||||
const variableDefinitionRegex = variableDefinitionFormatMatch(match[1]);
|
||||
const variableDefinitionMatch = variableDefinitionRegex.exec(
|
||||
buildGradleContent
|
||||
);
|
||||
const variableDefinitionMatch =
|
||||
variableDefinitionRegex.exec(buildGradleContent);
|
||||
if (variableDefinitionMatch) {
|
||||
return buildGradleContent.replace(
|
||||
variableDefinitionMatch[0],
|
||||
|
|
|
@ -28,8 +28,7 @@ describe(getName(), () => {
|
|||
repoName: 'aide',
|
||||
sha256:
|
||||
'0f2b7cecc70c1a27d35c06c98804fcdb9f326630de5d035afc447122186010b7',
|
||||
url:
|
||||
'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
|
||||
url: 'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
|
||||
},
|
||||
newValue: 'v0.17.7',
|
||||
};
|
||||
|
@ -55,8 +54,7 @@ describe(getName(), () => {
|
|||
repoName: 'bazel-watcher',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -84,8 +82,7 @@ describe(getName(), () => {
|
|||
repoName: 'bazel-watcher',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -136,8 +133,7 @@ describe(getName(), () => {
|
|||
repoName: 'invalid/repo/name',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -167,8 +163,7 @@ describe(getName(), () => {
|
|||
repoName: 'wrong-version/archive/v10.2.3.tar.gz',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -207,8 +202,7 @@ describe(getName(), () => {
|
|||
repoName: 'bazel-watcher',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -242,8 +236,7 @@ describe(getName(), () => {
|
|||
repoName: 'bazel-watcher',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -278,8 +271,7 @@ describe(getName(), () => {
|
|||
repoName: 'bazel-watcher',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -313,8 +305,7 @@ describe(getName(), () => {
|
|||
repoName: 'bazel-watcher',
|
||||
sha256:
|
||||
'26f5125218fad2741d3caf937b02296d803900e5f153f5b1f733f15391b9f9b4',
|
||||
url:
|
||||
'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
url: 'https://github.com/bazelbuild/bazel-watcher/archive/v0.8.2.tar.gz',
|
||||
},
|
||||
newValue: 'v0.9.3',
|
||||
};
|
||||
|
@ -341,8 +332,7 @@ describe(getName(), () => {
|
|||
repoName: 'aide',
|
||||
sha256:
|
||||
'0f2b7cecc70c1a27d35c06c98804fcdb9f326630de5d035afc447122186010b7',
|
||||
url:
|
||||
'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
|
||||
url: 'https://github.com/aide/aide/releases/download/v0.16.1/aide-0.16.1.tar.gz',
|
||||
},
|
||||
newValue: 'v0.17.7',
|
||||
};
|
||||
|
|
|
@ -70,7 +70,8 @@ function extractYaml(content: string): PackageDependency[] {
|
|||
|
||||
function extractText(content: string): PackageDependency[] {
|
||||
const deps: PackageDependency[] = [];
|
||||
const regex = /^\s*(?<depName>[\d\w-]+):(?<currentValue>[^#\s]+)[#\s]*(?<comment>.*)$/;
|
||||
const regex =
|
||||
/^\s*(?<depName>[\d\w-]+):(?<currentValue>[^#\s]+)[#\s]*(?<comment>.*)$/;
|
||||
|
||||
for (const line of content.split('\n')) {
|
||||
const match = regex.exec(line);
|
||||
|
|
|
@ -9,7 +9,8 @@ import type { Image, Kustomize } from './types';
|
|||
|
||||
// URL specifications should follow the hashicorp URL format
|
||||
// https://github.com/hashicorp/go-getter#url-format
|
||||
const gitUrl = /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^/\s]+\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/;
|
||||
const gitUrl =
|
||||
/^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^/\s]+\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/;
|
||||
|
||||
export function extractBase(base: string): PackageDependency | null {
|
||||
const match = gitUrl.exec(base);
|
||||
|
|
|
@ -5,7 +5,8 @@ import { getSiblingFileName, localPathExists } from '../../util/fs';
|
|||
import type { PackageDependency, PackageFile } from '../types';
|
||||
|
||||
const depSectionRegExp = /defp\s+deps.*do/g;
|
||||
const depMatchRegExp = /{:(\w+),\s*([^:"]+)?:?\s*"([^"]+)",?\s*(organization: "(.*)")?.*}/gm;
|
||||
const depMatchRegExp =
|
||||
/{:(\w+),\s*([^:"]+)?:?\s*"([^"]+)",?\s*(organization: "(.*)")?.*}/gm;
|
||||
|
||||
export async function extractPackageFile(
|
||||
content: string,
|
||||
|
|
|
@ -31,9 +31,8 @@ export function detectMonorepos(
|
|||
const { lernaJsonFile } = managerData;
|
||||
const packages = yarnWorkspacesPackages || lernaPackages;
|
||||
if (packages?.length) {
|
||||
const internalPackagePatterns = (is.array(packages)
|
||||
? packages
|
||||
: [packages]
|
||||
const internalPackagePatterns = (
|
||||
is.array(packages) ? packages : [packages]
|
||||
).map((pattern) => getSiblingFileName(packageFile, pattern));
|
||||
const internalPackageFiles = packageFiles.filter((sp) =>
|
||||
matchesAnyPattern(
|
||||
|
|
|
@ -40,8 +40,9 @@ export async function getNodeConstraint(
|
|||
let lockfileVersion = 1;
|
||||
try {
|
||||
const lockFileName = getSiblingFileName(packageFile, 'package-lock.json');
|
||||
lockfileVersion = JSON.parse(await readLocalFile(lockFileName, 'utf8'))
|
||||
.lockfileVersion;
|
||||
lockfileVersion = JSON.parse(
|
||||
await readLocalFile(lockFileName, 'utf8')
|
||||
).lockfileVersion;
|
||||
} catch (err) {
|
||||
// do nothing
|
||||
}
|
||||
|
|
|
@ -4,13 +4,8 @@ import type { RangeStrategy } from '../../types';
|
|||
import type { RangeConfig } from '../types';
|
||||
|
||||
export function getRangeStrategy(config: RangeConfig): RangeStrategy {
|
||||
const {
|
||||
depType,
|
||||
depName,
|
||||
packageJsonType,
|
||||
currentValue,
|
||||
rangeStrategy,
|
||||
} = config;
|
||||
const { depType, depName, packageJsonType, currentValue, rangeStrategy } =
|
||||
config;
|
||||
const isComplexRange = parseRange(currentValue).length > 1;
|
||||
if (rangeStrategy === 'bump' && isComplexRange) {
|
||||
logger.debug(
|
||||
|
|
|
@ -23,15 +23,12 @@ jest.mock('./util');
|
|||
|
||||
const exec: jest.Mock<typeof _exec> = _exec as any;
|
||||
const env = mocked(_env);
|
||||
const getConfiguredRegistries: jest.Mock<
|
||||
typeof _getConfiguredRegistries
|
||||
> = _getConfiguredRegistries as any;
|
||||
const getDefaultRegistries: jest.Mock<
|
||||
typeof _getDefaultRegistries
|
||||
> = _getDefaultRegistries as any;
|
||||
const getRandomString: jest.Mock<
|
||||
typeof _getRandomString
|
||||
> = _getRandomString as any;
|
||||
const getConfiguredRegistries: jest.Mock<typeof _getConfiguredRegistries> =
|
||||
_getConfiguredRegistries as any;
|
||||
const getDefaultRegistries: jest.Mock<typeof _getDefaultRegistries> =
|
||||
_getDefaultRegistries as any;
|
||||
const getRandomString: jest.Mock<typeof _getRandomString> =
|
||||
_getRandomString as any;
|
||||
const hostRules = mocked(_hostRules);
|
||||
|
||||
const config = {
|
||||
|
|
|
@ -18,7 +18,8 @@ import { getConfiguredRegistries } from './util';
|
|||
* The update of the right boundary does not make sense regarding to the lowest version restore rule,
|
||||
* so we don't include it in the extracting regexp
|
||||
*/
|
||||
const checkVersion = /^\s*(?:[[])?(?:(?<currentValue>[^"(,[\]]+)\s*(?:,\s*[)\]]|])?)\s*$/;
|
||||
const checkVersion =
|
||||
/^\s*(?:[[])?(?:(?<currentValue>[^"(,[\]]+)\s*(?:,\s*[)\]]|])?)\s*$/;
|
||||
const elemNames = new Set([
|
||||
'PackageReference',
|
||||
'PackageVersion',
|
||||
|
|
|
@ -42,14 +42,20 @@ describe(getName(), () => {
|
|||
expect(res.deps).toHaveLength(3);
|
||||
});
|
||||
it('extracts multiple dependencies', () => {
|
||||
const res = extractPackageFile(requirements2, 'unused_file_name', config)
|
||||
.deps;
|
||||
const res = extractPackageFile(
|
||||
requirements2,
|
||||
'unused_file_name',
|
||||
config
|
||||
).deps;
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(5);
|
||||
});
|
||||
it('handles comments and commands', () => {
|
||||
const res = extractPackageFile(requirements3, 'unused_file_name', config)
|
||||
.deps;
|
||||
const res = extractPackageFile(
|
||||
requirements3,
|
||||
'unused_file_name',
|
||||
config
|
||||
).deps;
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(5);
|
||||
});
|
||||
|
|
|
@ -24,7 +24,8 @@ import {
|
|||
getTerraformDependencyType,
|
||||
} from './util';
|
||||
|
||||
const dependencyBlockExtractionRegex = /^\s*(?<type>[a-z_]+)\s+("(?<lookupName>[^"]+)"\s+)?("(?<terraformName>[^"]+)"\s+)?{\s*$/;
|
||||
const dependencyBlockExtractionRegex =
|
||||
/^\s*(?<type>[a-z_]+)\s+("(?<lookupName>[^"]+)"\s+)?("(?<terraformName>[^"]+)"\s+)?{\s*$/;
|
||||
const contentCheckList = [
|
||||
'module "',
|
||||
'provider "',
|
||||
|
|
|
@ -8,8 +8,10 @@ import { TerraformDependencyTypes } from './common';
|
|||
import { extractTerraformProvider } from './providers';
|
||||
import type { ExtractionResult } from './types';
|
||||
|
||||
export const githubRefMatchRegex = /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i;
|
||||
export const gitTagsRefMatchRegex = /(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/;
|
||||
export const githubRefMatchRegex =
|
||||
/github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i;
|
||||
export const gitTagsRefMatchRegex =
|
||||
/(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/;
|
||||
const hostnameMatchRegex = /^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/;
|
||||
|
||||
export function extractTerraformModule(
|
||||
|
|
|
@ -7,7 +7,8 @@ import { TerraformDependencyTypes } from './common';
|
|||
import type { ExtractionResult } from './types';
|
||||
import { keyValueExtractionRegex } from './util';
|
||||
|
||||
export const sourceExtractionRegex = /^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/;
|
||||
export const sourceExtractionRegex =
|
||||
/^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/;
|
||||
|
||||
export function extractTerraformProvider(
|
||||
startingLine: number,
|
||||
|
|
|
@ -1,7 +1,9 @@
|
|||
import { TerraformDependencyTypes } from './common';
|
||||
|
||||
export const keyValueExtractionRegex = /^\s*(?<key>[^\s]+)\s+=\s+"(?<value>[^"]+)"\s*$/;
|
||||
export const resourceTypeExtractionRegex = /^\s*resource\s+"(?<type>[^\s]+)"\s+"(?<name>[^"]+)"\s*{/;
|
||||
export const keyValueExtractionRegex =
|
||||
/^\s*(?<key>[^\s]+)\s+=\s+"(?<value>[^"]+)"\s*$/;
|
||||
export const resourceTypeExtractionRegex =
|
||||
/^\s*resource\s+"(?<type>[^\s]+)"\s+"(?<name>[^"]+)"\s*{/;
|
||||
|
||||
export function getTerraformDependencyType(
|
||||
value: string
|
||||
|
|
|
@ -8,8 +8,10 @@ import { TerragruntDependencyTypes } from './common';
|
|||
import { extractTerragruntProvider } from './providers';
|
||||
import type { ExtractionResult } from './types';
|
||||
|
||||
export const githubRefMatchRegex = /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i;
|
||||
export const gitTagsRefMatchRegex = /(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/;
|
||||
export const githubRefMatchRegex =
|
||||
/github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?ref=(?<tag>.*)$/i;
|
||||
export const gitTagsRefMatchRegex =
|
||||
/(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?ref=(?<tag>.*)$/;
|
||||
const hostnameMatchRegex = /^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/;
|
||||
|
||||
export function extractTerragruntModule(
|
||||
|
|
|
@ -3,7 +3,8 @@ import { TerragruntDependencyTypes } from './common';
|
|||
import type { ExtractionResult } from './types';
|
||||
import { keyValueExtractionRegex } from './util';
|
||||
|
||||
export const sourceExtractionRegex = /^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/;
|
||||
export const sourceExtractionRegex =
|
||||
/^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/;
|
||||
|
||||
function extractBracesContent(content): number {
|
||||
const stack = [];
|
||||
|
|
|
@ -162,9 +162,10 @@ export function max4000Chars(str: string): string {
|
|||
return str;
|
||||
}
|
||||
|
||||
export function getProjectAndRepo(
|
||||
str: string
|
||||
): { project: string; repo: string } {
|
||||
export function getProjectAndRepo(str: string): {
|
||||
project: string;
|
||||
repo: string;
|
||||
} {
|
||||
logger.trace(`getProjectAndRepo(${str})`);
|
||||
const strSplit = str.split(`/`);
|
||||
if (strSplit.length === 1) {
|
||||
|
|
|
@ -312,11 +312,9 @@ function matchesState(state: string, desiredState: string): boolean {
|
|||
|
||||
// TODO: coverage (#9624)
|
||||
// istanbul ignore next
|
||||
const isRelevantPr = (
|
||||
branchName: string,
|
||||
prTitle: string | null | undefined,
|
||||
state: string
|
||||
) => (p: Pr): boolean =>
|
||||
const isRelevantPr =
|
||||
(branchName: string, prTitle: string | null | undefined, state: string) =>
|
||||
(p: Pr): boolean =>
|
||||
p.sourceBranch === branchName &&
|
||||
(!prTitle || p.title === prTitle) &&
|
||||
matchesState(p.state, state);
|
||||
|
|
|
@ -22,14 +22,12 @@ describe('accumulateValues()', () => {
|
|||
.get('/some-url?pagelen=10')
|
||||
.reply(200, {
|
||||
values: range(10),
|
||||
next:
|
||||
'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=9&role=contributor',
|
||||
next: 'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=9&role=contributor',
|
||||
})
|
||||
.get('/2.0/repositories/?pagelen=10&after=9&role=contributor')
|
||||
.reply(200, {
|
||||
values: range(10),
|
||||
next:
|
||||
'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=19&role=contributor',
|
||||
next: 'https://api.bitbucket.org/2.0/repositories/?pagelen=10&after=19&role=contributor',
|
||||
})
|
||||
.get('/2.0/repositories/?pagelen=10&after=19&role=contributor')
|
||||
.reply(200, {
|
||||
|
|
|
@ -1691,8 +1691,7 @@ export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
|
|||
logger.debug({ err }, 'Error retrieving vulnerability alerts');
|
||||
logger.warn(
|
||||
{
|
||||
url:
|
||||
'https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts',
|
||||
url: 'https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts',
|
||||
},
|
||||
'Cannot access vulnerability alerts. Please ensure permissions have been granted.'
|
||||
);
|
||||
|
|
|
@ -93,7 +93,7 @@ export function find(search: HostRuleSearch): HostRule {
|
|||
logger.warn({ search }, 'Invalid hostRules search');
|
||||
return {};
|
||||
}
|
||||
let res = ({} as any) as HostRule;
|
||||
let res = {} as any as HostRule;
|
||||
// First, apply empty rule matches
|
||||
hostRules
|
||||
.filter((rule) => isEmptyRule(rule))
|
||||
|
|
|
@ -205,8 +205,7 @@ describe(getName(), () => {
|
|||
authorization: 'auth',
|
||||
},
|
||||
hostname: 'store123.blob.core.windows.net',
|
||||
href:
|
||||
'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
|
||||
href: 'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
|
||||
});
|
||||
|
||||
removeAuthorization(opts);
|
||||
|
@ -214,8 +213,7 @@ describe(getName(), () => {
|
|||
expect(opts).toEqual({
|
||||
headers: {},
|
||||
hostname: 'store123.blob.core.windows.net',
|
||||
href:
|
||||
'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
|
||||
href: 'https://<store>.blob.core.windows.net/<some id>//docker/registry/v2/blobs',
|
||||
});
|
||||
});
|
||||
|
||||
|
|
|
@ -31,13 +31,11 @@ describe(getName(), () => {
|
|||
.scope(gitlabApiHost)
|
||||
.get('/api/v4/some-url')
|
||||
.reply(200, ['a'], {
|
||||
link:
|
||||
'<https://gitlab.com/api/v4/some-url&page=2>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
|
||||
link: '<https://gitlab.com/api/v4/some-url&page=2>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
|
||||
})
|
||||
.get('/api/v4/some-url&page=2')
|
||||
.reply(200, ['b', 'c'], {
|
||||
link:
|
||||
'<https://gitlab.com/api/v4/some-url&page=3>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
|
||||
link: '<https://gitlab.com/api/v4/some-url&page=3>; rel="next", <https://gitlab.com/api/v4/some-url&page=3>; rel="last"',
|
||||
})
|
||||
.get('/api/v4/some-url&page=3')
|
||||
.reply(200, ['d']);
|
||||
|
|
|
@ -239,7 +239,8 @@ export function parsePrefixRange(input: string): PrefixRange | null {
|
|||
return null;
|
||||
}
|
||||
|
||||
const mavenBasedRangeRegex = /^(?<leftBoundStr>[[\](]\s*)(?<leftVal>[-._+a-zA-Z0-9]*?)(?<separator>\s*,\s*)(?<rightVal>[-._+a-zA-Z0-9]*?)(?<rightBoundStr>\s*[[\])])$/;
|
||||
const mavenBasedRangeRegex =
|
||||
/^(?<leftBoundStr>[[\](]\s*)(?<leftVal>[-._+a-zA-Z0-9]*?)(?<separator>\s*,\s*)(?<rightVal>[-._+a-zA-Z0-9]*?)(?<rightBoundStr>\s*[[\])])$/;
|
||||
|
||||
export function parseMavenBasedRange(input: string): MavenBasedRange | null {
|
||||
if (!input) {
|
||||
|
|
|
@ -124,7 +124,8 @@ export const create = ({
|
|||
|
||||
export abstract class GenericVersioningApi<
|
||||
T extends GenericVersion = GenericVersion
|
||||
> implements VersioningApi {
|
||||
> implements VersioningApi
|
||||
{
|
||||
private _getSection(version: string, index: number): number {
|
||||
const parsed = this._parse(version);
|
||||
return parsed && parsed.release.length > index
|
||||
|
|
|
@ -10,7 +10,8 @@ export interface Range {
|
|||
}
|
||||
|
||||
const parse = (range: string): Range => {
|
||||
const regExp = /^(?<operator>[^\d\s]+)?(?<delimiter>\s*)(?<version>[0-9a-zA-Z-.]+)$/;
|
||||
const regExp =
|
||||
/^(?<operator>[^\d\s]+)?(?<delimiter>\s*)(?<version>[0-9a-zA-Z-.]+)$/;
|
||||
|
||||
const value = (range || '').trim();
|
||||
|
||||
|
|
|
@ -22,10 +22,8 @@ export async function postUpgradeCommandsExecutor(
|
|||
): Promise<PostUpgradeCommandsExecutionResult> {
|
||||
let updatedArtifacts = [...(config.updatedArtifacts || [])];
|
||||
const artifactErrors = [...(config.artifactErrors || [])];
|
||||
const {
|
||||
allowedPostUpgradeCommands,
|
||||
allowPostUpgradeCommandTemplating,
|
||||
} = getAdminConfig();
|
||||
const { allowedPostUpgradeCommands, allowPostUpgradeCommandTemplating } =
|
||||
getAdminConfig();
|
||||
|
||||
for (const upgrade of filteredUpgradeCommands) {
|
||||
addMeta({ dep: upgrade.depName });
|
||||
|
@ -184,10 +182,8 @@ export default async function executePostUpgradeCommands(
|
|||
postUpgradeTasks.executionMode === 'update'
|
||||
);
|
||||
|
||||
const {
|
||||
updatedArtifacts,
|
||||
artifactErrors,
|
||||
} = await postUpgradeCommandsExecutor(updateUpgradeCommands, config);
|
||||
const { updatedArtifacts, artifactErrors } =
|
||||
await postUpgradeCommandsExecutor(updateUpgradeCommands, config);
|
||||
return postUpgradeCommandsExecutor(branchUpgradeCommands, {
|
||||
...config,
|
||||
updatedArtifacts,
|
||||
|
|
|
@ -1065,14 +1065,18 @@ describe(getName(), () => {
|
|||
});
|
||||
expect(exec.exec).toHaveBeenCalledTimes(2);
|
||||
expect(
|
||||
(commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
(
|
||||
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
(f) => f.name === 'modified_file'
|
||||
).contents as Buffer).toString()
|
||||
).contents as Buffer
|
||||
).toString()
|
||||
).toBe('modified file content again');
|
||||
expect(
|
||||
(commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
(
|
||||
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
(f) => f.name === 'deleted_then_created_file'
|
||||
).contents as Buffer).toString()
|
||||
).contents as Buffer
|
||||
).toString()
|
||||
).toBe('this file was once deleted');
|
||||
expect(
|
||||
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
|
@ -1191,9 +1195,11 @@ describe(getName(), () => {
|
|||
});
|
||||
expect(exec.exec).toHaveBeenCalledTimes(1);
|
||||
expect(
|
||||
(commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
(
|
||||
commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts.find(
|
||||
(f) => f.name === 'modified_file'
|
||||
).contents as Buffer).toString()
|
||||
).contents as Buffer
|
||||
).toString()
|
||||
).toBe('modified file content');
|
||||
});
|
||||
});
|
||||
|
|
|
@ -101,8 +101,7 @@ describe(getName(), () => {
|
|||
{ tag_name: `v1.0.0` },
|
||||
{
|
||||
tag_name: `v1.0.1`,
|
||||
body:
|
||||
'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
|
||||
body: 'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
|
||||
},
|
||||
]);
|
||||
|
||||
|
@ -123,8 +122,7 @@ describe(getName(), () => {
|
|||
{ tag_name: `v1.0.0` },
|
||||
{
|
||||
tag_name: `v1.0.1`,
|
||||
body:
|
||||
'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
|
||||
body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
|
||||
},
|
||||
]);
|
||||
const res = await getReleaseList(
|
||||
|
@ -145,8 +143,7 @@ describe(getName(), () => {
|
|||
{ tag_name: `v1.0.0` },
|
||||
{
|
||||
tag_name: `v1.0.1`,
|
||||
body:
|
||||
'some body #123, [#124](https://my.custom.domain/some/yet-other-repository/issues/124)',
|
||||
body: 'some body #123, [#124](https://my.custom.domain/some/yet-other-repository/issues/124)',
|
||||
},
|
||||
]);
|
||||
const res = await getReleaseList(
|
||||
|
@ -183,8 +180,7 @@ describe(getName(), () => {
|
|||
{ tag_name: `${prefix}1.0.0` },
|
||||
{
|
||||
tag_name: `${prefix}1.0.1`,
|
||||
body:
|
||||
'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
|
||||
body: 'some body #123, [#124](https://github.com/some/yet-other-repository/issues/124)',
|
||||
},
|
||||
]);
|
||||
const res = await getReleaseNotes(
|
||||
|
@ -208,8 +204,7 @@ describe(getName(), () => {
|
|||
{ tag_name: `${prefix}1.0.0` },
|
||||
{
|
||||
tag_name: `${prefix}1.0.1`,
|
||||
body:
|
||||
'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
|
||||
body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
|
||||
},
|
||||
]);
|
||||
|
||||
|
|
|
@ -26,13 +26,8 @@ function matchesUnstable(
|
|||
export async function getInRangeReleases(
|
||||
config: BranchUpgradeConfig
|
||||
): Promise<Release[] | null> {
|
||||
const {
|
||||
versioning,
|
||||
currentVersion,
|
||||
newVersion,
|
||||
depName,
|
||||
datasource,
|
||||
} = config;
|
||||
const { versioning, currentVersion, newVersion, depName, datasource } =
|
||||
config;
|
||||
// istanbul ignore if
|
||||
if (!isGetPkgReleasesConfig(config)) {
|
||||
return null;
|
||||
|
|
|
@ -81,12 +81,10 @@ function setupGitlabChangelogMock() {
|
|||
},
|
||||
],
|
||||
releaseNotes: {
|
||||
url:
|
||||
'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
|
||||
url: 'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
|
||||
},
|
||||
compare: {
|
||||
url:
|
||||
'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
|
||||
url: 'https://gitlab.com/renovateapp/gitlabdummy/compare/v1.0.0...v1.1.0',
|
||||
},
|
||||
},
|
||||
],
|
||||
|
@ -176,8 +174,7 @@ describe(getName(), () => {
|
|||
const existingPr: Pr = {
|
||||
displayNumber: 'Existing PR',
|
||||
title: 'Update dependency dummy to v1.1.0',
|
||||
body:
|
||||
'Some body<!-- Reviewable:start -->something<!-- Reviewable:end -->\n\n',
|
||||
body: 'Some body<!-- Reviewable:start -->something<!-- Reviewable:end -->\n\n',
|
||||
} as never;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
|
|
|
@ -407,9 +407,9 @@ export async function ensurePr(
|
|||
targetBranch: config.baseBranch,
|
||||
prTitle,
|
||||
prBody,
|
||||
labels: [
|
||||
...new Set([...config.labels, ...config.addLabels]),
|
||||
].map((label) => template.compile(label, config)),
|
||||
labels: [...new Set([...config.labels, ...config.addLabels])].map(
|
||||
(label) => template.compile(label, config)
|
||||
),
|
||||
platformOptions: getPlatformPrOptions(config),
|
||||
draftPR: config.draftPR,
|
||||
});
|
||||
|
|
|
@ -49,8 +49,9 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
|
|||
let configFileParsed;
|
||||
if (configFileName === 'package.json') {
|
||||
// We already know it parses
|
||||
configFileParsed = JSON.parse(await readLocalFile('package.json', 'utf8'))
|
||||
.renovate;
|
||||
configFileParsed = JSON.parse(
|
||||
await readLocalFile('package.json', 'utf8')
|
||||
).renovate;
|
||||
logger.debug({ config: configFileParsed }, 'package.json>renovate config');
|
||||
} else {
|
||||
let rawFileContents = await readLocalFile(configFileName, 'utf8');
|
||||
|
|
|
@ -115,9 +115,8 @@ export async function detectVulnerabilityAlerts(
|
|||
combinedAlerts[fileName] ||= {};
|
||||
combinedAlerts[fileName][datasource] ||= {};
|
||||
combinedAlerts[fileName][datasource][depName] ||= {};
|
||||
combinedAlerts[fileName][datasource][depName][
|
||||
vulnerableRequirements
|
||||
] ||= {
|
||||
combinedAlerts[fileName][datasource][depName][vulnerableRequirements] ||=
|
||||
{
|
||||
advisories: [],
|
||||
};
|
||||
const alertDetails =
|
||||
|
|
|
@ -41,7 +41,8 @@ export async function createOnboardingBranch(
|
|||
} ${configFile}`;
|
||||
}
|
||||
|
||||
const commitMessage = `${commitMessagePrefix} ${onboardingCommitMessage}`.trim();
|
||||
const commitMessage =
|
||||
`${commitMessagePrefix} ${onboardingCommitMessage}`.trim();
|
||||
|
||||
// istanbul ignore if
|
||||
if (getAdminConfig().dryRun) {
|
||||
|
|
|
@ -57,7 +57,8 @@ async function fetchManagerPackagerFileUpdates(
|
|||
const { packageFile } = pFile;
|
||||
const packageFileConfig = mergeChildConfig(managerConfig, pFile);
|
||||
const { manager } = packageFileConfig;
|
||||
const queue = pFile.deps.map((dep) => (): Promise<PackageDependency> =>
|
||||
const queue = pFile.deps.map(
|
||||
(dep) => (): Promise<PackageDependency> =>
|
||||
fetchDepUpdates(packageFileConfig, dep)
|
||||
);
|
||||
logger.trace(
|
||||
|
@ -75,7 +76,8 @@ async function fetchManagerUpdates(
|
|||
manager: string
|
||||
): Promise<void> {
|
||||
const managerConfig = getManagerConfig(config, manager);
|
||||
const queue = packageFiles[manager].map((pFile) => (): Promise<void> =>
|
||||
const queue = packageFiles[manager].map(
|
||||
(pFile) => (): Promise<void> =>
|
||||
fetchManagerPackagerFileUpdates(config, managerConfig, pFile)
|
||||
);
|
||||
logger.trace(
|
||||
|
|
|
@ -12,11 +12,8 @@ export function getBucket(
|
|||
newVersion: string,
|
||||
versioning: allVersioning.VersioningApi
|
||||
): string {
|
||||
const {
|
||||
separateMajorMinor,
|
||||
separateMultipleMajor,
|
||||
separateMinorPatch,
|
||||
} = config;
|
||||
const { separateMajorMinor, separateMultipleMajor, separateMinorPatch } =
|
||||
config;
|
||||
if (!separateMajorMinor) {
|
||||
return 'latest';
|
||||
}
|
||||
|
|
|
@ -15,12 +15,8 @@ export function filterVersions(
|
|||
latestVersion: string,
|
||||
releases: Release[]
|
||||
): Release[] {
|
||||
const {
|
||||
ignoreUnstable,
|
||||
ignoreDeprecated,
|
||||
respectLatest,
|
||||
allowedVersions,
|
||||
} = config;
|
||||
const { ignoreUnstable, ignoreDeprecated, respectLatest, allowedVersions } =
|
||||
config;
|
||||
let versioning;
|
||||
function isVersionStable(version: string): boolean {
|
||||
if (!versioning.isStable(version)) {
|
||||
|
|
|
@ -50,13 +50,8 @@ export async function branchifyUpgrades(
|
|||
branchUpgrades[branchName] = branchUpgrades[branchName]
|
||||
.reverse()
|
||||
.filter((upgrade) => {
|
||||
const {
|
||||
manager,
|
||||
packageFile,
|
||||
depName,
|
||||
currentValue,
|
||||
newValue,
|
||||
} = upgrade;
|
||||
const { manager, packageFile, depName, currentValue, newValue } =
|
||||
upgrade;
|
||||
const upgradeKey = `${packageFile}:${depName}:${currentValue}`;
|
||||
const previousNewValue = seenUpdates[upgradeKey];
|
||||
if (previousNewValue && previousNewValue !== newValue) {
|
||||
|
|
|
@ -35,13 +35,8 @@ function getTableValues(
|
|||
if (!upgrade.commitBodyTable) {
|
||||
return null;
|
||||
}
|
||||
const {
|
||||
datasource,
|
||||
lookupName,
|
||||
depName,
|
||||
currentVersion,
|
||||
newVersion,
|
||||
} = upgrade;
|
||||
const { datasource, lookupName, depName, currentVersion, newVersion } =
|
||||
upgrade;
|
||||
const name = lookupName || depName;
|
||||
if (datasource && name && currentVersion && newVersion) {
|
||||
return [datasource, name, currentVersion, newVersion];
|
||||
|
|
|
@ -252,7 +252,7 @@
|
|||
"mockdate": "3.0.5",
|
||||
"nock": "13.0.11",
|
||||
"npm-run-all": "4.1.5",
|
||||
"prettier": "2.2.1",
|
||||
"prettier": "2.3.0",
|
||||
"pretty-quick": "3.1.0",
|
||||
"rimraf": "3.0.2",
|
||||
"semantic-release": "17.4.2",
|
||||
|
|
|
@ -53,7 +53,7 @@ function getCallerFileName(): string | null {
|
|||
try {
|
||||
const err = new Error();
|
||||
|
||||
const stack = (err.stack as unknown) as NodeJS.CallSite[];
|
||||
const stack = err.stack as unknown as NodeJS.CallSite[];
|
||||
|
||||
let currentFile = null;
|
||||
for (const frame of stack) {
|
||||
|
|
|
@ -7619,10 +7619,10 @@ prelude-ls@~1.1.2:
|
|||
resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54"
|
||||
integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ=
|
||||
|
||||
prettier@2.2.1:
|
||||
version "2.2.1"
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.2.1.tgz#795a1a78dd52f073da0cd42b21f9c91381923ff5"
|
||||
integrity sha512-PqyhM2yCjg/oKkFPtTGUojv7gnZAoG80ttl45O6x2Ug/rMJw4wcc9k6aaf2hibP7BGVCCM33gZoGjyvt9mm16Q==
|
||||
prettier@2.3.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.3.0.tgz#b6a5bf1284026ae640f17f7ff5658a7567fc0d18"
|
||||
integrity sha512-kXtO4s0Lz/DW/IJ9QdWhAf7/NmPWQXkFr/r/WkR3vyI+0v8amTDxiaQSLzs8NBlytfLWX/7uQUMIW677yLKl4w==
|
||||
|
||||
pretty-bytes@^5.1.0:
|
||||
version "5.6.0"
|
||||
|
|
Loading…
Reference in a new issue