mirror of
https://github.com/renovatebot/renovate.git
synced 2025-01-12 06:56:24 +00:00
feat: refactor dependency extraction (#1912)
Rewrite of dependency extraction, particularly for npm. Paves way for easier addition of new package managers. Closes #1882
This commit is contained in:
parent
bfec4a759a
commit
ecdcd9df4f
99 changed files with 2812 additions and 3064 deletions
|
@ -9,10 +9,27 @@ const envParser = require('./env');
|
|||
|
||||
const { getPlatformApi } = require('../platform');
|
||||
const { resolveConfigPresets } = require('./presets');
|
||||
const { get, getLanguageList, getManagerList } = require('../manager');
|
||||
|
||||
exports.parseConfigs = parseConfigs;
|
||||
exports.mergeChildConfig = mergeChildConfig;
|
||||
exports.filterConfig = filterConfig;
|
||||
exports.getManagerConfig = getManagerConfig;
|
||||
|
||||
function getManagerConfig(config, manager) {
|
||||
let managerConfig = config;
|
||||
const language = get(manager, 'language');
|
||||
if (language) {
|
||||
managerConfig = mergeChildConfig(managerConfig, config[language]);
|
||||
}
|
||||
managerConfig = mergeChildConfig(managerConfig, config[manager]);
|
||||
for (const i of getLanguageList().concat(getManagerList())) {
|
||||
delete managerConfig[i];
|
||||
}
|
||||
managerConfig.language = language;
|
||||
managerConfig.manager = manager;
|
||||
return managerConfig;
|
||||
}
|
||||
|
||||
async function parseConfigs(env, argv) {
|
||||
logger.debug('Parsing configs');
|
||||
|
|
|
@ -1,9 +1,3 @@
|
|||
const minimatch = require('minimatch');
|
||||
const pAll = require('p-all');
|
||||
const { mergeChildConfig } = require('../config');
|
||||
const { checkMonorepos } = require('../manager/npm/monorepos');
|
||||
|
||||
const managers = {};
|
||||
const managerList = [
|
||||
'bazel',
|
||||
'buildkite',
|
||||
|
@ -16,191 +10,37 @@ const managerList = [
|
|||
'pip_requirements',
|
||||
'travis',
|
||||
];
|
||||
const managers = {};
|
||||
for (const manager of managerList) {
|
||||
// eslint-disable-next-line global-require,import/no-dynamic-require
|
||||
managers[manager] = require(`./${manager}`);
|
||||
}
|
||||
|
||||
const languageList = ['node', 'python'];
|
||||
|
||||
const get = (manager, name) => managers[manager][name];
|
||||
const getLanguageList = () => languageList;
|
||||
const getManagerList = () => managerList;
|
||||
|
||||
module.exports = {
|
||||
detectPackageFiles,
|
||||
extractDependencies,
|
||||
getPackageUpdates,
|
||||
getUpdatedPackageFiles,
|
||||
resolvePackageFiles,
|
||||
get,
|
||||
getLanguageList,
|
||||
getManagerList,
|
||||
};
|
||||
|
||||
async function detectPackageFiles(config) {
|
||||
logger.debug('detectPackageFiles()');
|
||||
logger.trace({ config });
|
||||
let packageFiles = [];
|
||||
let fileList = await platform.getFileList();
|
||||
if (config.includePaths && config.includePaths.length) {
|
||||
fileList = fileList.filter(file =>
|
||||
config.includePaths.some(
|
||||
includePath => file === includePath || minimatch(file, includePath)
|
||||
)
|
||||
);
|
||||
}
|
||||
if (config.ignorePaths && config.ignorePaths.length) {
|
||||
fileList = fileList.filter(
|
||||
file =>
|
||||
!config.ignorePaths.some(
|
||||
ignorePath => file.includes(ignorePath) || minimatch(file, ignorePath)
|
||||
)
|
||||
);
|
||||
}
|
||||
for (const manager of managerList) {
|
||||
logger.debug(`Detecting package files (${manager})`);
|
||||
const { language } = managers[manager];
|
||||
// Check if the user has a whitelist of managers
|
||||
if (
|
||||
config.enabledManagers &&
|
||||
config.enabledManagers.length &&
|
||||
!(
|
||||
config.enabledManagers.includes(manager) ||
|
||||
config.enabledManagers.includes(language)
|
||||
)
|
||||
) {
|
||||
logger.debug(manager + ' is not on the enabledManagers list');
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
// Check if the manager is manually disabled
|
||||
if (config[manager].enabled === false) {
|
||||
logger.debug(manager + ' is disabled');
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
// Check if the parent is manually disabled
|
||||
if (language && config[language].enabled === false) {
|
||||
logger.debug(manager + ' language is disabled');
|
||||
continue; // eslint-disable-line no-continue
|
||||
}
|
||||
const files = [];
|
||||
let allfiles = [];
|
||||
for (const fileMatch of config[manager].fileMatch) {
|
||||
logger.debug(`Using ${manager} file match: ${fileMatch}`);
|
||||
allfiles = allfiles.concat(
|
||||
fileList.filter(file => file.match(new RegExp(fileMatch)))
|
||||
);
|
||||
}
|
||||
logger.debug(`Found ${allfiles.length} files`);
|
||||
for (const file of allfiles) {
|
||||
const { contentPattern } = managers[manager];
|
||||
if (contentPattern) {
|
||||
const content = await platform.getFile(file);
|
||||
if (content && content.match(contentPattern)) {
|
||||
files.push(file);
|
||||
}
|
||||
} else {
|
||||
files.push(file);
|
||||
}
|
||||
}
|
||||
if (files.length) {
|
||||
logger.info({ manager, files }, `Detected package files`);
|
||||
packageFiles = packageFiles.concat(
|
||||
files.map(packageFile => ({ packageFile, manager }))
|
||||
);
|
||||
}
|
||||
}
|
||||
logger.trace({ packageFiles }, 'All detected package files');
|
||||
return packageFiles;
|
||||
}
|
||||
const managerFunctions = [
|
||||
'extractDependencies',
|
||||
'postExtract',
|
||||
'getPackageUpdates',
|
||||
'updateDependency',
|
||||
'supportsLockFileMaintenance',
|
||||
];
|
||||
|
||||
function extractDependencies(packageContent, config) {
|
||||
logger.debug('manager.extractDependencies()');
|
||||
return managers[config.manager].extractDependencies(packageContent, config);
|
||||
for (const f of managerFunctions) {
|
||||
module.exports[f] = (manager, ...params) => {
|
||||
if (managers[manager][f]) {
|
||||
return managers[manager][f](...params);
|
||||
}
|
||||
|
||||
function getPackageUpdates(config) {
|
||||
logger.trace({ config }, 'manager.getPackageUpdates()');
|
||||
const { manager } = config;
|
||||
if (!managerList.includes(manager)) {
|
||||
throw new Error('Unsupported package manager');
|
||||
}
|
||||
return managers[manager].getPackageUpdates(config);
|
||||
}
|
||||
|
||||
async function getUpdatedPackageFiles(config) {
|
||||
logger.debug('manager.getUpdatedPackageFiles()');
|
||||
logger.trace({ config });
|
||||
const updatedPackageFiles = {};
|
||||
|
||||
for (const upgrade of config.upgrades) {
|
||||
const { manager } = upgrade;
|
||||
if (upgrade.type !== 'lockFileMaintenance') {
|
||||
const existingContent =
|
||||
updatedPackageFiles[upgrade.packageFile] ||
|
||||
(await platform.getFile(upgrade.packageFile, config.parentBranch));
|
||||
let newContent = existingContent;
|
||||
newContent = await managers[manager].updateDependency(
|
||||
existingContent,
|
||||
upgrade
|
||||
);
|
||||
if (!newContent) {
|
||||
if (config.parentBranch) {
|
||||
logger.info('Rebasing branch after error updating content');
|
||||
return getUpdatedPackageFiles({
|
||||
...config,
|
||||
parentBranch: undefined,
|
||||
});
|
||||
}
|
||||
throw new Error('Error updating branch content and cannot rebase');
|
||||
}
|
||||
if (newContent !== existingContent) {
|
||||
if (config.parentBranch) {
|
||||
// This ensure it's always 1 commit from Renovate
|
||||
logger.info('Need to update package file so will rebase first');
|
||||
return getUpdatedPackageFiles({
|
||||
...config,
|
||||
parentBranch: undefined,
|
||||
});
|
||||
}
|
||||
logger.debug('Updating packageFile content');
|
||||
updatedPackageFiles[upgrade.packageFile] = newContent;
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
parentBranch: config.parentBranch, // Need to overwrite original config
|
||||
updatedPackageFiles: Object.keys(updatedPackageFiles).map(packageFile => ({
|
||||
name: packageFile,
|
||||
contents: updatedPackageFiles[packageFile],
|
||||
})),
|
||||
return null;
|
||||
};
|
||||
}
|
||||
|
||||
async function resolvePackageFiles(config) {
|
||||
logger.debug('manager.resolvePackageFile()');
|
||||
logger.trace({ config });
|
||||
const allPackageFiles = await detectPackageFiles(config);
|
||||
logger.debug({ allPackageFiles }, 'allPackageFiles');
|
||||
async function resolvePackageFile(p) {
|
||||
let packageFile = p;
|
||||
const { manager } = packageFile;
|
||||
if (managers[manager].resolvePackageFile) {
|
||||
return managers[manager].resolvePackageFile(config, packageFile);
|
||||
}
|
||||
const { language } = managers[manager];
|
||||
const languageConfig = language ? config[language] : {};
|
||||
const managerConfig = mergeChildConfig(languageConfig, config[manager]);
|
||||
packageFile = mergeChildConfig(managerConfig, packageFile);
|
||||
logger.debug(
|
||||
`Resolving packageFile ${JSON.stringify(packageFile.packageFile)}`
|
||||
);
|
||||
packageFile.content = await platform.getFile(packageFile.packageFile);
|
||||
return packageFile;
|
||||
}
|
||||
let queue = allPackageFiles.map(p => () => resolvePackageFile(p));
|
||||
// limit to 100 maximum package files if no global value set
|
||||
const maxPackageFiles = config.global.maxPackageFiles || 100;
|
||||
// istanbul ignore if
|
||||
if (queue.length > maxPackageFiles) {
|
||||
logger.warn(`packageFile queue length is ${queue.length}`);
|
||||
queue = queue.slice(0, maxPackageFiles);
|
||||
}
|
||||
// retrieve with concurrency of 5
|
||||
const packageFiles = (await pAll(queue, { concurrency: 5 })).filter(
|
||||
p => p !== null
|
||||
);
|
||||
logger.debug('Checking against path rules');
|
||||
return checkMonorepos({ ...config, packageFiles });
|
||||
}
|
||||
|
|
|
@ -1,62 +0,0 @@
|
|||
module.exports = {
|
||||
extractDependencies,
|
||||
};
|
||||
|
||||
function extractDependencies(packageJson, config) {
|
||||
const {
|
||||
depType,
|
||||
packageLockParsed,
|
||||
npmShrinkwrapParsed,
|
||||
yarnLockParsed,
|
||||
} = config;
|
||||
const depNames = packageJson[depType]
|
||||
? Object.keys(packageJson[depType])
|
||||
: [];
|
||||
const deps = depNames
|
||||
.map(depName => {
|
||||
const currentVersion = packageJson[depType][depName]
|
||||
? `${packageJson[depType][depName]}`.trim().replace(/^=/, '')
|
||||
: undefined;
|
||||
let lockedVersion;
|
||||
try {
|
||||
const lockFile = packageLockParsed || npmShrinkwrapParsed;
|
||||
if (lockFile) {
|
||||
if (lockFile.dependencies[depName]) {
|
||||
lockedVersion = lockFile.dependencies[depName].version;
|
||||
if (lockedVersion !== currentVersion) {
|
||||
logger.debug(
|
||||
{ currentVersion, lockedVersion },
|
||||
'Found locked version'
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.debug({ currentVersion }, 'Found no locked version');
|
||||
}
|
||||
} else if (yarnLockParsed && yarnLockParsed.object) {
|
||||
const key = `${depName}@${currentVersion}`;
|
||||
const lockEntry = yarnLockParsed.object[key];
|
||||
if (lockEntry) {
|
||||
lockedVersion = lockEntry.version;
|
||||
if (lockedVersion !== currentVersion) {
|
||||
logger.debug(
|
||||
{ currentVersion, lockedVersion },
|
||||
'Found locked version'
|
||||
);
|
||||
}
|
||||
} else {
|
||||
logger.debug({ currentVersion }, 'Found no locked version');
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
logger.debug({ currentVersion }, 'Could not find locked version');
|
||||
}
|
||||
return {
|
||||
depType,
|
||||
depName,
|
||||
currentVersion,
|
||||
lockedVersion,
|
||||
};
|
||||
})
|
||||
.filter(dep => dep.currentVersion);
|
||||
return { deps };
|
||||
}
|
108
lib/manager/npm/extract/index.js
Normal file
108
lib/manager/npm/extract/index.js
Normal file
|
@ -0,0 +1,108 @@
|
|||
const path = require('path');
|
||||
const upath = require('upath');
|
||||
const { getLockedVersions } = require('./locked-versions');
|
||||
const { detectMonorepos } = require('./monorepo');
|
||||
|
||||
module.exports = {
|
||||
extractDependencies,
|
||||
postExtract,
|
||||
};
|
||||
|
||||
async function extractDependencies(content, packageFile) {
|
||||
logger.debug({ content, packageFile });
|
||||
const deps = [];
|
||||
let packageJson;
|
||||
try {
|
||||
packageJson = JSON.parse(content);
|
||||
} catch (err) {
|
||||
logger.info({ packageFile }, 'Invalid JSON');
|
||||
return null;
|
||||
}
|
||||
const packageJsonName = packageJson.name;
|
||||
const packageJsonVersion = packageJson.version;
|
||||
const yarnWorkspacesPackages = packageJson.workspaces;
|
||||
|
||||
const lockFiles = {
|
||||
yarnLock: 'yarn.lock',
|
||||
packageLock: 'package-lock.json',
|
||||
shrinkwrapJson: 'npm-shrinkwrap.json',
|
||||
pnpmShrinkwrap: 'shrinkwrap.yaml',
|
||||
};
|
||||
|
||||
for (const [key, val] of Object.entries(lockFiles)) {
|
||||
const filePath = upath.join(path.dirname(packageFile), val);
|
||||
if (await platform.getFile(filePath)) {
|
||||
lockFiles[key] = filePath;
|
||||
} else {
|
||||
lockFiles[key] = undefined;
|
||||
}
|
||||
}
|
||||
lockFiles.npmLock = lockFiles.packageLock || lockFiles.shrinkwrapJson;
|
||||
delete lockFiles.packageLock;
|
||||
delete lockFiles.shrinkwrapJson;
|
||||
|
||||
let npmrc = await platform.getFile(
|
||||
upath.join(path.dirname(packageFile), '.npmrc')
|
||||
);
|
||||
if (!npmrc) {
|
||||
npmrc = undefined;
|
||||
}
|
||||
|
||||
let lernaDir;
|
||||
let lernaPackages;
|
||||
let lernaClient;
|
||||
const lernaJson = JSON.parse(
|
||||
await platform.getFile(upath.join(path.dirname(packageFile), 'lerna.json'))
|
||||
);
|
||||
if (lernaJson) {
|
||||
lernaDir = path.dirname(packageFile);
|
||||
lernaPackages = lernaJson.packages;
|
||||
lernaClient = lernaJson.npmClient;
|
||||
}
|
||||
|
||||
const depTypes = [
|
||||
'dependencies',
|
||||
'devDependencies',
|
||||
'optionalDependencies',
|
||||
'peerDependencies',
|
||||
'engines',
|
||||
];
|
||||
for (const depType of depTypes) {
|
||||
if (packageJson[depType]) {
|
||||
try {
|
||||
for (const [depName, version] of Object.entries(packageJson[depType])) {
|
||||
deps.push({
|
||||
depName,
|
||||
depType,
|
||||
currentVersion: version.trim().replace(/^=/, ''),
|
||||
});
|
||||
}
|
||||
} catch (err) /* istanbul ignore next */ {
|
||||
logger.info(
|
||||
{ packageFile, depType, err, message: err.message },
|
||||
'Error parsing package.json'
|
||||
);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!(deps.length || lernaDir || yarnWorkspacesPackages)) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
deps,
|
||||
packageJsonName,
|
||||
packageJsonVersion,
|
||||
npmrc,
|
||||
...lockFiles,
|
||||
lernaDir,
|
||||
lernaClient,
|
||||
lernaPackages,
|
||||
yarnWorkspacesPackages,
|
||||
};
|
||||
}
|
||||
|
||||
async function postExtract(packageFiles) {
|
||||
await detectMonorepos(packageFiles);
|
||||
await getLockedVersions(packageFiles);
|
||||
}
|
36
lib/manager/npm/extract/locked-versions.js
Normal file
36
lib/manager/npm/extract/locked-versions.js
Normal file
|
@ -0,0 +1,36 @@
|
|||
const { getNpmLock } = require('./npm');
|
||||
const { getYarnLock } = require('./yarn');
|
||||
|
||||
module.exports = {
|
||||
getLockedVersions,
|
||||
};
|
||||
|
||||
async function getLockedVersions(packageFiles) {
|
||||
const lockFileCache = {};
|
||||
logger.debug('Finding locked versions');
|
||||
for (const packageFile of packageFiles) {
|
||||
const { yarnLock, npmLock, pnpmShrinkwrap } = packageFile;
|
||||
if (yarnLock) {
|
||||
logger.debug('Found yarnLock');
|
||||
if (!lockFileCache[yarnLock]) {
|
||||
logger.debug('Retrieving/parsing ' + yarnLock);
|
||||
lockFileCache[yarnLock] = await getYarnLock(yarnLock);
|
||||
}
|
||||
for (const dep of packageFile.deps) {
|
||||
dep.lockedVersion =
|
||||
lockFileCache[yarnLock][`${dep.depName}@${dep.currentVersion}`];
|
||||
}
|
||||
} else if (npmLock) {
|
||||
logger.debug({ npmLock }, 'npm lockfile');
|
||||
if (!lockFileCache[npmLock]) {
|
||||
logger.debug('Retrieving/parsing ' + npmLock);
|
||||
lockFileCache[npmLock] = await getNpmLock(npmLock);
|
||||
}
|
||||
for (const dep of packageFile.deps) {
|
||||
dep.lockedVersion = lockFileCache[npmLock][dep.depName];
|
||||
}
|
||||
} else if (pnpmShrinkwrap) {
|
||||
logger.info('TODO: implement shrinkwrap.yaml parsing of lockVersion');
|
||||
}
|
||||
}
|
||||
}
|
56
lib/manager/npm/extract/monorepo.js
Normal file
56
lib/manager/npm/extract/monorepo.js
Normal file
|
@ -0,0 +1,56 @@
|
|||
const minimatch = require('minimatch');
|
||||
const path = require('path');
|
||||
const upath = require('upath');
|
||||
|
||||
module.exports = {
|
||||
detectMonorepos,
|
||||
};
|
||||
|
||||
function matchesAnyPattern(val, patterns) {
|
||||
return patterns.some(pattern => minimatch(val, pattern));
|
||||
}
|
||||
|
||||
function detectMonorepos(packageFiles) {
|
||||
logger.debug('Detecting Lerna and Yarn Workspaces');
|
||||
for (const p of packageFiles) {
|
||||
const {
|
||||
packageFile,
|
||||
npmLock,
|
||||
yarnLock,
|
||||
lernaDir,
|
||||
lernaClient,
|
||||
lernaPackages,
|
||||
yarnWorkspacesPackages,
|
||||
} = p;
|
||||
const basePath = path.dirname(packageFile);
|
||||
const packages =
|
||||
lernaClient === 'yarn' && yarnWorkspacesPackages
|
||||
? yarnWorkspacesPackages
|
||||
: lernaPackages;
|
||||
if (packages && packages.length) {
|
||||
logger.debug(
|
||||
{ packageFile },
|
||||
'Found monorepo packages with base path ' + basePath
|
||||
);
|
||||
const subPackagePatterns = packages.map(pattern =>
|
||||
upath.join(basePath, pattern)
|
||||
);
|
||||
const subPackages = packageFiles.filter(sp =>
|
||||
matchesAnyPattern(path.dirname(sp.packageFile), subPackagePatterns)
|
||||
);
|
||||
const subPackageNames = subPackages
|
||||
.map(sp => sp.packageJsonName)
|
||||
.filter(Boolean);
|
||||
// add all names to main package.json
|
||||
packageFile.monorepoPackages = subPackageNames;
|
||||
for (const subPackage of subPackages) {
|
||||
subPackage.monorepoPackages = subPackageNames.filter(
|
||||
name => name !== subPackage.packageJsonName
|
||||
);
|
||||
subPackage.lernaDir = lernaDir;
|
||||
subPackage.yarnLock = subPackage.yarnLock || yarnLock;
|
||||
subPackage.npmLock = subPackage.npmLock || npmLock;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
22
lib/manager/npm/extract/npm.js
Normal file
22
lib/manager/npm/extract/npm.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
module.exports = {
|
||||
getNpmLock,
|
||||
};
|
||||
|
||||
async function getNpmLock(filePath) {
|
||||
const lockRaw = await platform.getFile(filePath);
|
||||
try {
|
||||
const lockParsed = JSON.parse(lockRaw);
|
||||
const lockFile = {};
|
||||
for (const [entry, val] of Object.entries(lockParsed.dependencies)) {
|
||||
logger.trace({ entry, version: val.version });
|
||||
lockFile[entry] = val.version;
|
||||
}
|
||||
return lockFile;
|
||||
} catch (err) {
|
||||
logger.info(
|
||||
{ filePath, err, message: err.message },
|
||||
'Warning: Exception parsing npm lock file'
|
||||
);
|
||||
return {};
|
||||
}
|
||||
}
|
32
lib/manager/npm/extract/yarn.js
Normal file
32
lib/manager/npm/extract/yarn.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
const yarnLockParser = require('@yarnpkg/lockfile');
|
||||
|
||||
module.exports = {
|
||||
getYarnLock,
|
||||
};
|
||||
|
||||
async function getYarnLock(filePath) {
|
||||
const yarnLockRaw = await platform.getFile(filePath);
|
||||
try {
|
||||
const yarnLockParsed = yarnLockParser.parse(yarnLockRaw);
|
||||
// istanbul ignore if
|
||||
if (yarnLockParsed.type !== 'success') {
|
||||
logger.info(
|
||||
{ filePath, parseType: yarnLockParsed.type },
|
||||
'Error parsing yarn.lock - not success'
|
||||
);
|
||||
return {};
|
||||
}
|
||||
const lockFile = {};
|
||||
for (const [entry, val] of Object.entries(yarnLockParsed.object)) {
|
||||
logger.trace({ entry, version: val.version });
|
||||
lockFile[entry] = val.version;
|
||||
}
|
||||
return lockFile;
|
||||
} catch (err) {
|
||||
logger.info(
|
||||
{ filePath, err, message: err.message },
|
||||
'Warning: Exception parsing yarn.lock'
|
||||
);
|
||||
return {};
|
||||
}
|
||||
}
|
|
@ -1,11 +1,11 @@
|
|||
const { extractDependencies } = require('./extract');
|
||||
const { extractDependencies, postExtract } = require('./extract');
|
||||
const { getPackageUpdates } = require('./package');
|
||||
const { resolvePackageFile } = require('./resolve');
|
||||
const { updateDependency } = require('./update');
|
||||
|
||||
module.exports = {
|
||||
extractDependencies,
|
||||
postExtract,
|
||||
getPackageUpdates,
|
||||
resolvePackageFile,
|
||||
updateDependency,
|
||||
supportsLockFileMaintenance: true,
|
||||
};
|
||||
|
|
|
@ -1,90 +0,0 @@
|
|||
const minimatch = require('minimatch');
|
||||
const path = require('path');
|
||||
const upath = require('upath');
|
||||
|
||||
module.exports = {
|
||||
checkMonorepos,
|
||||
};
|
||||
|
||||
async function checkMonorepos(config) {
|
||||
const monorepoPackages = [];
|
||||
logger.debug('checkMonorepos()');
|
||||
logger.trace({ config });
|
||||
// yarn workspaces
|
||||
let foundWorkspaces = false;
|
||||
for (const packageFile of config.packageFiles) {
|
||||
if (
|
||||
packageFile.packageFile &&
|
||||
packageFile.packageFile.endsWith('package.json') &&
|
||||
packageFile.content.workspaces
|
||||
) {
|
||||
foundWorkspaces = true;
|
||||
packageFile.workspaces = true;
|
||||
const workspaceDir = path.dirname(packageFile.packageFile);
|
||||
const { workspaces } = packageFile.content;
|
||||
if (workspaces.length) {
|
||||
logger.info(
|
||||
{ packageFile: packageFile.packageFile, workspaces },
|
||||
'Found yarn workspaces'
|
||||
);
|
||||
for (const workspace of workspaces) {
|
||||
const basePath = upath.join(workspaceDir, workspace);
|
||||
logger.debug(`basePath=${basePath}`);
|
||||
for (const innerPackageFile of config.packageFiles) {
|
||||
if (
|
||||
minimatch(path.dirname(innerPackageFile.packageFile), basePath)
|
||||
) {
|
||||
logger.debug(`Matched ${innerPackageFile.packageFile}`);
|
||||
const depName = innerPackageFile.content.name;
|
||||
monorepoPackages.push(depName);
|
||||
innerPackageFile.workspaceDir = workspaceDir;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (foundWorkspaces) {
|
||||
logger.debug('Ignoring any lerna and returning workspaces');
|
||||
return { ...config, workspaces: true, monorepoPackages };
|
||||
}
|
||||
// lerna
|
||||
let lernaJson;
|
||||
try {
|
||||
logger.debug('Checking for lerna.json');
|
||||
lernaJson = JSON.parse(await platform.getFile('lerna.json'));
|
||||
} catch (err) {
|
||||
logger.info('Error parsing lerna.json');
|
||||
}
|
||||
if (!lernaJson) {
|
||||
logger.debug('No lerna.json found');
|
||||
return { ...config, monorepoPackages };
|
||||
}
|
||||
let lernaLockFile;
|
||||
// istanbul ignore else
|
||||
if (await platform.getFile('package-lock.json')) {
|
||||
logger.debug('lerna has a package-lock.json');
|
||||
lernaLockFile = 'npm';
|
||||
} else if (
|
||||
lernaJson.npmClient === 'yarn' &&
|
||||
(await platform.getFile('yarn.lock'))
|
||||
) {
|
||||
logger.debug('lerna has non-workspaces yarn');
|
||||
lernaLockFile = 'yarn';
|
||||
}
|
||||
if (lernaJson && lernaJson.packages) {
|
||||
logger.debug({ lernaJson }, 'Found lerna config');
|
||||
for (const packageGlob of lernaJson.packages) {
|
||||
for (const packageFile of config.packageFiles) {
|
||||
if (minimatch(path.dirname(packageFile.packageFile), packageGlob)) {
|
||||
const depName = packageFile.content.name;
|
||||
if (!monorepoPackages.includes(depName)) {
|
||||
monorepoPackages.push(depName);
|
||||
}
|
||||
packageFile.lerna = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return { ...config, lernaLockFile, monorepoPackages };
|
||||
}
|
|
@ -7,173 +7,83 @@ const yarn = require('./yarn');
|
|||
const pnpm = require('./pnpm');
|
||||
|
||||
module.exports = {
|
||||
hasPackageLock,
|
||||
hasNpmShrinkwrap,
|
||||
hasYarnLock,
|
||||
hasShrinkwrapYaml,
|
||||
determineLockFileDirs,
|
||||
writeExistingFiles,
|
||||
writeUpdatedPackageFiles,
|
||||
getUpdatedLockFiles,
|
||||
getAdditionalFiles,
|
||||
};
|
||||
|
||||
function hasPackageLock(config, packageFile) {
|
||||
logger.trace(
|
||||
{ packageFiles: config.packageFiles, packageFile },
|
||||
'hasPackageLock'
|
||||
);
|
||||
for (const p of config.packageFiles) {
|
||||
if (p.packageFile === packageFile) {
|
||||
if (p.packageLock) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
throw new Error(`hasPackageLock cannot find ${packageFile}`);
|
||||
}
|
||||
// Strips empty values, deduplicates, and returns the directories from filenames
|
||||
// istanbul ignore next
|
||||
const getDirs = arr =>
|
||||
Array.from(new Set(arr.filter(Boolean).map(path.dirname)));
|
||||
|
||||
function hasNpmShrinkwrap(config, packageFile) {
|
||||
logger.trace(
|
||||
{ packageFiles: config.packageFiles, packageFile },
|
||||
'hasNpmShrinkwrap'
|
||||
);
|
||||
for (const p of config.packageFiles) {
|
||||
if (p.packageFile === packageFile) {
|
||||
if (p.npmShrinkwrap) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
throw new Error(`hasPackageLock cannot find ${packageFile}`);
|
||||
}
|
||||
|
||||
function hasYarnLock(config, packageFile) {
|
||||
logger.trace(
|
||||
{ packageFiles: config.packageFiles, packageFile },
|
||||
'hasYarnLock'
|
||||
);
|
||||
for (const p of config.packageFiles) {
|
||||
if (p.packageFile === packageFile) {
|
||||
if (p.yarnLock) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
throw new Error(`hasYarnLock cannot find ${packageFile}`);
|
||||
}
|
||||
|
||||
function hasShrinkwrapYaml(config, packageFile) {
|
||||
logger.trace(
|
||||
{ packageFiles: config.packageFiles, packageFile },
|
||||
'hasShrinkwrapYaml'
|
||||
);
|
||||
for (const p of config.packageFiles) {
|
||||
if (p.packageFile === packageFile) {
|
||||
if (p.shrinkwrapYaml) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
throw new Error(`hasShrinkwrapYaml cannot find ${packageFile}`);
|
||||
}
|
||||
|
||||
function determineLockFileDirs(config) {
|
||||
const packageLockFileDirs = [];
|
||||
const npmShrinkwrapDirs = [];
|
||||
const yarnLockFileDirs = [];
|
||||
const shrinkwrapYamlDirs = [];
|
||||
// istanbul ignore next
|
||||
function determineLockFileDirs(config, packageFiles) {
|
||||
const npmLockDirs = [];
|
||||
const yarnLockDirs = [];
|
||||
const pnpmShrinkwrapDirs = [];
|
||||
const lernaDirs = [];
|
||||
|
||||
for (const upgrade of config.upgrades) {
|
||||
if (upgrade.type === 'lockFileMaintenance') {
|
||||
// Return every direcotry that contains a lockfile
|
||||
for (const packageFile of config.packageFiles) {
|
||||
const dirname = path.dirname(packageFile.packageFile);
|
||||
if (packageFile.yarnLock) {
|
||||
yarnLockFileDirs.push(dirname);
|
||||
}
|
||||
if (packageFile.packageLock) {
|
||||
packageLockFileDirs.push(dirname);
|
||||
}
|
||||
if (packageFile.npmShrinkwrap) {
|
||||
npmShrinkwrapDirs.push(dirname);
|
||||
}
|
||||
if (packageFile.shrinkwrapYaml) {
|
||||
shrinkwrapYamlDirs.push(dirname);
|
||||
// TODO: support lerna
|
||||
// Return every directory that contains a lockfile
|
||||
for (const packageFile of packageFiles.npm) {
|
||||
if (packageFile.lernaDir) {
|
||||
lernaDirs.push(packageFile.lernaDir);
|
||||
} else {
|
||||
yarnLockDirs.push(packageFile.yarnLock);
|
||||
npmLockDirs.push(packageFile.npmLock);
|
||||
pnpmShrinkwrapDirs.push(packageFile.pnpmShrinkwrap);
|
||||
}
|
||||
}
|
||||
return {
|
||||
packageLockFileDirs,
|
||||
npmShrinkwrapDirs,
|
||||
yarnLockFileDirs,
|
||||
shrinkwrapYamlDirs,
|
||||
yarnLockDirs: getDirs(yarnLockDirs),
|
||||
npmLockDirs: getDirs(npmLockDirs),
|
||||
pnpmShrinkwrapDirs: getDirs(pnpmShrinkwrapDirs),
|
||||
lernaDirs: getDirs(lernaDirs),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
for (const packageFile of config.updatedPackageFiles) {
|
||||
if (
|
||||
module.exports.hasYarnLock(config, packageFile.name) &&
|
||||
!config.lernaLockFile
|
||||
) {
|
||||
yarnLockFileDirs.push(path.dirname(packageFile.name));
|
||||
function getPackageFile(fileName) {
|
||||
logger.trace('Looking for packageFile: ' + fileName);
|
||||
for (const packageFile of packageFiles.npm) {
|
||||
if (packageFile.packageFile === fileName) {
|
||||
logger.trace({ packageFile }, 'Found packageFile');
|
||||
return packageFile;
|
||||
}
|
||||
if (
|
||||
module.exports.hasPackageLock(config, packageFile.name) &&
|
||||
!config.lernaLockFile
|
||||
) {
|
||||
packageLockFileDirs.push(path.dirname(packageFile.name));
|
||||
}
|
||||
if (
|
||||
module.exports.hasNpmShrinkwrap(config, packageFile.name) &&
|
||||
!config.lernaLockFile
|
||||
) {
|
||||
npmShrinkwrapDirs.push(path.dirname(packageFile.name));
|
||||
}
|
||||
if (module.exports.hasShrinkwrapYaml(config, packageFile.name)) {
|
||||
shrinkwrapYamlDirs.push(path.dirname(packageFile.name));
|
||||
logger.trace('No match');
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
if (
|
||||
config.updatedPackageFiles &&
|
||||
config.updatedPackageFiles.length &&
|
||||
config.lernaLockFile
|
||||
) {
|
||||
lernaDirs.push('.');
|
||||
}
|
||||
|
||||
// If yarn workspaces are in use, then we need to generate yarn.lock from the workspaces dir
|
||||
if (
|
||||
config.updatedPackageFiles &&
|
||||
config.updatedPackageFiles.length &&
|
||||
config.workspaceDir
|
||||
) {
|
||||
const updatedPackageFileNames = config.updatedPackageFiles.map(p => p.name);
|
||||
for (const packageFile of config.packageFiles) {
|
||||
if (
|
||||
updatedPackageFileNames.includes(packageFile.packageFile) &&
|
||||
packageFile.workspaceDir &&
|
||||
!yarnLockFileDirs.includes(packageFile.workspaceDir)
|
||||
)
|
||||
yarnLockFileDirs.push(packageFile.workspaceDir);
|
||||
for (const p of config.updatedPackageFiles) {
|
||||
logger.debug(`Checking ${p.name} for lock files`);
|
||||
const packageFile = getPackageFile(p.name);
|
||||
// lerna first
|
||||
if (packageFile.lernaDir) {
|
||||
logger.debug(`${packageFile.packageFile} has lerna lock file`);
|
||||
lernaDirs.push(packageFile.lernaDir);
|
||||
} else {
|
||||
// push full lock file names and convert them later
|
||||
yarnLockDirs.push(packageFile.yarnLock);
|
||||
npmLockDirs.push(packageFile.npmLock);
|
||||
pnpmShrinkwrapDirs.push(packageFile.pnpmShrinkwrap);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
yarnLockFileDirs,
|
||||
packageLockFileDirs,
|
||||
npmShrinkwrapDirs,
|
||||
shrinkwrapYamlDirs,
|
||||
lernaDirs,
|
||||
yarnLockDirs: getDirs(yarnLockDirs),
|
||||
npmLockDirs: getDirs(npmLockDirs),
|
||||
pnpmShrinkwrapDirs: getDirs(pnpmShrinkwrapDirs),
|
||||
lernaDirs: getDirs(lernaDirs),
|
||||
};
|
||||
}
|
||||
|
||||
async function writeExistingFiles(config) {
|
||||
// istanbul ignore next
|
||||
async function writeExistingFiles(config, packageFiles) {
|
||||
const lernaJson = await platform.getFile('lerna.json');
|
||||
if (lernaJson) {
|
||||
logger.debug(`Writing repo lerna.json (${config.tmpDir.path})`);
|
||||
|
@ -193,12 +103,10 @@ async function writeExistingFiles(config) {
|
|||
config.yarnrc
|
||||
);
|
||||
}
|
||||
if (!config.packageFiles) {
|
||||
if (!packageFiles.npm) {
|
||||
return;
|
||||
}
|
||||
const npmFiles = config.packageFiles.filter(p =>
|
||||
p.packageFile.endsWith('package.json')
|
||||
);
|
||||
const npmFiles = packageFiles.npm;
|
||||
logger.debug(
|
||||
{ packageFiles: npmFiles.map(n => n.packageFile) },
|
||||
'Writing package.json files'
|
||||
|
@ -210,7 +118,9 @@ async function writeExistingFiles(config) {
|
|||
);
|
||||
logger.trace(`Writing package.json to ${basedir}`);
|
||||
// Massage the file to eliminate yarn errors
|
||||
const massagedFile = { ...packageFile.content };
|
||||
const massagedFile = JSON.parse(
|
||||
await platform.getFile(packageFile.packageFile)
|
||||
);
|
||||
if (massagedFile.name) {
|
||||
massagedFile.name = massagedFile.name.replace(/[{}]/g, '');
|
||||
}
|
||||
|
@ -309,7 +219,10 @@ async function writeExistingFiles(config) {
|
|||
if (packageFile.yarnLock && config.type !== 'lockFileMaintenance') {
|
||||
logger.debug(`Writing yarn.lock to ${basedir}`);
|
||||
const yarnLock = await platform.getFile(packageFile.yarnLock);
|
||||
await fs.outputFile(upath.join(basedir, 'yarn.lock'), yarnLock);
|
||||
await fs.outputFile(
|
||||
upath.join(config.tmpDir.path, packageFile.yarnLock),
|
||||
yarnLock
|
||||
);
|
||||
} else {
|
||||
logger.trace(`Removing ${basedir}/yarn.lock`);
|
||||
await fs.remove(upath.join(basedir, 'yarn.lock'));
|
||||
|
@ -318,12 +231,12 @@ async function writeExistingFiles(config) {
|
|||
const pnpmBug992 = true;
|
||||
// istanbul ignore next
|
||||
if (
|
||||
packageFile.shrinkwrapYaml &&
|
||||
packageFile.pnpmShrinkwrap &&
|
||||
config.type !== 'lockFileMaintenance' &&
|
||||
!pnpmBug992
|
||||
) {
|
||||
logger.debug(`Writing shrinkwrap.yaml to ${basedir}`);
|
||||
const shrinkwrap = await platform.getFile(packageFile.shrinkwrapYaml);
|
||||
const shrinkwrap = await platform.getFile(packageFile.pnpmShrinkwrap);
|
||||
await fs.outputFile(upath.join(basedir, 'shrinkwrap.yaml'), shrinkwrap);
|
||||
} else {
|
||||
await fs.remove(upath.join(basedir, 'shrinkwrap.yaml'));
|
||||
|
@ -331,6 +244,7 @@ async function writeExistingFiles(config) {
|
|||
}
|
||||
}
|
||||
|
||||
// istanbul ignore next
|
||||
function listLocalLibs(dependencies) {
|
||||
logger.trace(`listLocalLibs (${dependencies})`);
|
||||
const toCopy = [];
|
||||
|
@ -350,6 +264,7 @@ function listLocalLibs(dependencies) {
|
|||
return toCopy;
|
||||
}
|
||||
|
||||
// istanbul ignore next
|
||||
async function writeUpdatedPackageFiles(config) {
|
||||
logger.trace({ config }, 'writeUpdatedPackageFiles');
|
||||
logger.debug('Writing any updated package files');
|
||||
|
@ -375,8 +290,9 @@ async function writeUpdatedPackageFiles(config) {
|
|||
}
|
||||
}
|
||||
|
||||
async function getUpdatedLockFiles(config) {
|
||||
logger.trace({ config }, 'getUpdatedLockFiles');
|
||||
// istanbul ignore next
|
||||
async function getAdditionalFiles(config, packageFiles) {
|
||||
logger.trace({ config }, 'getAdditionalFiles');
|
||||
logger.debug('Getting updated lock files');
|
||||
const lockFileErrors = [];
|
||||
const updatedLockFiles = [];
|
||||
|
@ -392,10 +308,10 @@ async function getUpdatedLockFiles(config) {
|
|||
logger.debug('Skipping lockFileMaintenance update');
|
||||
return { lockFileErrors, updatedLockFiles };
|
||||
}
|
||||
const dirs = module.exports.determineLockFileDirs(config);
|
||||
const dirs = module.exports.determineLockFileDirs(config, packageFiles);
|
||||
logger.debug({ dirs }, 'lock file dirs');
|
||||
await module.exports.writeExistingFiles(config);
|
||||
await module.exports.writeUpdatedPackageFiles(config);
|
||||
await module.exports.writeExistingFiles(config, packageFiles);
|
||||
await module.exports.writeUpdatedPackageFiles(config, packageFiles);
|
||||
|
||||
const env =
|
||||
config.global && config.global.exposeEnv
|
||||
|
@ -403,7 +319,7 @@ async function getUpdatedLockFiles(config) {
|
|||
: { HOME: process.env.HOME, PATH: process.env.PATH };
|
||||
env.NODE_ENV = 'dev';
|
||||
|
||||
for (const lockFileDir of dirs.packageLockFileDirs) {
|
||||
for (const lockFileDir of dirs.npmLockDirs) {
|
||||
logger.debug(`Generating package-lock.json for ${lockFileDir}`);
|
||||
const lockFileName = upath.join(lockFileDir, 'package-lock.json');
|
||||
const res = await npm.generateLockFile(
|
||||
|
@ -455,7 +371,7 @@ async function getUpdatedLockFiles(config) {
|
|||
}
|
||||
|
||||
// istanbul ignore next
|
||||
for (const lockFileDir of dirs.npmShrinkwrapDirs) {
|
||||
for (const lockFileDir of dirs.pnpmShrinkwrapDirs) {
|
||||
logger.debug(`Generating npm-shrinkwrap.json for ${lockFileDir}`);
|
||||
const lockFileName = upath.join(lockFileDir, 'npm-shrinkwrap.json');
|
||||
const res = await npm.generateLockFile(
|
||||
|
@ -506,7 +422,7 @@ async function getUpdatedLockFiles(config) {
|
|||
}
|
||||
}
|
||||
|
||||
for (const lockFileDir of dirs.yarnLockFileDirs) {
|
||||
for (const lockFileDir of dirs.yarnLockDirs) {
|
||||
logger.debug(`Generating yarn.lock for ${lockFileDir}`);
|
||||
const lockFileName = upath.join(lockFileDir, 'yarn.lock');
|
||||
const res = await yarn.generateLockFile(
|
||||
|
@ -558,7 +474,7 @@ async function getUpdatedLockFiles(config) {
|
|||
}
|
||||
}
|
||||
|
||||
for (const lockFileDir of dirs.shrinkwrapYamlDirs) {
|
||||
for (const lockFileDir of dirs.pnpmShrinkwrapDirs) {
|
||||
logger.debug(`Generating shrinkwrap.yaml for ${lockFileDir}`);
|
||||
const lockFileName = upath.join(lockFileDir, 'shrinkwrap.yaml');
|
||||
const res = await pnpm.generateLockFile(
|
||||
|
@ -639,7 +555,7 @@ async function getUpdatedLockFiles(config) {
|
|||
stderr: res.stderr,
|
||||
});
|
||||
} else {
|
||||
for (const packageFile of config.packageFiles) {
|
||||
for (const packageFile of packageFiles.npm) {
|
||||
const baseDir = path.dirname(packageFile.packageFile);
|
||||
const filename = upath.join(baseDir, lockFile);
|
||||
logger.debug('Checking for ' + filename);
|
|
@ -1,137 +0,0 @@
|
|||
const path = require('path');
|
||||
const upath = require('upath');
|
||||
const configParser = require('../../config');
|
||||
|
||||
module.exports = {
|
||||
resolvePackageFile,
|
||||
};
|
||||
|
||||
async function resolvePackageFile(config, inputFile) {
|
||||
const packageFile = configParser.mergeChildConfig(config.npm, inputFile);
|
||||
logger.debug(
|
||||
`Resolving packageFile ${JSON.stringify(packageFile.packageFile)}`
|
||||
);
|
||||
|
||||
const pFileRaw = await platform.getFile(packageFile.packageFile);
|
||||
// istanbul ignore if
|
||||
if (!pFileRaw) {
|
||||
logger.info(
|
||||
{ packageFile: packageFile.packageFile },
|
||||
'Cannot find package.json'
|
||||
);
|
||||
config.errors.push({
|
||||
depName: packageFile.packageFile,
|
||||
message: 'Cannot find package.json',
|
||||
});
|
||||
return null;
|
||||
}
|
||||
try {
|
||||
packageFile.content = JSON.parse(pFileRaw);
|
||||
} catch (err) {
|
||||
logger.info(
|
||||
{ packageFile: packageFile.packageFile },
|
||||
'Cannot parse package.json'
|
||||
);
|
||||
if (config.repoIsOnboarded) {
|
||||
const error = new Error('config-validation');
|
||||
error.configFile = packageFile.packageFile;
|
||||
error.validationError = 'Cannot parse package.json';
|
||||
error.validationMessage =
|
||||
'This package.json contains invalid JSON and cannot be parsed. Please fix it, or add it to your "ignorePaths" array in your renovate config so that Renovate can continue.';
|
||||
throw error;
|
||||
}
|
||||
config.errors.push({
|
||||
depName: packageFile.packageFile,
|
||||
message:
|
||||
"Cannot parse package.json (invalid JSON). Please fix the contents or add the file/path to the `ignorePaths` array in Renovate's config",
|
||||
});
|
||||
return null;
|
||||
}
|
||||
|
||||
if (
|
||||
inputFile.packageFile.includes('package.json') &&
|
||||
inputFile.packageFile !== 'package.json' &&
|
||||
packageFile.content.renovate !== undefined
|
||||
) {
|
||||
const error = new Error('config-validation');
|
||||
error.configFile = packageFile.packageFile;
|
||||
error.validationError = 'package.json configuration error';
|
||||
error.validationMessage =
|
||||
'Nested package.json must not contain renovate configuration';
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (!config.ignoreNpmrcFile) {
|
||||
packageFile.npmrc = await platform.getFile(
|
||||
upath.join(path.dirname(packageFile.packageFile), '.npmrc')
|
||||
);
|
||||
}
|
||||
if (packageFile.npmrc) {
|
||||
logger.info({ packageFile: packageFile.packageFile }, 'Found .npmrc');
|
||||
if (packageFile.npmrc.match(/\${NPM_TOKEN}/) && !config.global.exposeEnv) {
|
||||
logger.info('Stripping NPM_TOKEN from .npmrc');
|
||||
packageFile.npmrc = packageFile.npmrc
|
||||
.replace(/(^|\n).*?\${NPM_TOKEN}.*?(\n|$)/g, '')
|
||||
.trim();
|
||||
if (packageFile.npmrc === '') {
|
||||
logger.info('Removing empty .npmrc');
|
||||
delete packageFile.npmrc;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
delete packageFile.npmrc;
|
||||
}
|
||||
packageFile.yarnrc = await platform.getFile(
|
||||
upath.join(path.dirname(packageFile.packageFile), '.yarnrc')
|
||||
);
|
||||
if (packageFile.yarnrc) {
|
||||
logger.info({ packageFile: packageFile.packageFile }, 'Found .yarnrc');
|
||||
} else {
|
||||
delete packageFile.yarnrc;
|
||||
}
|
||||
// Detect if lock files are used
|
||||
const yarnLockFileName = upath.join(
|
||||
path.dirname(packageFile.packageFile),
|
||||
'yarn.lock'
|
||||
);
|
||||
const fileList = await platform.getFileList();
|
||||
if (fileList.includes(yarnLockFileName)) {
|
||||
logger.debug({ packageFile: packageFile.packageFile }, 'Found yarn.lock');
|
||||
packageFile.yarnLock = yarnLockFileName;
|
||||
}
|
||||
const packageLockFileName = upath.join(
|
||||
path.dirname(packageFile.packageFile),
|
||||
'package-lock.json'
|
||||
);
|
||||
if (fileList.includes(packageLockFileName)) {
|
||||
logger.debug(
|
||||
{ packageFile: packageFile.packageFile },
|
||||
'Found package-lock.json'
|
||||
);
|
||||
packageFile.packageLock = packageLockFileName;
|
||||
}
|
||||
const npmShrinkwrapFileName = upath.join(
|
||||
path.dirname(packageFile.packageFile),
|
||||
'npm-shrinkwrap.json'
|
||||
);
|
||||
if (fileList.includes(npmShrinkwrapFileName)) {
|
||||
logger.info(
|
||||
{ packageFile: packageFile.packageFile },
|
||||
'Found npm-shrinkwrap.json'
|
||||
);
|
||||
packageFile.npmShrinkwrap = npmShrinkwrapFileName;
|
||||
}
|
||||
const shrinkwrapFileName = upath.join(
|
||||
path.dirname(packageFile.packageFile),
|
||||
'shrinkwrap.yaml'
|
||||
);
|
||||
if (fileList.includes(shrinkwrapFileName)) {
|
||||
logger.debug(
|
||||
{ packageFile: packageFile.packageFile },
|
||||
'Found shrinkwrap.yaml'
|
||||
);
|
||||
packageFile.shrinkwrapYaml = shrinkwrapFileName;
|
||||
}
|
||||
packageFile.currentPackageJsonVersion = packageFile.content.version;
|
||||
return packageFile;
|
||||
}
|
|
@ -1273,10 +1273,14 @@ async function createBlob(fileContents) {
|
|||
|
||||
// Return the commit SHA for a branch
|
||||
async function getBranchCommit(branchName) {
|
||||
try {
|
||||
const res = await get(
|
||||
`repos/${config.repository}/git/refs/heads/${branchName}`
|
||||
);
|
||||
return res.body.object.sha;
|
||||
} catch (err) /* istanbul ignore next */ {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function getCommitDetails(commit) {
|
||||
|
|
52
lib/workers/branch/get-updated.js
Normal file
52
lib/workers/branch/get-updated.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
const { get } = require('../../manager');
|
||||
|
||||
module.exports = {
|
||||
getUpdatedPackageFiles,
|
||||
};
|
||||
|
||||
async function getUpdatedPackageFiles(config) {
|
||||
logger.debug('manager.getUpdatedPackageFiles()');
|
||||
logger.trace({ config });
|
||||
const updatedPackageFiles = {};
|
||||
|
||||
for (const upgrade of config.upgrades) {
|
||||
const { manager } = upgrade;
|
||||
if (upgrade.type !== 'lockFileMaintenance') {
|
||||
const existingContent =
|
||||
updatedPackageFiles[upgrade.packageFile] ||
|
||||
(await platform.getFile(upgrade.packageFile, config.parentBranch));
|
||||
let newContent = existingContent;
|
||||
const updateDependency = get(manager, 'updateDependency');
|
||||
newContent = await updateDependency(existingContent, upgrade);
|
||||
if (!newContent) {
|
||||
if (config.parentBranch) {
|
||||
logger.info('Rebasing branch after error updating content');
|
||||
return getUpdatedPackageFiles({
|
||||
...config,
|
||||
parentBranch: undefined,
|
||||
});
|
||||
}
|
||||
throw new Error('Error updating branch content and cannot rebase');
|
||||
}
|
||||
if (newContent !== existingContent) {
|
||||
if (config.parentBranch) {
|
||||
// This ensure it's always 1 commit from Renovate
|
||||
logger.info('Need to update package file so will rebase first');
|
||||
return getUpdatedPackageFiles({
|
||||
...config,
|
||||
parentBranch: undefined,
|
||||
});
|
||||
}
|
||||
logger.debug('Updating packageFile content');
|
||||
updatedPackageFiles[upgrade.packageFile] = newContent;
|
||||
}
|
||||
}
|
||||
}
|
||||
return {
|
||||
parentBranch: config.parentBranch, // Need to overwrite original config
|
||||
updatedPackageFiles: Object.keys(updatedPackageFiles).map(packageFile => ({
|
||||
name: packageFile,
|
||||
contents: updatedPackageFiles[packageFile],
|
||||
})),
|
||||
};
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
const schedule = require('./schedule');
|
||||
const { getUpdatedPackageFiles } = require('../../manager');
|
||||
const { getUpdatedLockFiles } = require('./lock-files');
|
||||
const { getUpdatedPackageFiles } = require('./get-updated');
|
||||
const { getAdditionalFiles } = require('../../manager/npm/post-update');
|
||||
const { commitFilesToBranch } = require('./commit');
|
||||
const { getParentBranch } = require('./parent');
|
||||
const { tryBranchAutomerge } = require('./automerge');
|
||||
|
@ -14,7 +14,8 @@ module.exports = {
|
|||
processBranch,
|
||||
};
|
||||
|
||||
async function processBranch(branchConfig) {
|
||||
async function processBranch(branchConfig, packageFiles) {
|
||||
logger.debug(`processBranch with ${branchConfig.upgrades.length} upgrades`);
|
||||
const config = { ...branchConfig };
|
||||
const dependencies = config.upgrades
|
||||
.map(upgrade => upgrade.depName)
|
||||
|
@ -142,7 +143,7 @@ async function processBranch(branchConfig) {
|
|||
} else {
|
||||
logger.debug('No package files need updating');
|
||||
}
|
||||
Object.assign(config, await getUpdatedLockFiles(config));
|
||||
Object.assign(config, await getAdditionalFiles(config, packageFiles));
|
||||
if (config.updatedLockFiles && config.updatedLockFiles.length) {
|
||||
logger.debug(
|
||||
{ updatedLockFiles: config.updatedLockFiles.map(f => f.name) },
|
||||
|
|
|
@ -1,63 +0,0 @@
|
|||
const configParser = require('../../config');
|
||||
const pkgWorker = require('./package');
|
||||
const { extractDependencies } = require('../../manager');
|
||||
const { applyPackageRules } = require('../../util/package-rules');
|
||||
|
||||
module.exports = {
|
||||
renovateDepType,
|
||||
getDepConfig,
|
||||
};
|
||||
|
||||
async function renovateDepType(packageContent, config) {
|
||||
logger.setMeta({
|
||||
repository: config.repository,
|
||||
packageFile: config.packageFile,
|
||||
depType: config.depType,
|
||||
});
|
||||
logger.debug('renovateDepType()');
|
||||
logger.trace({ config });
|
||||
if (config.enabled === false) {
|
||||
logger.debug('depType is disabled');
|
||||
return [];
|
||||
}
|
||||
const res = await extractDependencies(packageContent, config);
|
||||
let deps;
|
||||
if (res) {
|
||||
({ deps } = res);
|
||||
} else {
|
||||
deps = [];
|
||||
}
|
||||
if (config.lerna || config.workspaces || config.workspaceDir) {
|
||||
deps = deps.filter(
|
||||
dependency => config.monorepoPackages.indexOf(dependency.depName) === -1
|
||||
);
|
||||
}
|
||||
deps = deps.filter(
|
||||
dependency => config.ignoreDeps.indexOf(dependency.depName) === -1
|
||||
);
|
||||
logger.debug(`filtered deps length is ${deps.length}`);
|
||||
logger.debug({ deps }, `filtered deps`);
|
||||
// Obtain full config for each dependency
|
||||
const depConfigs = deps.map(dep => module.exports.getDepConfig(config, dep));
|
||||
logger.trace({ config: depConfigs }, `depConfigs`);
|
||||
// renovateDepType can return more than one upgrade each
|
||||
const pkgWorkers = depConfigs.map(depConfig =>
|
||||
pkgWorker.renovatePackage(depConfig)
|
||||
);
|
||||
// Use Promise.all to execute npm queries in parallel
|
||||
const allUpgrades = await Promise.all(pkgWorkers);
|
||||
logger.trace({ config: allUpgrades }, `allUpgrades`);
|
||||
// Squash arrays into one
|
||||
const combinedUpgrades = [].concat(...allUpgrades);
|
||||
logger.trace({ config: combinedUpgrades }, `combinedUpgrades`);
|
||||
return combinedUpgrades;
|
||||
}
|
||||
|
||||
function getDepConfig(depTypeConfig, dep) {
|
||||
let depConfig = configParser.mergeChildConfig(depTypeConfig, dep);
|
||||
// Apply any matching package rules
|
||||
if (depConfig.packageRules) {
|
||||
depConfig = applyPackageRules(depConfig);
|
||||
}
|
||||
return configParser.filterConfig(depConfig, 'package');
|
||||
}
|
|
@ -1,159 +0,0 @@
|
|||
const yarnLockParser = require('@yarnpkg/lockfile');
|
||||
const configParser = require('../../config');
|
||||
const depTypeWorker = require('./dep-type');
|
||||
const npmApi = require('../../datasource/npm');
|
||||
const upath = require('upath');
|
||||
|
||||
module.exports = {
|
||||
mightBeABrowserLibrary,
|
||||
renovatePackageFile,
|
||||
renovatePackageJson,
|
||||
};
|
||||
|
||||
function mightBeABrowserLibrary(packageJson) {
|
||||
// return true unless we're sure it's not a browser library
|
||||
if (packageJson.private === true) {
|
||||
// it's not published
|
||||
return false;
|
||||
}
|
||||
if (packageJson.main === undefined) {
|
||||
// it can't be required
|
||||
return false;
|
||||
}
|
||||
// TODO: how can we know if it's a node.js library only, and not browser?
|
||||
// Otherwise play it safe and return true
|
||||
return true;
|
||||
}
|
||||
|
||||
async function renovatePackageFile(config) {
|
||||
logger.setMeta({
|
||||
repository: config.repository,
|
||||
packageFile: config.packageFile,
|
||||
});
|
||||
logger.debug('renovatePackageFile()');
|
||||
const { manager } = config;
|
||||
if (config.enabled === false) {
|
||||
logger.info('packageFile is disabled');
|
||||
return [];
|
||||
}
|
||||
if (manager === 'npm') {
|
||||
return renovatePackageJson(config);
|
||||
}
|
||||
const content = await platform.getFile(config.packageFile);
|
||||
return depTypeWorker.renovateDepType(content, config);
|
||||
}
|
||||
|
||||
async function renovatePackageJson(input) {
|
||||
const config = { ...input };
|
||||
if (config.npmrc) {
|
||||
logger.debug('Setting .npmrc');
|
||||
npmApi.setNpmrc(
|
||||
config.npmrc,
|
||||
config.global ? config.global.exposeEnv : false
|
||||
);
|
||||
}
|
||||
let upgrades = [];
|
||||
logger.info(`Processing package file`);
|
||||
|
||||
let { yarnLock } = config;
|
||||
if (!yarnLock && config.workspaceDir) {
|
||||
yarnLock = upath.join(config.workspaceDir, 'yarn.lock');
|
||||
if (await platform.getFile(yarnLock)) {
|
||||
logger.debug({ yarnLock }, 'Using workspaces yarn.lock');
|
||||
} else {
|
||||
logger.debug('Yarn workspaces has no yarn.lock');
|
||||
yarnLock = undefined;
|
||||
}
|
||||
}
|
||||
if (yarnLock) {
|
||||
try {
|
||||
config.yarnLockParsed = yarnLockParser.parse(
|
||||
await platform.getFile(yarnLock)
|
||||
);
|
||||
if (config.yarnLockParsed.type !== 'success') {
|
||||
logger.info(
|
||||
{ type: config.yarnLockParsed.type },
|
||||
'Error parsing yarn.lock - not success'
|
||||
);
|
||||
delete config.yarnLockParsed;
|
||||
}
|
||||
logger.trace({ yarnLockParsed: config.yarnLockParsed });
|
||||
} catch (err) {
|
||||
logger.info({ yarnLock }, 'Warning: Exception parsing yarn.lock');
|
||||
}
|
||||
} else if (config.packageLock) {
|
||||
try {
|
||||
config.packageLockParsed = JSON.parse(
|
||||
await platform.getFile(config.packageLock)
|
||||
);
|
||||
logger.trace({ packageLockParsed: config.packageLockParsed });
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
{ packageLock: config.packageLock },
|
||||
'Could not parse package-lock.json'
|
||||
);
|
||||
}
|
||||
} else if (config.npmShrinkwrap) {
|
||||
try {
|
||||
config.npmShrinkwrapParsed = JSON.parse(
|
||||
await platform.getFile(config.npmShrinkwrap)
|
||||
);
|
||||
logger.trace({ npmShrinkwrapParsed: config.npmShrinkwrapParsed });
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
{ npmShrinkwrap: config.npmShrinkwrap },
|
||||
'Could not parse npm-shrinkwrap.json'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
const depTypes = [
|
||||
'dependencies',
|
||||
'devDependencies',
|
||||
'optionalDependencies',
|
||||
'peerDependencies',
|
||||
'engines',
|
||||
];
|
||||
const depTypeConfigs = depTypes.map(depType => {
|
||||
const depTypeConfig = { ...config, depType };
|
||||
// Always pin devDependencies
|
||||
// Pin dependencies if we're pretty sure it's not a browser library
|
||||
if (
|
||||
depTypeConfig.pinVersions === null &&
|
||||
!depTypeConfig.upgradeInRange &&
|
||||
(depType === 'devDependencies' ||
|
||||
(depType === 'dependencies' && !mightBeABrowserLibrary(config.content)))
|
||||
) {
|
||||
logger.debug({ depType }, 'Autodetecting pinVersions = true');
|
||||
depTypeConfig.pinVersions = true;
|
||||
}
|
||||
logger.trace({ config: depTypeConfig }, 'depTypeConfig');
|
||||
return configParser.filterConfig(depTypeConfig, 'depType');
|
||||
});
|
||||
logger.trace({ config: depTypeConfigs }, `depTypeConfigs`);
|
||||
for (const depTypeConfig of depTypeConfigs) {
|
||||
upgrades = upgrades.concat(
|
||||
await depTypeWorker.renovateDepType(config.content, depTypeConfig)
|
||||
);
|
||||
}
|
||||
if (
|
||||
config.lockFileMaintenance.enabled &&
|
||||
(config.yarnLock || config.packageLock)
|
||||
) {
|
||||
logger.debug('lockFileMaintenance enabled');
|
||||
// Maintain lock files
|
||||
const lockFileMaintenanceConf = configParser.mergeChildConfig(
|
||||
config,
|
||||
config.lockFileMaintenance
|
||||
);
|
||||
lockFileMaintenanceConf.type = 'lockFileMaintenance';
|
||||
logger.trace(
|
||||
{ config: lockFileMaintenanceConf },
|
||||
`lockFileMaintenanceConf`
|
||||
);
|
||||
upgrades.push(configParser.filterConfig(lockFileMaintenanceConf, 'branch'));
|
||||
}
|
||||
|
||||
logger.info('Finished processing package file');
|
||||
return upgrades;
|
||||
}
|
|
@ -1,38 +0,0 @@
|
|||
const configParser = require('../../config');
|
||||
const { getPackageUpdates } = require('../../manager');
|
||||
|
||||
module.exports = {
|
||||
renovatePackage,
|
||||
};
|
||||
|
||||
// Returns all results for a given dependency config
|
||||
async function renovatePackage(config) {
|
||||
// These are done in parallel so we don't setMeta to avoid conflicts
|
||||
logger.trace(
|
||||
{ dependency: config.depName, config },
|
||||
`renovatePackage(${config.depName})`
|
||||
);
|
||||
if (config.enabled === false) {
|
||||
logger.debug('package is disabled');
|
||||
return [];
|
||||
}
|
||||
const results = await getPackageUpdates(config);
|
||||
if (results.length) {
|
||||
logger.debug(
|
||||
{ dependency: config.depName, results },
|
||||
`${config.depName} lookup results`
|
||||
);
|
||||
}
|
||||
// Flatten the result on top of config, add repositoryUrl
|
||||
return (
|
||||
results
|
||||
// combine upgrade fields with existing config
|
||||
.map(res => configParser.mergeChildConfig(config, res))
|
||||
// type can be major, minor, patch, pin, digest
|
||||
.map(res => configParser.mergeChildConfig(res, res[res.type]))
|
||||
// allow types to be disabled
|
||||
.filter(res => res.enabled)
|
||||
// strip unnecessary fields for next stage
|
||||
.map(res => configParser.filterConfig(res, 'branch'))
|
||||
);
|
||||
}
|
41
lib/workers/repository/extract/file-match.js
Normal file
41
lib/workers/repository/extract/file-match.js
Normal file
|
@ -0,0 +1,41 @@
|
|||
const minimatch = require('minimatch');
|
||||
|
||||
module.exports = {
|
||||
getIncludedFiles,
|
||||
filterIgnoredFiles,
|
||||
getMatchingFiles,
|
||||
};
|
||||
|
||||
function getIncludedFiles(fileList, includePaths) {
|
||||
if (!(includePaths && includePaths.length)) {
|
||||
return fileList;
|
||||
}
|
||||
return fileList.filter(file =>
|
||||
includePaths.some(
|
||||
includePath => file === includePath || minimatch(file, includePath)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function filterIgnoredFiles(fileList, ignorePaths) {
|
||||
if (!(ignorePaths && ignorePaths.length)) {
|
||||
return fileList;
|
||||
}
|
||||
return fileList.filter(
|
||||
file =>
|
||||
!ignorePaths.some(
|
||||
ignorePath => file.includes(ignorePath) || minimatch(file, ignorePath)
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
function getMatchingFiles(fileList, manager, fileMatch) {
|
||||
let matchedFiles = [];
|
||||
for (const match of fileMatch) {
|
||||
logger.debug(`Using file match: ${match} for manager ${manager}`);
|
||||
matchedFiles = matchedFiles.concat(
|
||||
fileList.filter(file => file.match(new RegExp(match)))
|
||||
);
|
||||
}
|
||||
return matchedFiles;
|
||||
}
|
23
lib/workers/repository/extract/index.js
Normal file
23
lib/workers/repository/extract/index.js
Normal file
|
@ -0,0 +1,23 @@
|
|||
const { getManagerList } = require('../../../manager');
|
||||
const { getManagerConfig } = require('../../../config');
|
||||
const { getManagerPackageFiles } = require('./manager-files');
|
||||
|
||||
module.exports = {
|
||||
extractAllDependencies,
|
||||
};
|
||||
|
||||
async function extractAllDependencies(config) {
|
||||
const extractions = {};
|
||||
let fileCount = 0;
|
||||
for (const manager of getManagerList()) {
|
||||
const managerConfig = getManagerConfig(config, manager);
|
||||
managerConfig.manager = manager;
|
||||
const packageFiles = await getManagerPackageFiles(config, managerConfig);
|
||||
if (packageFiles.length) {
|
||||
fileCount += packageFiles.length;
|
||||
extractions[manager] = packageFiles;
|
||||
}
|
||||
}
|
||||
logger.debug(`Found ${fileCount.length} package file(s)`);
|
||||
return extractions;
|
||||
}
|
55
lib/workers/repository/extract/manager-files.js
Normal file
55
lib/workers/repository/extract/manager-files.js
Normal file
|
@ -0,0 +1,55 @@
|
|||
module.exports = {
|
||||
getManagerPackageFiles,
|
||||
};
|
||||
|
||||
const { extractDependencies, postExtract } = require('../../../manager');
|
||||
|
||||
const {
|
||||
getIncludedFiles,
|
||||
filterIgnoredFiles,
|
||||
getMatchingFiles,
|
||||
} = require('./file-match');
|
||||
|
||||
async function getManagerPackageFiles(config, managerConfig) {
|
||||
const { manager, enabled, includePaths, ignorePaths } = managerConfig;
|
||||
logger.debug(`getPackageFiles(${manager})`);
|
||||
if (!enabled) {
|
||||
logger.debug(`${manager} is disabled`);
|
||||
return [];
|
||||
}
|
||||
if (
|
||||
config.enabledManagers.length &&
|
||||
!config.enabledManagers.includes(manager)
|
||||
) {
|
||||
logger.debug(`${manager} is not in enabledManagers list`);
|
||||
return [];
|
||||
}
|
||||
let fileList = await platform.getFileList();
|
||||
fileList = getIncludedFiles(fileList, includePaths);
|
||||
fileList = filterIgnoredFiles(fileList, ignorePaths);
|
||||
const matchedFiles = getMatchingFiles(
|
||||
fileList,
|
||||
manager,
|
||||
config[manager].fileMatch
|
||||
);
|
||||
if (matchedFiles.length) {
|
||||
logger.debug(
|
||||
{ matchedFiles },
|
||||
`Matched ${matchedFiles.length} file(s) for manager ${manager}`
|
||||
);
|
||||
}
|
||||
const packageFiles = [];
|
||||
for (const packageFile of matchedFiles) {
|
||||
const content = await platform.getFile(packageFile);
|
||||
const res = await extractDependencies(manager, content, packageFile);
|
||||
if (res) {
|
||||
packageFiles.push({
|
||||
packageFile,
|
||||
manager,
|
||||
...res,
|
||||
});
|
||||
}
|
||||
}
|
||||
await postExtract(manager, packageFiles);
|
||||
return packageFiles;
|
||||
}
|
|
@ -9,21 +9,18 @@ module.exports = {
|
|||
renovateRepository,
|
||||
};
|
||||
|
||||
// istanbul ignore next
|
||||
async function renovateRepository(repoConfig) {
|
||||
let config = { ...repoConfig };
|
||||
logger.setMeta({ repository: config.repository });
|
||||
logger.info('Renovating repository');
|
||||
logger.trace({ config }, 'renovateRepository()');
|
||||
logger.trace({ config });
|
||||
try {
|
||||
config = await initRepo(config);
|
||||
let res;
|
||||
let branches;
|
||||
let branchList;
|
||||
let packageFiles;
|
||||
({ res, branches, branchList, packageFiles } = await processRepo(config)); // eslint-disable-line prefer-const
|
||||
if (!config.repoIsOnboarded) {
|
||||
res = await ensureOnboardingPr(config, packageFiles, branches);
|
||||
}
|
||||
const { res, branches, branchList, packageFiles } = await processRepo(
|
||||
config
|
||||
);
|
||||
await ensureOnboardingPr(config, packageFiles, branches);
|
||||
await finaliseRepo(config, branchList);
|
||||
return processResult(config, res);
|
||||
} catch (err) /* istanbul ignore next */ {
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
const { detectPackageFiles } = require('../../../../manager');
|
||||
const { extractAllDependencies } = require('../../extract');
|
||||
const { createOnboardingBranch } = require('./create');
|
||||
const { rebaseOnboardingBranch } = require('./rebase');
|
||||
const { isOnboarded, onboardingPrExists } = require('./check');
|
||||
|
@ -20,7 +20,7 @@ async function checkOnboardingBranch(config) {
|
|||
await rebaseOnboardingBranch(config);
|
||||
} else {
|
||||
logger.debug('Onboarding PR does not exist');
|
||||
if ((await detectPackageFiles(config)).length === 0) {
|
||||
if (Object.entries(await extractAllDependencies(config)).length === 0) {
|
||||
throw new Error('no-package-files');
|
||||
}
|
||||
logger.info('Need to create onboarding PR');
|
||||
|
|
|
@ -4,6 +4,9 @@ const { getBaseBranchDesc } = require('./base-branch');
|
|||
const { getPrList } = require('./pr-list');
|
||||
|
||||
async function ensureOnboardingPr(config, packageFiles, branches) {
|
||||
if (config.repoIsOnboarded) {
|
||||
return;
|
||||
}
|
||||
logger.debug('ensureOnboardingPr()');
|
||||
logger.trace({ config });
|
||||
const onboardingBranch = `renovate/configure`;
|
||||
|
@ -27,14 +30,17 @@ You can post questions in [our Config Help repository](https://github.com/renova
|
|||
---
|
||||
`;
|
||||
let prBody = prTemplate;
|
||||
if (packageFiles && packageFiles.length) {
|
||||
if (packageFiles && Object.entries(packageFiles).length) {
|
||||
let files = [];
|
||||
for (const [manager, managerFiles] of Object.entries(packageFiles)) {
|
||||
files = files.concat(
|
||||
managerFiles.map(file => ` * \`${file.packageFile}\` (${manager})`)
|
||||
);
|
||||
}
|
||||
prBody =
|
||||
prBody.replace(
|
||||
'{{PACKAGE FILES}}',
|
||||
'## Detected Package Files\n\n' +
|
||||
packageFiles
|
||||
.map(packageFile => ` * \`${packageFile.packageFile}\``)
|
||||
.join('\n')
|
||||
'## Detected Package Files\n\n' + files.join('\n')
|
||||
) + '\n';
|
||||
} else {
|
||||
prBody = prBody.replace('{{PACKAGE FILES}}\n', '');
|
||||
|
@ -62,12 +68,12 @@ You can post questions in [our Config Help repository](https://github.com/renova
|
|||
// Check if existing PR needs updating
|
||||
if (existingPr.title === onboardingPrTitle && existingPr.body === prBody) {
|
||||
logger.info(`${existingPr.displayNumber} does not need updating`);
|
||||
return 'onboarding';
|
||||
return;
|
||||
}
|
||||
// PR must need updating
|
||||
await platform.updatePr(existingPr.number, onboardingPrTitle, prBody);
|
||||
logger.info(`Updated ${existingPr.displayNumber}`);
|
||||
return 'onboarding';
|
||||
return;
|
||||
}
|
||||
logger.info('Creating onboarding PR');
|
||||
const labels = [];
|
||||
|
@ -80,7 +86,6 @@ You can post questions in [our Config Help repository](https://github.com/renova
|
|||
useDefaultBranch
|
||||
);
|
||||
logger.info({ pr: pr.displayNumber }, 'Created onboarding PR');
|
||||
return 'onboarding';
|
||||
}
|
||||
|
||||
module.exports = {
|
||||
|
|
|
@ -1,21 +1,24 @@
|
|||
const { determineUpdates } = require('../updates');
|
||||
const { writeUpdates } = require('./write');
|
||||
const { sortBranches } = require('./sort');
|
||||
const { resolvePackageFiles } = require('../../../manager');
|
||||
const { fetchUpdates } = require('./fetch');
|
||||
const { branchifyUpgrades } = require('../updates/branchify');
|
||||
const { extractAllDependencies } = require('../extract');
|
||||
|
||||
module.exports = {
|
||||
extractAndUpdate,
|
||||
};
|
||||
|
||||
async function extractAndUpdate(input) {
|
||||
let config = await resolvePackageFiles(input);
|
||||
config = await determineUpdates(config);
|
||||
const { branches, branchList, packageFiles } = config;
|
||||
async function extractAndUpdate(config) {
|
||||
logger.debug('extractAndUpdate()');
|
||||
const packageFiles = await extractAllDependencies(config);
|
||||
logger.debug({ packageFiles }, 'packageFiles');
|
||||
await fetchUpdates(config, packageFiles);
|
||||
logger.debug({ packageFiles }, 'packageFiles with updates');
|
||||
const { branches, branchList } = branchifyUpgrades(config, packageFiles);
|
||||
sortBranches(branches);
|
||||
let res;
|
||||
if (config.repoIsOnboarded) {
|
||||
res = await writeUpdates(config);
|
||||
res = await writeUpdates(config, packageFiles, branches);
|
||||
}
|
||||
logger.setMeta({ repository: config.repository });
|
||||
return { res, branches, branchList, packageFiles };
|
||||
}
|
||||
|
|
69
lib/workers/repository/process/fetch.js
Normal file
69
lib/workers/repository/process/fetch.js
Normal file
|
@ -0,0 +1,69 @@
|
|||
const pAll = require('p-all');
|
||||
|
||||
const { getPackageUpdates } = require('../../../manager');
|
||||
const { mergeChildConfig } = require('../../../config');
|
||||
const { applyPackageRules } = require('../../../util/package-rules');
|
||||
const { getManagerConfig } = require('../../../config');
|
||||
|
||||
module.exports = {
|
||||
fetchUpdates,
|
||||
};
|
||||
|
||||
async function fetchDepUpdates(packageFileConfig, dep) {
|
||||
/* eslint-disable no-param-reassign */
|
||||
const { manager, packageFile } = packageFileConfig;
|
||||
const { depName, currentVersion } = dep;
|
||||
let depConfig = mergeChildConfig(packageFileConfig, dep);
|
||||
depConfig = applyPackageRules(depConfig);
|
||||
dep.updates = [];
|
||||
if (depConfig.ignoreDeps.includes(depName)) {
|
||||
logger.debug({ depName: dep.depName }, 'Dependency is ignored');
|
||||
dep.skipReason = 'ignored';
|
||||
} else if (
|
||||
depConfig.monorepoPackages &&
|
||||
depConfig.monorepoPackages.includes(depName)
|
||||
) {
|
||||
logger.debug(
|
||||
{ depName: dep.depName },
|
||||
'Dependency is ignored as part of monorepo'
|
||||
);
|
||||
dep.skipReason = 'monorepo';
|
||||
} else if (depConfig.enabled === false) {
|
||||
logger.debug({ depName: dep.depName }, 'Dependency is disabled');
|
||||
dep.skipReason = 'disabled';
|
||||
} else {
|
||||
dep.updates = await getPackageUpdates(manager, depConfig);
|
||||
logger.debug({
|
||||
packageFile,
|
||||
manager,
|
||||
depName,
|
||||
currentVersion,
|
||||
updates: dep.updates,
|
||||
});
|
||||
}
|
||||
/* eslint-enable no-param-reassign */
|
||||
}
|
||||
|
||||
async function fetchManagerPackagerFileUpdates(config, managerConfig, pFile) {
|
||||
const packageFileConfig = mergeChildConfig(managerConfig, pFile);
|
||||
const queue = pFile.deps.map(dep => () =>
|
||||
fetchDepUpdates(packageFileConfig, dep)
|
||||
);
|
||||
await pAll(queue, { concurrency: 10 });
|
||||
}
|
||||
|
||||
async function fetchManagerUpdates(config, packageFiles, manager) {
|
||||
const managerConfig = getManagerConfig(config, manager);
|
||||
const queue = packageFiles[manager].map(pFile => () =>
|
||||
fetchManagerPackagerFileUpdates(config, managerConfig, pFile)
|
||||
);
|
||||
await pAll(queue, { concurrency: 5 });
|
||||
}
|
||||
|
||||
async function fetchUpdates(config, packageFiles) {
|
||||
logger.debug(`manager.fetchUpdates()`);
|
||||
const allManagerJobs = Object.keys(packageFiles).map(manager =>
|
||||
fetchManagerUpdates(config, packageFiles, manager)
|
||||
);
|
||||
await Promise.all(allManagerJobs);
|
||||
}
|
|
@ -6,6 +6,7 @@ module.exports = {
|
|||
};
|
||||
|
||||
async function processRepo(config) {
|
||||
logger.debug('processRepo()');
|
||||
if (config.baseBranches && config.baseBranches.length) {
|
||||
logger.info({ baseBranches: config.baseBranches }, 'baseBranches');
|
||||
let res;
|
||||
|
@ -24,5 +25,6 @@ async function processRepo(config) {
|
|||
}
|
||||
return { res, branches, branchList };
|
||||
}
|
||||
logger.debug('No baseBranches');
|
||||
return extractAndUpdate(config);
|
||||
}
|
||||
|
|
57
lib/workers/repository/process/limits.js
Normal file
57
lib/workers/repository/process/limits.js
Normal file
|
@ -0,0 +1,57 @@
|
|||
const moment = require('moment');
|
||||
|
||||
module.exports = {
|
||||
getPrHourlyRemaining,
|
||||
getConcurrentPrsRemaining,
|
||||
getPrsRemaining,
|
||||
};
|
||||
|
||||
async function getPrHourlyRemaining(config) {
|
||||
if (config.prHourlyLimit) {
|
||||
const prList = await platform.getPrList();
|
||||
const currentHourStart = moment({
|
||||
hour: moment().hour(),
|
||||
});
|
||||
try {
|
||||
const soFarThisHour = prList.filter(
|
||||
pr =>
|
||||
pr.branchName !== 'renovate/configure' &&
|
||||
moment(pr.createdAt).isAfter(currentHourStart)
|
||||
).length;
|
||||
const prsRemaining = config.prHourlyLimit - soFarThisHour;
|
||||
logger.info(`PR hourly limit remaining: ${prsRemaining}`);
|
||||
return prsRemaining;
|
||||
} catch (err) {
|
||||
logger.error('Error checking PRs created per hour');
|
||||
}
|
||||
}
|
||||
return 99;
|
||||
}
|
||||
|
||||
async function getConcurrentPrsRemaining(config, branches) {
|
||||
if (config.prConcurrentLimit) {
|
||||
logger.debug(`Enforcing prConcurrentLimit (${config.prConcurrentLimit})`);
|
||||
let currentlyOpen = 0;
|
||||
for (const branch of branches) {
|
||||
if (await platform.branchExists(branch.branchName)) {
|
||||
currentlyOpen += 1;
|
||||
}
|
||||
}
|
||||
logger.debug(`${currentlyOpen} PRs are currently open`);
|
||||
const concurrentRemaining = config.prConcurrentLimit - currentlyOpen;
|
||||
logger.info(`PR concurrent limit remaining: ${concurrentRemaining}`);
|
||||
return concurrentRemaining;
|
||||
}
|
||||
return 99;
|
||||
}
|
||||
|
||||
async function getPrsRemaining(config, branches) {
|
||||
const hourlyRemaining = await module.exports.getPrHourlyRemaining(config);
|
||||
const concurrentRemaining = await module.exports.getConcurrentPrsRemaining(
|
||||
config,
|
||||
branches
|
||||
);
|
||||
return hourlyRemaining < concurrentRemaining
|
||||
? hourlyRemaining
|
||||
: concurrentRemaining;
|
||||
}
|
|
@ -1,61 +1,32 @@
|
|||
const moment = require('moment');
|
||||
const tmp = require('tmp-promise');
|
||||
|
||||
const branchWorker = require('../../branch');
|
||||
const { getPrsRemaining } = require('./limits');
|
||||
|
||||
module.exports = {
|
||||
writeUpdates,
|
||||
};
|
||||
|
||||
async function writeUpdates(config) {
|
||||
let { branches } = config;
|
||||
async function writeUpdates(config, packageFiles, allBranches) {
|
||||
let branches = allBranches;
|
||||
logger.info(`Processing ${branches.length} branch(es)`);
|
||||
if (!config.mirrorMode && branches.some(upg => upg.isPin)) {
|
||||
branches = branches.filter(upg => upg.isPin);
|
||||
logger.info(`Processing ${branches.length} "pin" PRs first`);
|
||||
}
|
||||
const tmpDir = await tmp.dir({ unsafeCleanup: true });
|
||||
let prsRemaining = 99;
|
||||
if (config.prHourlyLimit) {
|
||||
const prList = await platform.getPrList();
|
||||
const currentHourStart = moment({
|
||||
hour: moment().hour(),
|
||||
});
|
||||
try {
|
||||
prsRemaining =
|
||||
config.prHourlyLimit -
|
||||
prList.filter(
|
||||
pr =>
|
||||
pr.branchName !== 'renovate/configure' &&
|
||||
moment(pr.createdAt).isAfter(currentHourStart)
|
||||
).length;
|
||||
logger.info(`PR hourly limit remaining: ${prsRemaining}`);
|
||||
} catch (err) {
|
||||
logger.error('Error checking PRs created per hour');
|
||||
}
|
||||
}
|
||||
if (config.prConcurrentLimit) {
|
||||
logger.debug(`Enforcing prConcurrentLimit (${config.prConcurrentLimit})`);
|
||||
let currentlyOpen = 0;
|
||||
for (const branch of branches) {
|
||||
if (await platform.branchExists(branch.branchName)) {
|
||||
currentlyOpen += 1;
|
||||
}
|
||||
}
|
||||
logger.debug(`${currentlyOpen} PRs are currently open`);
|
||||
const concurrentRemaining = config.prConcurrentLimit - currentlyOpen;
|
||||
logger.info(`PR concurrent limit remaining: ${concurrentRemaining}`);
|
||||
prsRemaining =
|
||||
prsRemaining < concurrentRemaining ? prsRemaining : concurrentRemaining;
|
||||
}
|
||||
let prsRemaining = await getPrsRemaining(config, branches);
|
||||
try {
|
||||
// eslint-disable-next-line no-param-reassign
|
||||
for (const branch of branches) {
|
||||
const res = await branchWorker.processBranch({
|
||||
const res = await branchWorker.processBranch(
|
||||
{
|
||||
...branch,
|
||||
tmpDir,
|
||||
prHourlyLimitReached: prsRemaining <= 0,
|
||||
});
|
||||
},
|
||||
packageFiles
|
||||
);
|
||||
if (res === 'pr-closed' || res === 'automerged') {
|
||||
// Stop procesing other branches because base branch has been changed
|
||||
return res;
|
||||
|
|
|
@ -31,11 +31,7 @@ function processResult(config, result) {
|
|||
status = 'enabled';
|
||||
} else {
|
||||
status = 'onboarding';
|
||||
if (result === 'onboarding') {
|
||||
res = 'done';
|
||||
} else {
|
||||
res = result;
|
||||
}
|
||||
}
|
||||
return { res, status };
|
||||
}
|
||||
|
|
|
@ -3,6 +3,7 @@ const slugify = require('slugify');
|
|||
const cleanGitRef = require('clean-git-ref').clean;
|
||||
|
||||
const { generateBranchConfig } = require('./generate');
|
||||
const { flattenUpdates } = require('./flatten');
|
||||
|
||||
/**
|
||||
* Clean git branch name
|
||||
|
@ -19,15 +20,18 @@ function cleanBranchName(branchName) {
|
|||
.replace(/\s/g, ''); // whitespace
|
||||
}
|
||||
|
||||
function branchifyUpgrades(config) {
|
||||
function branchifyUpgrades(config, packageFiles) {
|
||||
logger.debug('branchifyUpgrades');
|
||||
logger.trace({ config });
|
||||
const updates = flattenUpdates(config, packageFiles);
|
||||
logger.debug(`${updates.length} updates found`);
|
||||
logger.debug({ updates });
|
||||
logger.debug({ upgradeNames: updates.map(u => u.depName) });
|
||||
const errors = [];
|
||||
const warnings = [];
|
||||
const branchUpgrades = {};
|
||||
const branches = [];
|
||||
for (const upg of config.upgrades) {
|
||||
const update = { ...upg };
|
||||
for (const u of updates) {
|
||||
const update = { ...u };
|
||||
// Split out errors and warnings first
|
||||
if (update.type === 'error') {
|
||||
errors.push(update);
|
||||
|
@ -81,12 +85,10 @@ function branchifyUpgrades(config) {
|
|||
? branches.map(upgrade => upgrade.branchName)
|
||||
: config.branchList;
|
||||
return {
|
||||
...config,
|
||||
errors: config.errors.concat(errors),
|
||||
warnings: config.warnings.concat(warnings),
|
||||
branches,
|
||||
branchList,
|
||||
upgrades: null,
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,46 +0,0 @@
|
|||
const packageFileWorker = require('../../package-file');
|
||||
const { mergeChildConfig, filterConfig } = require('../../../config');
|
||||
const { detectSemanticCommits } = require('./semantic');
|
||||
|
||||
async function determineRepoUpgrades(config) {
|
||||
logger.debug('determineRepoUpgrades()');
|
||||
logger.trace({ config });
|
||||
let upgrades = [];
|
||||
logger.debug(`Found ${config.packageFiles.length} package files`);
|
||||
// Iterate through repositories sequentially
|
||||
for (const packageFile of config.packageFiles) {
|
||||
logger.setMeta({
|
||||
repository: config.repository,
|
||||
packageFile: packageFile.packageFile,
|
||||
});
|
||||
logger.debug('Getting packageFile config');
|
||||
logger.trace({ fullPackageFile: packageFile });
|
||||
let packageFileConfig = mergeChildConfig(config, packageFile);
|
||||
packageFileConfig = filterConfig(packageFileConfig, 'packageFile');
|
||||
upgrades = upgrades.concat(
|
||||
await packageFileWorker.renovatePackageFile(packageFileConfig)
|
||||
);
|
||||
}
|
||||
let semanticCommits;
|
||||
if (upgrades.length) {
|
||||
semanticCommits = await detectSemanticCommits(config);
|
||||
}
|
||||
// Sanitize depNames
|
||||
upgrades = upgrades.map(upgrade => ({
|
||||
...upgrade,
|
||||
semanticCommits,
|
||||
depNameSanitized: upgrade.depName
|
||||
? upgrade.depName
|
||||
.replace('@types/', '')
|
||||
.replace('@', '')
|
||||
.replace('/', '-')
|
||||
.replace(/\s+/g, '-')
|
||||
.toLowerCase()
|
||||
: undefined,
|
||||
}));
|
||||
|
||||
logger.debug('returning upgrades');
|
||||
return { ...config, upgrades };
|
||||
}
|
||||
|
||||
module.exports = { determineRepoUpgrades };
|
68
lib/workers/repository/updates/flatten.js
Normal file
68
lib/workers/repository/updates/flatten.js
Normal file
|
@ -0,0 +1,68 @@
|
|||
const {
|
||||
getManagerConfig,
|
||||
mergeChildConfig,
|
||||
filterConfig,
|
||||
} = require('../../../config');
|
||||
const { applyPackageRules } = require('../../../util/package-rules');
|
||||
const { get } = require('../../../manager');
|
||||
|
||||
module.exports = {
|
||||
flattenUpdates,
|
||||
};
|
||||
|
||||
function flattenUpdates(config, packageFiles) {
|
||||
const updates = [];
|
||||
for (const [manager, files] of Object.entries(packageFiles)) {
|
||||
logger.debug(`flatten manager=${manager}`);
|
||||
const managerConfig = getManagerConfig(config, manager);
|
||||
logger.debug('Got manager config');
|
||||
for (const packageFile of files) {
|
||||
logger.debug('packageFile');
|
||||
const packageFileConfig = mergeChildConfig(managerConfig, packageFile);
|
||||
for (const dep of packageFile.deps) {
|
||||
logger.debug('dep ' + dep.depName);
|
||||
let depConfig = mergeChildConfig(packageFileConfig, dep);
|
||||
logger.debug('got depConfig');
|
||||
delete depConfig.deps;
|
||||
depConfig = applyPackageRules(depConfig);
|
||||
logger.debug('got depConfig with rules');
|
||||
for (const update of dep.updates) {
|
||||
logger.debug('update');
|
||||
let updateConfig = mergeChildConfig(depConfig, update);
|
||||
delete updateConfig.updates;
|
||||
// apply major/minor/patch/pin/digest
|
||||
updateConfig = mergeChildConfig(
|
||||
updateConfig,
|
||||
updateConfig[updateConfig.type]
|
||||
);
|
||||
updateConfig.depNameSanitized = updateConfig.depName
|
||||
? updateConfig.depName
|
||||
.replace('@types/', '')
|
||||
.replace('@', '')
|
||||
.replace('/', '-')
|
||||
.replace(/\s+/g, '-')
|
||||
.toLowerCase()
|
||||
: undefined;
|
||||
delete updateConfig.repoIsOnboarded;
|
||||
delete updateConfig.renovateJsonPresent;
|
||||
updates.push(updateConfig);
|
||||
}
|
||||
logger.debug('Done dep');
|
||||
}
|
||||
logger.debug('Done packageFile');
|
||||
}
|
||||
logger.debug({ managerConfig });
|
||||
if (
|
||||
get(manager, 'supportsLockFileMaintenance') &&
|
||||
managerConfig.lockFileMaintenance.enabled
|
||||
) {
|
||||
const lockFileConfig = mergeChildConfig(
|
||||
managerConfig,
|
||||
managerConfig.lockFileMaintenance
|
||||
);
|
||||
lockFileConfig.type = 'lockFileMaintenance';
|
||||
updates.push(lockFileConfig);
|
||||
}
|
||||
}
|
||||
return updates.map(update => filterConfig(update, 'branch'));
|
||||
}
|
|
@ -1,19 +0,0 @@
|
|||
const { determineRepoUpgrades } = require('./determine');
|
||||
const { branchifyUpgrades } = require('./branchify');
|
||||
|
||||
module.exports = {
|
||||
determineUpdates,
|
||||
};
|
||||
|
||||
async function determineUpdates(input) {
|
||||
let config = { ...input };
|
||||
logger.debug('determineUpdates()');
|
||||
logger.trace({ config });
|
||||
config = await determineRepoUpgrades(config);
|
||||
await platform.ensureIssueClosing(
|
||||
'Action Required: Fix Renovate Configuration'
|
||||
);
|
||||
config = branchifyUpgrades(config);
|
||||
logger.debug('Finished determining upgrades');
|
||||
return config;
|
||||
}
|
57
test/_fixtures/npm/plocktest1/package-lock.json
generated
Normal file
57
test/_fixtures/npm/plocktest1/package-lock.json
generated
Normal file
|
@ -0,0 +1,57 @@
|
|||
{
|
||||
"name": "plocktest1",
|
||||
"version": "1.0.0",
|
||||
"lockfileVersion": 1,
|
||||
"requires": true,
|
||||
"dependencies": {
|
||||
"ansi-styles": {
|
||||
"version": "3.2.1",
|
||||
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
|
||||
"integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
|
||||
"requires": {
|
||||
"color-convert": "1.9.1"
|
||||
}
|
||||
},
|
||||
"chalk": {
|
||||
"version": "2.4.1",
|
||||
"resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.1.tgz",
|
||||
"integrity": "sha512-ObN6h1v2fTJSmUXoS3nMQ92LbDK9be4TV+6G+omQlGJFdcUX5heKi1LZ1YnRMIgwTLEj3E24bT6tYni50rlCfQ==",
|
||||
"requires": {
|
||||
"ansi-styles": "3.2.1",
|
||||
"escape-string-regexp": "1.0.5",
|
||||
"supports-color": "5.4.0"
|
||||
}
|
||||
},
|
||||
"color-convert": {
|
||||
"version": "1.9.1",
|
||||
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.1.tgz",
|
||||
"integrity": "sha512-mjGanIiwQJskCC18rPR6OmrZ6fm2Lc7PeGFYwCmy5J34wC6F1PzdGL6xeMfmgicfYcNLGuVFA3WzXtIDCQSZxQ==",
|
||||
"requires": {
|
||||
"color-name": "1.1.3"
|
||||
}
|
||||
},
|
||||
"color-name": {
|
||||
"version": "1.1.3",
|
||||
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz",
|
||||
"integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU="
|
||||
},
|
||||
"escape-string-regexp": {
|
||||
"version": "1.0.5",
|
||||
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz",
|
||||
"integrity": "sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ="
|
||||
},
|
||||
"has-flag": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
|
||||
"integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0="
|
||||
},
|
||||
"supports-color": {
|
||||
"version": "5.4.0",
|
||||
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.4.0.tgz",
|
||||
"integrity": "sha512-zjaXglF5nnWpsq470jSv6P9DwPvgLkuapYmfDm3JWOm0vkNTVF2tI4UrN2r6jH1qM/uc/WtxYY1hYoA2dOKj5w==",
|
||||
"requires": {
|
||||
"has-flag": "3.0.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
15
test/_fixtures/npm/plocktest1/package.json
Normal file
15
test/_fixtures/npm/plocktest1/package.json
Normal file
|
@ -0,0 +1,15 @@
|
|||
{
|
||||
"name": "plocktest1",
|
||||
"version": "1.0.0",
|
||||
"description": "",
|
||||
"main": "index.js",
|
||||
"scripts": {
|
||||
"test": "echo \"Error: no test specified\" && exit 1"
|
||||
},
|
||||
"keywords": [],
|
||||
"author": "",
|
||||
"license": "ISC",
|
||||
"dependencies": {
|
||||
"chalk": "^2.4.1"
|
||||
}
|
||||
}
|
41
test/_fixtures/npm/plocktest1/yarn.lock
Normal file
41
test/_fixtures/npm/plocktest1/yarn.lock
Normal file
|
@ -0,0 +1,41 @@
|
|||
# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY.
|
||||
# yarn lockfile v1
|
||||
|
||||
|
||||
ansi-styles@^3.2.1:
|
||||
version "3.2.1"
|
||||
resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d"
|
||||
dependencies:
|
||||
color-convert "^1.9.0"
|
||||
|
||||
chalk@^2.4.1:
|
||||
version "2.4.1"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e"
|
||||
dependencies:
|
||||
ansi-styles "^3.2.1"
|
||||
escape-string-regexp "^1.0.5"
|
||||
supports-color "^5.3.0"
|
||||
|
||||
color-convert@^1.9.0:
|
||||
version "1.9.1"
|
||||
resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed"
|
||||
dependencies:
|
||||
color-name "^1.1.1"
|
||||
|
||||
color-name@^1.1.1:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25"
|
||||
|
||||
escape-string-regexp@^1.0.5:
|
||||
version "1.0.5"
|
||||
resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4"
|
||||
|
||||
has-flag@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
|
||||
|
||||
supports-color@^5.3.0:
|
||||
version "5.4.0"
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.4.0.tgz#1c6b337402c2137605efe19f10fec390f6faab54"
|
||||
dependencies:
|
||||
has-flag "^3.0.0"
|
|
@ -1,95 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager detectPackageFiles(config) adds package files to object 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "npm",
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
Object {
|
||||
"manager": "npm",
|
||||
"packageFile": "backend/package.json",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) finds .nvmrc files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "nvm",
|
||||
"packageFile": ".nvmrc",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) finds .travis.yml files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "travis",
|
||||
"packageFile": ".travis.yml",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) finds Dockerfiles 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "docker",
|
||||
"packageFile": "Dockerfile",
|
||||
},
|
||||
Object {
|
||||
"manager": "docker",
|
||||
"packageFile": "other/Dockerfile",
|
||||
},
|
||||
Object {
|
||||
"manager": "docker",
|
||||
"packageFile": "another/Dockerfile",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) finds WORKSPACE files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "bazel",
|
||||
"packageFile": "WORKSPACE",
|
||||
},
|
||||
Object {
|
||||
"manager": "bazel",
|
||||
"packageFile": "other/WORKSPACE",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) finds meteor package files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "meteor",
|
||||
"packageFile": "modules/something/package.js",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) ignores node modules 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "npm",
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) ignores node modules 2`] = `undefined`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) ignores node modules 3`] = `undefined`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) skips meteor package files with no json 1`] = `Array []`;
|
||||
|
||||
exports[`manager detectPackageFiles(config) uses includePaths 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "npm",
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -1,123 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() clears npmrc and yarnrc fields 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
"name": "something",
|
||||
"renovate": Object {
|
||||
"automerge": true,
|
||||
},
|
||||
"version": "1.0.0",
|
||||
},
|
||||
"currentPackageJsonVersion": "1.0.0",
|
||||
"fileMatch": Array [
|
||||
"(^|/)package.json$",
|
||||
],
|
||||
"manager": "npm",
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() detect package.json and adds error if cannot parse (onboarding) 1`] = `Array []`;
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() detect package.json and throws error if cannot parse (onboarded) 1`] = `[Error: config-validation]`;
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() detects accompanying files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
"name": "package.json",
|
||||
"version": "0.0.1",
|
||||
},
|
||||
"currentPackageJsonVersion": "0.0.1",
|
||||
"fileMatch": Array [
|
||||
"(^|/)package.json$",
|
||||
],
|
||||
"manager": "npm",
|
||||
"npmShrinkwrap": "npm-shrinkwrap.json",
|
||||
"npmrc": "npmrc",
|
||||
"packageFile": "package.json",
|
||||
"packageLock": "package-lock.json",
|
||||
"shrinkwrapYaml": "shrinkwrap.yaml",
|
||||
"yarnLock": "yarn.lock",
|
||||
"yarnrc": "yarnrc",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() resolves docker 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"commitMessageTopic": "{{{depName}}} Docker tag",
|
||||
"content": "# comment
|
||||
FROM node:8
|
||||
",
|
||||
"digest": Object {
|
||||
"branchTopic": "{{{depNameSanitized}}}-{{{currentTag}}}",
|
||||
"commitMessageExtra": "to {{newDigestShort}}",
|
||||
"commitMessageTopic": "{{{depName}}}:{{{currentTag}}} Docker digest",
|
||||
"group": Object {
|
||||
"commitMessageTopic": "{{{groupName}}}",
|
||||
"prBody": "This Pull Request updates Dockerfiles to the latest image digests. For details on Renovate's Docker support, please visit https://renovatebot.com/docs/language-support/docker\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#each upgrades as |upgrade|}}\\n- {{#if repositoryUrl}}[{{upgrade.depName}}]({{upgrade.repositoryUrl}}){{else}}\`{{{depName}}}\`{{/if}}: \`{{upgrade.newDigest}}\`\\n{{/each}}\\n\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"prBody": "This Pull Request updates Docker base image \`{{{depName}}}:{{{currentTag}}}\` to the latest digest (\`{{{newDigest}}}\`). For details on Renovate's Docker support, please visit https://renovatebot.com/docs/language-support/docker\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"fileMatch": Array [
|
||||
"(^|/)Dockerfile$",
|
||||
],
|
||||
"group": Object {
|
||||
"commitMessageTopic": "{{{groupName}}} Docker tags",
|
||||
"prBody": "This Pull Request updates Dockerfiles to use image digests.\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#each upgrades as |upgrade|}}\\n- {{#if repositoryUrl}}[{{upgrade.depName}}]({{upgrade.repositoryUrl}}){{else}}\`{{{depName}}}\`{{/if}}: \`{{upgrade.newDigest}}\`\\n{{/each}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"major": Object {
|
||||
"enabled": false,
|
||||
},
|
||||
"manager": "docker",
|
||||
"managerBranchPrefix": "docker-",
|
||||
"packageFile": "Dockerfile",
|
||||
"pin": Object {
|
||||
"commitMessageExtra": "",
|
||||
"group": Object {
|
||||
"branchTopic": "digests-pin",
|
||||
"commitMessageTopic": "{{{groupName}}}",
|
||||
"prBody": "This Pull Request pins Dockerfiles to use image digests. For details on Renovate's Docker support, please visit https://renovatebot.com/docs/language-support/docker\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#each upgrades as |upgrade|}}\\n- {{#if repositoryUrl}}[{{upgrade.depName}}]({{upgrade.repositoryUrl}}){{else}}\`{{{depName}}}\`{{/if}}: \`{{upgrade.newDigest}}\`\\n{{/each}}\\n\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"groupName": "Docker digests",
|
||||
"prBody": "This Pull Request pins Docker base image \`{{{depName}}}:{{{currentTag}}}\` to use a digest (\`{{{newDigest}}}\`).\\nThis digest will then be kept updated via Pull Requests whenever the image is updated on the Docker registry. For details on Renovate's Docker support, please visit https://renovatebot.com/docs/language-support/docker\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"prBody": "This Pull Request updates Docker base image \`{{{depName}}}\` from tag \`{{{currentTag}}}\` to new tag \`{{{newTag}}}\`. For details on Renovate's Docker support, please visit https://renovatebot.com/docs/language-support/docker\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() resolves package files without own resolve 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": "git_repository(\\n",
|
||||
"fileMatch": Array [
|
||||
"(^|/)WORKSPACE$",
|
||||
],
|
||||
"manager": "bazel",
|
||||
"packageFile": "WORKSPACE",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/resolve resolvePackageFiles() strips npmrc with NPM_TOKEN 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"content": Object {
|
||||
"name": "package.json",
|
||||
"version": "0.0.1",
|
||||
},
|
||||
"currentPackageJsonVersion": "0.0.1",
|
||||
"fileMatch": Array [
|
||||
"(^|/)package.json$",
|
||||
],
|
||||
"manager": "npm",
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -6,6 +6,10 @@ describe('lib/manager/docker/extract', () => {
|
|||
beforeEach(() => {
|
||||
config = {};
|
||||
});
|
||||
it('handles no FROM', () => {
|
||||
const res = extractDependencies('no from!', config);
|
||||
expect(res).toBe(null);
|
||||
});
|
||||
it('handles naked dep', () => {
|
||||
const res = extractDependencies('FROM node\n', config).deps;
|
||||
expect(res).toMatchSnapshot();
|
||||
|
|
|
@ -1,202 +1,27 @@
|
|||
const defaultConfig = require('../../lib/config/defaults').getConfig();
|
||||
const manager = require('../../lib/manager');
|
||||
const npm = require('../../lib/manager/npm');
|
||||
const meteor = require('../../lib/manager/meteor');
|
||||
const docker = require('../../lib/manager/docker');
|
||||
const node = require('../../lib/manager/travis');
|
||||
const bazel = require('../../lib/manager/bazel');
|
||||
|
||||
const path = require('path');
|
||||
const fs = require('fs-extra');
|
||||
|
||||
const { getUpdatedPackageFiles } = manager;
|
||||
|
||||
describe('manager', () => {
|
||||
describe('detectPackageFiles(config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...JSON.parse(JSON.stringify(defaultConfig)),
|
||||
warnings: [],
|
||||
};
|
||||
});
|
||||
it('skips if not in enabledManagers list', async () => {
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'package.json',
|
||||
'backend/package.json',
|
||||
]);
|
||||
config.enabledManagers = ['docker'];
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('skips if language is disabled', async () => {
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'package.json',
|
||||
'.circleci/config.yml',
|
||||
]);
|
||||
config.docker.enabled = false;
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('adds package files to object', async () => {
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'package.json',
|
||||
'backend/package.json',
|
||||
]);
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
it('finds meteor package files', async () => {
|
||||
config.meteor.enabled = true;
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'modules/something/package.js',
|
||||
]); // meteor
|
||||
platform.getFile.mockReturnValueOnce('Npm.depends( {} )');
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('skips meteor package files with no json', async () => {
|
||||
config.meteor.enabled = true;
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'modules/something/package.js',
|
||||
]); // meteor
|
||||
platform.getFile.mockReturnValueOnce('Npm.depends(packages)');
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('finds Dockerfiles', async () => {
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'Dockerfile',
|
||||
'other/Dockerfile',
|
||||
'another/Dockerfile',
|
||||
]);
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(3);
|
||||
});
|
||||
it('finds .travis.yml files', async () => {
|
||||
config.travis.enabled = true;
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'.travis.yml',
|
||||
'other/.travis.yml',
|
||||
]);
|
||||
platform.getFile.mockReturnValueOnce('sudo: true\nnode_js:\n -8\n');
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('finds .nvmrc files', async () => {
|
||||
config.travis.enabled = true;
|
||||
platform.getFileList.mockReturnValueOnce(['.nvmrc', 'other/.nvmrc']);
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('finds WORKSPACE files', async () => {
|
||||
config.bazel.enabled = true;
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'WORKSPACE',
|
||||
'other/WORKSPACE',
|
||||
'empty/WORKSPACE',
|
||||
]);
|
||||
platform.getFile.mockReturnValueOnce('\n\ngit_repository(\n\n)\n');
|
||||
platform.getFile.mockReturnValueOnce(
|
||||
await fs.readFile(
|
||||
path.resolve('test/_fixtures/bazel/WORKSPACE1'),
|
||||
'utf8'
|
||||
)
|
||||
);
|
||||
platform.getFile.mockReturnValueOnce('foo');
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
it('ignores node modules', async () => {
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'package.json',
|
||||
'node_modules/backend/package.json',
|
||||
]);
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
expect(res.foundIgnoredPaths).toMatchSnapshot();
|
||||
expect(res.warnings).toMatchSnapshot();
|
||||
});
|
||||
it('uses includePaths', async () => {
|
||||
platform.getFileList.mockReturnValueOnce([
|
||||
'package.json',
|
||||
'backend/package.json',
|
||||
]);
|
||||
config.includePaths = ['package.json'];
|
||||
const res = await manager.detectPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
describe('get()', () => {
|
||||
it('gets something', () => {
|
||||
expect(manager.get('docker', 'extractDependencies')).not.toBe(null);
|
||||
});
|
||||
});
|
||||
describe('getUpdatedPackageFiles', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
parentBranch: 'some-branch',
|
||||
};
|
||||
npm.updateDependency = jest.fn();
|
||||
docker.updateDependency = jest.fn();
|
||||
meteor.updateDependency = jest.fn();
|
||||
node.updateDependency = jest.fn();
|
||||
bazel.updateDependency = jest.fn();
|
||||
describe('getLanguageList()', () => {
|
||||
it('gets', () => {
|
||||
expect(manager.getLanguageList()).not.toBe(null);
|
||||
});
|
||||
it('returns empty if lock file maintenance', async () => {
|
||||
config.upgrades = [{ type: 'lockFileMaintenance' }];
|
||||
const res = await getUpdatedPackageFiles(config);
|
||||
expect(res.updatedPackageFiles).toHaveLength(0);
|
||||
});
|
||||
it('recurses if updateDependency error', async () => {
|
||||
config.parentBranch = 'some-branch';
|
||||
config.canRebase = true;
|
||||
config.upgrades = [{ packageFile: 'package.json', manager: 'npm' }];
|
||||
npm.updateDependency.mockReturnValueOnce(null);
|
||||
npm.updateDependency.mockReturnValueOnce('some content');
|
||||
const res = await getUpdatedPackageFiles(config);
|
||||
expect(res.updatedPackageFiles).toHaveLength(1);
|
||||
describe('getManagerList()', () => {
|
||||
it('gets', () => {
|
||||
expect(manager.getManagerList()).not.toBe(null);
|
||||
});
|
||||
it('errors if cannot rebase', async () => {
|
||||
config.upgrades = [{ packageFile: 'package.json', manager: 'npm' }];
|
||||
let e;
|
||||
try {
|
||||
await getUpdatedPackageFiles(config);
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
it('returns updated files', async () => {
|
||||
config.parentBranch = 'some-branch';
|
||||
config.canRebase = true;
|
||||
config.upgrades = [
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
{ packageFile: 'Dockerfile', manager: 'docker' },
|
||||
{ packageFile: 'packages/foo/package.js', manager: 'meteor' },
|
||||
{ packageFile: '.travis.yml', manager: 'travis' },
|
||||
{ packageFile: 'WORKSPACE', manager: 'bazel' },
|
||||
];
|
||||
platform.getFile.mockReturnValueOnce('old content 1');
|
||||
platform.getFile.mockReturnValueOnce('old content 1');
|
||||
platform.getFile.mockReturnValueOnce('old content 2');
|
||||
platform.getFile.mockReturnValueOnce('old content 3');
|
||||
platform.getFile.mockReturnValueOnce('old travis');
|
||||
platform.getFile.mockReturnValueOnce('old WORKSPACE');
|
||||
npm.updateDependency.mockReturnValueOnce('new content 1');
|
||||
npm.updateDependency.mockReturnValueOnce('new content 1+');
|
||||
docker.updateDependency.mockReturnValueOnce('new content 2');
|
||||
meteor.updateDependency.mockReturnValueOnce('old content 3');
|
||||
node.updateDependency.mockReturnValueOnce('old travis');
|
||||
bazel.updateDependency.mockReturnValueOnce('old WORKSPACE');
|
||||
const res = await getUpdatedPackageFiles(config);
|
||||
expect(res.updatedPackageFiles).toHaveLength(2);
|
||||
describe('postExtract()', () => {
|
||||
it('returns null', () => {
|
||||
expect(manager.postExtract('docker', [])).toBe(null);
|
||||
});
|
||||
it('returns postExtract', () => {
|
||||
expect(manager.postExtract('npm', [])).not.toBe(null);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
32
test/manager/meteor/__snapshots__/extract.spec.js.snap
Normal file
32
test/manager/meteor/__snapshots__/extract.spec.js.snap
Normal file
|
@ -0,0 +1,32 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`lib/manager/meteor/extract extractDependencies() returns results 1`] = `
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "0.2.0",
|
||||
"depName": "xml2js",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.6.0",
|
||||
"depName": "xml-crypto",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.1.19",
|
||||
"depName": "xmldom",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "2.7.10",
|
||||
"depName": "connect",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "2.6.4",
|
||||
"depName": "xmlbuilder",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.2.0",
|
||||
"depName": "querystring",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
30
test/manager/meteor/extract.spec.js
Normal file
30
test/manager/meteor/extract.spec.js
Normal file
|
@ -0,0 +1,30 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const { extractDependencies } = require('../../../lib/manager/meteor/extract');
|
||||
|
||||
function readFixture(fixture) {
|
||||
return fs.readFileSync(
|
||||
path.resolve(__dirname, `../../_fixtures/meteor/${fixture}`),
|
||||
'utf8'
|
||||
);
|
||||
}
|
||||
|
||||
const input01Content = readFixture('package-1.js');
|
||||
|
||||
describe('lib/manager/meteor/extract', () => {
|
||||
describe('extractDependencies()', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {};
|
||||
});
|
||||
it('returns empty if fails to parse', () => {
|
||||
const res = extractDependencies('blahhhhh:foo:@what\n', config);
|
||||
expect(res).toBe(null);
|
||||
});
|
||||
it('returns results', () => {
|
||||
const res = extractDependencies(input01Content, config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.deps).toHaveLength(6);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,42 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/extract .extractDependencies(npmExtract, depType) each element contains non-null depType, depName, currentVersion 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"currentVersion": "6.5.0",
|
||||
"depName": "autoprefixer",
|
||||
"depType": "dependencies",
|
||||
"lockedVersion": undefined,
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "~1.6.0",
|
||||
"depName": "bower",
|
||||
"depType": "dependencies",
|
||||
"lockedVersion": undefined,
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "13.1.0",
|
||||
"depName": "browserify",
|
||||
"depType": "dependencies",
|
||||
"lockedVersion": undefined,
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.9.2",
|
||||
"depName": "browserify-css",
|
||||
"depType": "dependencies",
|
||||
"lockedVersion": undefined,
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.22.0",
|
||||
"depName": "cheerio",
|
||||
"depType": "dependencies",
|
||||
"lockedVersion": undefined,
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.21.0",
|
||||
"depName": "config",
|
||||
"depType": "dependencies",
|
||||
"lockedVersion": undefined,
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -1,24 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/monorepo checkMonorepos adds lerna packages 1`] = `
|
||||
Array [
|
||||
"@a/b",
|
||||
"@a/c",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/monorepo checkMonorepos adds nested yarn workspaces 1`] = `
|
||||
Array [
|
||||
"@a/b",
|
||||
"@a/c",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/monorepo checkMonorepos adds yarn workspaces 1`] = `
|
||||
Array [
|
||||
"@a/b",
|
||||
"@a/c",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/monorepo checkMonorepos skips if no lerna packages 1`] = `Array []`;
|
|
@ -1,123 +0,0 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const npmExtract = require('../../../lib/manager/npm/extract');
|
||||
|
||||
function readFixture(fixture) {
|
||||
return fs.readFileSync(
|
||||
path.resolve(__dirname, `../../_fixtures/package-json/${fixture}`),
|
||||
'utf8'
|
||||
);
|
||||
}
|
||||
|
||||
const input01Content = readFixture('inputs/01.json');
|
||||
const input02Content = readFixture('inputs/02.json');
|
||||
|
||||
describe('manager/npm/extract', () => {
|
||||
describe('.extractDependencies(npmExtract, depType)', () => {
|
||||
it('returns an array of correct length (dependencies)', () => {
|
||||
const config = {
|
||||
depType: 'dependencies',
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
JSON.parse(input01Content),
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(6);
|
||||
});
|
||||
it('returns an array of correct length (devDependencies)', () => {
|
||||
const config = {
|
||||
depType: 'devDependencies',
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
JSON.parse(input01Content),
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(4);
|
||||
});
|
||||
it('each element contains non-null depType, depName, currentVersion', () => {
|
||||
const config = {
|
||||
depType: 'dependencies',
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
JSON.parse(input01Content),
|
||||
config
|
||||
).deps;
|
||||
expect(extractedDependencies).toMatchSnapshot();
|
||||
extractedDependencies
|
||||
.every(dep => dep.depType && dep.depName && dep.currentVersion)
|
||||
.should.eql(true);
|
||||
});
|
||||
it('supports null devDependencies indirect', () => {
|
||||
const config = {
|
||||
depType: 'dependencies',
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
JSON.parse(input02Content),
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(6);
|
||||
});
|
||||
it('supports null', () => {
|
||||
const config = {
|
||||
depType: 'fooDpendencies',
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
JSON.parse(input02Content),
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(0);
|
||||
});
|
||||
it('finds a locked version in package-lock.json', () => {
|
||||
const packageLockParsed = {
|
||||
dependencies: { chalk: { version: '2.0.1' } },
|
||||
};
|
||||
const config = {
|
||||
depType: 'dependencies',
|
||||
packageLockParsed,
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
{ dependencies: { chalk: '^2.0.0', foo: '^1.0.0' } },
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(2);
|
||||
expect(extractedDependencies[0].lockedVersion).toBeDefined();
|
||||
expect(extractedDependencies[1].lockedVersion).toBeUndefined();
|
||||
});
|
||||
it('finds a locked version in yarn.lock', () => {
|
||||
const yarnLockParsed = {
|
||||
object: { 'chalk@^2.0.0': { version: '2.0.1' } },
|
||||
};
|
||||
const config = {
|
||||
depType: 'dependencies',
|
||||
yarnLockParsed,
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
{ dependencies: { chalk: '^2.0.0', foo: '^1.0.0' } },
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(2);
|
||||
expect(extractedDependencies[0].lockedVersion).toBeDefined();
|
||||
expect(extractedDependencies[1].lockedVersion).toBeUndefined();
|
||||
});
|
||||
it('handles lock error', () => {
|
||||
const config = {
|
||||
depType: 'dependencies',
|
||||
packageLockParsed: true,
|
||||
};
|
||||
const extractedDependencies = npmExtract.extractDependencies(
|
||||
{ dependencies: { chalk: '^2.0.0', foo: '^1.0.0' } },
|
||||
config
|
||||
).deps;
|
||||
extractedDependencies.should.be.instanceof(Array);
|
||||
extractedDependencies.should.have.length(2);
|
||||
expect(extractedDependencies[0].lockedVersion).toBeUndefined();
|
||||
expect(extractedDependencies[1].lockedVersion).toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
202
test/manager/npm/extract/__snapshots__/index.spec.js.snap
Normal file
202
test/manager/npm/extract/__snapshots__/index.spec.js.snap
Normal file
|
@ -0,0 +1,202 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/extract .extractDependencies() finds a lock file 1`] = `
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "6.5.0",
|
||||
"depName": "autoprefixer",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "~1.6.0",
|
||||
"depName": "bower",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "13.1.0",
|
||||
"depName": "browserify",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.9.2",
|
||||
"depName": "browserify-css",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.22.0",
|
||||
"depName": "cheerio",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.21.0",
|
||||
"depName": "config",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "^1.5.8",
|
||||
"depName": "angular",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.5.8",
|
||||
"depName": "angular-touch",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.5.8",
|
||||
"depName": "angular-sanitize",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "4.0.0-beta.1",
|
||||
"depName": "@angular/core",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
],
|
||||
"lernaClient": undefined,
|
||||
"lernaDir": undefined,
|
||||
"lernaPackages": undefined,
|
||||
"npmLock": undefined,
|
||||
"npmrc": undefined,
|
||||
"packageJsonName": "renovate",
|
||||
"packageJsonVersion": "1.0.0",
|
||||
"pnpmShrinkwrap": undefined,
|
||||
"yarnLock": "yarn.lock",
|
||||
"yarnWorkspacesPackages": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`manager/npm/extract .extractDependencies() finds lerna 1`] = `
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "6.5.0",
|
||||
"depName": "autoprefixer",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "~1.6.0",
|
||||
"depName": "bower",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "13.1.0",
|
||||
"depName": "browserify",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.9.2",
|
||||
"depName": "browserify-css",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.22.0",
|
||||
"depName": "cheerio",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.21.0",
|
||||
"depName": "config",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "^1.5.8",
|
||||
"depName": "angular",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.5.8",
|
||||
"depName": "angular-touch",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.5.8",
|
||||
"depName": "angular-sanitize",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "4.0.0-beta.1",
|
||||
"depName": "@angular/core",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
],
|
||||
"lernaClient": undefined,
|
||||
"lernaDir": ".",
|
||||
"lernaPackages": undefined,
|
||||
"npmLock": undefined,
|
||||
"npmrc": undefined,
|
||||
"packageJsonName": "renovate",
|
||||
"packageJsonVersion": "1.0.0",
|
||||
"pnpmShrinkwrap": undefined,
|
||||
"yarnLock": undefined,
|
||||
"yarnWorkspacesPackages": undefined,
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`manager/npm/extract .extractDependencies() returns an array of dependencies 1`] = `
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "6.5.0",
|
||||
"depName": "autoprefixer",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "~1.6.0",
|
||||
"depName": "bower",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "13.1.0",
|
||||
"depName": "browserify",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.9.2",
|
||||
"depName": "browserify-css",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "0.22.0",
|
||||
"depName": "cheerio",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.21.0",
|
||||
"depName": "config",
|
||||
"depType": "dependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "^1.5.8",
|
||||
"depName": "angular",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.5.8",
|
||||
"depName": "angular-touch",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "1.5.8",
|
||||
"depName": "angular-sanitize",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "4.0.0-beta.1",
|
||||
"depName": "@angular/core",
|
||||
"depType": "devDependencies",
|
||||
},
|
||||
],
|
||||
"lernaClient": undefined,
|
||||
"lernaDir": undefined,
|
||||
"lernaPackages": undefined,
|
||||
"npmLock": undefined,
|
||||
"npmrc": undefined,
|
||||
"packageJsonName": "renovate",
|
||||
"packageJsonVersion": "1.0.0",
|
||||
"pnpmShrinkwrap": undefined,
|
||||
"yarnLock": undefined,
|
||||
"yarnWorkspacesPackages": undefined,
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,60 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/extract/locked-versions .getLockedVersions() ignores pnpm 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "1.0.0",
|
||||
"depName": "a",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "2.0.0",
|
||||
"depName": "b",
|
||||
},
|
||||
],
|
||||
"pnpmShrinkwrap": "shrinkwrap.yaml",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/extract/locked-versions .getLockedVersions() uses package-lock.json 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "1.0.0",
|
||||
"depName": "a",
|
||||
"lockedVersion": "1.0.0",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "2.0.0",
|
||||
"depName": "b",
|
||||
"lockedVersion": "2.0.0",
|
||||
},
|
||||
],
|
||||
"npmLock": "package-lock.json",
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/extract/locked-versions .getLockedVersions() uses yarn.lock 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": "1.0.0",
|
||||
"depName": "a",
|
||||
"lockedVersion": "1.0.0",
|
||||
},
|
||||
Object {
|
||||
"currentVersion": "2.0.0",
|
||||
"depName": "b",
|
||||
"lockedVersion": "2.0.0",
|
||||
},
|
||||
],
|
||||
"npmLock": "package-lock.json",
|
||||
"yarnLock": "yarn.lock",
|
||||
},
|
||||
]
|
||||
`;
|
69
test/manager/npm/extract/__snapshots__/monorepo.spec.js.snap
Normal file
69
test/manager/npm/extract/__snapshots__/monorepo.spec.js.snap
Normal file
|
@ -0,0 +1,69 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/extract .extractDependencies() uses lerna package settings 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"lernaDir": ".",
|
||||
"lernaPackages": Array [
|
||||
"packages/*",
|
||||
],
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
Object {
|
||||
"lernaDir": ".",
|
||||
"monorepoPackages": Array [
|
||||
"@org/b",
|
||||
],
|
||||
"npmLock": undefined,
|
||||
"packageFile": "packages/a/package.json",
|
||||
"packageJsonName": "@org/a",
|
||||
"yarnLock": undefined,
|
||||
},
|
||||
Object {
|
||||
"lernaDir": ".",
|
||||
"monorepoPackages": Array [
|
||||
"@org/a",
|
||||
],
|
||||
"npmLock": undefined,
|
||||
"packageFile": "packages/b/package.json",
|
||||
"packageJsonName": "@org/b",
|
||||
"yarnLock": undefined,
|
||||
},
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`manager/npm/extract .extractDependencies() uses yarn workspaces package settings 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"lernaClient": "yarn",
|
||||
"lernaDir": ".",
|
||||
"lernaPackages": Array [
|
||||
"oldpackages/*",
|
||||
],
|
||||
"packageFile": "package.json",
|
||||
"yarnWorkspacesPackages": Array [
|
||||
"packages/*",
|
||||
],
|
||||
},
|
||||
Object {
|
||||
"lernaDir": ".",
|
||||
"monorepoPackages": Array [
|
||||
"@org/b",
|
||||
],
|
||||
"npmLock": undefined,
|
||||
"packageFile": "packages/a/package.json",
|
||||
"packageJsonName": "@org/a",
|
||||
"yarnLock": undefined,
|
||||
},
|
||||
Object {
|
||||
"lernaDir": ".",
|
||||
"monorepoPackages": Array [
|
||||
"@org/a",
|
||||
],
|
||||
"npmLock": undefined,
|
||||
"packageFile": "packages/b/package.json",
|
||||
"packageJsonName": "@org/b",
|
||||
"yarnLock": undefined,
|
||||
},
|
||||
]
|
||||
`;
|
13
test/manager/npm/extract/__snapshots__/npm.spec.js.snap
Normal file
13
test/manager/npm/extract/__snapshots__/npm.spec.js.snap
Normal file
|
@ -0,0 +1,13 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/extract/npm .getNpmLock() extracts 1`] = `
|
||||
Object {
|
||||
"ansi-styles": "3.2.1",
|
||||
"chalk": "2.4.1",
|
||||
"color-convert": "1.9.1",
|
||||
"color-name": "1.1.3",
|
||||
"escape-string-regexp": "1.0.5",
|
||||
"has-flag": "3.0.0",
|
||||
"supports-color": "5.4.0",
|
||||
}
|
||||
`;
|
13
test/manager/npm/extract/__snapshots__/yarn.spec.js.snap
Normal file
13
test/manager/npm/extract/__snapshots__/yarn.spec.js.snap
Normal file
|
@ -0,0 +1,13 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/extract/yarn .getYarnLock() extracts 1`] = `
|
||||
Object {
|
||||
"ansi-styles@^3.2.1": "3.2.1",
|
||||
"chalk@^2.4.1": "2.4.1",
|
||||
"color-convert@^1.9.0": "1.9.1",
|
||||
"color-name@^1.1.1": "1.1.3",
|
||||
"escape-string-regexp@^1.0.5": "1.0.5",
|
||||
"has-flag@^3.0.0": "3.0.0",
|
||||
"supports-color@^5.3.0": "5.4.0",
|
||||
}
|
||||
`;
|
76
test/manager/npm/extract/index.spec.js
Normal file
76
test/manager/npm/extract/index.spec.js
Normal file
|
@ -0,0 +1,76 @@
|
|||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const npmExtract = require('../../../../lib/manager/npm/extract');
|
||||
|
||||
function readFixture(fixture) {
|
||||
return fs.readFileSync(
|
||||
path.resolve(__dirname, `../../../_fixtures/package-json/${fixture}`),
|
||||
'utf8'
|
||||
);
|
||||
}
|
||||
|
||||
const input01Content = readFixture('inputs/01.json');
|
||||
|
||||
describe('manager/npm/extract', () => {
|
||||
describe('.extractDependencies()', () => {
|
||||
beforeEach(() => {
|
||||
platform.getFile.mockReturnValue(null);
|
||||
});
|
||||
it('returns null if cannot parse', async () => {
|
||||
const res = await npmExtract.extractDependencies(
|
||||
'not json',
|
||||
'package.json'
|
||||
);
|
||||
expect(res).toBe(null);
|
||||
});
|
||||
it('returns null if no deps', async () => {
|
||||
const res = await npmExtract.extractDependencies('{}', 'package.json');
|
||||
expect(res).toBe(null);
|
||||
});
|
||||
it('handles invalid', async () => {
|
||||
const res = await npmExtract.extractDependencies(
|
||||
'{"dependencies": true, "devDependencies": []}',
|
||||
'package.json'
|
||||
);
|
||||
expect(res).toBe(null);
|
||||
});
|
||||
it('returns an array of dependencies', async () => {
|
||||
const res = await npmExtract.extractDependencies(
|
||||
input01Content,
|
||||
'package.json'
|
||||
);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('finds a lock file', async () => {
|
||||
platform.getFile = jest.fn(fileName => {
|
||||
if (fileName === 'yarn.lock') {
|
||||
return '# yarn.lock';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
const res = await npmExtract.extractDependencies(
|
||||
input01Content,
|
||||
'package.json'
|
||||
);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('finds lerna', async () => {
|
||||
platform.getFile = jest.fn(fileName => {
|
||||
if (fileName === 'lerna.json') {
|
||||
return '{}';
|
||||
}
|
||||
return null;
|
||||
});
|
||||
const res = await npmExtract.extractDependencies(
|
||||
input01Content,
|
||||
'package.json'
|
||||
);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
describe('.postExtract()', () => {
|
||||
it('runs', async () => {
|
||||
await npmExtract.postExtract([]);
|
||||
});
|
||||
});
|
||||
});
|
82
test/manager/npm/extract/locked-versions.spec.js
Normal file
82
test/manager/npm/extract/locked-versions.spec.js
Normal file
|
@ -0,0 +1,82 @@
|
|||
const {
|
||||
getLockedVersions,
|
||||
} = require('../../../../lib/manager/npm/extract/locked-versions');
|
||||
|
||||
const npm = require('../../../../lib/manager/npm/extract/npm');
|
||||
const yarn = require('../../../../lib/manager/npm/extract/yarn');
|
||||
|
||||
jest.mock('../../../../lib/manager/npm/extract/npm');
|
||||
jest.mock('../../../../lib/manager/npm/extract/yarn');
|
||||
|
||||
describe('manager/npm/extract/locked-versions', () => {
|
||||
describe('.getLockedVersions()', () => {
|
||||
it('uses yarn.lock', async () => {
|
||||
yarn.getYarnLock.mockReturnValue({
|
||||
'a@1.0.0': '1.0.0',
|
||||
'b@2.0.0': '2.0.0',
|
||||
'c@2.0.0': '3.0.0',
|
||||
});
|
||||
const packageFiles = [
|
||||
{
|
||||
npmLock: 'package-lock.json',
|
||||
yarnLock: 'yarn.lock',
|
||||
deps: [
|
||||
{
|
||||
depName: 'a',
|
||||
currentVersion: '1.0.0',
|
||||
},
|
||||
{
|
||||
depName: 'b',
|
||||
currentVersion: '2.0.0',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
await getLockedVersions(packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
});
|
||||
it('uses package-lock.json', async () => {
|
||||
npm.getNpmLock.mockReturnValue({
|
||||
a: '1.0.0',
|
||||
b: '2.0.0',
|
||||
c: '3.0.0',
|
||||
});
|
||||
const packageFiles = [
|
||||
{
|
||||
npmLock: 'package-lock.json',
|
||||
deps: [
|
||||
{
|
||||
depName: 'a',
|
||||
currentVersion: '1.0.0',
|
||||
},
|
||||
{
|
||||
depName: 'b',
|
||||
currentVersion: '2.0.0',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
await getLockedVersions(packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
});
|
||||
it('ignores pnpm', async () => {
|
||||
const packageFiles = [
|
||||
{
|
||||
pnpmShrinkwrap: 'shrinkwrap.yaml',
|
||||
deps: [
|
||||
{
|
||||
depName: 'a',
|
||||
currentVersion: '1.0.0',
|
||||
},
|
||||
{
|
||||
depName: 'b',
|
||||
currentVersion: '2.0.0',
|
||||
},
|
||||
],
|
||||
},
|
||||
];
|
||||
await getLockedVersions(packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
52
test/manager/npm/extract/monorepo.spec.js
Normal file
52
test/manager/npm/extract/monorepo.spec.js
Normal file
|
@ -0,0 +1,52 @@
|
|||
const {
|
||||
detectMonorepos,
|
||||
} = require('../../../../lib/manager/npm/extract/monorepo');
|
||||
|
||||
describe('manager/npm/extract', () => {
|
||||
describe('.extractDependencies()', () => {
|
||||
it('uses lerna package settings', async () => {
|
||||
const packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
lernaDir: '.',
|
||||
lernaPackages: ['packages/*'],
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/a/package.json',
|
||||
packageJsonName: '@org/a',
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/b/package.json',
|
||||
packageJsonName: '@org/b',
|
||||
},
|
||||
];
|
||||
await detectMonorepos(packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
expect(packageFiles[1].lernaDir).toEqual('.');
|
||||
expect(packageFiles[1].monorepoPackages).toEqual(['@org/b']);
|
||||
});
|
||||
it('uses yarn workspaces package settings', async () => {
|
||||
const packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
lernaDir: '.',
|
||||
lernaPackages: ['oldpackages/*'],
|
||||
lernaClient: 'yarn',
|
||||
yarnWorkspacesPackages: ['packages/*'],
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/a/package.json',
|
||||
packageJsonName: '@org/a',
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/b/package.json',
|
||||
packageJsonName: '@org/b',
|
||||
},
|
||||
];
|
||||
await detectMonorepos(packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
expect(packageFiles[1].lernaDir).toEqual('.');
|
||||
expect(packageFiles[1].monorepoPackages).toEqual(['@org/b']);
|
||||
});
|
||||
});
|
||||
});
|
21
test/manager/npm/extract/npm.spec.js
Normal file
21
test/manager/npm/extract/npm.spec.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
const fs = require('fs');
|
||||
const { getNpmLock } = require('../../../../lib/manager/npm/extract/npm');
|
||||
|
||||
describe('manager/npm/extract/npm', () => {
|
||||
describe('.getNpmLock()', () => {
|
||||
it('returns empty if failed to parse', async () => {
|
||||
platform.getFile.mockReturnValueOnce('abcd');
|
||||
const res = await getNpmLock('package.json');
|
||||
expect(Object.keys(res)).toHaveLength(0);
|
||||
});
|
||||
it('extracts', async () => {
|
||||
const plocktest1Lock = fs.readFileSync(
|
||||
'test/_fixtures/npm/plocktest1/package-lock.json'
|
||||
);
|
||||
platform.getFile.mockReturnValueOnce(plocktest1Lock);
|
||||
const res = await getNpmLock('package.json');
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(Object.keys(res)).toHaveLength(7);
|
||||
});
|
||||
});
|
||||
});
|
22
test/manager/npm/extract/yarn.spec.js
Normal file
22
test/manager/npm/extract/yarn.spec.js
Normal file
|
@ -0,0 +1,22 @@
|
|||
const fs = require('fs');
|
||||
const { getYarnLock } = require('../../../../lib/manager/npm/extract/yarn');
|
||||
|
||||
describe('manager/npm/extract/yarn', () => {
|
||||
describe('.getYarnLock()', () => {
|
||||
it('returns empty if exception parsing', async () => {
|
||||
platform.getFile.mockReturnValueOnce('abcd');
|
||||
const res = await getYarnLock('package.json');
|
||||
expect(Object.keys(res)).toHaveLength(0);
|
||||
});
|
||||
it('extracts', async () => {
|
||||
const plocktest1Lock = fs.readFileSync(
|
||||
'test/_fixtures/npm/plocktest1/yarn.lock',
|
||||
'utf8'
|
||||
);
|
||||
platform.getFile.mockReturnValueOnce(plocktest1Lock);
|
||||
const res = await getYarnLock('package.json');
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(Object.keys(res)).toHaveLength(7);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,80 +0,0 @@
|
|||
const { checkMonorepos } = require('../../../lib/manager/npm/monorepos');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = { ...require('../../_fixtures/config') };
|
||||
config.errors = [];
|
||||
config.warnings = [];
|
||||
});
|
||||
|
||||
describe('manager/npm/monorepo', () => {
|
||||
describe('checkMonorepos', () => {
|
||||
it('adds yarn workspaces', async () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
content: { workspaces: ['packages/*'] },
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/something/package.json',
|
||||
content: { name: '@a/b' },
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/something-else/package.json',
|
||||
content: { name: '@a/c' },
|
||||
},
|
||||
];
|
||||
const res = await checkMonorepos(config);
|
||||
expect(res.monorepoPackages).toMatchSnapshot();
|
||||
});
|
||||
it('adds nested yarn workspaces', async () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'frontend/package.json',
|
||||
content: { workspaces: ['packages/*'] },
|
||||
},
|
||||
{
|
||||
packageFile: 'frontend/packages/something/package.json',
|
||||
content: { name: '@a/b' },
|
||||
},
|
||||
{
|
||||
packageFile: 'frontend/packages/something-else/package.json',
|
||||
content: { name: '@a/c' },
|
||||
},
|
||||
];
|
||||
const res = await checkMonorepos(config);
|
||||
expect(res.monorepoPackages).toMatchSnapshot();
|
||||
});
|
||||
it('adds lerna packages', async () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
content: {},
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/something/package.json',
|
||||
content: { name: '@a/b' },
|
||||
},
|
||||
{
|
||||
packageFile: 'packages/something-else/package.json',
|
||||
content: { name: '@a/c' },
|
||||
},
|
||||
];
|
||||
platform.getFile.mockReturnValue('{ "packages": ["packages/*"] }');
|
||||
const res = await checkMonorepos(config);
|
||||
expect(res.monorepoPackages).toMatchSnapshot();
|
||||
});
|
||||
it('skips if no lerna packages', async () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
content: {},
|
||||
},
|
||||
];
|
||||
platform.getFile.mockReturnValue(null);
|
||||
const res = await checkMonorepos(config);
|
||||
expect(res.monorepoPackages).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,131 +0,0 @@
|
|||
const manager = require('../../lib/manager');
|
||||
|
||||
const { resolvePackageFiles } = manager;
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = { ...require('../_fixtures/config') };
|
||||
config.global = {};
|
||||
config.errors = [];
|
||||
config.warnings = [];
|
||||
});
|
||||
|
||||
describe('manager/resolve', () => {
|
||||
describe('resolvePackageFiles()', () => {
|
||||
beforeEach(() => {
|
||||
manager.detectPackageFiles = jest.fn();
|
||||
});
|
||||
it('detect package.json and adds error if cannot parse (onboarding)', async () => {
|
||||
manager.detectPackageFiles.mockReturnValueOnce([
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
]);
|
||||
platform.getFileList.mockReturnValueOnce(['package.json']);
|
||||
platform.getFile.mockReturnValueOnce('not json');
|
||||
const res = await resolvePackageFiles(config);
|
||||
expect(res.packageFiles).toMatchSnapshot();
|
||||
expect(res.errors).toHaveLength(1);
|
||||
});
|
||||
it('detect package.json and throws error if cannot parse (onboarded)', async () => {
|
||||
manager.detectPackageFiles.mockReturnValueOnce([
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
]);
|
||||
platform.getFileList.mockReturnValueOnce(['package.json']);
|
||||
platform.getFile.mockReturnValueOnce('not json');
|
||||
config.repoIsOnboarded = true;
|
||||
let e;
|
||||
try {
|
||||
await resolvePackageFiles(config);
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
expect(e).toMatchSnapshot();
|
||||
});
|
||||
it('clears npmrc and yarnrc fields', async () => {
|
||||
manager.detectPackageFiles.mockReturnValueOnce([
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
]);
|
||||
const pJson = {
|
||||
name: 'something',
|
||||
version: '1.0.0',
|
||||
renovate: {
|
||||
automerge: true,
|
||||
},
|
||||
};
|
||||
platform.getFile.mockReturnValueOnce(JSON.stringify(pJson));
|
||||
platform.getFileList.mockReturnValueOnce(['package.json']);
|
||||
platform.getFileList.mockReturnValueOnce(['package.json']);
|
||||
const res = await resolvePackageFiles(config);
|
||||
expect(res.packageFiles).toMatchSnapshot();
|
||||
expect(res.warnings).toHaveLength(0);
|
||||
});
|
||||
it('detects accompanying files', async () => {
|
||||
manager.detectPackageFiles.mockReturnValueOnce([
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
]);
|
||||
platform.getFileList.mockReturnValue([
|
||||
'package.json',
|
||||
'yarn.lock',
|
||||
'package-lock.json',
|
||||
'npm-shrinkwrap.json',
|
||||
'shrinkwrap.yaml',
|
||||
]);
|
||||
platform.getFile.mockReturnValueOnce(
|
||||
'{"name": "package.json", "version": "0.0.1"}'
|
||||
);
|
||||
platform.getFile.mockReturnValueOnce('npmrc');
|
||||
platform.getFile.mockReturnValueOnce('yarnrc');
|
||||
const res = await resolvePackageFiles(config);
|
||||
expect(res.packageFiles).toMatchSnapshot();
|
||||
expect(res.warnings).toHaveLength(0);
|
||||
});
|
||||
it('resolves docker', async () => {
|
||||
platform.getFileList.mockReturnValue(['Dockerfile']);
|
||||
platform.getFile.mockReturnValue('# comment\nFROM node:8\n'); // Dockerfile
|
||||
const res = await resolvePackageFiles(config);
|
||||
expect(res.packageFiles).toMatchSnapshot();
|
||||
expect(res.packageFiles).toHaveLength(1);
|
||||
expect(res.warnings).toHaveLength(0);
|
||||
});
|
||||
it('resolves package files without own resolve', async () => {
|
||||
platform.getFileList.mockReturnValue(['WORKSPACE']);
|
||||
platform.getFile.mockReturnValue('git_repository(\n'); // WORKSPACE
|
||||
const res = await resolvePackageFiles(config);
|
||||
expect(res.packageFiles).toMatchSnapshot();
|
||||
expect(res.packageFiles).toHaveLength(1);
|
||||
expect(res.warnings).toHaveLength(0);
|
||||
});
|
||||
it('strips npmrc with NPM_TOKEN', async () => {
|
||||
manager.detectPackageFiles.mockReturnValueOnce([
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
]);
|
||||
platform.getFileList.mockReturnValue(['package.json', '.npmrc']);
|
||||
platform.getFile.mockReturnValueOnce(
|
||||
'{"name": "package.json", "version": "0.0.1"}'
|
||||
);
|
||||
platform.getFile.mockReturnValueOnce(
|
||||
'//registry.npmjs.org/:_authToken=${NPM_TOKEN}' // eslint-disable-line
|
||||
);
|
||||
const res = await resolvePackageFiles(config);
|
||||
expect(res.packageFiles).toMatchSnapshot();
|
||||
expect(res.warnings).toHaveLength(0);
|
||||
});
|
||||
it('checks if renovate config in nested package.json throws an error', async () => {
|
||||
manager.detectPackageFiles.mockReturnValueOnce([
|
||||
{ packageFile: 'package.json', manager: 'npm' },
|
||||
]);
|
||||
platform.getFileList.mockReturnValue(['test/package.json']);
|
||||
platform.getFile.mockReturnValueOnce(
|
||||
'{"name": "test/package.json", "version": "0.0.1", "renovate":{"enabled": true}}'
|
||||
);
|
||||
let e;
|
||||
try {
|
||||
await resolvePackageFiles(config);
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toEqual(new Error('config-validation'));
|
||||
});
|
||||
});
|
||||
});
|
15
test/manager/travis/__snapshots__/extract.spec.js.snap
Normal file
15
test/manager/travis/__snapshots__/extract.spec.js.snap
Normal file
|
@ -0,0 +1,15 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`lib/manager/travis/extract extractDependencies() returns results 1`] = `
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"currentVersion": Array [
|
||||
6,
|
||||
8,
|
||||
],
|
||||
"depName": "node",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -10,5 +10,10 @@ describe('lib/manager/travis/extract', () => {
|
|||
const res = extractDependencies('blahhhhh:foo:@what\n', config);
|
||||
expect(res).toBe(null);
|
||||
});
|
||||
it('returns results', () => {
|
||||
const res = extractDependencies('node_js:\n - 6\n - 8\n', config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.deps).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
241
test/util/package-rules.spec.js
Normal file
241
test/util/package-rules.spec.js
Normal file
|
@ -0,0 +1,241 @@
|
|||
const { applyPackageRules } = require('../../lib/util/package-rules');
|
||||
|
||||
describe('applyPackageRules()', () => {
|
||||
const config1 = {
|
||||
foo: 'bar',
|
||||
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['a', 'b'],
|
||||
x: 2,
|
||||
},
|
||||
{
|
||||
packagePatterns: ['a', 'b'],
|
||||
excludePackageNames: ['aa'],
|
||||
excludePackagePatterns: ['d'],
|
||||
y: 2,
|
||||
},
|
||||
],
|
||||
};
|
||||
it('applies both rules for a', () => {
|
||||
const dep = {
|
||||
depName: 'a',
|
||||
};
|
||||
const res = applyPackageRules({ ...config1, ...dep });
|
||||
expect(res.x).toBe(2);
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('applies both rules for b', () => {
|
||||
const dep = {
|
||||
depName: 'b',
|
||||
};
|
||||
const res = applyPackageRules({ ...config1, ...dep });
|
||||
expect(res.x).toBe(2);
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('applies the second rule', () => {
|
||||
const dep = {
|
||||
depName: 'abc',
|
||||
};
|
||||
const res = applyPackageRules({ ...config1, ...dep });
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('applies the second second rule', () => {
|
||||
const dep = {
|
||||
depName: 'bc',
|
||||
};
|
||||
const res = applyPackageRules({ ...config1, ...dep });
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('excludes package name', () => {
|
||||
const dep = {
|
||||
depName: 'aa',
|
||||
};
|
||||
const res = applyPackageRules({ ...config1, ...dep });
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBeUndefined();
|
||||
});
|
||||
it('excludes package pattern', () => {
|
||||
const dep = {
|
||||
depName: 'bcd',
|
||||
};
|
||||
const res = applyPackageRules({ ...config1, ...dep });
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBeUndefined();
|
||||
});
|
||||
it('matches anything if missing inclusive rules', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
excludePackageNames: ['foo'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = applyPackageRules({
|
||||
...config,
|
||||
depName: 'foo',
|
||||
});
|
||||
expect(res1.x).toBeUndefined();
|
||||
const res2 = applyPackageRules({
|
||||
...config,
|
||||
depName: 'bar',
|
||||
});
|
||||
expect(res2.x).toBeDefined();
|
||||
});
|
||||
it('supports inclusive or', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['neutrino'],
|
||||
packagePatterns: ['^@neutrino\\/'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = applyPackageRules({ ...config, depName: 'neutrino' });
|
||||
expect(res1.x).toBeDefined();
|
||||
const res2 = applyPackageRules({
|
||||
...config,
|
||||
depName: '@neutrino/something',
|
||||
});
|
||||
expect(res2.x).toBeDefined();
|
||||
});
|
||||
it('filters depType', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
depTypeList: ['dependencies', 'peerDependencies'],
|
||||
packageNames: ['a'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const dep = {
|
||||
depType: 'dependencies',
|
||||
depName: 'a',
|
||||
};
|
||||
const res = applyPackageRules({ ...config, ...dep });
|
||||
expect(res.x).toBe(1);
|
||||
});
|
||||
it('filters naked depType', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
depTypeList: ['dependencies', 'peerDependencies'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const dep = {
|
||||
depType: 'dependencies',
|
||||
depName: 'a',
|
||||
};
|
||||
const res = applyPackageRules({ ...config, ...dep });
|
||||
expect(res.x).toBe(1);
|
||||
});
|
||||
it('filters depType', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
depTypeList: ['dependencies', 'peerDependencies'],
|
||||
packageNames: ['a'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const dep = {
|
||||
depType: 'devDependencies',
|
||||
depName: 'a',
|
||||
};
|
||||
const res = applyPackageRules({ ...config, ...dep });
|
||||
expect(res.x).toBeUndefined();
|
||||
});
|
||||
it('checks if matchCurrentVersion selector is valid and satisfies the condition on range overlap', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['test'],
|
||||
matchCurrentVersion: '<= 2.0.0',
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = applyPackageRules({
|
||||
...config,
|
||||
...{
|
||||
depName: 'test',
|
||||
currentVersion: '^1.0.0',
|
||||
},
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
});
|
||||
it('checks if matchCurrentVersion selector is valid and satisfies the condition on pinned to range overlap', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['test'],
|
||||
matchCurrentVersion: '>= 2.0.0',
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = applyPackageRules({
|
||||
...config,
|
||||
...{
|
||||
depName: 'test',
|
||||
currentVersion: '2.4.6',
|
||||
},
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
});
|
||||
it('checks if matchCurrentVersion selector works with static values', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['test'],
|
||||
matchCurrentVersion: '4.6.0',
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = applyPackageRules({
|
||||
...config,
|
||||
...{
|
||||
depName: 'test',
|
||||
currentVersion: '4.6.0',
|
||||
},
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
});
|
||||
it('matches paths', () => {
|
||||
const config = {
|
||||
packageFile: 'examples/foo/package.json',
|
||||
packageRules: [
|
||||
{
|
||||
paths: ['examples/**', 'lib/'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = applyPackageRules({
|
||||
...config,
|
||||
depName: 'test',
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
config.packageFile = 'package.json';
|
||||
const res2 = applyPackageRules({
|
||||
...config,
|
||||
depName: 'test',
|
||||
});
|
||||
expect(res2.x).toBeUndefined();
|
||||
config.packageFile = 'lib/a/package.json';
|
||||
const res3 = applyPackageRules({
|
||||
...config,
|
||||
depName: 'test',
|
||||
});
|
||||
expect(res3.x).toBeDefined();
|
||||
});
|
||||
});
|
20
test/workers/branch/__snapshots__/get-updated.spec.js.snap
Normal file
20
test/workers/branch/__snapshots__/get-updated.spec.js.snap
Normal file
|
@ -0,0 +1,20 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/branch/get-updated getUpdatedPackageFiles() handles content change 1`] = `
|
||||
Object {
|
||||
"parentBranch": undefined,
|
||||
"updatedPackageFiles": Array [
|
||||
Object {
|
||||
"contents": "some new content",
|
||||
"name": "undefined",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/get-updated getUpdatedPackageFiles() handles empty 1`] = `
|
||||
Object {
|
||||
"parentBranch": undefined,
|
||||
"updatedPackageFiles": Array [],
|
||||
}
|
||||
`;
|
|
@ -1,107 +0,0 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/branch/lock-files determineLockFileDirs returns all directories if lock file maintenance 1`] = `
|
||||
Object {
|
||||
"npmShrinkwrapDirs": Array [
|
||||
"leftend",
|
||||
],
|
||||
"packageLockFileDirs": Array [
|
||||
"backend",
|
||||
],
|
||||
"shrinkwrapYamlDirs": Array [
|
||||
"frontend",
|
||||
],
|
||||
"yarnLockFileDirs": Array [
|
||||
".",
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files determineLockFileDirs returns directories from updated package files 1`] = `
|
||||
Object {
|
||||
"lernaDirs": Array [],
|
||||
"npmShrinkwrapDirs": Array [
|
||||
"leftend",
|
||||
],
|
||||
"packageLockFileDirs": Array [
|
||||
"backend",
|
||||
],
|
||||
"shrinkwrapYamlDirs": Array [
|
||||
"frontend",
|
||||
],
|
||||
"yarnLockFileDirs": Array [
|
||||
".",
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files determineLockFileDirs returns root directory if using lerna package lock 1`] = `
|
||||
Object {
|
||||
"lernaDirs": Array [
|
||||
".",
|
||||
],
|
||||
"npmShrinkwrapDirs": Array [],
|
||||
"packageLockFileDirs": Array [],
|
||||
"shrinkwrapYamlDirs": Array [],
|
||||
"yarnLockFileDirs": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files determineLockFileDirs returns root directory if using yarn workspaces 1`] = `
|
||||
Object {
|
||||
"lernaDirs": Array [],
|
||||
"npmShrinkwrapDirs": Array [],
|
||||
"packageLockFileDirs": Array [],
|
||||
"shrinkwrapYamlDirs": Array [],
|
||||
"yarnLockFileDirs": Array [
|
||||
".",
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files getUpdatedLockFiles returns no error and empty lockfiles if lock file maintenance exists 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files getUpdatedLockFiles returns no error and empty lockfiles if none updated 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files getUpdatedLockFiles returns no error and empty lockfiles if updateLockFiles false 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files getUpdatedLockFiles tries lerna npm 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files getUpdatedLockFiles tries lerna yarn 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [
|
||||
Object {
|
||||
"lockFile": "yarn.lock",
|
||||
"stderr": undefined,
|
||||
},
|
||||
],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/branch/lock-files getUpdatedLockFiles tries multiple lock files 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
44
test/workers/branch/get-updated.spec.js
Normal file
44
test/workers/branch/get-updated.spec.js
Normal file
|
@ -0,0 +1,44 @@
|
|||
const npm = require('../../../lib/manager/npm');
|
||||
const {
|
||||
getUpdatedPackageFiles,
|
||||
} = require('../../../lib/workers/branch/get-updated');
|
||||
const defaultConfig = require('../../../lib/config/defaults').getConfig();
|
||||
|
||||
describe('workers/branch/get-updated', () => {
|
||||
describe('getUpdatedPackageFiles()', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
upgrades: [],
|
||||
};
|
||||
npm.updateDependency = jest.fn();
|
||||
});
|
||||
it('handles empty', async () => {
|
||||
const res = await getUpdatedPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('handles null content', async () => {
|
||||
config.parentBranch = 'some-branch';
|
||||
config.upgrades.push({
|
||||
manager: 'npm',
|
||||
});
|
||||
let e;
|
||||
try {
|
||||
await getUpdatedPackageFiles(config);
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
it('handles content change', async () => {
|
||||
config.parentBranch = 'some-branch';
|
||||
config.upgrades.push({
|
||||
manager: 'npm',
|
||||
});
|
||||
npm.updateDependency.mockReturnValue('some new content');
|
||||
const res = await getUpdatedPackageFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -4,18 +4,18 @@ const defaultConfig = require('../../../lib/config/defaults').getConfig();
|
|||
const schedule = require('../../../lib/workers/branch/schedule');
|
||||
const checkExisting = require('../../../lib/workers/branch/check-existing');
|
||||
const parent = require('../../../lib/workers/branch/parent');
|
||||
const manager = require('../../../lib/manager');
|
||||
const lockFiles = require('../../../lib/workers/branch/lock-files');
|
||||
const npmPostExtract = require('../../../lib/manager/npm/post-update');
|
||||
const commit = require('../../../lib/workers/branch/commit');
|
||||
const statusChecks = require('../../../lib/workers/branch/status-checks');
|
||||
const automerge = require('../../../lib/workers/branch/automerge');
|
||||
const prWorker = require('../../../lib/workers/pr');
|
||||
const getUpdated = require('../../../lib/workers/branch/get-updated');
|
||||
|
||||
jest.mock('../../../lib/manager');
|
||||
jest.mock('../../../lib/workers/branch/get-updated');
|
||||
jest.mock('../../../lib/workers/branch/schedule');
|
||||
jest.mock('../../../lib/workers/branch/check-existing');
|
||||
jest.mock('../../../lib/workers/branch/parent');
|
||||
jest.mock('../../../lib/workers/branch/lock-files');
|
||||
jest.mock('../../../lib/manager/npm/post-update');
|
||||
jest.mock('../../../lib/workers/branch/status-checks');
|
||||
jest.mock('../../../lib/workers/branch/automerge');
|
||||
jest.mock('../../../lib/workers/pr');
|
||||
|
@ -126,24 +126,24 @@ describe('workers/branch', () => {
|
|||
expect(res).not.toEqual('pr-edited');
|
||||
});
|
||||
it('returns if pr creation limit exceeded', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [],
|
||||
});
|
||||
platform.branchExists.mockReturnValueOnce(false);
|
||||
platform.branchExists.mockReturnValue(false);
|
||||
config.prHourlyLimitReached = true;
|
||||
expect(await branchWorker.processBranch(config)).toEqual(
|
||||
'pr-hourly-limit-reached'
|
||||
);
|
||||
});
|
||||
it('returns if no work', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [],
|
||||
});
|
||||
|
@ -151,10 +151,10 @@ describe('workers/branch', () => {
|
|||
expect(await branchWorker.processBranch(config)).toEqual('no-work');
|
||||
});
|
||||
it('returns if branch automerged', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [{}],
|
||||
});
|
||||
|
@ -166,10 +166,10 @@ describe('workers/branch', () => {
|
|||
expect(prWorker.ensurePr.mock.calls).toHaveLength(0);
|
||||
});
|
||||
it('ensures PR and tries automerge', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [{}],
|
||||
});
|
||||
|
@ -183,10 +183,10 @@ describe('workers/branch', () => {
|
|||
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(1);
|
||||
});
|
||||
it('ensures PR and adds lock file error comment', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [{}],
|
||||
});
|
||||
|
@ -202,10 +202,10 @@ describe('workers/branch', () => {
|
|||
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
|
||||
});
|
||||
it('ensures PR and adds lock file error comment recreate closed', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [{}],
|
||||
});
|
||||
|
@ -222,26 +222,26 @@ describe('workers/branch', () => {
|
|||
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
|
||||
});
|
||||
it('swallows branch errors', async () => {
|
||||
manager.getUpdatedPackageFiles.mockImplementationOnce(() => {
|
||||
getUpdated.getUpdatedPackageFiles.mockImplementationOnce(() => {
|
||||
throw new Error('some error');
|
||||
});
|
||||
await branchWorker.processBranch(config);
|
||||
});
|
||||
it('throws and swallows branch errors', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: true,
|
||||
updatedLockFiles: [{}],
|
||||
});
|
||||
await branchWorker.processBranch(config);
|
||||
});
|
||||
it('swallows pr errors', async () => {
|
||||
manager.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
getUpdated.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||
updatedPackageFiles: [{}],
|
||||
});
|
||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||
npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
|
||||
lockFileError: false,
|
||||
updatedLockFiles: [{}],
|
||||
});
|
||||
|
|
|
@ -0,0 +1,29 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`manager/npm/post-update getAdditionalFiles returns no error and empty lockfiles if lock file maintenance exists 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`manager/npm/post-update getAdditionalFiles returns no error and empty lockfiles if updateLockFiles false 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`manager/npm/post-update getAdditionalFiles returns no error and empty lockfiles if lock file maintenance exists 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`manager/npm/post-update getAdditionalFiles returns no error and empty lockfiles if updateLockFiles false 1`] = `
|
||||
Object {
|
||||
"lockFileErrors": Array [],
|
||||
"updatedLockFiles": Array [],
|
||||
}
|
||||
`;
|
|
@ -1,219 +1,30 @@
|
|||
const fs = require('fs-extra');
|
||||
const lockFiles = require('../../../lib/workers/branch/lock-files');
|
||||
const defaultConfig = require('../../../lib/config/defaults').getConfig();
|
||||
const upath = require('upath');
|
||||
const lockFiles = require('../../../../lib/manager/npm/post-update');
|
||||
const defaultConfig = require('../../../../lib/config/defaults').getConfig();
|
||||
// const upath = require('upath');
|
||||
|
||||
const npm = require('../../../lib/workers/branch/npm');
|
||||
const yarn = require('../../../lib/workers/branch/yarn');
|
||||
const pnpm = require('../../../lib/workers/branch/pnpm');
|
||||
const lerna = require('../../../lib/workers/branch/lerna');
|
||||
const npm = require('../../../../lib/manager/npm/post-update/npm');
|
||||
const yarn = require('../../../../lib/manager/npm/post-update/yarn');
|
||||
const pnpm = require('../../../../lib/manager/npm/post-update/pnpm');
|
||||
const lerna = require('../../../../lib/manager/npm/post-update/lerna');
|
||||
|
||||
const {
|
||||
hasPackageLock,
|
||||
hasNpmShrinkwrap,
|
||||
hasYarnLock,
|
||||
hasShrinkwrapYaml,
|
||||
determineLockFileDirs,
|
||||
writeExistingFiles,
|
||||
// determineLockFileDirs,
|
||||
// writeExistingFiles,
|
||||
writeUpdatedPackageFiles,
|
||||
getUpdatedLockFiles,
|
||||
getAdditionalFiles,
|
||||
} = lockFiles;
|
||||
|
||||
describe('workers/branch/lock-files', () => {
|
||||
describe('hasPackageLock', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
};
|
||||
});
|
||||
it('returns true if found and true', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
packageLock: 'some package lock',
|
||||
},
|
||||
];
|
||||
expect(hasPackageLock(config, 'package.json')).toBe(true);
|
||||
});
|
||||
it('returns false if found and false', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
packageLock: 'some package lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
expect(hasPackageLock(config, 'backend/package.json')).toBe(false);
|
||||
});
|
||||
it('throws error if not found', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
packageLock: 'some package lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
let e;
|
||||
try {
|
||||
hasPackageLock(config, 'frontend/package.json');
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
});
|
||||
describe('hasNpmShrinkWrap', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
};
|
||||
});
|
||||
it('returns true if found and true', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
npmShrinkwrap: 'some package lock',
|
||||
},
|
||||
];
|
||||
expect(hasNpmShrinkwrap(config, 'package.json')).toBe(true);
|
||||
});
|
||||
it('returns false if found and false', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
npmShrinkwrap: 'some package lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
expect(hasNpmShrinkwrap(config, 'backend/package.json')).toBe(false);
|
||||
});
|
||||
it('throws error if not found', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
npmShrinkwrap: 'some package lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
let e;
|
||||
try {
|
||||
hasNpmShrinkwrap(config, 'frontend/package.json');
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
});
|
||||
describe('hasYarnLock', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
};
|
||||
});
|
||||
it('returns true if found and true', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
yarnLock: '# some yarn lock',
|
||||
},
|
||||
];
|
||||
expect(hasYarnLock(config, 'package.json')).toBe(true);
|
||||
});
|
||||
it('returns false if found and false', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
yarnLock: '# some yarn lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
expect(hasYarnLock(config, 'backend/package.json')).toBe(false);
|
||||
});
|
||||
it('throws error if not found', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
yarnLock: '# some yarn lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
let e;
|
||||
try {
|
||||
hasYarnLock(config, 'frontend/package.json');
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
});
|
||||
describe('hasShrinkWrapYaml', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
};
|
||||
});
|
||||
it('returns true if found and true', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
shrinkwrapYaml: 'some shrinkwrap',
|
||||
},
|
||||
];
|
||||
expect(hasShrinkwrapYaml(config, 'package.json')).toBe(true);
|
||||
});
|
||||
it('returns false if found and false', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
shrinkwrapYaml: 'some shrinkwrap',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
expect(hasShrinkwrapYaml(config, 'backend/package.json')).toBe(false);
|
||||
});
|
||||
it('throws error if not found', () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
shrinkwrapYaml: 'some package lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
let e;
|
||||
try {
|
||||
hasShrinkwrapYaml(config, 'frontend/package.json');
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
});
|
||||
describe('manager/npm/post-update', () => {
|
||||
/*
|
||||
describe('determineLockFileDirs', () => {
|
||||
let config;
|
||||
let packageFiles;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
packageFiles: [
|
||||
};
|
||||
packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
yarnLock: '# some yarn lock',
|
||||
|
@ -224,18 +35,17 @@ describe('workers/branch/lock-files', () => {
|
|||
},
|
||||
{
|
||||
packageFile: 'frontend/package.json',
|
||||
shrinkwrapYaml: 'some package lock',
|
||||
pnpmShrinkwrap: 'some package lock',
|
||||
},
|
||||
{
|
||||
packageFile: 'leftend/package.json',
|
||||
npmShrinkwrap: 'some package lock',
|
||||
},
|
||||
],
|
||||
};
|
||||
];
|
||||
});
|
||||
it('returns all directories if lock file maintenance', () => {
|
||||
config.upgrades = [{ type: 'lockFileMaintenance' }];
|
||||
const res = determineLockFileDirs(config);
|
||||
const res = determineLockFileDirs(config, packageFiles);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('returns directories from updated package files', () => {
|
||||
|
@ -258,7 +68,7 @@ describe('workers/branch/lock-files', () => {
|
|||
contents: 'some contents',
|
||||
},
|
||||
];
|
||||
const res = determineLockFileDirs(config);
|
||||
const res = determineLockFileDirs(config, packageFiles);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('returns root directory if using yarn workspaces', () => {
|
||||
|
@ -282,9 +92,9 @@ describe('workers/branch/lock-files', () => {
|
|||
];
|
||||
const res = determineLockFileDirs(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.packageLockFileDirs).toHaveLength(0);
|
||||
expect(res.yarnLockFileDirs).toHaveLength(1);
|
||||
expect(res.yarnLockFileDirs[0]).toEqual('.');
|
||||
expect(res.npmLockDirs).toHaveLength(0);
|
||||
expect(res.yarnLockDirs).toHaveLength(1);
|
||||
expect(res.yarnLockDirs[0]).toEqual('.');
|
||||
});
|
||||
it('returns root directory if using lerna package lock', () => {
|
||||
config.lernaLockFile = 'yarn';
|
||||
|
@ -307,8 +117,8 @@ describe('workers/branch/lock-files', () => {
|
|||
];
|
||||
const res = determineLockFileDirs(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.packageLockFileDirs).toHaveLength(0);
|
||||
expect(res.yarnLockFileDirs).toHaveLength(0);
|
||||
expect(res.npmLockDirs).toHaveLength(0);
|
||||
expect(res.yarnLockDirs).toHaveLength(0);
|
||||
expect(res.lernaDirs).toHaveLength(1);
|
||||
expect(res.lernaDirs[0]).toEqual('.');
|
||||
});
|
||||
|
@ -326,13 +136,13 @@ describe('workers/branch/lock-files', () => {
|
|||
it('returns if no packageFiles', async () => {
|
||||
config.npmrc = 'some-npmrc';
|
||||
config.yarnrc = 'some-yarnrc';
|
||||
delete config.packageFiles;
|
||||
await writeExistingFiles(config);
|
||||
await writeExistingFiles(config, {});
|
||||
expect(fs.outputFile.mock.calls).toHaveLength(2);
|
||||
});
|
||||
it('writes files and removes files', async () => {
|
||||
config.npmrc = 'some-npmrc';
|
||||
config.packageFiles = [
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
content: { name: 'package 1' },
|
||||
|
@ -349,8 +159,9 @@ describe('workers/branch/lock-files', () => {
|
|||
hasNpmShrinkwrap: true,
|
||||
content: { name: 'package-3' },
|
||||
},
|
||||
];
|
||||
await writeExistingFiles(config);
|
||||
],
|
||||
};
|
||||
await writeExistingFiles(config, packageFiles);
|
||||
expect(fs.outputFile.mock.calls).toHaveLength(7);
|
||||
expect(fs.remove.mock.calls).toHaveLength(9);
|
||||
});
|
||||
|
@ -358,7 +169,8 @@ describe('workers/branch/lock-files', () => {
|
|||
const renoPath = upath.join(__dirname, '../../../');
|
||||
config.copyLocalLibs = true;
|
||||
config.tmpDir = { path: renoPath };
|
||||
config.packageFiles = [
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'client/package.json',
|
||||
content: {
|
||||
|
@ -371,9 +183,10 @@ describe('workers/branch/lock-files', () => {
|
|||
yarnLock: 'some yarn lock',
|
||||
packageLock: 'some package lock',
|
||||
},
|
||||
];
|
||||
],
|
||||
};
|
||||
platform.getFile.mockReturnValue('some lock file contents');
|
||||
await writeExistingFiles(config);
|
||||
await writeExistingFiles(config, packageFiles);
|
||||
expect(fs.outputFile.mock.calls).toHaveLength(5);
|
||||
expect(fs.remove.mock.calls).toHaveLength(1);
|
||||
});
|
||||
|
@ -381,7 +194,8 @@ describe('workers/branch/lock-files', () => {
|
|||
const renoPath = upath.join(__dirname, '../../../');
|
||||
config.copyLocalLibs = true;
|
||||
config.tmpDir = { path: renoPath };
|
||||
config.packageFiles = [
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'client/package.json',
|
||||
content: {
|
||||
|
@ -394,9 +208,10 @@ describe('workers/branch/lock-files', () => {
|
|||
yarnLock: 'some yarn lock',
|
||||
packageLock: 'some package lock',
|
||||
},
|
||||
];
|
||||
],
|
||||
};
|
||||
platform.getFile.mockReturnValue(null);
|
||||
await writeExistingFiles(config);
|
||||
await writeExistingFiles(config, packageFiles);
|
||||
expect(fs.outputFile.mock.calls).toHaveLength(3);
|
||||
expect(fs.remove.mock.calls).toHaveLength(1);
|
||||
});
|
||||
|
@ -404,7 +219,8 @@ describe('workers/branch/lock-files', () => {
|
|||
const renoPath = upath.join(__dirname, '../../../');
|
||||
config.copyLocalLibs = true;
|
||||
config.tmpDir = { path: renoPath };
|
||||
config.packageFiles = [
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'client/package.json',
|
||||
content: {
|
||||
|
@ -417,13 +233,15 @@ describe('workers/branch/lock-files', () => {
|
|||
yarnLock: 'some yarn lock',
|
||||
packageLock: 'some package lock',
|
||||
},
|
||||
];
|
||||
],
|
||||
};
|
||||
platform.getFile.mockReturnValue(null);
|
||||
await writeExistingFiles(config);
|
||||
await writeExistingFiles(config, packageFiles);
|
||||
expect(fs.outputFile.mock.calls).toHaveLength(3);
|
||||
expect(fs.remove.mock.calls).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
*/
|
||||
describe('writeUpdatedPackageFiles', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
|
@ -465,7 +283,7 @@ describe('workers/branch/lock-files', () => {
|
|||
expect(fs.outputFile.mock.calls[1][1].includes('"engines"')).toBe(false);
|
||||
});
|
||||
});
|
||||
describe('getUpdatedLockFiles', () => {
|
||||
describe('getAdditionalFiles', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
|
@ -493,7 +311,7 @@ describe('workers/branch/lock-files', () => {
|
|||
});
|
||||
it('returns no error and empty lockfiles if updateLockFiles false', async () => {
|
||||
config.updateLockFiles = false;
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.lockFileErrors).toHaveLength(0);
|
||||
expect(res.updatedLockFiles).toHaveLength(0);
|
||||
|
@ -502,33 +320,34 @@ describe('workers/branch/lock-files', () => {
|
|||
config.type = 'lockFileMaintenance';
|
||||
config.parentBranch = 'renovate/lock-file-maintenance';
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.lockFileErrors).toHaveLength(0);
|
||||
expect(res.updatedLockFiles).toHaveLength(0);
|
||||
});
|
||||
/*
|
||||
it('returns no error and empty lockfiles if none updated', async () => {
|
||||
lockFiles.determineLockFileDirs.mockReturnValueOnce({
|
||||
packageLockFileDirs: [],
|
||||
npmLockDirs: [],
|
||||
npmShrinkwrapDirs: [],
|
||||
yarnLockFileDirs: [],
|
||||
shrinkwrapYamlDirs: [],
|
||||
yarnLockDirs: [],
|
||||
pnpmShrinkwrapDirs: [],
|
||||
lernaDirs: [],
|
||||
});
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.lockFileErrors).toHaveLength(0);
|
||||
expect(res.updatedLockFiles).toHaveLength(0);
|
||||
});
|
||||
it('tries multiple lock files', async () => {
|
||||
lockFiles.determineLockFileDirs.mockReturnValueOnce({
|
||||
packageLockFileDirs: ['a', 'b'],
|
||||
npmLockDirs: ['a', 'b'],
|
||||
npmShrinkwrapDirs: ['f'],
|
||||
yarnLockFileDirs: ['c', 'd'],
|
||||
shrinkwrapYamlDirs: ['e'],
|
||||
yarnLockDirs: ['c', 'd'],
|
||||
pnpmShrinkwrapDirs: ['e'],
|
||||
lernaDirs: [],
|
||||
});
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res.lockFileErrors).toHaveLength(0);
|
||||
expect(res.updatedLockFiles).toHaveLength(0);
|
||||
|
@ -538,43 +357,43 @@ describe('workers/branch/lock-files', () => {
|
|||
});
|
||||
it('tries lerna npm', async () => {
|
||||
lockFiles.determineLockFileDirs.mockReturnValueOnce({
|
||||
packageLockFileDirs: ['a', 'b'],
|
||||
npmLockDirs: ['a', 'b'],
|
||||
npmShrinkwrapDirs: [],
|
||||
yarnLockFileDirs: [],
|
||||
shrinkwrapYamlDirs: [],
|
||||
yarnLockDirs: [],
|
||||
pnpmShrinkwrapDirs: [],
|
||||
lernaDirs: ['.'],
|
||||
});
|
||||
config.packageFiles = [];
|
||||
config.lernaLockFile = 'npm';
|
||||
lerna.generateLockFiles.mockReturnValueOnce({ error: false });
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('tries lerna yarn', async () => {
|
||||
lockFiles.determineLockFileDirs.mockReturnValueOnce({
|
||||
packageLockFileDirs: [],
|
||||
npmLockDirs: [],
|
||||
npmShrinkwrapDirs: [],
|
||||
yarnLockFileDirs: ['c', 'd'],
|
||||
shrinkwrapYamlDirs: [],
|
||||
yarnLockDirs: ['c', 'd'],
|
||||
pnpmShrinkwrapDirs: [],
|
||||
lernaDirs: ['.'],
|
||||
});
|
||||
config.lernaLockFile = 'yarn';
|
||||
lerna.generateLockFiles.mockReturnValueOnce({ error: true });
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
it('sets error if receiving null', async () => {
|
||||
lockFiles.determineLockFileDirs.mockReturnValueOnce({
|
||||
packageLockFileDirs: ['a', 'b'],
|
||||
npmLockDirs: ['a', 'b'],
|
||||
npmShrinkwrapDirs: ['f'],
|
||||
yarnLockFileDirs: ['c', 'd'],
|
||||
shrinkwrapYamlDirs: ['e'],
|
||||
yarnLockDirs: ['c', 'd'],
|
||||
pnpmShrinkwrapDirs: ['e'],
|
||||
lernaDirs: [],
|
||||
});
|
||||
npm.generateLockFile.mockReturnValueOnce({ error: true });
|
||||
yarn.generateLockFile.mockReturnValueOnce({ error: true });
|
||||
pnpm.generateLockFile.mockReturnValueOnce({ error: true });
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res.lockFileErrors).toHaveLength(3);
|
||||
expect(res.updatedLockFiles).toHaveLength(0);
|
||||
expect(npm.generateLockFile.mock.calls).toHaveLength(3);
|
||||
|
@ -583,21 +402,22 @@ describe('workers/branch/lock-files', () => {
|
|||
});
|
||||
it('adds multiple lock files', async () => {
|
||||
lockFiles.determineLockFileDirs.mockReturnValueOnce({
|
||||
packageLockFileDirs: ['a', 'b'],
|
||||
npmLockDirs: ['a', 'b'],
|
||||
npmShrinkwrapDirs: ['f'],
|
||||
yarnLockFileDirs: ['c', 'd'],
|
||||
shrinkwrapYamlDirs: ['e'],
|
||||
yarnLockDirs: ['c', 'd'],
|
||||
pnpmShrinkwrapDirs: ['e'],
|
||||
lernaDirs: [],
|
||||
});
|
||||
npm.generateLockFile.mockReturnValueOnce('some new lock file contents');
|
||||
yarn.generateLockFile.mockReturnValueOnce('some new lock file contents');
|
||||
pnpm.generateLockFile.mockReturnValueOnce('some new lock file contents');
|
||||
const res = await getUpdatedLockFiles(config);
|
||||
const res = await getAdditionalFiles(config);
|
||||
expect(res.lockFileErrors).toHaveLength(0);
|
||||
expect(res.updatedLockFiles).toHaveLength(3);
|
||||
expect(npm.generateLockFile.mock.calls).toHaveLength(3);
|
||||
expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
|
||||
expect(platform.getFile.mock.calls).toHaveLength(7);
|
||||
});
|
||||
*/
|
||||
});
|
||||
});
|
|
@ -1,4 +1,4 @@
|
|||
const lernaHelper = require('../../../lib/workers/branch/lerna');
|
||||
const lernaHelper = require('../../../../lib/manager/npm/post-update/lerna');
|
||||
|
||||
jest.mock('child-process-promise');
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const npmHelper = require('../../../lib/workers/branch/npm');
|
||||
const npmHelper = require('../../../../lib/manager/npm/post-update/npm');
|
||||
|
||||
const { getInstalledPath } = require('get-installed-path');
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const pnpmHelper = require('../../../lib/workers/branch/pnpm');
|
||||
const pnpmHelper = require('../../../../lib/manager/npm/post-update/pnpm');
|
||||
|
||||
const { getInstalledPath } = require('get-installed-path');
|
||||
|
|
@ -1,4 +1,4 @@
|
|||
const yarnHelper = require('../../../lib/workers/branch/yarn');
|
||||
const yarnHelper = require('../../../../lib/manager/npm/post-update/yarn');
|
||||
|
||||
const { getInstalledPath } = require('get-installed-path');
|
||||
|
|
@ -1,324 +0,0 @@
|
|||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
const npmExtract = require('../../../lib/manager/npm/extract');
|
||||
const pkgWorker = require('../../../lib/workers/package-file/package');
|
||||
const depTypeWorker = require('../../../lib/workers/package-file/dep-type');
|
||||
|
||||
jest.mock('../../../lib/manager/npm/extract');
|
||||
jest.mock('../../../lib/workers/package-file/package');
|
||||
|
||||
pkgWorker.renovatePackage = jest.fn(() => ['a']);
|
||||
|
||||
describe('lib/workers/package-file/dep-type', () => {
|
||||
describe('renovateDepType(packageContent, config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
packageFile: 'package.json',
|
||||
manager: 'npm',
|
||||
ignoreDeps: ['a', 'b'],
|
||||
monorepoPackages: ['e'],
|
||||
workspaceDir: '.',
|
||||
};
|
||||
});
|
||||
it('returns empty if config is disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await depTypeWorker.renovateDepType({}, config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('returns empty if no deps found', async () => {
|
||||
npmExtract.extractDependencies.mockReturnValueOnce(null);
|
||||
const res = await depTypeWorker.renovateDepType({}, config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('returns empty if all deps are filtered', async () => {
|
||||
npmExtract.extractDependencies.mockReturnValueOnce({
|
||||
deps: [{ depName: 'a' }, { depName: 'b' }, { depName: 'e' }],
|
||||
});
|
||||
const res = await depTypeWorker.renovateDepType({}, config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('returns combined upgrades if all deps are filtered', async () => {
|
||||
npmExtract.extractDependencies.mockReturnValueOnce({
|
||||
deps: [{ depName: 'a' }, { depName: 'c' }, { depName: 'd' }],
|
||||
});
|
||||
const res = await depTypeWorker.renovateDepType({}, config);
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
it('returns upgrades for meteor', async () => {
|
||||
config.manager = 'meteor';
|
||||
const content = fs.readFileSync(
|
||||
path.resolve('test/_fixtures/meteor/package-1.js'),
|
||||
'utf8'
|
||||
);
|
||||
const res = await depTypeWorker.renovateDepType(content, config);
|
||||
expect(res).toHaveLength(6);
|
||||
});
|
||||
it('returns upgrades for bazel', async () => {
|
||||
config.manager = 'bazel';
|
||||
const content = fs.readFileSync(
|
||||
path.resolve('test/_fixtures/bazel/WORKSPACE1'),
|
||||
'utf8'
|
||||
);
|
||||
const res = await depTypeWorker.renovateDepType(content, config);
|
||||
expect(res).toHaveLength(4);
|
||||
});
|
||||
it('returns upgrades for travis', async () => {
|
||||
config.manager = 'travis';
|
||||
const content = fs.readFileSync(
|
||||
path.resolve('test/_fixtures/node/travis.yml'),
|
||||
'utf8'
|
||||
);
|
||||
const res = await depTypeWorker.renovateDepType(content, config);
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('handles malformed meteor', async () => {
|
||||
config.manager = 'meteor';
|
||||
const content = 'blah';
|
||||
const res = await depTypeWorker.renovateDepType(content, config);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('returns upgrades for docker', async () => {
|
||||
config.manager = 'docker';
|
||||
config.currentFrom = 'node';
|
||||
const res = await depTypeWorker.renovateDepType(
|
||||
'# a comment\nFROM something\n',
|
||||
config
|
||||
);
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('ignores Dockerfiles with no FROM', async () => {
|
||||
config.manager = 'docker';
|
||||
config.currentFrom = 'node';
|
||||
const res = await depTypeWorker.renovateDepType(
|
||||
'# a comment\nRUN something\n',
|
||||
config
|
||||
);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
describe('getDepConfig(depTypeConfig, dep)', () => {
|
||||
const depTypeConfig = {
|
||||
foo: 'bar',
|
||||
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['a', 'b'],
|
||||
x: 2,
|
||||
},
|
||||
{
|
||||
packagePatterns: ['a', 'b'],
|
||||
excludePackageNames: ['aa'],
|
||||
excludePackagePatterns: ['d'],
|
||||
y: 2,
|
||||
},
|
||||
],
|
||||
};
|
||||
it('matches anything if missing inclusive rules', () => {
|
||||
const allConfig = {
|
||||
packageRules: [
|
||||
{
|
||||
excludePackageNames: ['foo'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = depTypeWorker.getDepConfig(allConfig, {
|
||||
depName: 'foo',
|
||||
});
|
||||
expect(res1.x).toBeUndefined();
|
||||
const res2 = depTypeWorker.getDepConfig(allConfig, {
|
||||
depName: 'bar',
|
||||
});
|
||||
expect(res2.x).toBeDefined();
|
||||
});
|
||||
it('supports inclusive or', () => {
|
||||
const nConfig = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['neutrino'],
|
||||
packagePatterns: ['^@neutrino\\/'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = depTypeWorker.getDepConfig(nConfig, { depName: 'neutrino' });
|
||||
expect(res1.x).toBeDefined();
|
||||
const res2 = depTypeWorker.getDepConfig(nConfig, {
|
||||
depName: '@neutrino/something',
|
||||
});
|
||||
expect(res2.x).toBeDefined();
|
||||
});
|
||||
it('applies both rules for a', () => {
|
||||
const dep = {
|
||||
depName: 'a',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(depTypeConfig, dep);
|
||||
expect(res.x).toBe(2);
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('applies both rules for b', () => {
|
||||
const dep = {
|
||||
depName: 'b',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(depTypeConfig, dep);
|
||||
expect(res.x).toBe(2);
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('applies the second rule', () => {
|
||||
const dep = {
|
||||
depName: 'abc',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(depTypeConfig, dep);
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('applies the second second rule', () => {
|
||||
const dep = {
|
||||
depName: 'bc',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(depTypeConfig, dep);
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBe(2);
|
||||
});
|
||||
it('excludes package name', () => {
|
||||
const dep = {
|
||||
depName: 'aa',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(depTypeConfig, dep);
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBeUndefined();
|
||||
});
|
||||
it('excludes package pattern', () => {
|
||||
const dep = {
|
||||
depName: 'bcd',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(depTypeConfig, dep);
|
||||
expect(res.x).toBeUndefined();
|
||||
expect(res.y).toBeUndefined();
|
||||
});
|
||||
it('filters depType', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
depTypeList: ['dependencies', 'peerDependencies'],
|
||||
packageNames: ['a'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const dep = {
|
||||
depType: 'dependencies',
|
||||
depName: 'a',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(config, dep);
|
||||
expect(res.x).toBe(1);
|
||||
});
|
||||
it('filters naked depType', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
depTypeList: ['dependencies', 'peerDependencies'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const dep = {
|
||||
depType: 'dependencies',
|
||||
depName: 'a',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(config, dep);
|
||||
expect(res.x).toBe(1);
|
||||
});
|
||||
it('filters depType', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
depTypeList: ['dependencies', 'peerDependencies'],
|
||||
packageNames: ['a'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const dep = {
|
||||
depType: 'devDependencies',
|
||||
depName: 'a',
|
||||
};
|
||||
const res = depTypeWorker.getDepConfig(config, dep);
|
||||
expect(res.x).toBeUndefined();
|
||||
});
|
||||
it('checks if matchCurrentVersion selector is valid and satisfies the condition on range overlap', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['test'],
|
||||
matchCurrentVersion: '<= 2.0.0',
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = depTypeWorker.getDepConfig(config, {
|
||||
depName: 'test',
|
||||
currentVersion: '^1.0.0',
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
});
|
||||
it('checks if matchCurrentVersion selector is valid and satisfies the condition on pinned to range overlap', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['test'],
|
||||
matchCurrentVersion: '>= 2.0.0',
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = depTypeWorker.getDepConfig(config, {
|
||||
depName: 'test',
|
||||
currentVersion: '2.4.6',
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
});
|
||||
it('checks if matchCurrentVersion selector works with static values', () => {
|
||||
const config = {
|
||||
packageRules: [
|
||||
{
|
||||
packageNames: ['test'],
|
||||
matchCurrentVersion: '4.6.0',
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = depTypeWorker.getDepConfig(config, {
|
||||
depName: 'test',
|
||||
currentVersion: '4.6.0',
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
});
|
||||
it('matches paths', () => {
|
||||
const config = {
|
||||
packageFile: 'examples/foo/package.json',
|
||||
packageRules: [
|
||||
{
|
||||
paths: ['examples/**', 'lib/'],
|
||||
x: 1,
|
||||
},
|
||||
],
|
||||
};
|
||||
const res1 = depTypeWorker.getDepConfig(config, {
|
||||
depName: 'test',
|
||||
});
|
||||
expect(res1.x).toBeDefined();
|
||||
config.packageFile = 'package.json';
|
||||
const res2 = depTypeWorker.getDepConfig(config, {
|
||||
depName: 'test',
|
||||
});
|
||||
expect(res2.x).toBeUndefined();
|
||||
config.packageFile = 'lib/a/package.json';
|
||||
const res3 = depTypeWorker.getDepConfig(config, {
|
||||
depName: 'test',
|
||||
});
|
||||
expect(res3.x).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,187 +0,0 @@
|
|||
const packageFileWorker = require('../../../lib/workers/package-file');
|
||||
const depTypeWorker = require('../../../lib/workers/package-file/dep-type');
|
||||
const defaultConfig = require('../../../lib/config/defaults').getConfig();
|
||||
const yarnLock = require('@yarnpkg/lockfile');
|
||||
|
||||
jest.mock('@yarnpkg/lockfile');
|
||||
|
||||
jest.mock('../../../lib/workers/package-file/dep-type');
|
||||
jest.mock('../../../lib/workers/branch/schedule');
|
||||
|
||||
describe('packageFileWorker', () => {
|
||||
describe('renovatePackageFile(config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
packageFile: 'package.json',
|
||||
manager: 'npm',
|
||||
content: {},
|
||||
repoIsOnboarded: true,
|
||||
npmrc: '# nothing',
|
||||
};
|
||||
depTypeWorker.renovateDepType.mockReturnValue([]);
|
||||
});
|
||||
it('returns empty if disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toEqual([]);
|
||||
});
|
||||
it('returns upgrades', async () => {
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([{}]);
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([{}, {}]);
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([]);
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([]);
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(3);
|
||||
});
|
||||
it('autodetects dependency pinning true if private', async () => {
|
||||
config.pinVersions = null;
|
||||
config.content.private = true;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('autodetects dependency pinning true if no main', async () => {
|
||||
config.pinVersions = null;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('autodetects dependency pinning true', async () => {
|
||||
config.pinVersions = null;
|
||||
config.content.main = 'something';
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('maintains lock files', async () => {
|
||||
config.lockFileMaintenance.enabled = true;
|
||||
config.yarnLock = '# some yarn lock';
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('uses workspaces yarn.lock', async () => {
|
||||
config.workspaceDir = '.';
|
||||
platform.getFile.mockReturnValueOnce('# yarn lock');
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('skips unparseable yarn.lock', async () => {
|
||||
config.yarnLock = 'yarn.lock';
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('skips unparseable yarn.lock', async () => {
|
||||
config.yarnLock = 'yarn.lock';
|
||||
yarnLock.parse.mockReturnValueOnce({ type: 'failure' });
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('uses workspace yarn.lock', async () => {
|
||||
config.workspaceDir = '.';
|
||||
yarnLock.parse.mockReturnValueOnce({ type: 'success' });
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('skips unparseable package-lock.json', async () => {
|
||||
config.packageLock = 'package-lock.lock';
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('parses package-lock.json', async () => {
|
||||
config.packageLock = 'package-lock.json';
|
||||
platform.getFile.mockReturnValueOnce('{}');
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('skips unparseable npm-shrinkwrap.json', async () => {
|
||||
config.npmShrinkwrap = 'npm-shrinkwrap.json';
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
it('parses npm-shrinkwrap.json', async () => {
|
||||
config.npmShrinkwrap = 'npm-shrinkwrap.json';
|
||||
platform.getFile.mockReturnValueOnce('{}');
|
||||
await packageFileWorker.renovatePackageFile(config);
|
||||
});
|
||||
});
|
||||
describe('renovateMeteorPackageFile(config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
packageFile: 'package.js',
|
||||
manager: 'meteor',
|
||||
repoIsOnboarded: true,
|
||||
};
|
||||
depTypeWorker.renovateDepType.mockReturnValue([]);
|
||||
});
|
||||
it('returns empty if disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toEqual([]);
|
||||
});
|
||||
it('returns upgrades', async () => {
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([{}, {}]);
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
describe('renovateBazelFile(config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
packageFile: 'WORKSPACE',
|
||||
manager: 'bazel',
|
||||
repoIsOnboarded: true,
|
||||
};
|
||||
depTypeWorker.renovateDepType.mockReturnValue([]);
|
||||
});
|
||||
it('returns empty if disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toEqual([]);
|
||||
});
|
||||
it('returns upgrades', async () => {
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([{}, {}]);
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
describe('renovateNodeFile(config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
packageFile: '.travis.yml',
|
||||
manager: 'travis',
|
||||
repoIsOnboarded: true,
|
||||
};
|
||||
depTypeWorker.renovateDepType.mockReturnValue([]);
|
||||
});
|
||||
it('returns empty if disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toEqual([]);
|
||||
});
|
||||
it('returns upgrades', async () => {
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([{}]);
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
describe('renovateDockerfile', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = {
|
||||
...defaultConfig,
|
||||
packageFile: 'Dockerfile',
|
||||
manager: 'docker',
|
||||
repoIsOnboarded: true,
|
||||
};
|
||||
depTypeWorker.renovateDepType.mockReturnValue([]);
|
||||
});
|
||||
it('returns empty if disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toEqual([]);
|
||||
});
|
||||
it('returns upgrades', async () => {
|
||||
depTypeWorker.renovateDepType.mockReturnValueOnce([{}, {}]);
|
||||
const res = await packageFileWorker.renovatePackageFile(config);
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,76 +0,0 @@
|
|||
const pkgWorker = require('../../../lib/workers/package-file/package');
|
||||
const defaultConfig = require('../../../lib/config/defaults').getConfig();
|
||||
const configParser = require('../../../lib/config');
|
||||
|
||||
const docker = require('../../../lib/manager/docker/package');
|
||||
const npm = require('../../../lib/manager/npm/package');
|
||||
const node = require('../../../lib/manager/travis/package');
|
||||
const bazel = require('../../../lib/manager/bazel/package');
|
||||
|
||||
jest.mock('../../../lib/manager/docker/package');
|
||||
jest.mock('../../../lib/manager/npm/package');
|
||||
jest.mock('../../../lib/manager/travis/package');
|
||||
jest.mock('../../../lib/manager/bazel/package');
|
||||
|
||||
describe('lib/workers/package-file/package', () => {
|
||||
describe('renovatePackage(config)', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = configParser.filterConfig(defaultConfig, 'package');
|
||||
config.depName = 'foo';
|
||||
config.currentVersion = '1.0.0';
|
||||
});
|
||||
it('returns empty if package is disabled', async () => {
|
||||
config.enabled = false;
|
||||
const res = await pkgWorker.renovatePackage(config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('calls docker', async () => {
|
||||
docker.getPackageUpdates.mockReturnValueOnce([]);
|
||||
config.manager = 'docker';
|
||||
const res = await pkgWorker.renovatePackage(config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('calls meteor', async () => {
|
||||
npm.getPackageUpdates.mockReturnValueOnce([]);
|
||||
config.manager = 'meteor';
|
||||
const res = await pkgWorker.renovatePackage(config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('calls node', async () => {
|
||||
node.getPackageUpdates.mockReturnValueOnce([]);
|
||||
config.manager = 'travis';
|
||||
const res = await pkgWorker.renovatePackage(config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('calls bazel', async () => {
|
||||
bazel.getPackageUpdates.mockReturnValueOnce([]);
|
||||
config.manager = 'bazel';
|
||||
const res = await pkgWorker.renovatePackage(config);
|
||||
expect(res).toMatchObject([]);
|
||||
});
|
||||
it('maps and filters type', async () => {
|
||||
config.manager = 'npm';
|
||||
config.major.enabled = false;
|
||||
npm.getPackageUpdates.mockReturnValueOnce([
|
||||
{ type: 'pin' },
|
||||
{ type: 'major' },
|
||||
{ type: 'minor', enabled: false },
|
||||
]);
|
||||
const res = await pkgWorker.renovatePackage(config);
|
||||
expect(res).toHaveLength(1);
|
||||
expect(res[0].groupName).toEqual('Pin Dependencies');
|
||||
});
|
||||
it('throws', async () => {
|
||||
npm.getPackageUpdates.mockReturnValueOnce([]);
|
||||
config.packageFile = 'something-else';
|
||||
let e;
|
||||
try {
|
||||
await pkgWorker.renovatePackage(config);
|
||||
} catch (err) {
|
||||
e = err;
|
||||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,8 +1,3 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/repository renovateRepository() writes 1`] = `
|
||||
Object {
|
||||
"res": undefined,
|
||||
"status": "onboarding",
|
||||
}
|
||||
`;
|
||||
exports[`workers/repository renovateRepository() runs 1`] = `undefined`;
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/repository/extract/file-match filterIgnoredFiles() ignores partial matches 1`] = `
|
||||
Array [
|
||||
"package.json",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`workers/repository/extract/file-match filterIgnoredFiles() returns minimatch matches 1`] = `
|
||||
Array [
|
||||
"package.json",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`workers/repository/extract/file-match getIncludedFiles() returns exact matches 1`] = `
|
||||
Array [
|
||||
"frontend/package.json",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`workers/repository/extract/file-match getIncludedFiles() returns minimatch matches 1`] = `
|
||||
Array [
|
||||
"frontend/package.json",
|
||||
]
|
||||
`;
|
||||
|
||||
exports[`workers/repository/extract/file-match getMatchingFiles() returns npm files 1`] = `
|
||||
Array [
|
||||
"package.json",
|
||||
"frontend/package.json",
|
||||
]
|
||||
`;
|
|
@ -0,0 +1,36 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/repository/extract/index extractAllDependencies() runs 1`] = `
|
||||
Object {
|
||||
"bazel": Array [
|
||||
Object {},
|
||||
],
|
||||
"buildkite": Array [
|
||||
Object {},
|
||||
],
|
||||
"circleci": Array [
|
||||
Object {},
|
||||
],
|
||||
"docker": Array [
|
||||
Object {},
|
||||
],
|
||||
"docker-compose": Array [
|
||||
Object {},
|
||||
],
|
||||
"meteor": Array [
|
||||
Object {},
|
||||
],
|
||||
"npm": Array [
|
||||
Object {},
|
||||
],
|
||||
"nvm": Array [
|
||||
Object {},
|
||||
],
|
||||
"pip_requirements": Array [
|
||||
Object {},
|
||||
],
|
||||
"travis": Array [
|
||||
Object {},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -0,0 +1,11 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/repository/extract/manager-files getManagerPackageFiles() returns files 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"manager": "npm",
|
||||
"packageFile": "package.json",
|
||||
"some": "result",
|
||||
},
|
||||
]
|
||||
`;
|
51
test/workers/repository/extract/file-match.spec.js
Normal file
51
test/workers/repository/extract/file-match.spec.js
Normal file
|
@ -0,0 +1,51 @@
|
|||
const fileMatch = require('../../../../lib/workers/repository/extract/file-match');
|
||||
|
||||
describe('workers/repository/extract/file-match', () => {
|
||||
const fileList = ['package.json', 'frontend/package.json'];
|
||||
describe('getIncludedFiles()', () => {
|
||||
it('returns fileList if no includePaths', () => {
|
||||
const res = fileMatch.getIncludedFiles(fileList, []);
|
||||
expect(res).toEqual(fileList);
|
||||
});
|
||||
it('returns exact matches', () => {
|
||||
const includePaths = ['frontend/package.json'];
|
||||
const res = fileMatch.getIncludedFiles(fileList, includePaths);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('returns minimatch matches', () => {
|
||||
const includePaths = ['frontend/**'];
|
||||
const res = fileMatch.getIncludedFiles(fileList, includePaths);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
describe('filterIgnoredFiles()', () => {
|
||||
it('returns fileList if no ignoredPaths', () => {
|
||||
const res = fileMatch.filterIgnoredFiles(fileList, []);
|
||||
expect(res).toEqual(fileList);
|
||||
});
|
||||
it('ignores partial matches', () => {
|
||||
const ignoredPaths = ['frontend'];
|
||||
const res = fileMatch.filterIgnoredFiles(fileList, ignoredPaths);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
it('returns minimatch matches', () => {
|
||||
const ignoredPaths = ['frontend/**'];
|
||||
const res = fileMatch.filterIgnoredFiles(fileList, ignoredPaths);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(1);
|
||||
});
|
||||
});
|
||||
describe('getMatchingFiles()', () => {
|
||||
it('returns npm files', () => {
|
||||
fileList.push('Dockerfile');
|
||||
const res = fileMatch.getMatchingFiles(fileList, 'npm', [
|
||||
'(^|/)package.json$',
|
||||
]);
|
||||
expect(res).toMatchSnapshot();
|
||||
expect(res).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
21
test/workers/repository/extract/index.spec.js
Normal file
21
test/workers/repository/extract/index.spec.js
Normal file
|
@ -0,0 +1,21 @@
|
|||
const managerFiles = require('../../../../lib/workers/repository/extract/manager-files');
|
||||
const {
|
||||
extractAllDependencies,
|
||||
} = require('../../../../lib/workers/repository/extract');
|
||||
|
||||
jest.mock('../../../../lib/workers/repository/extract/manager-files');
|
||||
|
||||
describe('workers/repository/extract/index', () => {
|
||||
describe('extractAllDependencies()', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = { ...require('../../../_fixtures/config') };
|
||||
});
|
||||
it('runs', async () => {
|
||||
managerFiles.getManagerPackageFiles.mockReturnValue([{}]);
|
||||
const res = await extractAllDependencies(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
35
test/workers/repository/extract/manager-files.spec.js
Normal file
35
test/workers/repository/extract/manager-files.spec.js
Normal file
|
@ -0,0 +1,35 @@
|
|||
const {
|
||||
getManagerPackageFiles,
|
||||
} = require('../../../../lib/workers/repository/extract/manager-files');
|
||||
const fileMatch = require('../../../../lib/workers/repository/extract/file-match');
|
||||
const npm = require('../../../../lib/manager/npm');
|
||||
|
||||
jest.mock('../../../../lib/workers/repository/extract/file-match');
|
||||
|
||||
describe('workers/repository/extract/manager-files', () => {
|
||||
describe('getManagerPackageFiles()', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = { ...require('../../../_fixtures/config') };
|
||||
});
|
||||
it('returns empty of manager is disabled', async () => {
|
||||
const managerConfig = { manager: 'travis', enabled: false };
|
||||
const res = await getManagerPackageFiles(config, managerConfig);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('returns empty of manager is not enabled', async () => {
|
||||
config.enabledManagers = ['npm'];
|
||||
const managerConfig = { manager: 'docker', enabled: true };
|
||||
const res = await getManagerPackageFiles(config, managerConfig);
|
||||
expect(res).toHaveLength(0);
|
||||
});
|
||||
it('returns files', async () => {
|
||||
const managerConfig = { manager: 'npm', enabled: true };
|
||||
fileMatch.getMatchingFiles.mockReturnValue(['package.json']);
|
||||
npm.extractDependencies = jest.fn(() => ({ some: 'result' }));
|
||||
const res = await getManagerPackageFiles(config, managerConfig);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,35 +1,20 @@
|
|||
const { initRepo } = require('../../../lib/workers/repository/init');
|
||||
const { determineUpdates } = require('../../../lib/workers/repository/updates');
|
||||
const {
|
||||
writeUpdates,
|
||||
} = require('../../../lib/workers/repository/process/write');
|
||||
const { renovateRepository } = require('../../../lib/workers/repository/index');
|
||||
const process = require('../../../lib/workers/repository/process');
|
||||
|
||||
jest.mock('../../../lib/workers/repository/init');
|
||||
jest.mock('../../../lib/workers/repository/init/apis');
|
||||
jest.mock('../../../lib/workers/repository/updates');
|
||||
jest.mock('../../../lib/workers/repository/onboarding/pr');
|
||||
jest.mock('../../../lib/workers/repository/process/write');
|
||||
jest.mock('../../../lib/workers/repository/finalise');
|
||||
jest.mock('../../../lib/manager');
|
||||
jest.mock('delay');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = require('../../_fixtures/config');
|
||||
});
|
||||
jest.mock('../../../lib/workers/repository/process');
|
||||
jest.mock('../../../lib/workers/repository/result');
|
||||
jest.mock('../../../lib/workers/repository/error');
|
||||
|
||||
describe('workers/repository', () => {
|
||||
describe('renovateRepository()', () => {
|
||||
it('writes', async () => {
|
||||
initRepo.mockReturnValue({});
|
||||
determineUpdates.mockReturnValue({
|
||||
repoIsOnboarded: true,
|
||||
branches: [{ type: 'minor' }, { type: 'pin' }],
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
config = require('../../_fixtures/config');
|
||||
});
|
||||
writeUpdates.mockReturnValueOnce('done');
|
||||
const res = await renovateRepository(config, 'some-token');
|
||||
it('runs', async () => {
|
||||
process.processRepo = jest.fn(() => ({}));
|
||||
const res = await renovateRepository(config);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
|
|
|
@ -78,13 +78,6 @@ describe('workers/repository/onboarding/branch', () => {
|
|||
}
|
||||
expect(e).toBeDefined();
|
||||
});
|
||||
it('creates onboarding branch', async () => {
|
||||
platform.getFileList.mockReturnValue(['package.json']);
|
||||
const res = await checkOnboardingBranch(config);
|
||||
expect(res.repoIsOnboarded).toBe(false);
|
||||
expect(res.branchList).toEqual(['renovate/configure']);
|
||||
expect(platform.setBaseBranch.mock.calls).toHaveLength(1);
|
||||
});
|
||||
it('creates onboarding branch with greenkeeper migration', async () => {
|
||||
platform.getFileList.mockReturnValue(['package.json']);
|
||||
const pJsonContent = JSON.stringify({
|
||||
|
|
|
@ -17,11 +17,15 @@ describe('workers/repository/onboarding/pr', () => {
|
|||
warnings: [],
|
||||
description: [],
|
||||
};
|
||||
packageFiles = [{ packageFile: 'package.json' }];
|
||||
packageFiles = { npm: [{ packageFile: 'package.json' }] };
|
||||
branches = [];
|
||||
platform.createPr.mockReturnValue({});
|
||||
});
|
||||
let createPrBody;
|
||||
it('returns if onboarded', async () => {
|
||||
config.repoIsOnboarded = true;
|
||||
await ensureOnboardingPr(config, packageFiles, branches);
|
||||
});
|
||||
it('creates PR', async () => {
|
||||
await ensureOnboardingPr(config, packageFiles, branches);
|
||||
expect(platform.createPr.mock.calls).toHaveLength(1);
|
||||
|
|
|
@ -0,0 +1,61 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/repository/process/fetch fetchUpdates() fetches updates 1`] = `
|
||||
Object {
|
||||
"npm": Array [
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"depName": "aaa",
|
||||
"updates": Array [
|
||||
"a",
|
||||
"b",
|
||||
],
|
||||
},
|
||||
],
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/repository/process/fetch fetchUpdates() handles empty deps 1`] = `
|
||||
Object {
|
||||
"npm": Array [
|
||||
Object {
|
||||
"deps": Array [],
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
||||
|
||||
exports[`workers/repository/process/fetch fetchUpdates() handles ignores and disabled 1`] = `
|
||||
Object {
|
||||
"npm": Array [
|
||||
Object {
|
||||
"deps": Array [
|
||||
Object {
|
||||
"depName": "abcd",
|
||||
"skipReason": "ignored",
|
||||
"updates": Array [],
|
||||
},
|
||||
Object {
|
||||
"depName": "zzzz",
|
||||
"skipReason": "monorepo",
|
||||
"updates": Array [],
|
||||
},
|
||||
Object {
|
||||
"depName": "foo",
|
||||
"skipReason": "disabled",
|
||||
"updates": Array [],
|
||||
},
|
||||
],
|
||||
"monorepoPackages": Array [
|
||||
"zzzz",
|
||||
],
|
||||
"packageFile": "package.json",
|
||||
},
|
||||
],
|
||||
}
|
||||
`;
|
|
@ -1,18 +1,23 @@
|
|||
const {
|
||||
extractAndUpdate,
|
||||
} = require('../../../../lib/workers/repository/process/extract-update');
|
||||
const updates = require('../../../../lib/workers/repository/updates');
|
||||
const branchify = require('../../../../lib/workers/repository/updates/branchify');
|
||||
|
||||
jest.mock('../../../../lib/manager');
|
||||
jest.mock('../../../../lib/workers/repository/updates');
|
||||
jest.mock('../../../../lib/workers/repository/process/sort');
|
||||
jest.mock('../../../../lib/workers/repository/process/write');
|
||||
jest.mock('../../../../lib/workers/repository/process/sort');
|
||||
jest.mock('../../../../lib/workers/repository/process/fetch');
|
||||
jest.mock('../../../../lib/workers/repository/updates/branchify');
|
||||
jest.mock('../../../../lib/workers/repository/extract');
|
||||
|
||||
branchify.branchifyUpgrades.mockReturnValueOnce({});
|
||||
|
||||
describe('workers/repository/process/extract-update', () => {
|
||||
describe('extractAndUpdate()', () => {
|
||||
it('runs', async () => {
|
||||
updates.determineUpdates.mockReturnValue({ repoIsOnboarded: true });
|
||||
await extractAndUpdate();
|
||||
const config = {
|
||||
repoIsOnboarded: true,
|
||||
};
|
||||
await extractAndUpdate(config);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
|
67
test/workers/repository/process/fetch.spec.js
Normal file
67
test/workers/repository/process/fetch.spec.js
Normal file
|
@ -0,0 +1,67 @@
|
|||
const {
|
||||
fetchUpdates,
|
||||
} = require('../../../../lib/workers/repository/process/fetch');
|
||||
|
||||
const npm = require('../../../../lib/manager/npm');
|
||||
|
||||
describe('workers/repository/process/fetch', () => {
|
||||
describe('fetchUpdates()', () => {
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = require('../../../_fixtures/config');
|
||||
});
|
||||
it('handles empty deps', async () => {
|
||||
const packageFiles = {
|
||||
npm: [{ packageFile: 'package.json', deps: [] }],
|
||||
};
|
||||
await fetchUpdates(config, packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
});
|
||||
it('handles ignores and disabled', async () => {
|
||||
config.ignoreDeps = ['abcd'];
|
||||
config.packageRules = [
|
||||
{
|
||||
packageNames: ['foo'],
|
||||
enabled: false,
|
||||
},
|
||||
];
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
deps: [
|
||||
{ depName: 'abcd' },
|
||||
{ depName: 'zzzz' },
|
||||
{ depName: 'foo' },
|
||||
],
|
||||
monorepoPackages: ['zzzz'],
|
||||
},
|
||||
],
|
||||
};
|
||||
await fetchUpdates(config, packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
expect(packageFiles.npm[0].deps[0].skipReason).toEqual('ignored');
|
||||
expect(packageFiles.npm[0].deps[0].updates).toHaveLength(0);
|
||||
expect(packageFiles.npm[0].deps[1].skipReason).toEqual('monorepo');
|
||||
expect(packageFiles.npm[0].deps[1].updates).toHaveLength(0);
|
||||
expect(packageFiles.npm[0].deps[2].skipReason).toEqual('disabled');
|
||||
expect(packageFiles.npm[0].deps[2].updates).toHaveLength(0);
|
||||
});
|
||||
it('fetches updates', async () => {
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
deps: [{ depName: 'aaa' }],
|
||||
},
|
||||
],
|
||||
};
|
||||
npm.getPackageUpdates = jest.fn(() => ['a', 'b']);
|
||||
await fetchUpdates(config, packageFiles);
|
||||
expect(packageFiles).toMatchSnapshot();
|
||||
expect(packageFiles.npm[0].deps[0].skipReason).toBeUndefined();
|
||||
expect(packageFiles.npm[0].deps[0].updates).toHaveLength(2);
|
||||
});
|
||||
});
|
||||
});
|
54
test/workers/repository/process/limits.spec.js
Normal file
54
test/workers/repository/process/limits.spec.js
Normal file
|
@ -0,0 +1,54 @@
|
|||
const moment = require('moment');
|
||||
const limits = require('../../../../lib/workers/repository/process/limits');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = { ...require('../../../_fixtures/config') };
|
||||
});
|
||||
|
||||
describe('workers/repository/process/limits', () => {
|
||||
describe('getPrHourlyRemaining()', () => {
|
||||
it('calculates hourly limit remaining', async () => {
|
||||
config.prHourlyLimit = 2;
|
||||
platform.getPrList.mockReturnValueOnce([
|
||||
{ created_at: moment().format() },
|
||||
]);
|
||||
const res = await limits.getPrHourlyRemaining(config);
|
||||
expect(res).toEqual(1);
|
||||
});
|
||||
it('returns 99 if errored', async () => {
|
||||
config.prHourlyLimit = 2;
|
||||
platform.getPrList.mockReturnValueOnce([null]);
|
||||
const res = await limits.getPrHourlyRemaining(config);
|
||||
expect(res).toEqual(99);
|
||||
});
|
||||
});
|
||||
describe('getConcurrentPrsRemaining()', () => {
|
||||
it('calculates concurrent limit remaining', async () => {
|
||||
config.prConcurrentLimit = 20;
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
const branches = [{}, {}];
|
||||
const res = await limits.getConcurrentPrsRemaining(config, branches);
|
||||
expect(res).toEqual(19);
|
||||
});
|
||||
it('returns 99 if no concurrent limit', async () => {
|
||||
const res = await limits.getConcurrentPrsRemaining(config, []);
|
||||
expect(res).toEqual(99);
|
||||
});
|
||||
});
|
||||
describe('getPrsRemaining()', () => {
|
||||
it('returns hourly limit', async () => {
|
||||
limits.getPrHourlyRemaining = jest.fn(() => 5);
|
||||
limits.getConcurrentPrsRemaining = jest.fn(() => 10);
|
||||
const res = await limits.getPrsRemaining();
|
||||
expect(res).toEqual(5);
|
||||
});
|
||||
it('returns concurrent limit', async () => {
|
||||
limits.getPrHourlyRemaining = jest.fn(() => 10);
|
||||
limits.getConcurrentPrsRemaining = jest.fn(() => 5);
|
||||
const res = await limits.getPrsRemaining();
|
||||
expect(res).toEqual(5);
|
||||
});
|
||||
});
|
||||
});
|
|
@ -2,9 +2,10 @@ const {
|
|||
writeUpdates,
|
||||
} = require('../../../../lib/workers/repository/process/write');
|
||||
const branchWorker = require('../../../../lib/workers/branch');
|
||||
const moment = require('moment');
|
||||
const limits = require('../../../../lib/workers/repository/process/limits');
|
||||
|
||||
branchWorker.processBranch = jest.fn();
|
||||
limits.getPrsRemaining = jest.fn(() => 99);
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
|
@ -14,44 +15,19 @@ beforeEach(() => {
|
|||
|
||||
describe('workers/repository/write', () => {
|
||||
describe('writeUpdates()', () => {
|
||||
it('calculates hourly limit remaining', async () => {
|
||||
config.branches = [];
|
||||
config.prHourlyLimit = 1;
|
||||
platform.getPrList.mockReturnValueOnce([
|
||||
{ created_at: moment().format() },
|
||||
]);
|
||||
const res = await writeUpdates(config);
|
||||
expect(res).toEqual('done');
|
||||
});
|
||||
it('calculates concurrent limit remaining', async () => {
|
||||
config.branches = ['renovate/chalk-2.x'];
|
||||
config.prConcurrentLimit = 1;
|
||||
platform.getPrList.mockReturnValueOnce([
|
||||
{ created_at: moment().format() },
|
||||
]);
|
||||
platform.branchExists.mockReturnValueOnce(true);
|
||||
const res = await writeUpdates(config);
|
||||
expect(res).toEqual('done');
|
||||
});
|
||||
it('handles error in calculation', async () => {
|
||||
config.branches = [];
|
||||
config.prHourlyLimit = 1;
|
||||
platform.getPrList.mockReturnValueOnce([{}, null]);
|
||||
const res = await writeUpdates(config);
|
||||
expect(res).toEqual('done');
|
||||
});
|
||||
const packageFiles = {};
|
||||
it('runs pins first', async () => {
|
||||
config.branches = [{ isPin: true }, {}, {}];
|
||||
const res = await writeUpdates(config);
|
||||
const branches = [{ isPin: true }, {}, {}];
|
||||
const res = await writeUpdates(config, packageFiles, branches);
|
||||
expect(res).toEqual('done');
|
||||
expect(branchWorker.processBranch.mock.calls).toHaveLength(1);
|
||||
});
|
||||
it('stops after automerge', async () => {
|
||||
config.branches = [{}, {}, {}, {}];
|
||||
const branches = [{}, {}, {}, {}];
|
||||
branchWorker.processBranch.mockReturnValueOnce('created');
|
||||
branchWorker.processBranch.mockReturnValueOnce('delete');
|
||||
branchWorker.processBranch.mockReturnValueOnce('automerged');
|
||||
const res = await writeUpdates(config);
|
||||
const res = await writeUpdates(config, packageFiles, branches);
|
||||
expect(res).toEqual('automerged');
|
||||
expect(branchWorker.processBranch.mock.calls).toHaveLength(3);
|
||||
});
|
||||
|
|
15
test/workers/repository/result.spec.js
Normal file
15
test/workers/repository/result.spec.js
Normal file
|
@ -0,0 +1,15 @@
|
|||
const { processResult } = require('../../../lib/workers/repository/result');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = require('../../_fixtures/config');
|
||||
});
|
||||
|
||||
describe('workers/repository/result', () => {
|
||||
describe('processResult()', () => {
|
||||
it('runs', () => {
|
||||
processResult(config, 'done');
|
||||
});
|
||||
});
|
||||
});
|
|
@ -0,0 +1,220 @@
|
|||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`workers/repository/updates/flatten flattenUpdates() flattens 1`] = `
|
||||
Array [
|
||||
Object {
|
||||
"assignees": Array [],
|
||||
"automerge": false,
|
||||
"automergeComment": "automergeComment",
|
||||
"automergeType": "pr",
|
||||
"branchName": "{{{branchPrefix}}}{{{managerBranchPrefix}}}{{{branchTopic}}}",
|
||||
"branchPrefix": "renovate/",
|
||||
"branchTopic": "{{{depNameSanitized}}}-{{{newVersionMajor}}}.x",
|
||||
"bumpVersion": null,
|
||||
"commitBody": null,
|
||||
"commitMessage": "{{{commitMessagePrefix}}} {{{commitMessageAction}}} {{{commitMessageTopic}}} {{{commitMessageExtra}}} {{{commitMessageSuffix}}}",
|
||||
"commitMessageAction": "Update",
|
||||
"commitMessageExtra": "to {{#if isMajor}}v{{{newVersionMajor}}}{{else}}{{#unless isRange}}v{{/unless}}{{{newVersion}}}{{/if}}",
|
||||
"commitMessagePrefix": null,
|
||||
"commitMessageTopic": "dependency {{depName}}",
|
||||
"copyLocalLibs": false,
|
||||
"depName": "@org/a",
|
||||
"depNameSanitized": "org-a",
|
||||
"errors": Array [],
|
||||
"gitAuthor": null,
|
||||
"gitPrivateKey": null,
|
||||
"group": Object {
|
||||
"branchTopic": "{{{groupSlug}}}",
|
||||
"commitMessageTopic": "{{{groupName}}}",
|
||||
"prBody": "This Pull Request renovates the package group \\"{{{groupName}}}\\".\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#each upgrades as |upgrade|}}\\n- {{#if repositoryUrl}}[{{{upgrade.depName}}}]({{upgrade.repositoryUrl}}){{else}}\`{{{depName}}}\`{{/if}}{{#if depType}} (\`{{{depType}}}\`){{/if}}: from \`{{{upgrade.currentVersion}}}\` to \`{{{upgrade.newVersion}}}\`\\n{{/each}}\\n\\n{{#if hasReleaseNotes}}\\n# Release Notes\\n{{#each upgrades as |upgrade|}}\\n{{#if upgrade.hasReleaseNotes}}\\n<details>\\n<summary>{{upgrade.githubName}}</summary>\\n\\n{{#each upgrade.releases as |release|}}\\n{{#if release.releaseNotes}}\\n### [\`v{{{release.version}}}\`]({{{release.releaseNotes.url}}})\\n{{#if release.compare.url}}\\n[Compare Source]({{release.compare.url}})\\n{{/if}}\\n{{{release.releaseNotes.body}}}\\n\\n---\\n\\n{{/if}}\\n{{/each}}\\n\\n</details>\\n{{/if}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if isPin}}\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"groupName": null,
|
||||
"groupSlug": null,
|
||||
"labels": Array [],
|
||||
"lazyGrouping": true,
|
||||
"lockFileMaintenance": Object {
|
||||
"branchTopic": "lock-file-maintenance",
|
||||
"commitMessageAction": "Lock file maintenance",
|
||||
"commitMessageExtra": null,
|
||||
"commitMessageTopic": null,
|
||||
"enabled": true,
|
||||
"groupName": null,
|
||||
"prBody": "This Pull Request updates \`package.json\` lock files to use the latest dependency versions.\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
"rebaseStalePrs": true,
|
||||
"recreateClosed": true,
|
||||
"schedule": Array [
|
||||
"before 5am on monday",
|
||||
],
|
||||
},
|
||||
"manager": "npm",
|
||||
"managerBranchPrefix": "",
|
||||
"newVersion": "1.0.0",
|
||||
"npmToken": null,
|
||||
"npmrc": null,
|
||||
"packageFile": "package.json ",
|
||||
"prBody": "This Pull Request {{#if isRollback}}rolls back{{else}}updates{{/if}} dependency {{#if repositoryUrl}}[{{{depName}}}]({{{repositoryUrl}}}){{else}}\`{{{depName}}}\`{{/if}} from \`{{#unless isRange}}{{#unless isPin}}v{{/unless}}{{/unless}}{{{currentVersion}}}\` to \`{{#unless isRange}}v{{/unless}}{{{newVersion}}}\`{{#if isRollback}}. This is necessary and important because \`v{{{currentVersion}}}\` cannot be found in the npm registry - probably because of it being unpublished.{{/if}}\\n{{#if hasTypes}}\\n\\nThis PR also includes an upgrade to the corresponding [@types/{{{depName}}}](https://npmjs.com/package/@types/{{{depName}}}) package.\\n{{/if}}\\n{{#if releases.length}}\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if isPin}}\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n{{/if}}\\n{{#if hasReleaseNotes}}\\n\\n<details>\\n<summary>Release Notes</summary>\\n\\n{{#each releases as |release|}}\\n{{#if release.releaseNotes}}\\n### [\`v{{{release.version}}}\`]({{{release.releaseNotes.url}}})\\n{{#if release.compare.url}}\\n[Compare Source]({{release.compare.url}})\\n{{/if}}\\n{{{release.releaseNotes.body}}}\\n\\n---\\n\\n{{/if}}\\n{{/each}}\\n</details>\\n{{/if}}\\n\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
"prConcurrentLimit": 0,
|
||||
"prCreation": "immediate",
|
||||
"prHourlyLimit": 0,
|
||||
"prNotPendingHours": 25,
|
||||
"prTitle": null,
|
||||
"rebaseStalePrs": null,
|
||||
"recreateClosed": false,
|
||||
"requiredStatusChecks": Array [],
|
||||
"reviewers": Array [],
|
||||
"schedule": Array [],
|
||||
"semanticCommitScope": "deps",
|
||||
"semanticCommitType": "chore",
|
||||
"semanticCommits": null,
|
||||
"statusCheckVerify": false,
|
||||
"timezone": null,
|
||||
"unpublishSafe": false,
|
||||
"updateLockFiles": true,
|
||||
"updateNotScheduled": true,
|
||||
"warnings": Array [],
|
||||
"yarnrc": null,
|
||||
},
|
||||
Object {
|
||||
"assignees": Array [],
|
||||
"automerge": false,
|
||||
"automergeComment": "automergeComment",
|
||||
"automergeType": "pr",
|
||||
"branchName": "{{{branchPrefix}}}{{{managerBranchPrefix}}}{{{branchTopic}}}",
|
||||
"branchPrefix": "renovate/",
|
||||
"branchTopic": "{{{depNameSanitized}}}-{{{newVersionMajor}}}.x",
|
||||
"bumpVersion": null,
|
||||
"commitBody": null,
|
||||
"commitMessage": "{{{commitMessagePrefix}}} {{{commitMessageAction}}} {{{commitMessageTopic}}} {{{commitMessageExtra}}} {{{commitMessageSuffix}}}",
|
||||
"commitMessageAction": "Update",
|
||||
"commitMessageExtra": "to {{#if isMajor}}v{{{newVersionMajor}}}{{else}}{{#unless isRange}}v{{/unless}}{{{newVersion}}}{{/if}}",
|
||||
"commitMessagePrefix": null,
|
||||
"commitMessageTopic": "dependency {{depName}}",
|
||||
"copyLocalLibs": false,
|
||||
"depNameSanitized": undefined,
|
||||
"errors": Array [],
|
||||
"gitAuthor": null,
|
||||
"gitPrivateKey": null,
|
||||
"group": Object {
|
||||
"branchTopic": "{{{groupSlug}}}",
|
||||
"commitMessageTopic": "{{{groupName}}}",
|
||||
"prBody": "This Pull Request renovates the package group \\"{{{groupName}}}\\".\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#each upgrades as |upgrade|}}\\n- {{#if repositoryUrl}}[{{{upgrade.depName}}}]({{upgrade.repositoryUrl}}){{else}}\`{{{depName}}}\`{{/if}}{{#if depType}} (\`{{{depType}}}\`){{/if}}: from \`{{{upgrade.currentVersion}}}\` to \`{{{upgrade.newVersion}}}\`\\n{{/each}}\\n\\n{{#if hasReleaseNotes}}\\n# Release Notes\\n{{#each upgrades as |upgrade|}}\\n{{#if upgrade.hasReleaseNotes}}\\n<details>\\n<summary>{{upgrade.githubName}}</summary>\\n\\n{{#each upgrade.releases as |release|}}\\n{{#if release.releaseNotes}}\\n### [\`v{{{release.version}}}\`]({{{release.releaseNotes.url}}})\\n{{#if release.compare.url}}\\n[Compare Source]({{release.compare.url}})\\n{{/if}}\\n{{{release.releaseNotes.body}}}\\n\\n---\\n\\n{{/if}}\\n{{/each}}\\n\\n</details>\\n{{/if}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if isPin}}\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"groupName": null,
|
||||
"groupSlug": null,
|
||||
"labels": Array [],
|
||||
"lazyGrouping": true,
|
||||
"lockFileMaintenance": Object {
|
||||
"branchTopic": "lock-file-maintenance",
|
||||
"commitMessageAction": "Lock file maintenance",
|
||||
"commitMessageExtra": null,
|
||||
"commitMessageTopic": null,
|
||||
"enabled": true,
|
||||
"groupName": null,
|
||||
"prBody": "This Pull Request updates \`package.json\` lock files to use the latest dependency versions.\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
"rebaseStalePrs": true,
|
||||
"recreateClosed": true,
|
||||
"schedule": Array [
|
||||
"before 5am on monday",
|
||||
],
|
||||
},
|
||||
"manager": "npm",
|
||||
"managerBranchPrefix": "",
|
||||
"newVersion": "2.0.0",
|
||||
"npmToken": null,
|
||||
"npmrc": null,
|
||||
"packageFile": "package.json ",
|
||||
"prBody": "This Pull Request {{#if isRollback}}rolls back{{else}}updates{{/if}} dependency {{#if repositoryUrl}}[{{{depName}}}]({{{repositoryUrl}}}){{else}}\`{{{depName}}}\`{{/if}} from \`{{#unless isRange}}{{#unless isPin}}v{{/unless}}{{/unless}}{{{currentVersion}}}\` to \`{{#unless isRange}}v{{/unless}}{{{newVersion}}}\`{{#if isRollback}}. This is necessary and important because \`v{{{currentVersion}}}\` cannot be found in the npm registry - probably because of it being unpublished.{{/if}}\\n{{#if hasTypes}}\\n\\nThis PR also includes an upgrade to the corresponding [@types/{{{depName}}}](https://npmjs.com/package/@types/{{{depName}}}) package.\\n{{/if}}\\n{{#if releases.length}}\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if isPin}}\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n{{/if}}\\n{{#if hasReleaseNotes}}\\n\\n<details>\\n<summary>Release Notes</summary>\\n\\n{{#each releases as |release|}}\\n{{#if release.releaseNotes}}\\n### [\`v{{{release.version}}}\`]({{{release.releaseNotes.url}}})\\n{{#if release.compare.url}}\\n[Compare Source]({{release.compare.url}})\\n{{/if}}\\n{{{release.releaseNotes.body}}}\\n\\n---\\n\\n{{/if}}\\n{{/each}}\\n</details>\\n{{/if}}\\n\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
"prConcurrentLimit": 0,
|
||||
"prCreation": "immediate",
|
||||
"prHourlyLimit": 0,
|
||||
"prNotPendingHours": 25,
|
||||
"prTitle": null,
|
||||
"rebaseStalePrs": null,
|
||||
"recreateClosed": false,
|
||||
"requiredStatusChecks": Array [],
|
||||
"reviewers": Array [],
|
||||
"schedule": Array [],
|
||||
"semanticCommitScope": "deps",
|
||||
"semanticCommitType": "chore",
|
||||
"semanticCommits": null,
|
||||
"statusCheckVerify": false,
|
||||
"timezone": null,
|
||||
"unpublishSafe": false,
|
||||
"updateLockFiles": true,
|
||||
"updateNotScheduled": true,
|
||||
"warnings": Array [],
|
||||
"yarnrc": null,
|
||||
},
|
||||
Object {
|
||||
"assignees": Array [],
|
||||
"automerge": false,
|
||||
"automergeComment": "automergeComment",
|
||||
"automergeType": "pr",
|
||||
"branchName": "{{{branchPrefix}}}{{{managerBranchPrefix}}}{{{branchTopic}}}",
|
||||
"branchPrefix": "renovate/",
|
||||
"branchTopic": "lock-file-maintenance",
|
||||
"bumpVersion": null,
|
||||
"commitBody": null,
|
||||
"commitMessage": "{{{commitMessagePrefix}}} {{{commitMessageAction}}} {{{commitMessageTopic}}} {{{commitMessageExtra}}} {{{commitMessageSuffix}}}",
|
||||
"commitMessageAction": "Lock file maintenance",
|
||||
"commitMessageExtra": null,
|
||||
"commitMessagePrefix": null,
|
||||
"commitMessageTopic": null,
|
||||
"copyLocalLibs": false,
|
||||
"errors": Array [],
|
||||
"gitAuthor": null,
|
||||
"gitPrivateKey": null,
|
||||
"group": Object {
|
||||
"branchTopic": "{{{groupSlug}}}",
|
||||
"commitMessageTopic": "{{{groupName}}}",
|
||||
"prBody": "This Pull Request renovates the package group \\"{{{groupName}}}\\".\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#each upgrades as |upgrade|}}\\n- {{#if repositoryUrl}}[{{{upgrade.depName}}}]({{upgrade.repositoryUrl}}){{else}}\`{{{depName}}}\`{{/if}}{{#if depType}} (\`{{{depType}}}\`){{/if}}: from \`{{{upgrade.currentVersion}}}\` to \`{{{upgrade.newVersion}}}\`\\n{{/each}}\\n\\n{{#if hasReleaseNotes}}\\n# Release Notes\\n{{#each upgrades as |upgrade|}}\\n{{#if upgrade.hasReleaseNotes}}\\n<details>\\n<summary>{{upgrade.githubName}}</summary>\\n\\n{{#each upgrade.releases as |release|}}\\n{{#if release.releaseNotes}}\\n### [\`v{{{release.version}}}\`]({{{release.releaseNotes.url}}})\\n{{#if release.compare.url}}\\n[Compare Source]({{release.compare.url}})\\n{{/if}}\\n{{{release.releaseNotes.body}}}\\n\\n---\\n\\n{{/if}}\\n{{/each}}\\n\\n</details>\\n{{/if}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if isPin}}\\n**Important**: Renovate will wait until you have merged this Pin request before creating PRs for any *upgrades*. If you do not wish to pin anything, please update your config accordingly instead of leaving this PR open.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
},
|
||||
"groupName": null,
|
||||
"groupSlug": null,
|
||||
"labels": Array [],
|
||||
"lazyGrouping": true,
|
||||
"lockFileMaintenance": Object {
|
||||
"branchTopic": "lock-file-maintenance",
|
||||
"commitMessageAction": "Lock file maintenance",
|
||||
"commitMessageExtra": null,
|
||||
"commitMessageTopic": null,
|
||||
"enabled": true,
|
||||
"groupName": null,
|
||||
"prBody": "This Pull Request updates \`package.json\` lock files to use the latest dependency versions.\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
"rebaseStalePrs": true,
|
||||
"recreateClosed": true,
|
||||
"schedule": Array [
|
||||
"before 5am on monday",
|
||||
],
|
||||
},
|
||||
"manager": "npm",
|
||||
"managerBranchPrefix": "",
|
||||
"npmToken": null,
|
||||
"npmrc": null,
|
||||
"prBody": "This Pull Request updates \`package.json\` lock files to use the latest dependency versions.\\n\\n{{#if schedule}}\\n**Note**: This PR was created on a configured schedule (\\"{{{schedule}}}\\"{{#if timezone}} in timezone \`{{{timezone}}}\`{{/if}}) and will not receive updates outside those times.\\n{{/if}}\\n\\n{{#if hasErrors}}\\n\\n---\\n\\n# Errors\\n\\nRenovate encountered some errors when processing your repository, so you are being notified here even if they do not directly apply to this PR.\\n\\n{{#each errors as |error|}}\\n- \`{{error.depName}}\`: {{error.message}}\\n{{/each}}\\n{{/if}}\\n\\n{{#if hasWarnings}}\\n\\n---\\n\\n# Warnings\\n\\nPlease make sure the following warnings are safe to ignore:\\n\\n{{#each warnings as |warning|}}\\n- \`{{warning.depName}}\`: {{warning.message}}\\n{{/each}}\\n{{/if}}",
|
||||
"prConcurrentLimit": 0,
|
||||
"prCreation": "immediate",
|
||||
"prHourlyLimit": 0,
|
||||
"prNotPendingHours": 25,
|
||||
"prTitle": null,
|
||||
"rebaseStalePrs": true,
|
||||
"recreateClosed": true,
|
||||
"requiredStatusChecks": Array [],
|
||||
"reviewers": Array [],
|
||||
"schedule": Array [
|
||||
"before 5am on monday",
|
||||
],
|
||||
"semanticCommitScope": "deps",
|
||||
"semanticCommitType": "chore",
|
||||
"semanticCommits": null,
|
||||
"statusCheckVerify": false,
|
||||
"timezone": null,
|
||||
"type": "lockFileMaintenance",
|
||||
"unpublishSafe": false,
|
||||
"updateLockFiles": true,
|
||||
"updateNotScheduled": true,
|
||||
"warnings": Array [],
|
||||
"yarnrc": null,
|
||||
},
|
||||
]
|
||||
`;
|
|
@ -9,29 +9,34 @@ beforeEach(() => {
|
|||
const {
|
||||
branchifyUpgrades,
|
||||
} = require('../../../../lib/workers/repository/updates/branchify');
|
||||
const {
|
||||
flattenUpdates,
|
||||
} = require('../../../../lib/workers/repository/updates/flatten');
|
||||
|
||||
jest.mock('../../../../lib/workers/repository/updates/flatten');
|
||||
|
||||
describe('workers/repository/updates/branchify', () => {
|
||||
describe('branchifyUpgrades()', () => {
|
||||
it('returns empty', async () => {
|
||||
config.upgrades = [];
|
||||
flattenUpdates.mockReturnValueOnce([]);
|
||||
const res = await branchifyUpgrades(config);
|
||||
expect(res.branches).toEqual([]);
|
||||
});
|
||||
it('returns one branch if one input', async () => {
|
||||
config.upgrades = [
|
||||
flattenUpdates.mockReturnValueOnce([
|
||||
{
|
||||
depName: 'foo',
|
||||
branchName: 'foo-{{version}}',
|
||||
version: '1.1.0',
|
||||
prTitle: 'some-title',
|
||||
},
|
||||
];
|
||||
]);
|
||||
config.repoIsOnboarded = true;
|
||||
const res = await branchifyUpgrades(config);
|
||||
expect(Object.keys(res.branches).length).toBe(1);
|
||||
});
|
||||
it('does not group if different compiled branch names', async () => {
|
||||
config.upgrades = [
|
||||
flattenUpdates.mockReturnValueOnce([
|
||||
{
|
||||
depName: 'foo',
|
||||
branchName: 'foo-{{version}}',
|
||||
|
@ -50,12 +55,12 @@ describe('workers/repository/updates/branchify', () => {
|
|||
version: '1.1.0',
|
||||
prTitle: 'some-title',
|
||||
},
|
||||
];
|
||||
]);
|
||||
const res = await branchifyUpgrades(config);
|
||||
expect(Object.keys(res.branches).length).toBe(3);
|
||||
});
|
||||
it('groups if same compiled branch names', async () => {
|
||||
config.upgrades = [
|
||||
flattenUpdates.mockReturnValueOnce([
|
||||
{
|
||||
depName: 'foo',
|
||||
branchName: 'foo',
|
||||
|
@ -74,12 +79,12 @@ describe('workers/repository/updates/branchify', () => {
|
|||
version: '1.1.0',
|
||||
prTitle: 'some-title',
|
||||
},
|
||||
];
|
||||
]);
|
||||
const res = await branchifyUpgrades(config);
|
||||
expect(Object.keys(res.branches).length).toBe(2);
|
||||
});
|
||||
it('groups if same compiled group name', async () => {
|
||||
config.upgrades = [
|
||||
flattenUpdates.mockReturnValueOnce([
|
||||
{
|
||||
depName: 'foo',
|
||||
branchName: 'foo',
|
||||
|
@ -102,12 +107,12 @@ describe('workers/repository/updates/branchify', () => {
|
|||
groupName: 'My Group',
|
||||
group: { branchName: 'renovate/my-group' },
|
||||
},
|
||||
];
|
||||
]);
|
||||
const res = await branchifyUpgrades(config);
|
||||
expect(Object.keys(res.branches).length).toBe(2);
|
||||
});
|
||||
it('mixes errors and warnings', async () => {
|
||||
config.upgrades = [
|
||||
flattenUpdates.mockReturnValueOnce([
|
||||
{
|
||||
type: 'error',
|
||||
},
|
||||
|
@ -127,7 +132,7 @@ describe('workers/repository/updates/branchify', () => {
|
|||
prTitle: 'some-title',
|
||||
version: '1.1.0',
|
||||
},
|
||||
];
|
||||
]);
|
||||
const res = await branchifyUpgrades(config);
|
||||
expect(Object.keys(res.branches).length).toBe(2);
|
||||
expect(res.errors).toHaveLength(1);
|
||||
|
@ -193,7 +198,9 @@ describe('workers/repository/updates/branchify', () => {
|
|||
expectedBranchName: 'renovate/bad-branch-name9',
|
||||
},
|
||||
];
|
||||
config.upgrades = fixtures.map(({ upgrade }) => upgrade);
|
||||
flattenUpdates.mockReturnValueOnce(
|
||||
fixtures.map(({ upgrade }) => upgrade)
|
||||
);
|
||||
|
||||
(await branchifyUpgrades(config)).branches.forEach(
|
||||
({ branchName }, index) => {
|
||||
|
|
|
@ -1,67 +0,0 @@
|
|||
jest.mock('../../../../lib/workers/package-file');
|
||||
|
||||
const packageFileWorker = require('../../../../lib/workers/package-file');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = require('../../../_fixtures/config');
|
||||
});
|
||||
|
||||
const {
|
||||
determineRepoUpgrades,
|
||||
} = require('../../../../lib/workers/repository/updates/determine');
|
||||
|
||||
describe('workers/repository/updates/determine', () => {
|
||||
describe('determineRepoUpgrades(config)', () => {
|
||||
it('returns empty array if none found', async () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'package.json',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
},
|
||||
];
|
||||
packageFileWorker.renovatePackageFile.mockReturnValue([]);
|
||||
const res = await determineRepoUpgrades(config);
|
||||
expect(res.upgrades).toHaveLength(0);
|
||||
});
|
||||
it('returns array if upgrades found', async () => {
|
||||
config.packageFiles = [
|
||||
{
|
||||
packageFile: 'Dockerfile',
|
||||
manager: 'docker',
|
||||
},
|
||||
{
|
||||
packageFile: 'backend/package.json',
|
||||
manager: 'npm',
|
||||
},
|
||||
{
|
||||
packageFile: 'frontend/package.js',
|
||||
manager: 'meteor',
|
||||
},
|
||||
{
|
||||
packageFile: '.travis.yml',
|
||||
manager: 'travis',
|
||||
},
|
||||
{
|
||||
packageFile: 'WORKSPACE',
|
||||
manager: 'bazel',
|
||||
},
|
||||
];
|
||||
packageFileWorker.renovatePackageFile.mockReturnValueOnce([
|
||||
{ depName: 'a' },
|
||||
]);
|
||||
packageFileWorker.renovatePackageFile.mockReturnValueOnce([
|
||||
{ depName: 'b' },
|
||||
{ depName: 'c' },
|
||||
]);
|
||||
packageFileWorker.renovatePackageFile.mockReturnValueOnce([{ foo: 'd' }]);
|
||||
packageFileWorker.renovatePackageFile.mockReturnValueOnce([{ foo: 'e' }]);
|
||||
packageFileWorker.renovatePackageFile.mockReturnValueOnce([{ bar: 'f' }]);
|
||||
const res = await determineRepoUpgrades(config);
|
||||
expect(res.upgrades).toHaveLength(6);
|
||||
});
|
||||
});
|
||||
});
|
32
test/workers/repository/updates/flatten.spec.js
Normal file
32
test/workers/repository/updates/flatten.spec.js
Normal file
|
@ -0,0 +1,32 @@
|
|||
const {
|
||||
flattenUpdates,
|
||||
} = require('../../../../lib/workers/repository/updates/flatten');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = { ...require('../../../_fixtures/config') };
|
||||
config.errors = [];
|
||||
config.warnings = [];
|
||||
});
|
||||
|
||||
describe('workers/repository/updates/flatten', () => {
|
||||
describe('flattenUpdates()', () => {
|
||||
it('flattens', async () => {
|
||||
config.lockFileMaintenance.enabled = true;
|
||||
const packageFiles = {
|
||||
npm: [
|
||||
{
|
||||
packageFile: 'package.json ',
|
||||
deps: [
|
||||
{ depName: '@org/a', updates: [{ newVersion: '1.0.0' }] },
|
||||
{ updates: [{ newVersion: '2.0.0' }] },
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
const res = await flattenUpdates(config, packageFiles);
|
||||
expect(res).toMatchSnapshot();
|
||||
});
|
||||
});
|
||||
});
|
|
@ -1,20 +0,0 @@
|
|||
jest.mock('../../../../lib/workers/repository/updates/determine');
|
||||
jest.mock('../../../../lib/workers/repository/updates/branchify');
|
||||
|
||||
let config;
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks();
|
||||
config = require('../../../_fixtures/config');
|
||||
});
|
||||
|
||||
const {
|
||||
determineUpdates,
|
||||
} = require('../../../../lib/workers/repository/updates');
|
||||
|
||||
describe('workers/repository/updates', () => {
|
||||
describe('determineUpdates()', () => {
|
||||
it('runs', async () => {
|
||||
await determineUpdates(config, 'some-token');
|
||||
});
|
||||
});
|
||||
});
|
Loading…
Reference in a new issue