refactor: use global logger (#1116)

This commit is contained in:
Rhys Arkins 2017-11-08 06:44:03 +01:00 committed by GitHub
parent 3ec34d1391
commit 8c479071fd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
110 changed files with 281 additions and 749 deletions

View file

@ -1,5 +1,6 @@
module.exports = { module.exports = {
globals: { globals: {
logger: true,
platform: true, platform: true,
}, },
}; };

View file

@ -4,7 +4,7 @@ module.exports = {
decryptConfig, decryptConfig,
}; };
function decryptConfig(config, logger, privateKey) { function decryptConfig(config, privateKey) {
logger.trace({ config }, 'decryptConfig'); logger.trace({ config }, 'decryptConfig');
const decryptedConfig = { ...config }; const decryptedConfig = { ...config };
for (const key of Object.keys(config)) { for (const key of Object.keys(config)) {
@ -37,13 +37,13 @@ function decryptConfig(config, logger, privateKey) {
logger.error('Found encrypted data but no privateKey'); logger.error('Found encrypted data but no privateKey');
} }
delete decryptedConfig.encrypted; delete decryptedConfig.encrypted;
} else if (isObject(val) && key !== 'content' && key !== 'logger') { } else if (isObject(val) && key !== 'content') {
decryptedConfig[key] = decryptConfig(val, logger, privateKey); decryptedConfig[key] = decryptConfig(val, privateKey);
} else if (Array.isArray(val)) { } else if (Array.isArray(val)) {
decryptedConfig[key] = []; decryptedConfig[key] = [];
val.forEach(item => { val.forEach(item => {
if (isObject(item)) { if (isObject(item)) {
decryptedConfig[key].push(decryptConfig(item, logger, privateKey)); decryptedConfig[key].push(decryptConfig(item, privateKey));
} else { } else {
decryptedConfig[key].push(item); decryptedConfig[key].push(item);
} }

View file

@ -1,4 +1,3 @@
const logger = require('../logger');
const path = require('path'); const path = require('path');
module.exports = { module.exports = {

View file

@ -1,4 +1,3 @@
const logger = require('../logger');
const githubApi = require('../platform/github'); const githubApi = require('../platform/github');
const gitlabApi = require('../platform/gitlab'); const gitlabApi = require('../platform/gitlab');

View file

@ -7,7 +7,6 @@ module.exports = {
}; };
function migrateAndValidate(config, input) { function migrateAndValidate(config, input) {
const { logger } = config;
const { isMigrated, migratedConfig } = configMigration.migrateConfig(input); const { isMigrated, migratedConfig } = configMigration.migrateConfig(input);
if (isMigrated) { if (isMigrated) {
logger.info( logger.info(

View file

@ -10,11 +10,7 @@ module.exports = {
getPreset, getPreset,
}; };
async function resolveConfigPresets( async function resolveConfigPresets(inputConfig, existingPresets = []) {
inputConfig,
logger = inputConfig.logger,
existingPresets = []
) {
logger.trace( logger.trace(
{ config: inputConfig, existingPresets }, { config: inputConfig, existingPresets },
'resolveConfigPresets' 'resolveConfigPresets'
@ -32,8 +28,7 @@ async function resolveConfigPresets(
} else { } else {
logger.trace(`Resolving preset "${preset}"`); logger.trace(`Resolving preset "${preset}"`);
const presetConfig = await resolveConfigPresets( const presetConfig = await resolveConfigPresets(
await getPreset(preset, logger), await getPreset(preset),
logger,
existingPresets.concat([preset]) existingPresets.concat([preset])
); );
config = configParser.mergeChildConfig(config, presetConfig); config = configParser.mergeChildConfig(config, presetConfig);
@ -47,18 +42,18 @@ async function resolveConfigPresets(
logger.trace({ config }, `Post-merge resolve config`); logger.trace({ config }, `Post-merge resolve config`);
for (const key of Object.keys(config)) { for (const key of Object.keys(config)) {
const val = config[key]; const val = config[key];
const ignoredKeys = ['content', 'logger']; const ignoredKeys = ['content'];
if (isObject(val) && ignoredKeys.indexOf(key) === -1) { if (isObject(val) && ignoredKeys.indexOf(key) === -1) {
// Resolve nested objects // Resolve nested objects
logger.trace(`Resolving object "${key}"`); logger.trace(`Resolving object "${key}"`);
config[key] = await resolveConfigPresets(val, logger, existingPresets); config[key] = await resolveConfigPresets(val, existingPresets);
} else if (Array.isArray(val)) { } else if (Array.isArray(val)) {
// Resolve nested objects inside arrays // Resolve nested objects inside arrays
config[key] = []; config[key] = [];
for (const element of val) { for (const element of val) {
if (isObject(element)) { if (isObject(element)) {
config[key].push( config[key].push(
await resolveConfigPresets(element, logger, existingPresets) await resolveConfigPresets(element, existingPresets)
); );
} else { } else {
config[key].push(element); config[key].push(element);
@ -139,12 +134,12 @@ function parsePreset(input) {
return { packageName, presetName, params }; return { packageName, presetName, params };
} }
async function getPreset(preset, logger) { async function getPreset(preset) {
logger.trace(`getPreset(${preset})`); logger.trace(`getPreset(${preset})`);
const { packageName, presetName, params } = parsePreset(preset); const { packageName, presetName, params } = parsePreset(preset);
let presetConfig; let presetConfig;
try { try {
const dep = await npm.getDependency(packageName, logger); const dep = await npm.getDependency(packageName);
if (!dep) { if (!dep) {
logger.warn(`Failed to look up preset packageName ${packageName}`); logger.warn(`Failed to look up preset packageName ${packageName}`);
return {}; return {};

View file

@ -11,16 +11,12 @@ function configSerializer(config) {
'yarnrc', 'yarnrc',
'privateKey', 'privateKey',
]; ];
const functionFields = ['logger'];
const templateFields = ['commitMessage', 'prTitle', 'prBody']; const templateFields = ['commitMessage', 'prTitle', 'prBody'];
// eslint-disable-next-line array-callback-return // eslint-disable-next-line array-callback-return
return traverse(config).map(function scrub(val) { return traverse(config).map(function scrub(val) {
if (val && redactedFields.indexOf(this.key) !== -1) { if (val && redactedFields.indexOf(this.key) !== -1) {
this.update('***********'); this.update('***********');
} }
if (val && functionFields.indexOf(this.key) !== -1) {
this.update('[Function]');
}
if (val && templateFields.indexOf(this.key) !== -1) { if (val && templateFields.indexOf(this.key) !== -1) {
this.update('[Template]'); this.update('[Template]');
} }

View file

@ -2,10 +2,13 @@ const bunyan = require('bunyan');
const PrettyStdout = require('./pretty-stdout').RenovateStream; const PrettyStdout = require('./pretty-stdout').RenovateStream;
const configSerializer = require('./config-serializer'); const configSerializer = require('./config-serializer');
const prettyStdOut = new PrettyStdout(); let meta = {};
prettyStdOut.pipe(process.stdout);
const logger = bunyan.createLogger({ function initLogger() {
const prettyStdOut = new PrettyStdout();
prettyStdOut.pipe(process.stdout);
const bunyanLogger = bunyan.createLogger({
name: 'renovate', name: 'renovate',
serializers: { serializers: {
config: configSerializer, config: configSerializer,
@ -18,6 +21,45 @@ const logger = bunyan.createLogger({
stream: prettyStdOut, stream: prettyStdOut,
}, },
], ],
}); });
module.exports = logger; global.logger = {};
const logFunctions = [
'trace',
'debug',
'info',
'warn',
'error',
'fatal',
'child',
];
logFunctions.forEach(x => {
global.logger[x] = (p1, p2) => {
if (p2) {
// meta and msg provided
return bunyanLogger[x]({ ...meta, ...p1 }, p2);
}
if (typeof p1 === 'string') {
// only message provided
return bunyanLogger[x](meta, p1);
}
// only meta provided
return bunyanLogger[x]({ ...meta, ...p1 });
};
});
global.logger.addStream = bunyanLogger.addStream;
global.logger.levels = (stream, level) => {
bunyanLogger.levels(stream, level);
};
// setMeta overrides existing meta
global.logger.setMeta = function setMeta(obj) {
meta = { ...obj };
};
}
module.exports = {
initLogger,
};

View file

@ -3,7 +3,6 @@ module.exports = {
}; };
async function detectPackageFiles(config, fileList) { async function detectPackageFiles(config, fileList) {
const { logger } = config;
logger.debug('docker.detectPackageFiles()'); logger.debug('docker.detectPackageFiles()');
const packageFiles = []; const packageFiles = [];
if (config.docker.enabled) { if (config.docker.enabled) {

View file

@ -2,8 +2,7 @@ module.exports = {
extractDependencies, extractDependencies,
}; };
function extractDependencies(content, config) { function extractDependencies(content) {
const { logger } = config;
const fromMatch = content.match(/(\n|^)([Ff][Rr][Oo][Mm] .*)\n/); const fromMatch = content.match(/(\n|^)([Ff][Rr][Oo][Mm] .*)\n/);
if (!fromMatch) { if (!fromMatch) {
logger.warn({ content }, 'No FROM found'); logger.warn({ content }, 'No FROM found');

View file

@ -15,7 +15,6 @@ async function getPackageUpdates(config) {
currentDepTag, currentDepTag,
currentTag, currentTag,
currentDigest, currentDigest,
logger,
} = config; } = config;
if (dockerRegistry) { if (dockerRegistry) {
logger.info({ currentFrom }, 'Skipping Dockerfile image with custom host'); logger.info({ currentFrom }, 'Skipping Dockerfile image with custom host');
@ -24,7 +23,7 @@ async function getPackageUpdates(config) {
const upgrades = []; const upgrades = [];
if (currentDigest || config.pinDigests) { if (currentDigest || config.pinDigests) {
logger.debug('Checking docker pinDigests'); logger.debug('Checking docker pinDigests');
const newDigest = await dockerApi.getDigest(depName, currentTag, logger); const newDigest = await dockerApi.getDigest(depName, currentTag);
if (newDigest && config.currentDigest !== newDigest) { if (newDigest && config.currentDigest !== newDigest) {
const upgrade = {}; const upgrade = {};
upgrade.newTag = currentTag || 'latest'; upgrade.newTag = currentTag || 'latest';
@ -52,7 +51,7 @@ async function getPackageUpdates(config) {
return upgrades; return upgrades;
} }
let versionList = []; let versionList = [];
const allTags = await dockerApi.getTags(config.depName, config.logger); const allTags = await dockerApi.getTags(config.depName);
if (allTags) { if (allTags) {
versionList = allTags versionList = allTags
.filter(tag => getSuffix(tag) === tagSuffix) .filter(tag => getSuffix(tag) === tagSuffix)
@ -92,8 +91,7 @@ async function getPackageUpdates(config) {
if (config.currentDigest || config.pinDigests) { if (config.currentDigest || config.pinDigests) {
upgrade.newDigest = await dockerApi.getDigest( upgrade.newDigest = await dockerApi.getDigest(
config.depName, config.depName,
upgrade.newTag, upgrade.newTag
config.logger
); );
newFrom = `${newFrom}@${upgrade.newDigest}`; newFrom = `${newFrom}@${upgrade.newDigest}`;
} }

View file

@ -5,7 +5,7 @@ module.exports = {
getTags, getTags,
}; };
async function getDigest(name, tag = 'latest', logger) { async function getDigest(name, tag = 'latest') {
const repository = name.includes('/') ? name : `library/${name}`; const repository = name.includes('/') ? name : `library/${name}`;
try { try {
const authUrl = `https://auth.docker.io/token?service=registry.docker.io&scope=repository:${ const authUrl = `https://auth.docker.io/token?service=registry.docker.io&scope=repository:${
@ -34,7 +34,7 @@ async function getDigest(name, tag = 'latest', logger) {
} }
} }
async function getTags(name, logger) { async function getTags(name) {
const repository = name.includes('/') ? name : `library/${name}`; const repository = name.includes('/') ? name : `library/${name}`;
try { try {
const authUrl = `https://auth.docker.io/token?service=registry.docker.io&scope=repository:${ const authUrl = `https://auth.docker.io/token?service=registry.docker.io&scope=repository:${

View file

@ -5,7 +5,6 @@ module.exports = {
}; };
async function resolvePackageFile(config, inputFile) { async function resolvePackageFile(config, inputFile) {
const { logger } = config;
const packageFile = configParser.mergeChildConfig(config.docker, inputFile); const packageFile = configParser.mergeChildConfig(config.docker, inputFile);
logger.debug( logger.debug(
`Resolving packageFile ${JSON.stringify(packageFile.packageFile)}` `Resolving packageFile ${JSON.stringify(packageFile.packageFile)}`

View file

@ -2,7 +2,7 @@ module.exports = {
setNewValue, setNewValue,
}; };
function setNewValue(currentFileContent, upgrade, logger) { function setNewValue(currentFileContent, upgrade) {
try { try {
logger.debug(`setNewValue: ${upgrade.newFrom}`); logger.debug(`setNewValue: ${upgrade.newFrom}`);
const oldLine = new RegExp( const oldLine = new RegExp(

View file

@ -18,7 +18,6 @@ module.exports = {
}; };
async function detectPackageFiles(config) { async function detectPackageFiles(config) {
const { logger } = config;
logger.debug('detectPackageFiles()'); logger.debug('detectPackageFiles()');
logger.trace({ config }); logger.trace({ config });
let packageFiles = []; let packageFiles = [];
@ -54,12 +53,11 @@ function getPackageUpdates(config) {
} else if (config.packageFile.endsWith('package.js')) { } else if (config.packageFile.endsWith('package.js')) {
return npm.getPackageUpdates(config); return npm.getPackageUpdates(config);
} }
config.logger.info(`Cannot find manager for ${config.packageFile}`); logger.info(`Cannot find manager for ${config.packageFile}`);
throw new Error('Unsupported package manager'); throw new Error('Unsupported package manager');
} }
async function getUpdatedPackageFiles(config) { async function getUpdatedPackageFiles(config) {
const { logger } = config;
const updatedPackageFiles = {}; const updatedPackageFiles = {};
for (const upgrade of config.upgrades) { for (const upgrade of config.upgrades) {
@ -76,23 +74,17 @@ async function getUpdatedPackageFiles(config) {
existingContent, existingContent,
upgrade.depType, upgrade.depType,
upgrade.depName, upgrade.depName,
upgrade.newVersion, upgrade.newVersion
config.logger
); );
} else if (upgrade.packageFile.endsWith('package.js')) { } else if (upgrade.packageFile.endsWith('package.js')) {
newContent = meteorUpdater.setNewValue( newContent = meteorUpdater.setNewValue(
existingContent, existingContent,
upgrade.depName, upgrade.depName,
upgrade.currentVersion, upgrade.currentVersion,
upgrade.newVersion, upgrade.newVersion
config.logger
); );
} else if (upgrade.packageFile.endsWith('Dockerfile')) { } else if (upgrade.packageFile.endsWith('Dockerfile')) {
newContent = dockerfileHelper.setNewValue( newContent = dockerfileHelper.setNewValue(existingContent, upgrade);
existingContent,
upgrade,
config.logger
);
} }
if (!newContent) { if (!newContent) {
if (config.parentBranch && config.canRebase) { if (config.parentBranch && config.canRebase) {

View file

@ -3,7 +3,6 @@ module.exports = {
}; };
async function detectPackageFiles(config, fileList) { async function detectPackageFiles(config, fileList) {
const { logger } = config;
logger.debug('meteor.detectPackageFiles()'); logger.debug('meteor.detectPackageFiles()');
const packageFiles = []; const packageFiles = [];
if (config.meteor.enabled) { if (config.meteor.enabled) {

View file

@ -2,8 +2,7 @@ module.exports = {
extractDependencies, extractDependencies,
}; };
function extractDependencies(packageContent, config) { function extractDependencies(packageContent) {
const { logger } = config;
let deps = []; let deps = [];
try { try {
deps = packageContent deps = packageContent

View file

@ -2,13 +2,7 @@ module.exports = {
setNewValue, setNewValue,
}; };
function setNewValue( function setNewValue(currentFileContent, depName, currentVersion, newVersion) {
currentFileContent,
depName,
currentVersion,
newVersion,
logger
) {
logger.debug(`setNewValue: ${depName} = ${newVersion}`); logger.debug(`setNewValue: ${depName} = ${newVersion}`);
const regexReplace = new RegExp( const regexReplace = new RegExp(
`('|")(${depName})('|"):(\\s+)('|")${currentVersion}('|")` `('|")(${depName})('|"):(\\s+)('|")${currentVersion}('|")`

View file

@ -3,7 +3,6 @@ module.exports = {
}; };
function detectPackageFiles(config, fileList) { function detectPackageFiles(config, fileList) {
const { logger } = config;
logger.debug('npm.detectPackageFiles()'); logger.debug('npm.detectPackageFiles()');
const packageFiles = []; const packageFiles = [];
if (config.npm.enabled) { if (config.npm.enabled) {

View file

@ -6,7 +6,6 @@ module.exports = {
}; };
async function checkMonorepos(config) { async function checkMonorepos(config) {
const { logger } = config;
const monorepoPackages = []; const monorepoPackages = [];
let workspaceDir; let workspaceDir;
// yarn workspaces // yarn workspaces

View file

@ -6,7 +6,6 @@ module.exports = {
}; };
async function getPackageUpdates(config) { async function getPackageUpdates(config) {
const { logger } = config;
let results = []; let results = [];
if (!versions.isValidVersion(config.currentVersion)) { if (!versions.isValidVersion(config.currentVersion)) {
results.push({ results.push({
@ -19,7 +18,7 @@ async function getPackageUpdates(config) {
logger.debug(results[0].message); logger.debug(results[0].message);
return results; return results;
} }
const npmDep = await npmApi.getDependency(config.depName, logger); const npmDep = await npmApi.getDependency(config.depName);
if (npmDep) { if (npmDep) {
results = await versions.determineUpgrades(npmDep, config); results = await versions.determineUpgrades(npmDep, config);
if (results.length > 0) { if (results.length > 0) {

View file

@ -26,7 +26,7 @@ function setNpmrc(input) {
npmrc = ini.parse(input); npmrc = ini.parse(input);
} }
async function getDependency(name, logger) { async function getDependency(name) {
logger.trace(`getDependency(${name})`); logger.trace(`getDependency(${name})`);
const scope = name.split('/')[0]; const scope = name.split('/')[0];
let regUrl; let regUrl;

View file

@ -4,7 +4,7 @@ module.exports = {
setNewValue, setNewValue,
}; };
function setNewValue(currentFileContent, depType, depName, newVersion, logger) { function setNewValue(currentFileContent, depType, depName, newVersion) {
logger.debug(`setNewValue: ${depType}.${depName} = ${newVersion}`); logger.debug(`setNewValue: ${depType}.${depName} = ${newVersion}`);
try { try {
const parsedContents = JSON.parse(currentFileContent); const parsedContents = JSON.parse(currentFileContent);
@ -34,8 +34,7 @@ function setNewValue(currentFileContent, depType, depName, newVersion, logger) {
currentFileContent, currentFileContent,
searchIndex, searchIndex,
searchString, searchString,
newString, newString
logger
); );
// Compare the parsed JSON structure of old and new // Compare the parsed JSON structure of old and new
if (_.isEqual(parsedContents, JSON.parse(testContent))) { if (_.isEqual(parsedContents, JSON.parse(testContent))) {
@ -65,7 +64,7 @@ function matchAt(content, index, match) {
} }
// Replace oldString with newString at location index of content // Replace oldString with newString at location index of content
function replaceAt(content, index, oldString, newString, logger) { function replaceAt(content, index, oldString, newString) {
logger.debug(`Replacing ${oldString} with ${newString} at index ${index}`); logger.debug(`Replacing ${oldString} with ${newString} at index ${index}`);
return ( return (
content.substr(0, index) + content.substr(0, index) +

View file

@ -13,7 +13,6 @@ module.exports = {
}; };
async function resolvePackageFiles(config) { async function resolvePackageFiles(config) {
const { logger } = config;
logger.trace({ config }, 'resolvePackageFiles()'); logger.trace({ config }, 'resolvePackageFiles()');
const allPackageFiles = config.packageFiles.length const allPackageFiles = config.packageFiles.length
? config.packageFiles ? config.packageFiles
@ -82,8 +81,7 @@ async function resolvePackageFiles(config) {
'package.json migrated config' 'package.json migrated config'
); );
const resolvedConfig = await presets.resolveConfigPresets( const resolvedConfig = await presets.resolveConfigPresets(
migratedConfig, migratedConfig
config.logger
); );
logger.debug( logger.debug(
{ config: resolvedConfig }, { config: resolvedConfig },

View file

@ -1,4 +1,3 @@
const logger = require('../../logger');
const ghGot = require('gh-got'); const ghGot = require('gh-got');
const parseLinkHeader = require('parse-link-header'); const parseLinkHeader = require('parse-link-header');

View file

@ -1,4 +1,3 @@
let logger = require('../../logger');
const get = require('./gh-got-wrapper'); const get = require('./gh-got-wrapper');
const config = {}; const config = {};
@ -64,12 +63,8 @@ async function getRepos(token, endpoint) {
} }
// Initialize GitHub by getting base branch and SHA // Initialize GitHub by getting base branch and SHA
async function initRepo(repoName, token, endpoint, repoLogger) { async function initRepo(repoName, token, endpoint) {
logger = repoLogger || logger;
logger.debug(`initRepo("${repoName}")`); logger.debug(`initRepo("${repoName}")`);
if (repoLogger) {
logger = repoLogger;
}
if (token) { if (token) {
process.env.GITHUB_TOKEN = token; process.env.GITHUB_TOKEN = token;
} else if (!process.env.GITHUB_TOKEN) { } else if (!process.env.GITHUB_TOKEN) {

View file

@ -1,4 +1,3 @@
let logger = require('../../logger');
const get = require('./gl-got-wrapper'); const get = require('./gl-got-wrapper');
const { createFile, updateFile } = require('./helpers'); const { createFile, updateFile } = require('./helpers');
@ -67,10 +66,7 @@ async function getRepos(token, endpoint) {
} }
// Initialize GitLab by getting base branch // Initialize GitLab by getting base branch
async function initRepo(repoName, token, endpoint, repoLogger) { async function initRepo(repoName, token, endpoint) {
if (repoLogger) {
logger = repoLogger;
}
logger.debug(`initRepo(${repoName})`); logger.debug(`initRepo(${repoName})`);
if (token) { if (token) {
process.env.GITLAB_TOKEN = token; process.env.GITLAB_TOKEN = token;

View file

@ -3,7 +3,6 @@ module.exports = {
}; };
async function tryBranchAutomerge(config) { async function tryBranchAutomerge(config) {
const { logger } = config;
logger.debug('Checking if we can automerge branch'); logger.debug('Checking if we can automerge branch');
if (!config.automerge || config.automergeType === 'pr') { if (!config.automerge || config.automergeType === 'pr') {
return 'no automerge'; return 'no automerge';

View file

@ -5,7 +5,6 @@ module.exports = {
}; };
async function prAlreadyExisted(config) { async function prAlreadyExisted(config) {
const { logger } = config;
logger.trace({ config }, 'prAlreadyExisted'); logger.trace({ config }, 'prAlreadyExisted');
if (config.recreateClosed) { if (config.recreateClosed) {
logger.debug('recreateClosed is true'); logger.debug('recreateClosed is true');

View file

@ -5,7 +5,6 @@ module.exports = {
}; };
async function commitFilesToBranch(config) { async function commitFilesToBranch(config) {
const { logger } = config;
const updatedFiles = config.updatedPackageFiles.concat( const updatedFiles = config.updatedPackageFiles.concat(
config.updatedLockFiles config.updatedLockFiles
); );

View file

@ -23,12 +23,11 @@ async function processBranch(branchConfig) {
dependencies = dependencies.filter( dependencies = dependencies.filter(
(item, index) => dependencies.indexOf(item) === index (item, index) => dependencies.indexOf(item) === index
); );
const logger = config.logger.child({ logger.setMeta({
repository: config.repository, repository: config.repository,
branch: config.branchName, branch: config.branchName,
dependencies, dependencies,
}); });
config.logger = logger;
logger.trace({ config }, 'processBranch'); logger.trace({ config }, 'processBranch');
try { try {
// Check schedule // Check schedule

View file

@ -13,7 +13,6 @@ module.exports = {
}; };
function hasPackageLock(config, packageFile) { function hasPackageLock(config, packageFile) {
const { logger } = config;
logger.trace( logger.trace(
{ packageFiles: config.packageFiles, packageFile }, { packageFiles: config.packageFiles, packageFile },
'hasPackageLock' 'hasPackageLock'
@ -30,7 +29,6 @@ function hasPackageLock(config, packageFile) {
} }
function hasYarnLock(config, packageFile) { function hasYarnLock(config, packageFile) {
const { logger } = config;
logger.trace( logger.trace(
{ packageFiles: config.packageFiles, packageFile }, { packageFiles: config.packageFiles, packageFile },
'hasYarnLock' 'hasYarnLock'
@ -92,7 +90,6 @@ function determineLockFileDirs(config) {
} }
async function writeExistingFiles(config) { async function writeExistingFiles(config) {
const { logger } = config;
if (config.npmrc) { if (config.npmrc) {
logger.debug('Writing repo .npmrc'); logger.debug('Writing repo .npmrc');
await fs.outputFile(path.join(config.tmpDir.path, '.npmrc'), config.npmrc); await fs.outputFile(path.join(config.tmpDir.path, '.npmrc'), config.npmrc);
@ -165,7 +162,6 @@ async function writeExistingFiles(config) {
} }
async function writeUpdatedPackageFiles(config) { async function writeUpdatedPackageFiles(config) {
const { logger } = config;
logger.trace({ config }, 'writeUpdatedPackageFiles'); logger.trace({ config }, 'writeUpdatedPackageFiles');
logger.debug('Writing any updated package files'); logger.debug('Writing any updated package files');
if (!config.updatedPackageFiles) { if (!config.updatedPackageFiles) {
@ -191,7 +187,6 @@ async function writeUpdatedPackageFiles(config) {
} }
async function getUpdatedLockFiles(config) { async function getUpdatedLockFiles(config) {
const { logger } = config;
logger.trace({ config }, 'getUpdatedLockFiles'); logger.trace({ config }, 'getUpdatedLockFiles');
logger.debug('Getting updated lock files'); logger.debug('Getting updated lock files');
const lockFileErrors = []; const lockFileErrors = [];
@ -211,8 +206,7 @@ async function getUpdatedLockFiles(config) {
logger.debug(`Generating package-lock.json for ${lockFileDir}`); logger.debug(`Generating package-lock.json for ${lockFileDir}`);
const lockFileName = path.join(lockFileDir, 'package-lock.json'); const lockFileName = path.join(lockFileDir, 'package-lock.json');
const res = await npm.generateLockFile( const res = await npm.generateLockFile(
path.join(config.tmpDir.path, lockFileDir), path.join(config.tmpDir.path, lockFileDir)
logger
); );
if (res.error) { if (res.error) {
lockFileErrors.push({ lockFileErrors.push({
@ -240,8 +234,7 @@ async function getUpdatedLockFiles(config) {
logger.debug(`Generating yarn.lock for ${lockFileDir}`); logger.debug(`Generating yarn.lock for ${lockFileDir}`);
const lockFileName = path.join(lockFileDir, 'yarn.lock'); const lockFileName = path.join(lockFileDir, 'yarn.lock');
const res = await yarn.generateLockFile( const res = await yarn.generateLockFile(
path.join(config.tmpDir.path, lockFileDir), path.join(config.tmpDir.path, lockFileDir)
logger
); );
if (res.error) { if (res.error) {
lockFileErrors.push({ lockFileErrors.push({

View file

@ -7,7 +7,7 @@ module.exports = {
generateLockFile, generateLockFile,
}; };
async function generateLockFile(tmpDir, logger) { async function generateLockFile(tmpDir) {
logger.debug(`Spawning npm install to create ${tmpDir}/package-lock.json`); logger.debug(`Spawning npm install to create ${tmpDir}/package-lock.json`);
let lockFile = null; let lockFile = null;
let stdout; let stdout;

View file

@ -12,7 +12,7 @@ function checkStale(config) {
} }
async function getParentBranch(config) { async function getParentBranch(config) {
const { branchName, logger } = config; const { branchName } = config;
// Check if branch exists // Check if branch exists
const branchExists = await platform.branchExists(branchName); const branchExists = await platform.branchExists(branchName);
if (!branchExists) { if (!branchExists) {

View file

@ -44,7 +44,6 @@ function hasValidSchedule(schedule) {
} }
function isScheduledNow(config) { function isScheduledNow(config) {
const { logger } = config;
let configSchedule = config.schedule; let configSchedule = config.schedule;
logger.debug({ schedule: configSchedule }, `Checking schedule`); logger.debug({ schedule: configSchedule }, `Checking schedule`);
if ( if (

View file

@ -3,7 +3,6 @@ module.exports = {
}; };
async function setUnpublishable(config) { async function setUnpublishable(config) {
const { logger } = config;
let unpublishable; let unpublishable;
for (const upgrade of config.upgrades) { for (const upgrade of config.upgrades) {
if (typeof upgrade.unpublishable !== 'undefined') { if (typeof upgrade.unpublishable !== 'undefined') {

View file

@ -7,7 +7,7 @@ module.exports = {
generateLockFile, generateLockFile,
}; };
async function generateLockFile(tmpDir, logger) { async function generateLockFile(tmpDir) {
logger.debug(`Spawning yarn install to create ${tmpDir}/yarn.lock`); logger.debug(`Spawning yarn install to create ${tmpDir}/yarn.lock`);
let lockFile = null; let lockFile = null;
let stdout; let stdout;

View file

@ -1,7 +1,6 @@
const configParser = require('../../config'); const configParser = require('../../config');
const pkgWorker = require('../package'); const pkgWorker = require('../package');
const packageJson = require('./package-json'); const packageJson = require('./package-json');
let logger = require('../../logger');
const dockerExtract = require('../../manager/docker/extract'); const dockerExtract = require('../../manager/docker/extract');
const meteorExtract = require('../../manager/meteor/extract'); const meteorExtract = require('../../manager/meteor/extract');
@ -11,7 +10,11 @@ module.exports = {
}; };
async function renovateDepType(packageContent, config) { async function renovateDepType(packageContent, config) {
logger = config.logger || logger; logger.setMeta({
repository: config.repository,
packageFile: config.packageFile,
depType: config.depType,
});
logger.trace({ config }, `renovateDepType(packageContent, config)`); logger.trace({ config }, `renovateDepType(packageContent, config)`);
if (config.enabled === false) { if (config.enabled === false) {
logger.debug('depType is disabled'); logger.debug('depType is disabled');
@ -30,9 +33,9 @@ async function renovateDepType(packageContent, config) {
logger.debug(`deps length is ${deps.length}`); logger.debug(`deps length is ${deps.length}`);
logger.debug({ deps }, `deps`); logger.debug({ deps }, `deps`);
} else if (config.packageFile.endsWith('package.js')) { } else if (config.packageFile.endsWith('package.js')) {
deps = meteorExtract.extractDependencies(packageContent, config); deps = meteorExtract.extractDependencies(packageContent);
} else if (config.packageFile.endsWith('Dockerfile')) { } else if (config.packageFile.endsWith('Dockerfile')) {
deps = dockerExtract.extractDependencies(packageContent, config); deps = dockerExtract.extractDependencies(packageContent);
} }
deps = deps.filter( deps = deps.filter(
dependency => config.ignoreDeps.indexOf(dependency.depName) === -1 dependency => config.ignoreDeps.indexOf(dependency.depName) === -1
@ -97,7 +100,7 @@ function getDepConfig(depTypeConfig, dep) {
} }
}); });
} }
depConfig.logger = logger.child({ logger.setMeta({
repository: depConfig.repository, repository: depConfig.repository,
packageFile: depConfig.packageFile, packageFile: depConfig.packageFile,
depType: depConfig.depType, depType: depConfig.depType,

View file

@ -1,4 +1,4 @@
const logger = require('../../logger'); const { initLogger } = require('../../logger');
const configParser = require('../../config'); const configParser = require('../../config');
const repositoryWorker = require('../repository'); const repositoryWorker = require('../repository');
const configValidation = require('../../config/validation'); const configValidation = require('../../config/validation');
@ -9,6 +9,7 @@ module.exports = {
}; };
async function start() { async function start() {
initLogger();
try { try {
const config = await configParser.parseConfigs(process.env, process.argv); const config = await configParser.parseConfigs(process.env, process.argv);
const { warnings, errors } = configValidation.validateConfig(config); const { warnings, errors } = configValidation.validateConfig(config);
@ -19,19 +20,17 @@ async function start() {
if (errors.length) { if (errors.length) {
logger.error({ errors }, 'Found config errors'); logger.error({ errors }, 'Found config errors');
} }
config.logger = logger;
// Iterate through repositories sequentially
for (let index = 0; index < config.repositories.length; index += 1) {
const repoConfig = module.exports.getRepositoryConfig(config, index);
repoConfig.logger.info('Renovating repository');
await repositoryWorker.renovateRepository(repoConfig, repoConfig.token);
repoConfig.logger.info('Finished repository');
}
if (config.repositories.length === 0) { if (config.repositories.length === 0) {
logger.warn( logger.warn(
'No repositories found - did you want to run with flag --autodiscover?' 'No repositories found - did you want to run with flag --autodiscover?'
); );
} }
// Iterate through repositories sequentially
for (let index = 0; index < config.repositories.length; index += 1) {
const repoConfig = module.exports.getRepositoryConfig(config, index);
await repositoryWorker.renovateRepository(repoConfig, repoConfig.token);
}
logger.setMeta({});
logger.info('Renovate finished'); logger.info('Renovate finished');
} catch (err) { } catch (err) {
logger.fatal(`Renovate fatal error: ${err.message}`); logger.fatal(`Renovate fatal error: ${err.message}`);
@ -45,9 +44,6 @@ function getRepositoryConfig(globalConfig, index) {
repository = { repository }; repository = { repository };
} }
const repoConfig = configParser.mergeChildConfig(globalConfig, repository); const repoConfig = configParser.mergeChildConfig(globalConfig, repository);
repoConfig.logger = logger.child({
repository: repoConfig.repository,
});
repoConfig.isGitHub = repoConfig.platform === 'github'; repoConfig.isGitHub = repoConfig.platform === 'github';
repoConfig.isGitLab = repoConfig.platform === 'gitlab'; repoConfig.isGitLab = repoConfig.platform === 'gitlab';
return configParser.filterConfig(repoConfig, 'repository'); return configParser.filterConfig(repoConfig, 'repository');

View file

@ -2,8 +2,6 @@ const configParser = require('../../config');
const depTypeWorker = require('../dep-type'); const depTypeWorker = require('../dep-type');
const npmApi = require('../../manager/npm/registry'); const npmApi = require('../../manager/npm/registry');
let logger = require('../../logger');
module.exports = { module.exports = {
mightBeABrowserLibrary, mightBeABrowserLibrary,
renovatePackageFile, renovatePackageFile,
@ -28,11 +26,15 @@ function mightBeABrowserLibrary(packageJson) {
async function renovatePackageFile(packageFileConfig) { async function renovatePackageFile(packageFileConfig) {
const config = { ...packageFileConfig }; const config = { ...packageFileConfig };
logger.setMeta({
repository: config.repository,
packageFile: config.packageFile,
});
logger.debug('renovatePakageFile()');
if (config.npmrc) { if (config.npmrc) {
npmApi.setNpmrc(config.npmrc); npmApi.setNpmrc(config.npmrc);
} }
let upgrades = []; let upgrades = [];
({ logger } = config);
logger.info(`Processing package file`); logger.info(`Processing package file`);
// Check if config is disabled // Check if config is disabled
@ -52,11 +54,6 @@ async function renovatePackageFile(packageFileConfig) {
...config[depType], ...config[depType],
}); });
depTypeConfig.depType = depType; depTypeConfig.depType = depType;
depTypeConfig.logger = logger.child({
repository: depTypeConfig.repository,
packageFile: depTypeConfig.packageFile,
depType: depTypeConfig.depType,
});
// Always pin devDependencies // Always pin devDependencies
// Pin dependencies if we're pretty sure it's not a browser library // Pin dependencies if we're pretty sure it's not a browser library
if ( if (
@ -64,10 +61,10 @@ async function renovatePackageFile(packageFileConfig) {
(depType === 'devDependencies' || (depType === 'devDependencies' ||
(depType === 'dependencies' && !mightBeABrowserLibrary(config.content))) (depType === 'dependencies' && !mightBeABrowserLibrary(config.content)))
) { ) {
depTypeConfig.logger.debug('Autodetecting pinVersions = true'); logger.debug('Autodetecting pinVersions = true');
depTypeConfig.pinVersions = true; depTypeConfig.pinVersions = true;
} }
depTypeConfig.logger.trace({ config: depTypeConfig }, 'depTypeConfig'); logger.trace({ config: depTypeConfig }, 'depTypeConfig');
return configParser.filterConfig(depTypeConfig, 'depType'); return configParser.filterConfig(depTypeConfig, 'depType');
}); });
logger.trace({ config: depTypeConfigs }, `depTypeConfigs`); logger.trace({ config: depTypeConfigs }, `depTypeConfigs`);
@ -76,6 +73,11 @@ async function renovatePackageFile(packageFileConfig) {
await depTypeWorker.renovateDepType(config.content, depTypeConfig) await depTypeWorker.renovateDepType(config.content, depTypeConfig)
); );
} }
// Reset logger again
logger.setMeta({
repository: config.repository,
packageFile: config.packageFile,
});
if ( if (
config.lockFileMaintenance.enabled && config.lockFileMaintenance.enabled &&
(config.yarnLock || config.packageLock) (config.yarnLock || config.packageLock)
@ -101,7 +103,6 @@ async function renovatePackageFile(packageFileConfig) {
async function renovateMeteorPackageFile(packageFileConfig) { async function renovateMeteorPackageFile(packageFileConfig) {
const config = { ...packageFileConfig }; const config = { ...packageFileConfig };
let upgrades = []; let upgrades = [];
({ logger } = config);
logger.info(`Processing meteor package file`); logger.info(`Processing meteor package file`);
// Check if config is disabled // Check if config is disabled
@ -119,7 +120,6 @@ async function renovateMeteorPackageFile(packageFileConfig) {
async function renovateDockerfile(packageFileConfig) { async function renovateDockerfile(packageFileConfig) {
let upgrades = []; let upgrades = [];
({ logger } = packageFileConfig);
logger.info(`Processing Dockerfile`); logger.info(`Processing Dockerfile`);
// Check if config is disabled // Check if config is disabled

View file

@ -7,14 +7,20 @@ module.exports = {
// Returns all results for a given dependency config // Returns all results for a given dependency config
async function renovatePackage(config) { async function renovatePackage(config) {
const { logger } = config; // These are done in parallel so we don't setMeta to avoid conflicts
logger.trace({ config }, `renovatePackage(${config.depName})`); logger.trace(
{ dependency: config.depName, config },
`renovatePackage(${config.depName})`
);
if (config.enabled === false) { if (config.enabled === false) {
logger.debug('package is disabled'); logger.debug('package is disabled');
return []; return [];
} }
const results = await getPackageUpdates(config); const results = await getPackageUpdates(config);
logger.debug({ results }, `${config.depName} lookup results`); logger.debug(
{ dependency: config.depName, results },
`${config.depName} lookup results`
);
// Flatten the result on top of config, add repositoryUrl // Flatten the result on top of config, add repositoryUrl
return ( return (
results results

View file

@ -1,4 +1,3 @@
let logger = require('../../logger');
const semver = require('semver'); const semver = require('semver');
const stable = require('semver-stable'); const stable = require('semver-stable');
const _ = require('lodash'); const _ = require('lodash');
@ -13,7 +12,6 @@ module.exports = {
}; };
function determineUpgrades(npmDep, config) { function determineUpgrades(npmDep, config) {
logger = config.logger || logger;
const result = { const result = {
type: 'warning', type: 'warning',
}; };

View file

@ -6,7 +6,7 @@ module.exports = {
getChangeLog, getChangeLog,
}; };
async function getChangeLogJSON(depName, fromVersion, newVersion, logger) { async function getChangeLogJSON(depName, fromVersion, newVersion) {
logger.debug(`getChangeLogJSON(${depName}, ${fromVersion}, ${newVersion})`); logger.debug(`getChangeLogJSON(${depName}, ${fromVersion}, ${newVersion})`);
if (!fromVersion || fromVersion === newVersion) { if (!fromVersion || fromVersion === newVersion) {
return null; return null;
@ -34,12 +34,7 @@ function getMarkdown(changelogJSON) {
} }
// Get Changelog // Get Changelog
async function getChangeLog(depName, fromVersion, newVersion, logger) { async function getChangeLog(depName, fromVersion, newVersion) {
const logJSON = await getChangeLogJSON( const logJSON = await getChangeLogJSON(depName, fromVersion, newVersion);
depName,
fromVersion,
newVersion,
logger
);
return getMarkdown(logJSON); return getMarkdown(logJSON);
} }

View file

@ -13,7 +13,7 @@ module.exports = {
// Ensures that PR exists with matching title/body // Ensures that PR exists with matching title/body
async function ensurePr(prConfig) { async function ensurePr(prConfig) {
const config = { ...prConfig }; const config = { ...prConfig };
const { logger } = config;
logger.trace({ config }, 'ensurePr'); logger.trace({ config }, 'ensurePr');
// If there is a group, it will use the config of the first upgrade in the array // If there is a group, it will use the config of the first upgrade in the array
const { branchName, upgrades } = config; const { branchName, upgrades } = config;
@ -85,8 +85,7 @@ async function ensurePr(prConfig) {
const logJSON = await changelogHelper.getChangeLogJSON( const logJSON = await changelogHelper.getChangeLogJSON(
upgrade.depName, upgrade.depName,
upgrade.changeLogFromVersion, upgrade.changeLogFromVersion,
upgrade.changeLogToVersion, upgrade.changeLogToVersion
logger
); );
// Store changelog markdown for backwards compatibility // Store changelog markdown for backwards compatibility
if (logJSON) { if (logJSON) {
@ -195,7 +194,6 @@ async function ensurePr(prConfig) {
} }
async function addAssigneesReviewers(config, pr) { async function addAssigneesReviewers(config, pr) {
const { logger } = config;
if (config.assignees.length > 0) { if (config.assignees.length > 0) {
try { try {
const assignees = config.assignees.map( const assignees = config.assignees.map(
@ -229,7 +227,6 @@ async function addAssigneesReviewers(config, pr) {
} }
async function checkAutoMerge(pr, config) { async function checkAutoMerge(pr, config) {
const { logger } = config;
logger.trace({ config }, 'checkAutoMerge'); logger.trace({ config }, 'checkAutoMerge');
logger.debug(`Checking #${pr.number} for automerge`); logger.debug(`Checking #${pr.number} for automerge`);
if (config.automerge === true && config.automergeType === 'pr') { if (config.automerge === true && config.automergeType === 'pr') {

View file

@ -4,7 +4,7 @@ module.exports = {
async function pruneStaleBranches(config) { async function pruneStaleBranches(config) {
// TODO: try/catch // TODO: try/catch
const { branchList, logger } = config; const { branchList } = config;
logger.debug('Removing any stale branches'); logger.debug('Removing any stale branches');
logger.trace({ config }, `pruneStaleBranches`); logger.trace({ config }, `pruneStaleBranches`);
if (!config.branchList) { if (!config.branchList) {

View file

@ -3,7 +3,6 @@ module.exports = {
}; };
function handleError(config, err) { function handleError(config, err) {
const { logger } = config;
if (err.message === 'uninitiated') { if (err.message === 'uninitiated') {
logger.info('Repository is uninitiated - skipping'); logger.info('Repository is uninitiated - skipping');
delete config.branchList; // eslint-disable-line no-param-reassign delete config.branchList; // eslint-disable-line no-param-reassign

View file

@ -14,7 +14,8 @@ module.exports = {
async function renovateRepository(repoConfig, token, loop = 1) { async function renovateRepository(repoConfig, token, loop = 1) {
let config = { ...repoConfig, branchList: [] }; let config = { ...repoConfig, branchList: [] };
const { logger } = config; logger.setMeta({ repository: config.repository });
logger.info('Renovating repository');
logger.trace({ config, loop }, 'renovateRepository()'); logger.trace({ config, loop }, 'renovateRepository()');
try { try {
if (loop > 5) { if (loop > 5) {
@ -35,6 +36,8 @@ async function renovateRepository(repoConfig, token, loop = 1) {
} catch (err) { } catch (err) {
return handleError(config, err); return handleError(config, err);
} finally { } finally {
logger.setMeta({ repository: config.repository });
await pruneStaleBranches(config); await pruneStaleBranches(config);
logger.info('Finished repository');
} }
} }

View file

@ -2,7 +2,6 @@ const { initPlatform } = require('../../../platform');
const { detectSemanticCommits } = require('./semantic'); const { detectSemanticCommits } = require('./semantic');
function assignPlatform(config) { function assignPlatform(config) {
const { logger } = config;
logger.debug('assignPlatform'); logger.debug('assignPlatform');
initPlatform(config.platform); initPlatform(config.platform);
return config; return config;
@ -12,8 +11,7 @@ async function getPlatformConfig(config) {
const platformConfig = await platform.initRepo( const platformConfig = await platform.initRepo(
config.repository, config.repository,
config.token, config.token,
config.endpoint, config.endpoint
config.logger
); );
return { return {
...config, ...config,

View file

@ -1,5 +1,4 @@
async function checkBaseBranch(config) { async function checkBaseBranch(config) {
const { logger } = config;
let error = []; let error = [];
if (config.baseBranch) { if (config.baseBranch) {
// Renovate should read content and target PRs here // Renovate should read content and target PRs here

View file

@ -7,7 +7,6 @@ const presets = require('../../../config/presets');
// Check for config in `renovate.json` // Check for config in `renovate.json`
async function mergeRenovateJson(config) { async function mergeRenovateJson(config) {
const { logger } = config;
let returnConfig = { ...config }; let returnConfig = { ...config };
const renovateJsonContent = await platform.getFileContent('renovate.json'); const renovateJsonContent = await platform.getFileContent('renovate.json');
if (!renovateJsonContent) { if (!renovateJsonContent) {
@ -48,15 +47,8 @@ async function mergeRenovateJson(config) {
logger.debug({ config: renovateJson }, 'renovate.json config'); logger.debug({ config: renovateJson }, 'renovate.json config');
const migratedConfig = migrateAndValidate(config, renovateJson); const migratedConfig = migrateAndValidate(config, renovateJson);
logger.debug({ config: migratedConfig }, 'renovate.json migrated config'); logger.debug({ config: migratedConfig }, 'renovate.json migrated config');
const decryptedConfig = decryptConfig( const decryptedConfig = decryptConfig(migratedConfig, config.privateKey);
migratedConfig, const resolvedConfig = await presets.resolveConfigPresets(decryptedConfig);
config.logger,
config.privateKey
);
const resolvedConfig = await presets.resolveConfigPresets(
decryptedConfig,
logger
);
logger.trace({ config: resolvedConfig }, 'renovate.json resolved config'); logger.trace({ config: resolvedConfig }, 'renovate.json resolved config');
returnConfig = mergeChildConfig(returnConfig, resolvedConfig); returnConfig = mergeChildConfig(returnConfig, resolvedConfig);
returnConfig.renovateJsonPresent = true; returnConfig.renovateJsonPresent = true;

View file

@ -1,7 +1,6 @@
const conventionalCommitsDetector = require('conventional-commits-detector'); const conventionalCommitsDetector = require('conventional-commits-detector');
async function detectSemanticCommits(config) { async function detectSemanticCommits(config) {
const { logger } = config;
if (config.semanticCommits !== null) { if (config.semanticCommits !== null) {
return config; return config;
} }

View file

@ -1,5 +1,4 @@
const findFile = async (config, fileName) => { const findFile = async (config, fileName) => {
const { logger } = config;
logger.debug('findFile()'); logger.debug('findFile()');
logger.trace({ config }); logger.trace({ config });
const fileList = await platform.getFileList(); const fileList = await platform.getFileList();
@ -16,7 +15,6 @@ const closedPrExists = config =>
); );
const isOnboarded = async config => { const isOnboarded = async config => {
const { logger } = config;
logger.debug('isOnboarded()'); logger.debug('isOnboarded()');
if (await renovateJsonExists(config)) { if (await renovateJsonExists(config)) {
logger.debug('renovate.json exists'); logger.debug('renovate.json exists');

View file

@ -1,5 +1,4 @@
async function createOnboardingBranch(config) { async function createOnboardingBranch(config) {
const { logger } = config;
logger.debug('Creating onboarding branch'); logger.debug('Creating onboarding branch');
const renovateJson = { const renovateJson = {
extends: ['config:base'], extends: ['config:base'],

View file

@ -3,7 +3,6 @@ const { createOnboardingBranch } = require('./create');
const { isOnboarded, onboardingPrExists } = require('./check'); const { isOnboarded, onboardingPrExists } = require('./check');
async function checkOnboardingBranch(config) { async function checkOnboardingBranch(config) {
const { logger } = config;
logger.debug('checkOnboarding()'); logger.debug('checkOnboarding()');
logger.trace({ config }); logger.trace({ config });
const repoIsOnboarded = await isOnboarded(config); const repoIsOnboarded = await isOnboarded(config);

View file

@ -1,5 +1,4 @@
function getAssigneesDesc(config) { function getAssigneesDesc(config) {
const { logger } = config;
logger.debug('getAssigneesDesc()'); logger.debug('getAssigneesDesc()');
logger.trace({ config }); logger.trace({ config });
if (!(config.assignees && config.assignees.length)) { if (!(config.assignees && config.assignees.length)) {
@ -15,7 +14,6 @@ function getAssigneesDesc(config) {
} }
function getLabelsDesc(config) { function getLabelsDesc(config) {
const { logger } = config;
logger.debug('getLabelsDesc()'); logger.debug('getLabelsDesc()');
logger.trace({ config }); logger.trace({ config });
if (!(config.labels && config.labels.length)) { if (!(config.labels && config.labels.length)) {
@ -31,7 +29,6 @@ function getLabelsDesc(config) {
} }
function getScheduleDesc(config) { function getScheduleDesc(config) {
const { logger } = config;
logger.debug('getScheduleDesc()'); logger.debug('getScheduleDesc()');
logger.trace({ config }); logger.trace({ config });
if (!(config.schedule && config.schedule.length)) { if (!(config.schedule && config.schedule.length)) {
@ -43,7 +40,6 @@ function getScheduleDesc(config) {
} }
function getDescriptionArray(config) { function getDescriptionArray(config) {
const { logger } = config;
logger.debug('getDescriptionArray()'); logger.debug('getDescriptionArray()');
logger.trace({ config }); logger.trace({ config });
return (config.description || []) return (config.description || [])
@ -53,7 +49,6 @@ function getDescriptionArray(config) {
} }
function getConfigDesc(config) { function getConfigDesc(config) {
const { logger } = config;
logger.debug('getConfigDesc()'); logger.debug('getConfigDesc()');
logger.trace({ config }); logger.trace({ config });
const descriptionArr = getDescriptionArray(config); const descriptionArr = getDescriptionArray(config);

View file

@ -33,7 +33,6 @@ If later on you ever wish to reconfigure Renovate then you can use this same tri
`; `;
async function ensureOnboardingPr(config) { async function ensureOnboardingPr(config) {
const { logger } = config;
logger.debug('ensureOnboardingPr()'); logger.debug('ensureOnboardingPr()');
logger.trace({ config }); logger.trace({ config });
const onboardingBranch = `${config.branchPrefix}configure`; const onboardingBranch = `${config.branchPrefix}configure`;

View file

@ -1,5 +1,5 @@
function getPrList(config) { function getPrList(config) {
const { branches, logger } = config; const { branches } = config;
logger.debug('getPrList()'); logger.debug('getPrList()');
logger.trace({ config }); logger.trace({ config });
let prDesc = `\n### What to Expect\n\n`; let prDesc = `\n### What to Expect\n\n`;

View file

@ -3,7 +3,6 @@ const handlebars = require('handlebars');
const { generateBranchConfig } = require('./generate'); const { generateBranchConfig } = require('./generate');
function branchifyUpgrades(config) { function branchifyUpgrades(config) {
const { logger } = config;
logger.debug('branchifyUpgrades'); logger.debug('branchifyUpgrades');
logger.trace({ config }); logger.trace({ config });
const errors = []; const errors = [];
@ -42,11 +41,12 @@ function branchifyUpgrades(config) {
} }
logger.debug(`Returning ${Object.keys(branchUpgrades).length} branch(es)`); logger.debug(`Returning ${Object.keys(branchUpgrades).length} branch(es)`);
for (const branchName of Object.keys(branchUpgrades)) { for (const branchName of Object.keys(branchUpgrades)) {
const branch = generateBranchConfig(branchUpgrades[branchName], logger); logger.debug('loop');
const branch = generateBranchConfig(branchUpgrades[branchName]);
branch.branchName = branchName; branch.branchName = branchName;
branch.logger = logger;
branches.push(branch); branches.push(branch);
} }
logger.debug('3');
const branchList = config.repoIsOnboarded const branchList = config.repoIsOnboarded
? branches.map(upgrade => upgrade.branchName) ? branches.map(upgrade => upgrade.branchName)
: config.branchList; : config.branchList;

View file

@ -2,7 +2,6 @@ const packageFileWorker = require('../../package-file');
const { mergeChildConfig, filterConfig } = require('../../../config'); const { mergeChildConfig, filterConfig } = require('../../../config');
async function determineRepoUpgrades(config) { async function determineRepoUpgrades(config) {
const { logger } = config;
logger.debug('determineRepoUpgrades()'); logger.debug('determineRepoUpgrades()');
logger.trace({ config }); logger.trace({ config });
let upgrades = []; let upgrades = [];
@ -12,10 +11,6 @@ async function determineRepoUpgrades(config) {
logger.debug({ packageFile }, 'Getting packageFile config'); logger.debug({ packageFile }, 'Getting packageFile config');
let packageFileConfig = mergeChildConfig(config, packageFile); let packageFileConfig = mergeChildConfig(config, packageFile);
packageFileConfig = filterConfig(packageFileConfig, 'packageFile'); packageFileConfig = filterConfig(packageFileConfig, 'packageFile');
packageFileConfig.logger = packageFileConfig.logger.child({
repository: packageFileConfig.repository,
packageFile: packageFileConfig.packageFile,
});
if (packageFileConfig.packageFile.endsWith('package.json')) { if (packageFileConfig.packageFile.endsWith('package.json')) {
logger.info( logger.info(
{ packageFile: packageFileConfig.packageFile }, { packageFile: packageFileConfig.packageFile },
@ -46,6 +41,7 @@ async function determineRepoUpgrades(config) {
.toLowerCase() .toLowerCase()
: undefined, : undefined,
})); }));
logger.debug('returning upgrades');
return { ...config, upgrades }; return { ...config, upgrades };
} }

View file

@ -1,6 +1,6 @@
const handlebars = require('handlebars'); const handlebars = require('handlebars');
function generateBranchConfig(branchUpgrades, logger) { function generateBranchConfig(branchUpgrades) {
const config = { const config = {
upgrades: [], upgrades: [],
}; };

View file

@ -7,10 +7,10 @@ module.exports = {
async function determineUpdates(input) { async function determineUpdates(input) {
let config = { ...input }; let config = { ...input };
const { logger } = config;
logger.debug('determineUpdates()'); logger.debug('determineUpdates()');
logger.trace({ config }); logger.trace({ config });
config = await determineRepoUpgrades(config); config = await determineRepoUpgrades(config);
config = branchifyUpgrades(config); config = branchifyUpgrades(config);
logger.debug('Finished determining upgrades');
return config; return config;
} }

View file

@ -7,7 +7,6 @@ module.exports = {
}; };
async function writeUpdates(config) { async function writeUpdates(config) {
const { logger } = config;
let { branches } = config; let { branches } = config;
logger.info(`Processing ${branches.length} branch(es)`); logger.info(`Processing ${branches.length} branch(es)`);
if (branches.some(upg => upg.isPin)) { if (branches.some(upg => upg.isPin)) {

View file

@ -111,7 +111,7 @@
"text-summary" "text-summary"
], ],
"setupFiles": [ "setupFiles": [
"./test/platform.js" "./test/globals.js"
], ],
"setupTestFrameworkScriptFile": "./test/chai.js" "setupTestFrameworkScriptFile": "./test/chai.js"
}, },

View file

@ -1,9 +1,7 @@
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../logger');
const api = jest.genMockFromModule('../../../lib/platform/github'); const api = jest.genMockFromModule('../../../lib/platform/github');
module.exports = { module.exports = {
...defaultConfig, ...defaultConfig,
api, api,
logger,
}; };

View file

@ -5,12 +5,7 @@ module.exports = {
info: jest.fn(), info: jest.fn(),
debug: jest.fn(), debug: jest.fn(),
trace: jest.fn(), trace: jest.fn(),
child: jest.fn(() => ({ setMeta: jest.fn(),
fatal: jest.fn(), levels: jest.fn(),
error: jest.fn(), addStream: jest.fn(),
warn: jest.fn(),
info: jest.fn(),
debug: jest.fn(),
trace: jest.fn(),
})),
}; };

View file

@ -220,15 +220,6 @@ Object {
exports[`config/presets resolvePreset combines two package alls 1`] = ` exports[`config/presets resolvePreset combines two package alls 1`] = `
Object { Object {
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"packagePatterns": Array [ "packagePatterns": Array [
"^eslint", "^eslint",
"^stylelint", "^stylelint",
@ -271,15 +262,6 @@ Object {
"before 8am on Monday", "before 8am on Monday",
], ],
}, },
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object { "major": Object {
"automerge": false, "automerge": false,
}, },
@ -348,15 +330,6 @@ Object {
"lockFileMaintenance": Object { "lockFileMaintenance": Object {
"enabled": false, "enabled": false,
}, },
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"optionalDependencies": Object { "optionalDependencies": Object {
"description": Array [ "description": Array [
"Use <code>chore(deps):</code> as semantic prefix for commit messages and PR titles", "Use <code>chore(deps):</code> as semantic prefix for commit messages and PR titles",
@ -380,15 +353,6 @@ Object {
exports[`config/presets resolvePreset resolves eslint 1`] = ` exports[`config/presets resolvePreset resolves eslint 1`] = `
Object { Object {
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"packagePatterns": Array [ "packagePatterns": Array [
"^eslint", "^eslint",
], ],
@ -397,15 +361,6 @@ Object {
exports[`config/presets resolvePreset resolves group monorepos 1`] = ` exports[`config/presets resolvePreset resolves group monorepos 1`] = `
Object { Object {
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"packageRules": Array [ "packageRules": Array [
Object { Object {
"description": Array [ "description": Array [
@ -679,15 +634,6 @@ Object {
"description": Array [ "description": Array [
"All lint-related packages", "All lint-related packages",
], ],
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"packageNames": Array [ "packageNames": Array [
"remark-lint", "remark-lint",
], ],
@ -703,15 +649,6 @@ Object {
"description": Array [ "description": Array [
"Update lint packages automatically if tests pass", "Update lint packages automatically if tests pass",
], ],
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"packageRules": Array [ "packageRules": Array [
Object { Object {
"automerge": true, "automerge": true,
@ -732,15 +669,6 @@ Object {
exports[`config/presets resolvePreset resolves packageRule 1`] = ` exports[`config/presets resolvePreset resolves packageRule 1`] = `
Object { Object {
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"packageRules": Array [ "packageRules": Array [
Object { Object {
"groupName": "eslint", "groupName": "eslint",
@ -755,30 +683,12 @@ Object {
exports[`config/presets resolvePreset returns same if invalid preset 1`] = ` exports[`config/presets resolvePreset returns same if invalid preset 1`] = `
Object { Object {
"foo": 1, "foo": 1,
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
} }
`; `;
exports[`config/presets resolvePreset returns same if no presets 1`] = ` exports[`config/presets resolvePreset returns same if no presets 1`] = `
Object { Object {
"foo": 1, "foo": 1,
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
} }
`; `;
@ -788,15 +698,6 @@ Object {
"Use version pinning (maintain a single version only and not semver ranges)", "Use version pinning (maintain a single version only and not semver ranges)",
], ],
"foo": 1, "foo": 1,
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"pinVersions": true, "pinVersions": true,
} }
`; `;
@ -807,15 +708,6 @@ Object {
"Use version pinning (maintain a single version only and not semver ranges)", "Use version pinning (maintain a single version only and not semver ranges)",
], ],
"foo": 1, "foo": 1,
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"pinVersions": true, "pinVersions": true,
} }
`; `;

View file

@ -1,5 +1,4 @@
const { decryptConfig } = require('../../lib/config/decrypt.js'); const { decryptConfig } = require('../../lib/config/decrypt.js');
const logger = require('../_fixtures/logger');
const fs = require('fs'); const fs = require('fs');
const privateKey = fs.readFileSync('test/_fixtures/keys/private.pem'); const privateKey = fs.readFileSync('test/_fixtures/keys/private.pem');
@ -12,25 +11,25 @@ describe('config/decrypt', () => {
}); });
it('returns empty with no privateKey', () => { it('returns empty with no privateKey', () => {
delete config.encrypted; delete config.encrypted;
const res = decryptConfig(config, logger); const res = decryptConfig(config);
expect(res).toMatchObject(config); expect(res).toMatchObject(config);
}); });
it('warns if no privateKey found', () => { it('warns if no privateKey found', () => {
config.encrypted = { a: '1' }; config.encrypted = { a: '1' };
const res = decryptConfig(config, logger); const res = decryptConfig(config);
expect(res.encrypted).not.toBeDefined(); expect(res.encrypted).not.toBeDefined();
expect(res.a).not.toBeDefined(); expect(res.a).not.toBeDefined();
}); });
it('handles invalid encrypted type', () => { it('handles invalid encrypted type', () => {
config.encrypted = 1; config.encrypted = 1;
config.privateKey = privateKey; config.privateKey = privateKey;
const res = decryptConfig(config, logger, privateKey); const res = decryptConfig(config, privateKey);
expect(res.encrypted).not.toBeDefined(); expect(res.encrypted).not.toBeDefined();
}); });
it('handles invalid encrypted value', () => { it('handles invalid encrypted value', () => {
config.encrypted = { a: 1 }; config.encrypted = { a: 1 };
config.privateKey = privateKey; config.privateKey = privateKey;
const res = decryptConfig(config, logger, privateKey); const res = decryptConfig(config, privateKey);
expect(res.encrypted).not.toBeDefined(); expect(res.encrypted).not.toBeDefined();
expect(res.a).not.toBeDefined(); expect(res.a).not.toBeDefined();
}); });
@ -50,7 +49,7 @@ describe('config/decrypt', () => {
}, },
'backend/package.json', 'backend/package.json',
]; ];
const res = decryptConfig(config, logger, privateKey); const res = decryptConfig(config, privateKey);
expect(res.encrypted).not.toBeDefined(); expect(res.encrypted).not.toBeDefined();
expect(res.packageFiles[0].devDependencies.encrypted).not.toBeDefined(); expect(res.packageFiles[0].devDependencies.encrypted).not.toBeDefined();
expect(res.packageFiles[0].devDependencies.branchPrefix).toEqual( expect(res.packageFiles[0].devDependencies.branchPrefix).toEqual(

View file

@ -1,6 +1,5 @@
const npm = require('../../lib/manager/npm/registry'); const npm = require('../../lib/manager/npm/registry');
const presets = require('../../lib/config/presets'); const presets = require('../../lib/config/presets');
const logger = require('../_fixtures/logger');
const presetDefaults = require('../_fixtures/npm/renovate-config-default'); const presetDefaults = require('../_fixtures/npm/renovate-config-default');
const presetPackages = require('../_fixtures/npm/renovate-config-packages'); const presetPackages = require('../_fixtures/npm/renovate-config-packages');
const presetGroup = require('../_fixtures/npm/renovate-config-group'); const presetGroup = require('../_fixtures/npm/renovate-config-group');
@ -66,9 +65,7 @@ describe('config/presets', () => {
describe('resolvePreset', () => { describe('resolvePreset', () => {
let config; let config;
beforeEach(() => { beforeEach(() => {
config = { config = {};
logger,
};
}); });
it('returns same if no presets', async () => { it('returns same if no presets', async () => {
config.foo = 1; config.foo = 1;
@ -266,40 +263,37 @@ describe('config/presets', () => {
}); });
describe('getPreset', () => { describe('getPreset', () => {
it('gets linters', async () => { it('gets linters', async () => {
const res = await presets.getPreset('packages:linters', logger); const res = await presets.getPreset('packages:linters');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
expect(res.packageNames).toHaveLength(1); expect(res.packageNames).toHaveLength(1);
expect(res.extends).toHaveLength(2); expect(res.extends).toHaveLength(2);
}); });
it('gets parameterised configs', async () => { it('gets parameterised configs', async () => {
const res = await presets.getPreset( const res = await presets.getPreset(':group(packages:eslint, eslint)');
':group(packages:eslint, eslint)',
logger
);
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles missing params', async () => { it('handles missing params', async () => {
const res = await presets.getPreset(':group()', logger); const res = await presets.getPreset(':group()');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('ignores irrelevant params', async () => { it('ignores irrelevant params', async () => {
const res = await presets.getPreset(':pinVersions(foo, bar)', logger); const res = await presets.getPreset(':pinVersions(foo, bar)');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles 404 packages', async () => { it('handles 404 packages', async () => {
const res = await presets.getPreset('notfound:foo', logger); const res = await presets.getPreset('notfound:foo');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles no config', async () => { it('handles no config', async () => {
const res = await presets.getPreset('noconfig:foo', logger); const res = await presets.getPreset('noconfig:foo');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles throw errors', async () => { it('handles throw errors', async () => {
const res = await presets.getPreset('throw:foo', logger); const res = await presets.getPreset('throw:foo');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('handles preset not found', async () => { it('handles preset not found', async () => {
const res = await presets.getPreset('wrongpreset:foo', logger); const res = await presets.getPreset('wrongpreset:foo');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
}); });

View file

@ -1,5 +1,4 @@
const configValidation = require('../../lib/config/validation.js'); const configValidation = require('../../lib/config/validation.js');
const logger = require('../_fixtures/logger');
describe('config/validation', () => { describe('config/validation', () => {
describe('validateConfig(config)', () => { describe('validateConfig(config)', () => {
@ -30,10 +29,7 @@ describe('config/validation', () => {
}, },
], ],
}; };
const { warnings, errors } = configValidation.validateConfig( const { warnings, errors } = configValidation.validateConfig(config);
config,
logger
);
expect(warnings).toHaveLength(0); expect(warnings).toHaveLength(0);
expect(errors).toHaveLength(6); expect(errors).toHaveLength(6);
expect(errors).toMatchSnapshot(); expect(errors).toMatchSnapshot();

View file

@ -2,3 +2,4 @@ jest.mock('gh-got');
jest.mock('gl-got'); jest.mock('gl-got');
global.platform = jest.genMockFromModule('../lib/platform/github'); global.platform = jest.genMockFromModule('../lib/platform/github');
global.logger = require('./_fixtures/logger');

View file

@ -8,13 +8,6 @@ Object {
} }
`; `;
exports[`logger/config-serializer replaces functions 1`] = `
Object {
"logger": "[Function]",
"nottoken": "b",
}
`;
exports[`logger/config-serializer squashes templates 1`] = ` exports[`logger/config-serializer squashes templates 1`] = `
Object { Object {
"nottoken": "b", "nottoken": "b",

View file

@ -9,13 +9,6 @@ describe('logger/config-serializer', () => {
}; };
expect(configSerializer(config)).toMatchSnapshot(); expect(configSerializer(config)).toMatchSnapshot();
}); });
it('replaces functions', () => {
const config = {
nottoken: 'b',
logger: {},
};
expect(configSerializer(config)).toMatchSnapshot();
});
it('squashes templates', () => { it('squashes templates', () => {
const config = { const config = {
nottoken: 'b', nottoken: 'b',

25
test/logger/index.spec.js Normal file
View file

@ -0,0 +1,25 @@
const { initLogger } = require('../../lib/logger');
describe('logger', () => {
it('inits', () => {
delete global.logger;
delete process.env.LOG_LEVEL;
initLogger();
expect(global.logger).toBeDefined();
});
it('supports logging with metadata', () => {
global.logger.debug({ some: 'meta' }, 'some meta');
});
it('supports logging with only metadata', () => {
global.logger.debug({ some: 'meta' });
});
it('supports logging without metadata', () => {
global.logger.debug('some meta');
});
it('sets levels', () => {
global.logger.levels('stdout', 'DEBUG');
});
it('sets meta', () => {
global.logger.setMeta({ some: 'meta', and: 'more' });
});
});

View file

@ -404,15 +404,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"logFile": null, "logFile": null,
"logFileLevel": "debug", "logFileLevel": "debug",
"logLevel": "info", "logLevel": "info",
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object {}, "major": Object {},
"meteor": Object { "meteor": Object {
"enabled": true, "enabled": true,
@ -938,15 +929,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"logFile": null, "logFile": null,
"logFileLevel": "debug", "logFileLevel": "debug",
"logLevel": "info", "logLevel": "info",
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object {}, "major": Object {},
"meteor": Object { "meteor": Object {
"enabled": true, "enabled": true,
@ -1692,15 +1674,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"logFile": null, "logFile": null,
"logFileLevel": "debug", "logFileLevel": "debug",
"logLevel": "info", "logLevel": "info",
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object {}, "major": Object {},
"meteor": Object { "meteor": Object {
"enabled": true, "enabled": true,
@ -2231,15 +2204,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"logFile": null, "logFile": null,
"logFileLevel": "debug", "logFileLevel": "debug",
"logLevel": "info", "logLevel": "info",
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object {}, "major": Object {},
"meteor": Object { "meteor": Object {
"enabled": true, "enabled": true,
@ -2771,15 +2735,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"logFile": null, "logFile": null,
"logFileLevel": "debug", "logFileLevel": "debug",
"logLevel": "info", "logLevel": "info",
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object {}, "major": Object {},
"meteor": Object { "meteor": Object {
"enabled": true, "enabled": true,
@ -3309,15 +3264,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"logFile": null, "logFile": null,
"logFileLevel": "debug", "logFileLevel": "debug",
"logLevel": "info", "logLevel": "info",
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"major": Object {}, "major": Object {},
"meteor": Object { "meteor": Object {
"enabled": true, "enabled": true,

View file

@ -1,13 +1,10 @@
const { extractDependencies } = require('../../../lib/manager/docker/extract'); const { extractDependencies } = require('../../../lib/manager/docker/extract');
const logger = require('../../_fixtures/logger');
describe('lib/manager/docker/extract', () => { describe('lib/manager/docker/extract', () => {
describe('extractDependencies()', () => { describe('extractDependencies()', () => {
let config; let config;
beforeEach(() => { beforeEach(() => {
config = { config = {};
logger,
};
}); });
it('handles naked dep', () => { it('handles naked dep', () => {
const res = extractDependencies('FROM node\n', config); const res = extractDependencies('FROM node\n', config);

View file

@ -1,7 +1,6 @@
const dockerApi = require('../../../lib/manager/docker/registry'); const dockerApi = require('../../../lib/manager/docker/registry');
const docker = require('../../../lib/manager/docker/package'); const docker = require('../../../lib/manager/docker/package');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
// jest.mock('../../../lib/manager/docker/registry'); // jest.mock('../../../lib/manager/docker/registry');
dockerApi.getDigest = jest.fn(); dockerApi.getDigest = jest.fn();
@ -13,7 +12,6 @@ describe('lib/workers/package/docker', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
depName: 'some-dep', depName: 'some-dep',
currentFrom: 'some-dep:1.0.0@sha256:abcdefghijklmnop', currentFrom: 'some-dep:1.0.0@sha256:abcdefghijklmnop',
currentDepTag: 'some-dep:1.0.0', currentDepTag: 'some-dep:1.0.0',

View file

@ -1,6 +1,5 @@
const docker = require('../../../lib/manager/docker/registry'); const docker = require('../../../lib/manager/docker/registry');
const got = require('got'); const got = require('got');
const logger = require('../../_fixtures/logger');
jest.mock('got'); jest.mock('got');
@ -11,12 +10,12 @@ describe('api/docker', () => {
}); });
it('returns null if no token', async () => { it('returns null if no token', async () => {
got.mockReturnValueOnce({ body: {} }); got.mockReturnValueOnce({ body: {} });
const res = await docker.getDigest('some-name', undefined, logger); const res = await docker.getDigest('some-name', undefined);
expect(res).toBe(null); expect(res).toBe(null);
}); });
it('returns null if errored', async () => { it('returns null if errored', async () => {
got.mockReturnValueOnce({ body: { token: 'some-token' } }); got.mockReturnValueOnce({ body: { token: 'some-token' } });
const res = await docker.getDigest('some-name', undefined, logger); const res = await docker.getDigest('some-name', undefined);
expect(res).toBe(null); expect(res).toBe(null);
}); });
it('returns digest', async () => { it('returns digest', async () => {
@ -24,7 +23,7 @@ describe('api/docker', () => {
got.mockReturnValueOnce({ got.mockReturnValueOnce({
headers: { 'docker-content-digest': 'some-digest' }, headers: { 'docker-content-digest': 'some-digest' },
}); });
const res = await docker.getDigest('some-name', undefined, logger); const res = await docker.getDigest('some-name', undefined);
expect(res).toBe('some-digest'); expect(res).toBe('some-digest');
}); });
it('supports scoped names', async () => { it('supports scoped names', async () => {
@ -32,26 +31,26 @@ describe('api/docker', () => {
got.mockReturnValueOnce({ got.mockReturnValueOnce({
headers: { 'docker-content-digest': 'some-digest' }, headers: { 'docker-content-digest': 'some-digest' },
}); });
const res = await docker.getDigest('some/name', undefined, logger); const res = await docker.getDigest('some/name', undefined);
expect(res).toBe('some-digest'); expect(res).toBe('some-digest');
}); });
}); });
describe('getTags', () => { describe('getTags', () => {
it('returns null if no token', async () => { it('returns null if no token', async () => {
got.mockReturnValueOnce({ body: {} }); got.mockReturnValueOnce({ body: {} });
const res = await docker.getTags('node', logger); const res = await docker.getTags('node');
expect(res).toBe(null); expect(res).toBe(null);
}); });
it('returns tags', async () => { it('returns tags', async () => {
const tags = ['a', 'b']; const tags = ['a', 'b'];
got.mockReturnValueOnce({ body: { token: 'some-token ' } }); got.mockReturnValueOnce({ body: { token: 'some-token ' } });
got.mockReturnValueOnce({ body: { tags } }); got.mockReturnValueOnce({ body: { tags } });
const res = await docker.getTags('my/node', logger); const res = await docker.getTags('my/node');
expect(res).toBe(tags); expect(res).toBe(tags);
}); });
it('returns null on error', async () => { it('returns null on error', async () => {
got.mockReturnValueOnce({}); got.mockReturnValueOnce({});
const res = await docker.getTags('node', logger); const res = await docker.getTags('node');
expect(res).toBe(null); expect(res).toBe(null);
}); });
}); });

View file

@ -1,5 +1,4 @@
const dockerfile = require('../../../lib/manager/docker/update'); const dockerfile = require('../../../lib/manager/docker/update');
const logger = require('../../_fixtures/logger');
describe('workers/branch/dockerfile', () => { describe('workers/branch/dockerfile', () => {
describe('setNewValue', () => { describe('setNewValue', () => {
@ -13,7 +12,7 @@ describe('workers/branch/dockerfile', () => {
fromSuffix: '', fromSuffix: '',
newFrom: 'node:8@sha256:abcdefghijklmnop', newFrom: 'node:8@sha256:abcdefghijklmnop',
}; };
const res = dockerfile.setNewValue(currentFileContent, upgrade, logger); const res = dockerfile.setNewValue(currentFileContent, upgrade);
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('replaces existing value with suffix', () => { it('replaces existing value with suffix', () => {
@ -26,7 +25,7 @@ describe('workers/branch/dockerfile', () => {
fromSuffix: 'as base', fromSuffix: 'as base',
newFrom: 'node:8@sha256:abcdefghijklmnop', newFrom: 'node:8@sha256:abcdefghijklmnop',
}; };
const res = dockerfile.setNewValue(currentFileContent, upgrade, logger); const res = dockerfile.setNewValue(currentFileContent, upgrade);
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
}); });
it('returns null on error', () => { it('returns null on error', () => {
@ -38,7 +37,7 @@ describe('workers/branch/dockerfile', () => {
fromSuffix: '', fromSuffix: '',
newFrom: 'node:8@sha256:abcdefghijklmnop', newFrom: 'node:8@sha256:abcdefghijklmnop',
}; };
const res = dockerfile.setNewValue(currentFileContent, upgrade, logger); const res = dockerfile.setNewValue(currentFileContent, upgrade);
expect(res).toBe(null); expect(res).toBe(null);
}); });
}); });

View file

@ -1,7 +1,5 @@
const logger = require('../_fixtures/logger');
const defaultConfig = require('../../lib/config/defaults').getConfig(); const defaultConfig = require('../../lib/config/defaults').getConfig();
const manager = require('../../lib/manager'); const manager = require('../../lib/manager');
const npmUpdater = require('../../lib/manager/npm/update'); const npmUpdater = require('../../lib/manager/npm/update');
const meteorUpdater = require('../../lib/manager/meteor/update'); const meteorUpdater = require('../../lib/manager/meteor/update');
const dockerUpdater = require('../../lib/manager/docker/update'); const dockerUpdater = require('../../lib/manager/docker/update');
@ -14,7 +12,6 @@ describe('manager', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
warnings: [], warnings: [],
}; };
}); });
@ -85,7 +82,6 @@ describe('manager', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
parentBranch: 'some-branch', parentBranch: 'some-branch',
}; };
npmUpdater.setNewValue = jest.fn(); npmUpdater.setNewValue = jest.fn();

View file

@ -1,6 +1,6 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`workers/branch/package-js .setNewValue(currentFileContent, depName, currentVersion, newVersion, logger) handles alternative quotes and white space 1`] = ` exports[`workers/branch/package-js .setNewValue(currentFileContent, depName, currentVersion, newVersion) handles alternative quotes and white space 1`] = `
"Package.describe({ "Package.describe({
\\"name\\": \\"steffo:meteor-accounts-saml\\", \\"name\\": \\"steffo:meteor-accounts-saml\\",
\\"summary\\": \\"SAML Login (SP) for Meteor. Works with OpenAM, OpenIDP and provides Single Logout.\\", \\"summary\\": \\"SAML Login (SP) for Meteor. Works with OpenAM, OpenIDP and provides Single Logout.\\",
@ -30,7 +30,7 @@ Npm.depends({
" "
`; `;
exports[`workers/branch/package-js .setNewValue(currentFileContent, depName, currentVersion, newVersion, logger) replaces a dependency value 1`] = ` exports[`workers/branch/package-js .setNewValue(currentFileContent, depName, currentVersion, newVersion) replaces a dependency value 1`] = `
"Package.describe({ "Package.describe({
'name': 'steffo:meteor-accounts-saml', 'name': 'steffo:meteor-accounts-saml',
'summary': 'SAML Login (SP) for Meteor. Works with OpenAM, OpenIDP and provides Single Logout.', 'summary': 'SAML Login (SP) for Meteor. Works with OpenAM, OpenIDP and provides Single Logout.',

View file

@ -1,7 +1,6 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const meteorUpdater = require('../../../lib/manager/meteor/update'); const meteorUpdater = require('../../../lib/manager/meteor/update');
const logger = require('../../_fixtures/logger');
function readFixture(fixture) { function readFixture(fixture) {
return fs.readFileSync( return fs.readFileSync(
@ -14,14 +13,13 @@ const input01Content = readFixture('package-1.js');
const input02Content = readFixture('package-2.js'); const input02Content = readFixture('package-2.js');
describe('workers/branch/package-js', () => { describe('workers/branch/package-js', () => {
describe('.setNewValue(currentFileContent, depName, currentVersion, newVersion, logger)', () => { describe('.setNewValue(currentFileContent, depName, currentVersion, newVersion)', () => {
it('replaces a dependency value', () => { it('replaces a dependency value', () => {
const testContent = meteorUpdater.setNewValue( const testContent = meteorUpdater.setNewValue(
input01Content, input01Content,
'xmldom', 'xmldom',
'0.1.19', '0.1.19',
'0.22.1', '0.22.1'
logger
); );
expect(testContent).toMatchSnapshot(); expect(testContent).toMatchSnapshot();
}); });
@ -30,8 +28,7 @@ describe('workers/branch/package-js', () => {
input02Content, input02Content,
'xmldom', 'xmldom',
'0.1.19', '0.1.19',
'0.22.1', '0.22.1'
logger
); );
expect(testContent).toMatchSnapshot(); expect(testContent).toMatchSnapshot();
}); });
@ -40,8 +37,7 @@ describe('workers/branch/package-js', () => {
input01Content, input01Content,
'query-string', 'query-string',
'0.2.0', '0.2.0',
'0.2.0', '0.2.0'
logger
); );
testContent.should.equal(input01Content); testContent.should.equal(input01Content);
}); });

View file

@ -2,7 +2,6 @@ const npmApi = require('../../../lib/manager/npm/registry');
const versions = require('../../../lib/workers/package/versions'); const versions = require('../../../lib/workers/package/versions');
const npm = require('../../../lib/manager/npm/package'); const npm = require('../../../lib/manager/npm/package');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
jest.mock('../../../lib/manager/npm/registry'); jest.mock('../../../lib/manager/npm/registry');
npmApi.getDependency = jest.fn(); npmApi.getDependency = jest.fn();
@ -14,7 +13,6 @@ describe('lib/workers/package/npm', () => {
jest.resetAllMocks(); jest.resetAllMocks();
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
depName: 'some-dep', depName: 'some-dep',
currentVersion: '1.0.0', currentVersion: '1.0.0',
}; };

View file

@ -1,7 +1,6 @@
const npm = require('../../../lib/manager/npm/registry'); const npm = require('../../../lib/manager/npm/registry');
const got = require('got'); const got = require('got');
const registryAuthToken = require('registry-auth-token'); const registryAuthToken = require('registry-auth-token');
const logger = require('../../_fixtures/logger');
jest.mock('registry-auth-token'); jest.mock('registry-auth-token');
jest.mock('got'); jest.mock('got');
@ -34,7 +33,7 @@ describe('api/npm', () => {
}); });
it('should fetch package info from npm', async () => { it('should fetch package info from npm', async () => {
got.mockImplementation(() => Promise.resolve(npmResponse)); got.mockImplementation(() => Promise.resolve(npmResponse));
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
expect(call).toMatchSnapshot(); expect(call).toMatchSnapshot();
@ -44,15 +43,15 @@ describe('api/npm', () => {
npmResponseHomepage.body.repository.url = ''; npmResponseHomepage.body.repository.url = '';
npmResponseHomepage.body.homepage = 'https://google.com'; npmResponseHomepage.body.homepage = 'https://google.com';
got.mockImplementationOnce(() => Promise.resolve(npmResponseHomepage)); got.mockImplementationOnce(() => Promise.resolve(npmResponseHomepage));
const res = await npm.getDependency('foobarhome', logger); const res = await npm.getDependency('foobarhome');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
expect(call).toMatchSnapshot(); expect(call).toMatchSnapshot();
}); });
it('should cache package info from npm', async () => { it('should cache package info from npm', async () => {
got.mockImplementation(() => Promise.resolve(npmResponse)); got.mockImplementation(() => Promise.resolve(npmResponse));
const res1 = await npm.getDependency('foobar', logger); const res1 = await npm.getDependency('foobar');
const res2 = await npm.getDependency('foobar', logger); const res2 = await npm.getDependency('foobar');
expect(res1).toEqual(res2); expect(res1).toEqual(res2);
expect(got.mock.calls.length).toEqual(1); expect(got.mock.calls.length).toEqual(1);
}); });
@ -60,7 +59,7 @@ describe('api/npm', () => {
got.mockImplementation(() => { got.mockImplementation(() => {
throw new Error('not found'); throw new Error('not found');
}); });
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
expect(res).toBeNull(); expect(res).toBeNull();
}); });
it('should send an authorization header if provided', async () => { it('should send an authorization header if provided', async () => {
@ -69,7 +68,7 @@ describe('api/npm', () => {
token: '1234', token: '1234',
})); }));
got.mockImplementation(() => Promise.resolve(npmResponse)); got.mockImplementation(() => Promise.resolve(npmResponse));
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
expect(call).toMatchSnapshot(); expect(call).toMatchSnapshot();
@ -78,7 +77,7 @@ describe('api/npm', () => {
got.mockImplementation(() => Promise.resolve(npmResponse)); got.mockImplementation(() => Promise.resolve(npmResponse));
const oldToken = process.env.NPM_TOKEN; const oldToken = process.env.NPM_TOKEN;
process.env.NPM_TOKEN = 'some-token'; process.env.NPM_TOKEN = 'some-token';
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
process.env.NPM_TOKEN = oldToken; process.env.NPM_TOKEN = oldToken;
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
@ -87,7 +86,7 @@ describe('api/npm', () => {
it('should fetch package info from custom registry', async () => { it('should fetch package info from custom registry', async () => {
got.mockImplementation(() => Promise.resolve(npmResponse)); got.mockImplementation(() => Promise.resolve(npmResponse));
npm.setNpmrc('registry=https://npm.mycustomregistry.com/'); npm.setNpmrc('registry=https://npm.mycustomregistry.com/');
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
expect(call).toMatchSnapshot(); expect(call).toMatchSnapshot();
@ -95,7 +94,7 @@ describe('api/npm', () => {
it('should use default registry if missing from npmrc', async () => { it('should use default registry if missing from npmrc', async () => {
got.mockImplementation(() => Promise.resolve(npmResponse)); got.mockImplementation(() => Promise.resolve(npmResponse));
npm.setNpmrc('foo=bar'); npm.setNpmrc('foo=bar');
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
expect(call).toMatchSnapshot(); expect(call).toMatchSnapshot();
@ -104,7 +103,7 @@ describe('api/npm', () => {
const noTimeResponse = { ...npmResponse }; const noTimeResponse = { ...npmResponse };
delete noTimeResponse.body.time; delete noTimeResponse.body.time;
got.mockImplementation(() => Promise.resolve(noTimeResponse)); got.mockImplementation(() => Promise.resolve(noTimeResponse));
const res = await npm.getDependency('foobar', logger); const res = await npm.getDependency('foobar');
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
const call = got.mock.calls[0]; const call = got.mock.calls[0];
expect(call).toMatchSnapshot(); expect(call).toMatchSnapshot();

View file

@ -1,7 +1,6 @@
const fs = require('fs'); const fs = require('fs');
const path = require('path'); const path = require('path');
const npmUpdater = require('../../../lib/manager/npm/update'); const npmUpdater = require('../../../lib/manager/npm/update');
const logger = require('../../_fixtures/logger');
function readFixture(fixture) { function readFixture(fixture) {
return fs.readFileSync( return fs.readFileSync(
@ -13,15 +12,14 @@ function readFixture(fixture) {
const input01Content = readFixture('inputs/01.json'); const input01Content = readFixture('inputs/01.json');
describe('workers/branch/package-json', () => { describe('workers/branch/package-json', () => {
describe('.setNewValue(currentFileContent, depType, depName, newVersion, logger)', () => { describe('.setNewValue(currentFileContent, depType, depName, newVersion)', () => {
it('replaces a dependency value', () => { it('replaces a dependency value', () => {
const outputContent = readFixture('outputs/011.json'); const outputContent = readFixture('outputs/011.json');
const testContent = npmUpdater.setNewValue( const testContent = npmUpdater.setNewValue(
input01Content, input01Content,
'dependencies', 'dependencies',
'cheerio', 'cheerio',
'0.22.1', '0.22.1'
logger
); );
testContent.should.equal(outputContent); testContent.should.equal(outputContent);
}); });
@ -31,8 +29,7 @@ describe('workers/branch/package-json', () => {
input01Content, input01Content,
'devDependencies', 'devDependencies',
'angular-touch', 'angular-touch',
'1.6.1', '1.6.1'
logger
); );
testContent.should.equal(outputContent); testContent.should.equal(outputContent);
}); });
@ -42,8 +39,7 @@ describe('workers/branch/package-json', () => {
input01Content, input01Content,
'devDependencies', 'devDependencies',
'angular-sanitize', 'angular-sanitize',
'1.6.1', '1.6.1'
logger
); );
testContent.should.equal(outputContent); testContent.should.equal(outputContent);
}); });
@ -52,8 +48,7 @@ describe('workers/branch/package-json', () => {
input01Content, input01Content,
'devDependencies', 'devDependencies',
'angular-touch', 'angular-touch',
'1.5.8', '1.5.8'
logger
); );
testContent.should.equal(input01Content); testContent.should.equal(input01Content);
}); });
@ -62,8 +57,7 @@ describe('workers/branch/package-json', () => {
input01Content, input01Content,
'blah', 'blah',
'angular-touch-not', 'angular-touch-not',
'1.5.8', '1.5.8'
logger
); );
expect(testContent).toBe(null); expect(testContent).toBe(null);
}); });

View file

@ -1,5 +1,3 @@
const logger = require('../../_fixtures/logger');
describe('platform/github', () => { describe('platform/github', () => {
let github; let github;
let get; let get;
@ -104,9 +102,6 @@ describe('platform/github', () => {
expect(process.env.GITHUB_ENDPOINT).toBe(endpoint); expect(process.env.GITHUB_ENDPOINT).toBe(endpoint);
}); });
}); });
it('uses provided logger', async () => {
await initRepo('some/repo', 'some_token', 'an_endpoint', logger);
});
it('should throw an error if no token is provided', async () => { it('should throw an error if no token is provided', async () => {
let err; let err;
try { try {

View file

@ -266,8 +266,6 @@ Array [
exports[`platform/gitlab initRepo should initialise the config for the repo - 2 2`] = `Object {}`; exports[`platform/gitlab initRepo should initialise the config for the repo - 2 2`] = `Object {}`;
exports[`platform/gitlab initRepo uses provided logger 1`] = `Object {}`;
exports[`platform/gitlab setBaseBranch(branchName) sets the base branch 1`] = ` exports[`platform/gitlab setBaseBranch(branchName) sets the base branch 1`] = `
Array [ Array [
Array [ Array [

View file

@ -1,5 +1,3 @@
const logger = require('../../_fixtures/logger');
describe('platform/gitlab', () => { describe('platform/gitlab', () => {
let gitlab; let gitlab;
let get; let get;
@ -102,15 +100,6 @@ describe('platform/gitlab', () => {
expect(process.env.GITLAB_ENDPOINT).toBe(endpoint); expect(process.env.GITLAB_ENDPOINT).toBe(endpoint);
}); });
}); });
it('uses provided logger', async () => {
const config = await initRepo(
'some/repo',
'some_token',
'an_endpoint',
logger
);
expect(config).toMatchSnapshot();
});
it('should throw an error if no token is provided', async () => { it('should throw an error if no token is provided', async () => {
let err; let err;
try { try {

View file

@ -1,6 +1,5 @@
const { tryBranchAutomerge } = require('../../../lib/workers/branch/automerge'); const { tryBranchAutomerge } = require('../../../lib/workers/branch/automerge');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
describe('workers/branch/automerge', () => { describe('workers/branch/automerge', () => {
describe('tryBranchAutomerge', () => { describe('tryBranchAutomerge', () => {
@ -8,7 +7,6 @@ describe('workers/branch/automerge', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
}; };
}); });
it('returns false if not configured for automerge', async () => { it('returns false if not configured for automerge', async () => {

View file

@ -2,7 +2,6 @@ const {
prAlreadyExisted, prAlreadyExisted,
} = require('../../../lib/workers/branch/check-existing'); } = require('../../../lib/workers/branch/check-existing');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
describe('workers/branch/check-existing', () => { describe('workers/branch/check-existing', () => {
describe('prAlreadyExisted', () => { describe('prAlreadyExisted', () => {
@ -10,7 +9,6 @@ describe('workers/branch/check-existing', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
branchName: 'some-branch', branchName: 'some-branch',
prTitle: 'some-title', prTitle: 'some-title',
}; };

View file

@ -1,6 +1,5 @@
const { commitFilesToBranch } = require('../../../lib/workers/branch/commit'); const { commitFilesToBranch } = require('../../../lib/workers/branch/commit');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
describe('workers/branch/automerge', () => { describe('workers/branch/automerge', () => {
describe('commitFilesToBranch', () => { describe('commitFilesToBranch', () => {
@ -8,7 +7,6 @@ describe('workers/branch/automerge', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
branchName: 'renovate/some-branch', branchName: 'renovate/some-branch',
commitMessage: 'some commit message', commitMessage: 'some commit message',
semanticCommits: false, semanticCommits: false,

View file

@ -21,8 +21,6 @@ jest.mock('../../../lib/workers/branch/status-checks');
jest.mock('../../../lib/workers/branch/automerge'); jest.mock('../../../lib/workers/branch/automerge');
jest.mock('../../../lib/workers/pr'); jest.mock('../../../lib/workers/pr');
const logger = require('../../_fixtures/logger');
describe('workers/branch', () => { describe('workers/branch', () => {
describe('processBranch', () => { describe('processBranch', () => {
let config; let config;
@ -33,7 +31,6 @@ describe('workers/branch', () => {
...defaultConfig, ...defaultConfig,
errors: [], errors: [],
warnings: [], warnings: [],
logger,
upgrades: [{ depName: 'some-dep-name' }], upgrades: [{ depName: 'some-dep-name' }],
}; };
schedule.isScheduledNow.mockReturnValue(true); schedule.isScheduledNow.mockReturnValue(true);
@ -61,7 +58,6 @@ describe('workers/branch', () => {
checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 }); checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 });
await branchWorker.processBranch(config); await branchWorker.processBranch(config);
expect(parent.getParentBranch.mock.calls.length).toBe(0); expect(parent.getParentBranch.mock.calls.length).toBe(0);
expect(config.logger.error.mock.calls).toHaveLength(0);
}); });
it('skips branch if closed digest PR found', async () => { it('skips branch if closed digest PR found', async () => {
schedule.isScheduledNow.mockReturnValueOnce(false); schedule.isScheduledNow.mockReturnValueOnce(false);
@ -70,7 +66,6 @@ describe('workers/branch', () => {
checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 }); checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 });
await branchWorker.processBranch(config); await branchWorker.processBranch(config);
expect(parent.getParentBranch.mock.calls.length).toBe(0); expect(parent.getParentBranch.mock.calls.length).toBe(0);
expect(config.logger.error.mock.calls).toHaveLength(0);
}); });
it('skips branch if closed minor PR found', async () => { it('skips branch if closed minor PR found', async () => {
schedule.isScheduledNow.mockReturnValueOnce(false); schedule.isScheduledNow.mockReturnValueOnce(false);
@ -78,7 +73,6 @@ describe('workers/branch', () => {
checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 }); checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 });
await branchWorker.processBranch(config); await branchWorker.processBranch(config);
expect(parent.getParentBranch.mock.calls.length).toBe(0); expect(parent.getParentBranch.mock.calls.length).toBe(0);
expect(config.logger.error.mock.calls).toHaveLength(0);
}); });
it('returns if no branch exists', async () => { it('returns if no branch exists', async () => {
manager.getUpdatedPackageFiles.mockReturnValueOnce({ manager.getUpdatedPackageFiles.mockReturnValueOnce({

View file

@ -1,7 +1,7 @@
const fs = require('fs-extra'); const fs = require('fs-extra');
const lockFiles = require('../../../lib/workers/branch/lock-files'); const lockFiles = require('../../../lib/workers/branch/lock-files');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
const npm = require('../../../lib/workers/branch/npm'); const npm = require('../../../lib/workers/branch/npm');
const yarn = require('../../../lib/workers/branch/yarn'); const yarn = require('../../../lib/workers/branch/yarn');
@ -20,7 +20,6 @@ describe('workers/branch/lock-files', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
}; };
}); });
it('returns true if found and true', () => { it('returns true if found and true', () => {
@ -68,7 +67,6 @@ describe('workers/branch/lock-files', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
}; };
}); });
it('returns true if found and true', () => { it('returns true if found and true', () => {
@ -116,7 +114,6 @@ describe('workers/branch/lock-files', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
packageFiles: [ packageFiles: [
{ {
packageFile: 'package.json', packageFile: 'package.json',
@ -180,7 +177,6 @@ describe('workers/branch/lock-files', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
tmpDir: { path: 'some-tmp-dir' }, tmpDir: { path: 'some-tmp-dir' },
}; };
fs.outputFile = jest.fn(); fs.outputFile = jest.fn();
@ -233,7 +229,6 @@ describe('workers/branch/lock-files', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
tmpDir: { path: 'some-tmp-dir' }, tmpDir: { path: 'some-tmp-dir' },
}; };
fs.outputFile = jest.fn(); fs.outputFile = jest.fn();
@ -276,7 +271,6 @@ describe('workers/branch/lock-files', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
tmpDir: { path: 'some-tmp-dir' }, tmpDir: { path: 'some-tmp-dir' },
}; };
platform.getFileContent.mockReturnValue('some lock file contents'); platform.getFileContent.mockReturnValue('some lock file contents');

View file

@ -1,5 +1,5 @@
const npmHelper = require('../../../lib/workers/branch/npm'); const npmHelper = require('../../../lib/workers/branch/npm');
const logger = require('../../_fixtures/logger');
const { getInstalledPath } = require('get-installed-path'); const { getInstalledPath } = require('get-installed-path');
jest.mock('fs-extra'); jest.mock('fs-extra');
@ -19,7 +19,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await npmHelper.generateLockFile('some-dir', logger); const res = await npmHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.error).not.toBeDefined(); expect(res.error).not.toBeDefined();
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
@ -33,7 +33,7 @@ describe('generateLockFile', () => {
fs.readFile = jest.fn(() => { fs.readFile = jest.fn(() => {
throw new Error('not found'); throw new Error('not found');
}); });
const res = await npmHelper.generateLockFile('some-dir', logger); const res = await npmHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.error).toBe(true); expect(res.error).toBe(true);
expect(res.lockFile).not.toBeDefined(); expect(res.lockFile).not.toBeDefined();
@ -51,7 +51,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await npmHelper.generateLockFile('some-dir', logger); const res = await npmHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });
@ -69,7 +69,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await npmHelper.generateLockFile('some-dir', logger); const res = await npmHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });
@ -89,7 +89,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await npmHelper.generateLockFile('some-dir', logger); const res = await npmHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });

View file

@ -2,7 +2,6 @@ const {
checkStale, checkStale,
getParentBranch, getParentBranch,
} = require('../../../lib/workers/branch/parent'); } = require('../../../lib/workers/branch/parent');
const logger = require('../../_fixtures/logger');
describe('workers/branch/parent', () => { describe('workers/branch/parent', () => {
describe('checkStale', () => { describe('checkStale', () => {
@ -24,7 +23,6 @@ describe('workers/branch/parent', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
branchName: 'renovate/some-branch', branchName: 'renovate/some-branch',
logger,
}; };
}); });
afterEach(() => { afterEach(() => {

View file

@ -1,6 +1,5 @@
const mockDate = require('mockdate'); const mockDate = require('mockdate');
const schedule = require('../../../lib/workers/branch/schedule'); const schedule = require('../../../lib/workers/branch/schedule');
const logger = require('../../_fixtures/logger');
describe('workers/branch/schedule', () => { describe('workers/branch/schedule', () => {
describe('hasValidSchedule(schedule)', () => { describe('hasValidSchedule(schedule)', () => {
@ -52,23 +51,18 @@ describe('workers/branch/schedule', () => {
}); });
it('returns true if schedule has a start and end time', () => { it('returns true if schedule has a start and end time', () => {
expect( expect(
schedule.hasValidSchedule( schedule.hasValidSchedule(['after 11:00pm and before 6:00am'])[0]
['after 11:00pm and before 6:00am'],
logger
)[0]
).toBe(true); ).toBe(true);
}); });
it('returns true if schedule has days and a start and end time', () => { it('returns true if schedule has days and a start and end time', () => {
expect( expect(
schedule.hasValidSchedule( schedule.hasValidSchedule([
['after 11:00pm and before 6:00am every weekday'], 'after 11:00pm and before 6:00am every weekday',
logger ])[0]
)[0]
).toBe(true); ).toBe(true);
}); });
it('supports hours shorthand', () => { it('supports hours shorthand', () => {
const [res] = schedule.hasValidSchedule( const [res] = schedule.hasValidSchedule([
[
'after 11pm and before 6am every weekend', 'after 11pm and before 6am every weekend',
'after 11pm', 'after 11pm',
'after 10pm and before 5:00am', 'after 10pm and before 5:00am',
@ -77,9 +71,7 @@ describe('workers/branch/schedule', () => {
'after 9pm on friday and saturday', 'after 9pm on friday and saturday',
'before 5am every weekday', 'before 5am every weekday',
'every weekend', 'every weekend',
], ]);
logger
);
expect(res).toBe(true); expect(res).toBe(true);
}); });
}); });
@ -88,9 +80,7 @@ describe('workers/branch/schedule', () => {
beforeEach(() => { beforeEach(() => {
mockDate.set(1498812608678); // 2017-06-30 10:50am mockDate.set(1498812608678); // 2017-06-30 10:50am
jest.resetAllMocks(); jest.resetAllMocks();
config = { config = {};
logger,
};
}); });
it('returns true if no schedule', () => { it('returns true if no schedule', () => {
const res = schedule.isScheduledNow(config); const res = schedule.isScheduledNow(config);

View file

@ -2,7 +2,6 @@ const {
setUnpublishable, setUnpublishable,
} = require('../../../lib/workers/branch/status-checks'); } = require('../../../lib/workers/branch/status-checks');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
describe('workers/branch/status-checks', () => { describe('workers/branch/status-checks', () => {
describe('setUnpublishable', () => { describe('setUnpublishable', () => {
@ -10,7 +9,6 @@ describe('workers/branch/status-checks', () => {
beforeEach(() => { beforeEach(() => {
config = { config = {
...defaultConfig, ...defaultConfig,
logger,
upgrades: [], upgrades: [],
}; };
}); });

View file

@ -1,5 +1,5 @@
const yarnHelper = require('../../../lib/workers/branch/yarn'); const yarnHelper = require('../../../lib/workers/branch/yarn');
const logger = require('../../_fixtures/logger');
const { getInstalledPath } = require('get-installed-path'); const { getInstalledPath } = require('get-installed-path');
jest.mock('fs-extra'); jest.mock('fs-extra');
@ -19,7 +19,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await yarnHelper.generateLockFile('some-dir', logger); const res = await yarnHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });
@ -32,7 +32,7 @@ describe('generateLockFile', () => {
fs.readFile = jest.fn(() => { fs.readFile = jest.fn(() => {
throw new Error('not found'); throw new Error('not found');
}); });
const res = await yarnHelper.generateLockFile('some-dir', logger); const res = await yarnHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.error).toBe(true); expect(res.error).toBe(true);
expect(res.lockFile).not.toBeDefined(); expect(res.lockFile).not.toBeDefined();
@ -50,7 +50,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await yarnHelper.generateLockFile('some-dir', logger); const res = await yarnHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });
@ -68,7 +68,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await yarnHelper.generateLockFile('some-dir', logger); const res = await yarnHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });
@ -88,7 +88,7 @@ describe('generateLockFile', () => {
stderror: '', stderror: '',
}); });
fs.readFile = jest.fn(() => 'package-lock-contents'); fs.readFile = jest.fn(() => 'package-lock-contents');
const res = await yarnHelper.generateLockFile('some-dir', logger); const res = await yarnHelper.generateLockFile('some-dir');
expect(fs.readFile.mock.calls.length).toEqual(1); expect(fs.readFile.mock.calls.length).toEqual(1);
expect(res.lockFile).toEqual('package-lock-contents'); expect(res.lockFile).toEqual('package-lock-contents');
}); });

View file

@ -4,8 +4,6 @@ const packageJson = require('../../../lib/workers/dep-type/package-json');
const pkgWorker = require('../../../lib/workers/package/index'); const pkgWorker = require('../../../lib/workers/package/index');
const depTypeWorker = require('../../../lib/workers/dep-type/index'); const depTypeWorker = require('../../../lib/workers/dep-type/index');
const logger = require('../../_fixtures/logger');
jest.mock('../../../lib/workers/dep-type/package-json'); jest.mock('../../../lib/workers/dep-type/package-json');
jest.mock('../../../lib/workers/package/index'); jest.mock('../../../lib/workers/package/index');
@ -19,7 +17,6 @@ describe('lib/workers/dep-type/index', () => {
packageFile: 'package.json', packageFile: 'package.json',
ignoreDeps: ['a', 'b'], ignoreDeps: ['a', 'b'],
monorepoPackages: ['e'], monorepoPackages: ['e'],
logger,
}; };
}); });
it('returns empty if config is disabled', async () => { it('returns empty if config is disabled', async () => {
@ -87,7 +84,7 @@ describe('lib/workers/dep-type/index', () => {
describe('getDepConfig(depTypeConfig, dep)', () => { describe('getDepConfig(depTypeConfig, dep)', () => {
const depTypeConfig = { const depTypeConfig = {
foo: 'bar', foo: 'bar',
logger,
packageRules: [ packageRules: [
{ {
packageNames: ['a', 'b'], packageNames: ['a', 'b'],

View file

@ -2,8 +2,6 @@ const packageFileWorker = require('../../../lib/workers/package-file');
const depTypeWorker = require('../../../lib/workers/dep-type'); const depTypeWorker = require('../../../lib/workers/dep-type');
const defaultConfig = require('../../../lib/config/defaults').getConfig(); const defaultConfig = require('../../../lib/config/defaults').getConfig();
const logger = require('../../_fixtures/logger');
jest.mock('../../../lib/workers/dep-type'); jest.mock('../../../lib/workers/dep-type');
jest.mock('../../../lib/workers/branch/schedule'); jest.mock('../../../lib/workers/branch/schedule');
@ -17,7 +15,6 @@ describe('packageFileWorker', () => {
content: {}, content: {},
repoIsOnboarded: true, repoIsOnboarded: true,
npmrc: '# nothing', npmrc: '# nothing',
logger,
}; };
depTypeWorker.renovateDepType.mockReturnValue([]); depTypeWorker.renovateDepType.mockReturnValue([]);
}); });
@ -65,7 +62,6 @@ describe('packageFileWorker', () => {
...defaultConfig, ...defaultConfig,
packageFile: 'package.js', packageFile: 'package.js',
repoIsOnboarded: true, repoIsOnboarded: true,
logger,
}; };
depTypeWorker.renovateDepType.mockReturnValue([]); depTypeWorker.renovateDepType.mockReturnValue([]);
}); });
@ -87,7 +83,6 @@ describe('packageFileWorker', () => {
...defaultConfig, ...defaultConfig,
packageFile: 'Dockerfile', packageFile: 'Dockerfile',
repoIsOnboarded: true, repoIsOnboarded: true,
logger,
}; };
depTypeWorker.renovateDepType.mockReturnValue([]); depTypeWorker.renovateDepType.mockReturnValue([]);
}); });

View file

@ -82,15 +82,6 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
"groupSlug": null, "groupSlug": null,
"labels": Array [], "labels": Array [],
"lazyGrouping": true, "lazyGrouping": true,
"logger": Object {
"child": [Function],
"debug": [Function],
"error": [Function],
"fatal": [Function],
"info": [Function],
"trace": [Function],
"warn": [Function],
},
"npmrc": null, "npmrc": null,
"packageFile": "package.json", "packageFile": "package.json",
"packageFiles": Array [], "packageFiles": Array [],

Some files were not shown because too many files have changed in this diff Show more