2017-02-14 07:08:40 +00:00
|
|
|
const handlebars = require('handlebars');
|
2017-06-29 05:29:41 +00:00
|
|
|
const packageJsonHelper = require('./package-json');
|
|
|
|
const npm = require('./npm');
|
|
|
|
const yarn = require('./yarn');
|
2017-07-07 04:25:38 +00:00
|
|
|
const schedule = require('./schedule');
|
2017-06-29 05:29:41 +00:00
|
|
|
const prWorker = require('../pr');
|
|
|
|
let logger = require('../../logger');
|
2017-02-14 07:08:40 +00:00
|
|
|
|
|
|
|
module.exports = {
|
|
|
|
getParentBranch,
|
|
|
|
ensureBranch,
|
2017-07-02 04:44:49 +00:00
|
|
|
processBranchUpgrades,
|
2017-02-14 07:08:40 +00:00
|
|
|
};
|
|
|
|
|
|
|
|
async function getParentBranch(branchName, config) {
|
|
|
|
// Check if branch exists
|
2017-04-21 08:12:41 +00:00
|
|
|
if ((await config.api.branchExists(branchName)) === false) {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.info(`Branch needs creating`);
|
2017-02-14 07:08:40 +00:00
|
|
|
return undefined;
|
|
|
|
}
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.info(`Branch already exists`);
|
2017-06-08 04:18:21 +00:00
|
|
|
// Check if needs rebasing
|
|
|
|
if (
|
2017-06-20 06:02:17 +00:00
|
|
|
config.rebaseStalePrs ||
|
|
|
|
(config.automergeEnabled && config.automergeType === 'branch-push')
|
2017-06-08 04:18:21 +00:00
|
|
|
) {
|
|
|
|
const isBranchStale = await config.api.isBranchStale(branchName);
|
|
|
|
if (isBranchStale) {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.info(`Branch is stale and needs rebasing`);
|
2017-06-08 04:18:21 +00:00
|
|
|
return undefined;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-02-14 07:08:40 +00:00
|
|
|
// Check for existing PR
|
|
|
|
const pr = await config.api.getBranchPr(branchName);
|
|
|
|
// Decide if we need to rebase
|
|
|
|
if (!pr) {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.debug(`No PR found`);
|
2017-02-14 07:08:40 +00:00
|
|
|
// We can't tell if this branch can be rebased so better not
|
|
|
|
return branchName;
|
|
|
|
}
|
|
|
|
if (pr.isUnmergeable) {
|
|
|
|
logger.debug('PR is unmergeable');
|
|
|
|
if (pr.canRebase) {
|
2017-06-28 11:20:31 +00:00
|
|
|
logger.info(`Branch is not mergeable and needs rebasing`);
|
|
|
|
if (config.isGitLab) {
|
2017-06-22 09:56:23 +00:00
|
|
|
logger.info(`Deleting unmergeable branch in order to recreate/rebase`);
|
|
|
|
await config.api.deleteBranch(branchName);
|
|
|
|
}
|
2017-06-28 11:20:31 +00:00
|
|
|
// Setting parentBranch back to undefined means that we'll use the default branch
|
|
|
|
return undefined;
|
2017-02-14 07:08:40 +00:00
|
|
|
}
|
|
|
|
// Don't do anything different, but warn
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.warn(`Branch is not mergeable but can't be rebased`);
|
2017-02-14 07:08:40 +00:00
|
|
|
}
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.debug(`Branch does not need rebasing`);
|
2017-02-14 07:08:40 +00:00
|
|
|
return branchName;
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure branch exists with appropriate content
|
2017-07-02 05:50:46 +00:00
|
|
|
async function ensureBranch(config) {
|
|
|
|
logger.trace({ config }, 'ensureBranch');
|
2017-04-15 18:32:01 +00:00
|
|
|
// Use the first upgrade for all the templates
|
2017-07-02 05:50:46 +00:00
|
|
|
const branchName = handlebars.compile(config.branchName)(config);
|
2017-02-14 07:08:40 +00:00
|
|
|
// parentBranch is the branch we will base off
|
|
|
|
// If undefined, this will mean the defaultBranch
|
2017-07-02 05:50:46 +00:00
|
|
|
const parentBranch = await module.exports.getParentBranch(branchName, config);
|
2017-06-29 17:50:26 +00:00
|
|
|
|
2017-07-02 05:50:46 +00:00
|
|
|
const commitMessage = handlebars.compile(config.commitMessage)(config);
|
|
|
|
const api = config.api;
|
|
|
|
const versions = config.versions;
|
|
|
|
const cacheFolder = config.yarnCacheFolder;
|
2017-04-15 18:32:01 +00:00
|
|
|
const packageFiles = {};
|
2017-04-17 02:54:42 +00:00
|
|
|
const commitFiles = [];
|
2017-07-02 05:50:46 +00:00
|
|
|
for (const upgrade of config.upgrades) {
|
2017-07-05 05:12:25 +00:00
|
|
|
if (upgrade.type === 'lockFileMaintenance') {
|
2017-07-01 04:44:41 +00:00
|
|
|
logger.debug('branch lockFileMaintenance');
|
2017-06-02 06:06:44 +00:00
|
|
|
try {
|
2017-06-29 05:29:41 +00:00
|
|
|
const newYarnLock = await yarn.maintainLockFile(upgrade);
|
2017-06-02 06:06:44 +00:00
|
|
|
if (newYarnLock) {
|
|
|
|
commitFiles.push(newYarnLock);
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
logger.debug(err);
|
|
|
|
throw new Error('Could not maintain yarn.lock file');
|
2017-04-17 02:54:42 +00:00
|
|
|
}
|
2017-04-15 18:32:01 +00:00
|
|
|
} else {
|
2017-04-17 02:54:42 +00:00
|
|
|
// See if this is the first time editing this file
|
|
|
|
if (!packageFiles[upgrade.packageFile]) {
|
|
|
|
// If we are rebasing then existing content will be from master
|
2017-04-21 08:12:41 +00:00
|
|
|
packageFiles[upgrade.packageFile] = await api.getFileContent(
|
|
|
|
upgrade.packageFile,
|
2017-04-21 08:25:49 +00:00
|
|
|
parentBranch
|
2017-04-21 08:12:41 +00:00
|
|
|
);
|
2017-04-17 02:54:42 +00:00
|
|
|
}
|
|
|
|
const newContent = packageJsonHelper.setNewValue(
|
|
|
|
packageFiles[upgrade.packageFile],
|
|
|
|
upgrade.depType,
|
|
|
|
upgrade.depName,
|
2017-06-22 07:03:36 +00:00
|
|
|
upgrade.newVersion,
|
|
|
|
logger
|
2017-04-21 08:12:41 +00:00
|
|
|
);
|
2017-04-17 02:54:42 +00:00
|
|
|
if (packageFiles[upgrade.packageFile] === newContent) {
|
|
|
|
logger.debug('packageFile content unchanged');
|
|
|
|
delete packageFiles[upgrade.packageFile];
|
|
|
|
} else {
|
|
|
|
logger.debug('Updating packageFile content');
|
|
|
|
packageFiles[upgrade.packageFile] = newContent;
|
|
|
|
}
|
2017-04-15 18:32:01 +00:00
|
|
|
}
|
2017-02-14 07:08:40 +00:00
|
|
|
}
|
2017-04-15 18:32:01 +00:00
|
|
|
if (Object.keys(packageFiles).length > 0) {
|
2017-06-20 06:02:17 +00:00
|
|
|
logger.info(
|
2017-05-10 07:26:09 +00:00
|
|
|
`${Object.keys(packageFiles).length} package file(s) need updating.`
|
|
|
|
);
|
2017-04-15 18:32:01 +00:00
|
|
|
for (const packageFile of Object.keys(packageFiles)) {
|
2017-04-17 02:54:42 +00:00
|
|
|
logger.debug(`Adding ${packageFile}`);
|
2017-04-15 18:32:01 +00:00
|
|
|
commitFiles.push({
|
|
|
|
name: packageFile,
|
|
|
|
contents: packageFiles[packageFile],
|
|
|
|
});
|
2017-06-02 06:06:44 +00:00
|
|
|
try {
|
2017-06-29 05:29:41 +00:00
|
|
|
const yarnLockFile = await yarn.getLockFile(
|
2017-06-02 06:06:44 +00:00
|
|
|
packageFile,
|
|
|
|
packageFiles[packageFile],
|
2017-06-03 13:25:13 +00:00
|
|
|
api,
|
2017-06-28 20:33:27 +00:00
|
|
|
cacheFolder,
|
|
|
|
versions.yarn
|
2017-06-02 06:06:44 +00:00
|
|
|
);
|
|
|
|
if (yarnLockFile) {
|
|
|
|
// Add new yarn.lock file too
|
2017-06-20 06:02:17 +00:00
|
|
|
logger.info(`Adding ${yarnLockFile.name}`);
|
2017-06-02 06:06:44 +00:00
|
|
|
commitFiles.push(yarnLockFile);
|
|
|
|
}
|
2017-06-29 05:29:41 +00:00
|
|
|
const packageLockFile = await npm.getLockFile(
|
2017-06-02 06:29:36 +00:00
|
|
|
packageFile,
|
|
|
|
packageFiles[packageFile],
|
2017-06-28 20:33:27 +00:00
|
|
|
api,
|
2017-07-02 05:50:46 +00:00
|
|
|
config.versions.npm,
|
2017-06-28 20:33:27 +00:00
|
|
|
versions.npm
|
2017-06-02 06:29:36 +00:00
|
|
|
);
|
|
|
|
if (packageLockFile) {
|
|
|
|
// Add new package-lock.json file too
|
2017-06-20 06:02:17 +00:00
|
|
|
logger.info(`Adding ${packageLockFile.name}`);
|
2017-06-02 06:29:36 +00:00
|
|
|
commitFiles.push(packageLockFile);
|
|
|
|
}
|
|
|
|
} catch (err) {
|
2017-06-28 20:33:27 +00:00
|
|
|
logger.info('Could not generate necessary lock file');
|
|
|
|
throw err;
|
2017-06-02 06:29:36 +00:00
|
|
|
}
|
2017-04-15 18:32:01 +00:00
|
|
|
}
|
2017-04-17 02:54:42 +00:00
|
|
|
}
|
|
|
|
if (commitFiles.length) {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.debug(`${commitFiles.length} file(s) to commit`);
|
2017-04-15 18:32:01 +00:00
|
|
|
// API will know whether to create new branch or not
|
2017-04-21 08:12:41 +00:00
|
|
|
await api.commitFilesToBranch(
|
|
|
|
branchName,
|
|
|
|
commitFiles,
|
|
|
|
commitMessage,
|
2017-04-21 08:25:49 +00:00
|
|
|
parentBranch
|
2017-04-21 08:12:41 +00:00
|
|
|
);
|
2017-06-08 04:18:21 +00:00
|
|
|
} else {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.debug(`No files to commit`);
|
2017-06-08 04:18:21 +00:00
|
|
|
}
|
|
|
|
if (!api.branchExists(branchName)) {
|
|
|
|
// Return now if no branch exists
|
|
|
|
return false;
|
|
|
|
}
|
|
|
|
if (config.automergeEnabled === false || config.automergeType === 'pr') {
|
|
|
|
// No branch automerge
|
2017-04-17 02:54:42 +00:00
|
|
|
return true;
|
2017-02-14 07:08:40 +00:00
|
|
|
}
|
2017-06-08 04:18:21 +00:00
|
|
|
logger.debug('Checking if we can automerge branch');
|
2017-07-05 05:02:25 +00:00
|
|
|
const branchStatus = await api.getBranchStatus(
|
|
|
|
branchName,
|
|
|
|
config.requiredStatusChecks
|
|
|
|
);
|
2017-06-08 04:18:21 +00:00
|
|
|
if (branchStatus === 'success') {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.info(`Automerging branch`);
|
2017-06-08 04:18:21 +00:00
|
|
|
try {
|
|
|
|
await api.mergeBranch(branchName, config.automergeType);
|
|
|
|
} catch (err) {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.error(`Failed to automerge branch`);
|
2017-06-08 04:18:21 +00:00
|
|
|
logger.debug(JSON.stringify(err));
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
} else {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.debug(`Branch status is "${branchStatus}" - skipping automerge`);
|
2017-06-08 04:18:21 +00:00
|
|
|
}
|
|
|
|
// Return true as branch exists
|
|
|
|
return true;
|
2017-02-14 07:08:40 +00:00
|
|
|
}
|
2017-06-22 07:03:36 +00:00
|
|
|
|
2017-07-04 11:52:23 +00:00
|
|
|
async function processBranchUpgrades(branchUpgrades, errors, warnings) {
|
2017-07-07 04:25:38 +00:00
|
|
|
logger = branchUpgrades.logger || logger;
|
2017-07-02 05:50:46 +00:00
|
|
|
logger.trace({ config: branchUpgrades }, 'processBranchUpgrades');
|
2017-07-06 08:35:27 +00:00
|
|
|
const config = Object.assign({}, branchUpgrades);
|
2017-07-07 04:25:38 +00:00
|
|
|
// Check schedule
|
|
|
|
if (
|
|
|
|
config.schedule &&
|
|
|
|
config.schedule.length &&
|
|
|
|
schedule.isScheduledNow(config) === false
|
|
|
|
) {
|
|
|
|
logger.info('Skipping branch as it is not scheduled');
|
|
|
|
return;
|
2017-06-29 17:50:26 +00:00
|
|
|
}
|
2017-06-22 07:03:36 +00:00
|
|
|
|
2017-07-06 08:35:27 +00:00
|
|
|
logger = logger.child({
|
2017-07-02 05:50:46 +00:00
|
|
|
repository: config.repository,
|
2017-07-07 04:25:38 +00:00
|
|
|
branch: config.branchName,
|
2017-06-22 07:03:36 +00:00
|
|
|
});
|
2017-07-02 05:50:46 +00:00
|
|
|
config.logger = logger;
|
2017-06-22 07:03:36 +00:00
|
|
|
|
2017-07-02 05:50:46 +00:00
|
|
|
const packageNames = config.upgrades.map(upgrade => upgrade.depName);
|
|
|
|
logger.info(`Branch has ${packageNames.length} upgrade(s): ${packageNames}`);
|
2017-06-22 07:03:36 +00:00
|
|
|
|
|
|
|
try {
|
|
|
|
if (
|
2017-07-02 04:21:24 +00:00
|
|
|
// Groups and lock file maintenance should set this to true
|
2017-07-02 05:50:46 +00:00
|
|
|
config.recreateClosed === false &&
|
2017-07-07 04:25:38 +00:00
|
|
|
(await config.api.checkForClosedPr(config.branchName, config.prTitle))
|
2017-06-22 07:03:36 +00:00
|
|
|
) {
|
2017-06-27 11:44:03 +00:00
|
|
|
logger.info(`Skipping branch as matching closed PR already existed`);
|
2017-07-07 04:25:38 +00:00
|
|
|
return;
|
2017-06-22 07:03:36 +00:00
|
|
|
}
|
2017-07-02 05:50:46 +00:00
|
|
|
const branchCreated = await module.exports.ensureBranch(config);
|
2017-06-22 07:03:36 +00:00
|
|
|
if (branchCreated) {
|
2017-07-04 11:52:23 +00:00
|
|
|
const pr = await prWorker.ensurePr(
|
|
|
|
config.upgrades,
|
|
|
|
logger,
|
|
|
|
errors,
|
|
|
|
warnings
|
|
|
|
);
|
2017-06-22 07:03:36 +00:00
|
|
|
if (pr) {
|
2017-07-02 05:50:46 +00:00
|
|
|
await prWorker.checkAutoMerge(pr, config, logger);
|
2017-06-22 07:03:36 +00:00
|
|
|
}
|
|
|
|
}
|
2017-06-28 20:33:27 +00:00
|
|
|
} catch (err) {
|
|
|
|
logger.error(`Error updating branch: ${err.message}`);
|
|
|
|
logger.debug(JSON.stringify(err));
|
2017-06-22 07:03:36 +00:00
|
|
|
// Don't throw here - we don't want to stop the other renovations
|
|
|
|
}
|
|
|
|
}
|