2017-10-17 11:45:17 +00:00
|
|
|
const get = require('./gh-got-wrapper');
|
2017-12-09 16:09:31 +00:00
|
|
|
const addrs = require('email-addresses');
|
|
|
|
const moment = require('moment');
|
2017-12-09 16:56:23 +00:00
|
|
|
const openpgp = require('openpgp');
|
2017-12-11 11:24:37 +00:00
|
|
|
const path = require('path');
|
2017-07-30 09:06:15 +00:00
|
|
|
|
2017-11-16 13:12:40 +00:00
|
|
|
let config = {};
|
2017-01-07 07:22:48 +00:00
|
|
|
|
2017-01-11 13:33:32 +00:00
|
|
|
module.exports = {
|
2017-06-02 20:06:15 +00:00
|
|
|
// Initialization
|
2017-04-21 05:00:26 +00:00
|
|
|
getRepos,
|
2017-01-11 13:33:32 +00:00
|
|
|
initRepo,
|
2017-11-15 14:31:20 +00:00
|
|
|
getRepoForceRebase,
|
2017-07-06 08:26:18 +00:00
|
|
|
setBaseBranch,
|
2017-01-17 08:11:42 +00:00
|
|
|
// Search
|
2017-10-25 04:00:07 +00:00
|
|
|
getFileList,
|
2017-01-10 12:32:32 +00:00
|
|
|
// Branch
|
2017-01-29 20:25:12 +00:00
|
|
|
branchExists,
|
2017-07-05 09:57:22 +00:00
|
|
|
getAllRenovateBranches,
|
2017-06-08 04:18:21 +00:00
|
|
|
isBranchStale,
|
2017-01-16 17:10:39 +00:00
|
|
|
getBranchPr,
|
2017-04-17 04:46:24 +00:00
|
|
|
getBranchStatus,
|
2017-08-08 21:03:52 +00:00
|
|
|
getBranchStatusCheck,
|
2017-08-06 13:38:10 +00:00
|
|
|
setBranchStatus,
|
2017-06-03 13:27:11 +00:00
|
|
|
deleteBranch,
|
2017-06-08 04:18:21 +00:00
|
|
|
mergeBranch,
|
2017-08-28 09:37:09 +00:00
|
|
|
getBranchLastCommitTime,
|
2017-01-13 18:18:44 +00:00
|
|
|
// issue
|
2017-01-18 20:17:07 +00:00
|
|
|
addAssignees,
|
2017-01-31 13:54:16 +00:00
|
|
|
addReviewers,
|
2017-10-19 11:30:26 +00:00
|
|
|
// Comments
|
2017-10-18 13:28:51 +00:00
|
|
|
ensureComment,
|
2017-10-19 11:30:26 +00:00
|
|
|
ensureCommentRemoval,
|
2017-01-10 12:32:32 +00:00
|
|
|
// PR
|
2017-01-30 06:34:35 +00:00
|
|
|
findPr,
|
2017-01-11 13:33:32 +00:00
|
|
|
createPr,
|
|
|
|
getPr,
|
2017-11-10 12:07:06 +00:00
|
|
|
getPrFiles,
|
2017-01-11 13:33:32 +00:00
|
|
|
updatePr,
|
2017-04-20 11:01:23 +00:00
|
|
|
mergePr,
|
2017-01-15 22:56:09 +00:00
|
|
|
// file
|
2017-02-08 07:43:16 +00:00
|
|
|
commitFilesToBranch,
|
2017-01-15 22:56:09 +00:00
|
|
|
getFile,
|
2017-07-07 05:54:09 +00:00
|
|
|
// Commits
|
|
|
|
getCommitMessages,
|
2017-01-10 12:32:32 +00:00
|
|
|
};
|
|
|
|
|
2017-04-21 05:00:26 +00:00
|
|
|
// Get all repositories that the user has access to
|
|
|
|
async function getRepos(token, endpoint) {
|
|
|
|
logger.debug('getRepos(token, endpoint)');
|
|
|
|
if (token) {
|
|
|
|
process.env.GITHUB_TOKEN = token;
|
|
|
|
} else if (!process.env.GITHUB_TOKEN) {
|
|
|
|
throw new Error('No token found for getRepos');
|
|
|
|
}
|
|
|
|
if (endpoint) {
|
|
|
|
process.env.GITHUB_ENDPOINT = endpoint;
|
|
|
|
}
|
|
|
|
try {
|
2017-11-20 19:36:40 +00:00
|
|
|
const res = await get('user/repos', { paginate: true });
|
2017-04-21 05:00:26 +00:00
|
|
|
return res.body.map(repo => repo.full_name);
|
|
|
|
} catch (err) /* istanbul ignore next */ {
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.error({ err }, `GitHub getRepos error`);
|
2017-04-21 05:00:26 +00:00
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-10 12:32:32 +00:00
|
|
|
// Initialize GitHub by getting base branch and SHA
|
2017-11-08 05:44:03 +00:00
|
|
|
async function initRepo(repoName, token, endpoint) {
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.debug(`initRepo("${repoName}")`);
|
2017-02-01 16:43:28 +00:00
|
|
|
if (token) {
|
|
|
|
process.env.GITHUB_TOKEN = token;
|
|
|
|
} else if (!process.env.GITHUB_TOKEN) {
|
|
|
|
throw new Error(`No token found for GitHub repository ${repoName}`);
|
|
|
|
}
|
2017-02-05 08:10:29 +00:00
|
|
|
if (endpoint) {
|
|
|
|
process.env.GITHUB_ENDPOINT = endpoint;
|
|
|
|
}
|
2017-11-16 13:12:40 +00:00
|
|
|
config = {};
|
2017-11-16 21:13:54 +00:00
|
|
|
get.reset();
|
2017-01-10 12:32:32 +00:00
|
|
|
config.repoName = repoName;
|
2017-07-26 08:56:11 +00:00
|
|
|
const platformConfig = {};
|
2017-10-19 18:45:25 +00:00
|
|
|
let res;
|
2017-01-31 11:19:06 +00:00
|
|
|
try {
|
2017-12-10 06:25:36 +00:00
|
|
|
res = await get(`repos/${repoName}`);
|
2017-08-01 11:31:27 +00:00
|
|
|
logger.trace({ repositoryDetails: res.body }, 'Repository details');
|
2017-07-30 20:59:53 +00:00
|
|
|
platformConfig.privateRepo = res.body.private === true;
|
|
|
|
platformConfig.isFork = res.body.fork === true;
|
2017-01-29 20:25:12 +00:00
|
|
|
config.owner = res.body.owner.login;
|
|
|
|
logger.debug(`${repoName} owner = ${config.owner}`);
|
2017-07-06 08:26:18 +00:00
|
|
|
// Use default branch as PR target unless later overridden
|
2017-07-06 12:12:52 +00:00
|
|
|
config.defaultBranch = res.body.default_branch;
|
|
|
|
config.baseBranch = config.defaultBranch;
|
2017-07-06 08:26:18 +00:00
|
|
|
logger.debug(`${repoName} default branch = ${config.baseBranch}`);
|
2017-04-20 11:01:23 +00:00
|
|
|
if (res.body.allow_rebase_merge) {
|
|
|
|
config.mergeMethod = 'rebase';
|
|
|
|
} else if (res.body.allow_squash_merge) {
|
|
|
|
config.mergeMethod = 'squash';
|
2017-06-03 07:40:13 +00:00
|
|
|
} else if (res.body.allow_merge_commit) {
|
2017-04-20 11:01:23 +00:00
|
|
|
config.mergeMethod = 'merge';
|
2017-06-03 07:40:13 +00:00
|
|
|
} else {
|
2017-12-10 06:25:36 +00:00
|
|
|
logger.info('Could not find allowed merge methods for repo');
|
2017-04-20 11:01:23 +00:00
|
|
|
}
|
2017-11-15 14:31:20 +00:00
|
|
|
} catch (err) /* istanbul ignore next */ {
|
|
|
|
logger.info({ err, res }, 'Unknown GitHub initRepo error');
|
|
|
|
throw err;
|
|
|
|
}
|
2017-11-28 19:29:42 +00:00
|
|
|
delete config.prList;
|
|
|
|
delete config.fileList;
|
2017-11-16 06:13:50 +00:00
|
|
|
await Promise.all([getPrList(), getFileList()]);
|
2017-11-15 14:31:20 +00:00
|
|
|
return platformConfig;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function getRepoForceRebase() {
|
|
|
|
if (config.repoForceRebase === undefined) {
|
2017-07-25 16:18:19 +00:00
|
|
|
try {
|
2017-11-15 14:31:20 +00:00
|
|
|
config.repoForceRebase = false;
|
2017-07-25 16:18:19 +00:00
|
|
|
const branchProtection = await getBranchProtection(config.baseBranch);
|
2017-12-10 16:55:23 +00:00
|
|
|
logger.info('Found branch protection');
|
2017-12-10 14:22:58 +00:00
|
|
|
if (branchProtection.required_pull_request_reviews) {
|
2017-12-10 14:58:00 +00:00
|
|
|
logger.info(
|
|
|
|
'Branch protection: PR Reviews are required before merging'
|
|
|
|
);
|
2017-12-10 14:22:58 +00:00
|
|
|
config.prReviewsRequired = true;
|
|
|
|
}
|
|
|
|
if (branchProtection.required_status_checks) {
|
|
|
|
if (branchProtection.required_status_checks.strict) {
|
2017-12-10 14:58:00 +00:00
|
|
|
logger.info(
|
|
|
|
'Branch protection: PRs must be up-to-date before merging'
|
|
|
|
);
|
2017-12-10 14:22:58 +00:00
|
|
|
config.repoForceRebase = true;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (branchProtection.restrictions) {
|
|
|
|
logger.info(
|
|
|
|
{
|
|
|
|
users: branchProtection.restrictions.users,
|
|
|
|
teams: branchProtection.restrictions.teams,
|
|
|
|
},
|
2017-12-10 14:58:00 +00:00
|
|
|
'Branch protection: Pushing to branch is restricted'
|
2017-12-10 14:22:58 +00:00
|
|
|
);
|
|
|
|
config.pushProtection = true;
|
2017-07-25 16:18:19 +00:00
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
if (err.statusCode === 404) {
|
2017-12-10 16:55:23 +00:00
|
|
|
logger.info(`No branch protection found`);
|
2017-07-26 04:55:25 +00:00
|
|
|
} else if (err.statusCode === 403) {
|
2017-12-10 14:58:00 +00:00
|
|
|
logger.warn(
|
|
|
|
'Branch protection: Do not have permissions to detect branch protection'
|
|
|
|
);
|
2017-07-25 16:18:19 +00:00
|
|
|
} else {
|
|
|
|
throw err;
|
|
|
|
}
|
|
|
|
}
|
2017-01-31 11:19:06 +00:00
|
|
|
}
|
2017-11-15 14:31:20 +00:00
|
|
|
return config.repoForceRebase;
|
2017-01-10 12:32:32 +00:00
|
|
|
}
|
|
|
|
|
2017-11-15 13:20:17 +00:00
|
|
|
async function getBaseCommitSHA() {
|
|
|
|
if (!config.baseCommitSHA) {
|
|
|
|
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
|
|
|
|
}
|
|
|
|
return config.baseCommitSHA;
|
|
|
|
}
|
|
|
|
|
2017-07-25 16:18:19 +00:00
|
|
|
async function getBranchProtection(branchName) {
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get(
|
2017-12-10 14:22:58 +00:00
|
|
|
`repos/${config.repoName}/branches/${branchName}/protection`
|
2017-07-25 16:18:19 +00:00
|
|
|
);
|
|
|
|
return res.body;
|
|
|
|
}
|
|
|
|
|
2017-11-16 06:13:50 +00:00
|
|
|
async function setBaseBranch(branchName) {
|
2017-07-06 08:26:18 +00:00
|
|
|
if (branchName) {
|
2017-07-06 12:12:52 +00:00
|
|
|
logger.debug(`Setting baseBranch to ${branchName}`);
|
2017-07-06 08:26:18 +00:00
|
|
|
config.baseBranch = branchName;
|
2017-11-15 13:20:17 +00:00
|
|
|
delete config.baseCommitSHA;
|
2017-11-16 06:13:50 +00:00
|
|
|
delete config.fileList;
|
|
|
|
await getFileList(branchName);
|
2017-07-06 08:26:18 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-17 08:11:42 +00:00
|
|
|
// Search
|
|
|
|
|
2017-10-16 09:59:59 +00:00
|
|
|
// Get full file list
|
2017-10-25 04:00:07 +00:00
|
|
|
async function getFileList(branchName = config.baseBranch) {
|
2017-10-16 09:59:59 +00:00
|
|
|
if (config.fileList) {
|
|
|
|
return config.fileList;
|
2017-09-12 07:33:41 +00:00
|
|
|
}
|
2017-10-18 06:25:42 +00:00
|
|
|
try {
|
|
|
|
const res = await get(
|
|
|
|
`repos/${config.repoName}/git/trees/${branchName}?recursive=true`
|
2017-10-16 05:33:47 +00:00
|
|
|
);
|
2017-10-18 06:25:42 +00:00
|
|
|
if (res.body.truncated) {
|
|
|
|
logger.warn(
|
|
|
|
{ repository: config.repoName },
|
|
|
|
'repository tree is truncated'
|
|
|
|
);
|
|
|
|
}
|
|
|
|
config.fileList = res.body.tree
|
2017-10-22 18:24:01 +00:00
|
|
|
.filter(item => item.type === 'blob' && item.mode !== '120000')
|
2017-10-18 06:25:42 +00:00
|
|
|
.map(item => item.path)
|
|
|
|
.sort();
|
2017-11-15 13:41:36 +00:00
|
|
|
} catch (err) /* istanbul ignore next */ {
|
|
|
|
if (err.statusCode === 409) {
|
|
|
|
logger.debug('Repository is not initiated');
|
|
|
|
throw new Error('uninitiated');
|
|
|
|
}
|
2017-10-20 05:18:57 +00:00
|
|
|
logger.info(
|
|
|
|
{ repository: config.repoName },
|
|
|
|
'Error retrieving git tree - no files detected'
|
|
|
|
);
|
2017-10-18 06:25:42 +00:00
|
|
|
config.fileList = [];
|
2017-10-16 09:59:59 +00:00
|
|
|
}
|
2017-10-18 06:25:42 +00:00
|
|
|
|
2017-10-16 09:59:59 +00:00
|
|
|
return config.fileList;
|
|
|
|
}
|
|
|
|
|
2017-01-10 12:32:32 +00:00
|
|
|
// Branch
|
2017-01-29 20:25:12 +00:00
|
|
|
|
|
|
|
// Returns true if branch exists, otherwise false
|
2017-01-31 11:19:06 +00:00
|
|
|
async function branchExists(branchName) {
|
2017-11-16 21:40:07 +00:00
|
|
|
logger.debug(`branchExists(${branchName})`);
|
|
|
|
const branchList = (await get(
|
|
|
|
`repos/${config.repoName}/branches?per_page=100`,
|
|
|
|
{ paginate: true }
|
|
|
|
)).body.map(branch => branch.name);
|
|
|
|
return branchList.includes(branchName);
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
2017-08-04 06:32:22 +00:00
|
|
|
async function getAllRenovateBranches(branchPrefix) {
|
2017-07-05 09:57:22 +00:00
|
|
|
logger.trace('getAllRenovateBranches');
|
2017-10-17 05:15:01 +00:00
|
|
|
const allBranches = (await get(`repos/${config.repoName}/git/refs/heads`))
|
|
|
|
.body;
|
2017-07-05 09:57:22 +00:00
|
|
|
return allBranches.reduce((arr, branch) => {
|
2017-08-04 06:32:22 +00:00
|
|
|
if (branch.ref.indexOf(`refs/heads/${branchPrefix}`) === 0) {
|
2017-07-05 09:57:22 +00:00
|
|
|
arr.push(branch.ref.substring('refs/heads/'.length));
|
|
|
|
}
|
|
|
|
return arr;
|
|
|
|
}, []);
|
|
|
|
}
|
|
|
|
|
2017-06-08 04:18:21 +00:00
|
|
|
async function isBranchStale(branchName) {
|
|
|
|
// Check if branch's parent SHA = master SHA
|
|
|
|
logger.debug(`isBranchStale(${branchName})`);
|
|
|
|
const branchCommit = await getBranchCommit(branchName);
|
|
|
|
logger.debug(`branchCommit=${branchCommit}`);
|
|
|
|
const commitDetails = await getCommitDetails(branchCommit);
|
2017-11-16 12:37:36 +00:00
|
|
|
logger.trace({ commitDetails }, `commitDetails`);
|
2017-06-08 04:18:21 +00:00
|
|
|
const parentSha = commitDetails.parents[0].sha;
|
|
|
|
logger.debug(`parentSha=${parentSha}`);
|
2017-11-16 12:37:36 +00:00
|
|
|
const baseCommitSHA = await getBaseCommitSHA();
|
|
|
|
logger.debug(`baseCommitSHA=${baseCommitSHA}`);
|
2017-06-08 04:18:21 +00:00
|
|
|
// Return true if the SHAs don't match
|
2017-11-16 12:37:36 +00:00
|
|
|
return parentSha !== baseCommitSHA;
|
2017-06-08 04:18:21 +00:00
|
|
|
}
|
|
|
|
|
2017-02-01 12:50:28 +00:00
|
|
|
// Returns the Pull Request for a branch. Null if not exists.
|
2017-01-31 11:19:06 +00:00
|
|
|
async function getBranchPr(branchName) {
|
2017-02-01 12:50:28 +00:00
|
|
|
logger.debug(`getBranchPr(${branchName})`);
|
2017-10-19 04:19:39 +00:00
|
|
|
const existingPr = await findPr(branchName, null, 'open');
|
|
|
|
return existingPr ? getPr(existingPr.number) : null;
|
2017-01-16 17:10:39 +00:00
|
|
|
}
|
|
|
|
|
2017-04-17 04:46:24 +00:00
|
|
|
// Returns the combined status for a branch.
|
2017-07-05 05:02:25 +00:00
|
|
|
async function getBranchStatus(branchName, requiredStatusChecks) {
|
2017-04-17 04:46:24 +00:00
|
|
|
logger.debug(`getBranchStatus(${branchName})`);
|
2017-07-05 05:02:25 +00:00
|
|
|
if (!requiredStatusChecks) {
|
|
|
|
// null means disable status checks, so it always succeeds
|
2017-11-01 12:03:16 +00:00
|
|
|
logger.debug('Status checks disabled = returning "success"');
|
2017-07-05 05:02:25 +00:00
|
|
|
return 'success';
|
|
|
|
}
|
|
|
|
if (requiredStatusChecks.length) {
|
|
|
|
// This is Unsupported
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`);
|
2017-07-05 05:02:25 +00:00
|
|
|
return 'failed';
|
|
|
|
}
|
2017-04-17 04:46:24 +00:00
|
|
|
const gotString = `repos/${config.repoName}/commits/${branchName}/status`;
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get(gotString);
|
2017-11-01 12:03:16 +00:00
|
|
|
logger.debug(
|
|
|
|
{ state: res.body.stage, statuses: res.body.statuses },
|
|
|
|
'branch status check result'
|
|
|
|
);
|
2017-04-17 04:46:24 +00:00
|
|
|
return res.body.state;
|
|
|
|
}
|
|
|
|
|
2017-08-08 21:03:52 +00:00
|
|
|
async function getBranchStatusCheck(branchName, context) {
|
|
|
|
const branchCommit = await getBranchCommit(branchName);
|
|
|
|
const url = `repos/${config.repoName}/commits/${branchCommit}/statuses`;
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get(url);
|
2017-08-08 21:03:52 +00:00
|
|
|
for (const check of res.body) {
|
|
|
|
if (check.context === context) {
|
|
|
|
return check.state;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
|
2017-08-06 13:38:10 +00:00
|
|
|
async function setBranchStatus(
|
|
|
|
branchName,
|
|
|
|
context,
|
|
|
|
description,
|
|
|
|
state,
|
|
|
|
targetUrl
|
|
|
|
) {
|
|
|
|
const branchCommit = await getBranchCommit(branchName);
|
|
|
|
const url = `repos/${config.repoName}/statuses/${branchCommit}`;
|
|
|
|
const options = {
|
|
|
|
state,
|
|
|
|
description,
|
|
|
|
context,
|
|
|
|
};
|
|
|
|
if (targetUrl) {
|
|
|
|
options.target_url = targetUrl;
|
|
|
|
}
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.post(url, { body: options });
|
2017-08-06 13:38:10 +00:00
|
|
|
}
|
|
|
|
|
2017-06-03 13:27:11 +00:00
|
|
|
async function deleteBranch(branchName) {
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.delete(`repos/${config.repoName}/git/refs/heads/${branchName}`);
|
2017-06-03 13:27:11 +00:00
|
|
|
}
|
|
|
|
|
2017-06-08 04:18:21 +00:00
|
|
|
async function mergeBranch(branchName, mergeType) {
|
|
|
|
logger.debug(`mergeBranch(${branchName}, ${mergeType})`);
|
2017-12-10 14:22:58 +00:00
|
|
|
// istanbul ignore if
|
|
|
|
if (config.pushProtection) {
|
|
|
|
logger.info(
|
|
|
|
{ branchName, mergeType },
|
2017-12-10 14:38:47 +00:00
|
|
|
'Branch protection: Attempting to merge branch when push protection is enabled'
|
2017-12-10 14:22:58 +00:00
|
|
|
);
|
|
|
|
}
|
2017-06-08 04:18:21 +00:00
|
|
|
if (mergeType === 'branch-push') {
|
2017-07-06 08:26:18 +00:00
|
|
|
const url = `repos/${config.repoName}/git/refs/heads/${config.baseBranch}`;
|
2017-06-08 04:18:21 +00:00
|
|
|
const options = {
|
|
|
|
body: {
|
|
|
|
sha: await getBranchCommit(branchName),
|
|
|
|
},
|
|
|
|
};
|
|
|
|
try {
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.patch(url, options);
|
2017-06-08 04:18:21 +00:00
|
|
|
} catch (err) {
|
2017-10-05 08:45:08 +00:00
|
|
|
logger.warn({ err }, `Error pushing branch merge for ${branchName}`);
|
2017-06-08 04:18:21 +00:00
|
|
|
throw new Error('branch-push failed');
|
|
|
|
}
|
|
|
|
} else if (mergeType === 'branch-merge-commit') {
|
|
|
|
const url = `repos/${config.repoName}/merges`;
|
|
|
|
const options = {
|
|
|
|
body: {
|
2017-07-06 08:26:18 +00:00
|
|
|
base: config.baseBranch,
|
2017-06-08 04:18:21 +00:00
|
|
|
head: branchName,
|
|
|
|
},
|
|
|
|
};
|
|
|
|
try {
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.post(url, options);
|
2017-06-08 04:18:21 +00:00
|
|
|
} catch (err) {
|
2017-10-05 08:45:08 +00:00
|
|
|
logger.warn({ err }, `Error pushing branch merge for ${branchName}`);
|
2017-10-04 14:52:50 +00:00
|
|
|
throw new Error('branch-merge-commit failed');
|
2017-06-08 04:18:21 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
throw new Error(`Unsupported branch merge type: ${mergeType}`);
|
|
|
|
}
|
|
|
|
// Update base commit
|
2017-11-15 13:20:17 +00:00
|
|
|
delete config.baseCommitSHA;
|
2017-06-08 04:18:21 +00:00
|
|
|
// Delete branch
|
|
|
|
await deleteBranch(branchName);
|
|
|
|
}
|
|
|
|
|
2017-08-28 09:37:09 +00:00
|
|
|
async function getBranchLastCommitTime(branchName) {
|
|
|
|
try {
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get(`repos/${config.repoName}/commits?sha=${branchName}`);
|
2017-08-28 09:37:09 +00:00
|
|
|
return new Date(res.body[0].commit.committer.date);
|
|
|
|
} catch (err) {
|
|
|
|
logger.error({ err }, `getBranchLastCommitTime error`);
|
|
|
|
return new Date();
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-01-13 18:18:44 +00:00
|
|
|
// Issue
|
|
|
|
|
2017-01-31 11:19:06 +00:00
|
|
|
async function addAssignees(issueNo, assignees) {
|
2017-01-18 20:17:07 +00:00
|
|
|
logger.debug(`Adding assignees ${assignees} to #${issueNo}`);
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.post(`repos/${config.repoName}/issues/${issueNo}/assignees`, {
|
|
|
|
body: {
|
|
|
|
assignees,
|
|
|
|
},
|
|
|
|
});
|
2017-01-18 20:17:07 +00:00
|
|
|
}
|
|
|
|
|
2017-01-31 13:54:16 +00:00
|
|
|
async function addReviewers(issueNo, reviewers) {
|
|
|
|
logger.debug(`Adding reviewers ${reviewers} to #${issueNo}`);
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get.post(
|
2017-04-21 08:12:41 +00:00
|
|
|
`repos/${config.repoName}/pulls/${issueNo}/requested_reviewers`,
|
|
|
|
{
|
2017-10-02 04:57:13 +00:00
|
|
|
headers: {
|
|
|
|
accept: 'application/vnd.github.thor-preview+json',
|
|
|
|
},
|
2017-04-21 08:12:41 +00:00
|
|
|
body: {
|
|
|
|
reviewers,
|
2017-10-19 04:35:21 +00:00
|
|
|
team_reviewers: [],
|
2017-04-21 08:12:41 +00:00
|
|
|
},
|
2017-04-21 08:25:49 +00:00
|
|
|
}
|
2017-04-21 08:12:41 +00:00
|
|
|
);
|
2017-10-02 04:57:13 +00:00
|
|
|
logger.debug({ body: res.body }, 'Added reviewers');
|
2017-01-31 13:54:16 +00:00
|
|
|
}
|
|
|
|
|
2017-01-31 11:19:06 +00:00
|
|
|
async function addLabels(issueNo, labels) {
|
2017-01-13 18:18:44 +00:00
|
|
|
logger.debug(`Adding labels ${labels} to #${issueNo}`);
|
2017-11-01 12:55:36 +00:00
|
|
|
if (Array.isArray(labels) && labels.length) {
|
|
|
|
await get.post(`repos/${config.repoName}/issues/${issueNo}/labels`, {
|
|
|
|
body: labels,
|
|
|
|
});
|
|
|
|
}
|
2017-01-13 18:18:44 +00:00
|
|
|
}
|
|
|
|
|
2017-10-18 13:28:51 +00:00
|
|
|
async function getComments(issueNo) {
|
|
|
|
// GET /repos/:owner/:repo/issues/:number/comments
|
|
|
|
logger.debug(`Getting comments for #${issueNo}`);
|
2017-12-05 10:50:16 +00:00
|
|
|
const url = `repos/${
|
|
|
|
config.repoName
|
|
|
|
}/issues/${issueNo}/comments?per_page=100`;
|
2017-10-18 13:28:51 +00:00
|
|
|
const comments = (await get(url, { paginate: true })).body;
|
|
|
|
logger.debug(`Found ${comments.length} comments`);
|
|
|
|
return comments;
|
|
|
|
}
|
|
|
|
|
|
|
|
async function addComment(issueNo, body) {
|
|
|
|
// POST /repos/:owner/:repo/issues/:number/comments
|
|
|
|
await get.post(`repos/${config.repoName}/issues/${issueNo}/comments`, {
|
|
|
|
body: { body },
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
async function editComment(commentId, body) {
|
|
|
|
// PATCH /repos/:owner/:repo/issues/comments/:id
|
|
|
|
await get.patch(`repos/${config.repoName}/issues/comments/${commentId}`, {
|
|
|
|
body: { body },
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-10-19 11:30:26 +00:00
|
|
|
async function deleteComment(commentId) {
|
|
|
|
// DELETE /repos/:owner/:repo/issues/comments/:id
|
|
|
|
await get.delete(`repos/${config.repoName}/issues/comments/${commentId}`);
|
|
|
|
}
|
|
|
|
|
2017-10-18 13:28:51 +00:00
|
|
|
async function ensureComment(issueNo, topic, content) {
|
|
|
|
logger.debug(`Ensuring comment "${topic}" in #${issueNo}`);
|
|
|
|
const body = `### ${topic}\n\n${content}`;
|
|
|
|
const comments = await getComments(issueNo);
|
|
|
|
let commentId;
|
|
|
|
let commentNeedsUpdating;
|
|
|
|
comments.forEach(comment => {
|
|
|
|
if (comment.body.startsWith(`### ${topic}\n\n`)) {
|
|
|
|
commentId = comment.id;
|
|
|
|
commentNeedsUpdating = comment.body !== body;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
if (!commentId) {
|
|
|
|
await addComment(issueNo, body);
|
|
|
|
logger.info({ repository: config.repoName, issueNo }, 'Added comment');
|
|
|
|
} else if (commentNeedsUpdating) {
|
|
|
|
await editComment(commentId, body);
|
|
|
|
logger.info({ repository: config.repoName, issueNo }, 'Updated comment');
|
|
|
|
} else {
|
|
|
|
logger.debug('Comment is already update-to-date');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-19 11:30:26 +00:00
|
|
|
async function ensureCommentRemoval(issueNo, topic) {
|
|
|
|
logger.debug(`Ensuring comment "${topic}" in #${issueNo} is removed`);
|
|
|
|
const comments = await getComments(issueNo);
|
|
|
|
let commentId;
|
|
|
|
comments.forEach(comment => {
|
|
|
|
if (comment.body.startsWith(`### ${topic}\n\n`)) {
|
|
|
|
commentId = comment.id;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
if (commentId) {
|
|
|
|
await deleteComment(commentId);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-10-17 08:45:17 +00:00
|
|
|
// Pull Request
|
|
|
|
|
2017-10-17 09:01:21 +00:00
|
|
|
async function getPrList() {
|
2017-11-16 13:15:18 +00:00
|
|
|
logger.debug('getPrList()');
|
2017-10-17 09:01:21 +00:00
|
|
|
if (!config.prList) {
|
2017-11-16 13:15:18 +00:00
|
|
|
logger.debug('Retrieving PR list');
|
2017-10-17 09:01:21 +00:00
|
|
|
const res = await get(
|
2017-10-17 11:45:17 +00:00
|
|
|
`repos/${config.repoName}/pulls?per_page=100&state=all`,
|
|
|
|
{ paginate: true }
|
2017-10-17 09:01:21 +00:00
|
|
|
);
|
|
|
|
config.prList = res.body.map(pr => ({
|
|
|
|
number: pr.number,
|
|
|
|
branchName: pr.head.ref,
|
|
|
|
title: pr.title,
|
2017-11-28 12:27:37 +00:00
|
|
|
state:
|
|
|
|
pr.state === 'closed' && pr.merged_at && pr.merged_at.length
|
|
|
|
? 'merged'
|
|
|
|
: pr.state,
|
2017-10-17 09:01:21 +00:00
|
|
|
closed_at: pr.closed_at,
|
|
|
|
}));
|
2017-11-28 15:50:56 +00:00
|
|
|
logger.info({ length: config.prList.length }, 'Retrieved Pull Requests');
|
|
|
|
logger.debug({ prList: config.prList });
|
2017-10-17 09:01:21 +00:00
|
|
|
}
|
|
|
|
return config.prList;
|
|
|
|
}
|
|
|
|
|
2017-11-24 06:31:20 +00:00
|
|
|
function matchesState(state, desiredState) {
|
|
|
|
if (desiredState === 'all') {
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
if (desiredState[0] === '!') {
|
|
|
|
return state !== desiredState.substring(1);
|
|
|
|
}
|
|
|
|
return state === desiredState;
|
|
|
|
}
|
|
|
|
|
2017-01-31 11:19:06 +00:00
|
|
|
async function findPr(branchName, prTitle, state = 'all') {
|
2017-10-17 09:01:21 +00:00
|
|
|
logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`);
|
2017-11-05 07:18:20 +00:00
|
|
|
const prList = await getPrList();
|
2017-11-16 13:15:18 +00:00
|
|
|
const pr = prList.find(
|
2017-10-17 09:01:21 +00:00
|
|
|
p =>
|
|
|
|
p.branchName === branchName &&
|
|
|
|
(!prTitle || p.title === prTitle) &&
|
2017-11-24 06:31:20 +00:00
|
|
|
matchesState(p.state, state)
|
2017-11-14 08:24:19 +00:00
|
|
|
);
|
2017-11-16 13:15:18 +00:00
|
|
|
if (pr) {
|
|
|
|
logger.debug(`Found PR #${pr.number}`);
|
|
|
|
}
|
|
|
|
return pr;
|
2017-01-30 06:34:35 +00:00
|
|
|
}
|
|
|
|
|
2017-02-01 12:50:28 +00:00
|
|
|
// Creates PR and returns PR number
|
2017-11-01 12:55:36 +00:00
|
|
|
async function createPr(branchName, title, body, labels, useDefaultBranch) {
|
2017-07-06 12:12:52 +00:00
|
|
|
const base = useDefaultBranch ? config.defaultBranch : config.baseBranch;
|
2017-10-17 05:15:01 +00:00
|
|
|
const pr = (await get.post(`repos/${config.repoName}/pulls`, {
|
2017-07-06 08:26:18 +00:00
|
|
|
body: {
|
|
|
|
title,
|
|
|
|
head: branchName,
|
2017-07-06 12:12:52 +00:00
|
|
|
base,
|
2017-07-06 08:26:18 +00:00
|
|
|
body,
|
|
|
|
},
|
2017-02-08 07:34:19 +00:00
|
|
|
})).body;
|
2017-02-02 17:34:48 +00:00
|
|
|
pr.displayNumber = `Pull Request #${pr.number}`;
|
2017-11-01 12:55:36 +00:00
|
|
|
await addLabels(pr.number, labels);
|
2017-02-07 20:45:35 +00:00
|
|
|
return pr;
|
2017-01-10 12:32:32 +00:00
|
|
|
}
|
|
|
|
|
2017-02-01 12:50:28 +00:00
|
|
|
// Gets details for a PR
|
2017-01-31 11:19:06 +00:00
|
|
|
async function getPr(prNo) {
|
2017-02-01 12:50:28 +00:00
|
|
|
if (!prNo) {
|
|
|
|
return null;
|
|
|
|
}
|
2017-10-17 05:15:01 +00:00
|
|
|
const pr = (await get(`repos/${config.repoName}/pulls/${prNo}`)).body;
|
2017-02-01 12:50:28 +00:00
|
|
|
if (!pr) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
// Harmonise PR values
|
2017-02-02 17:34:48 +00:00
|
|
|
pr.displayNumber = `Pull Request #${pr.number}`;
|
2017-11-14 08:24:19 +00:00
|
|
|
if (pr.state === 'open') {
|
2017-03-13 09:05:19 +00:00
|
|
|
if (pr.mergeable_state === 'dirty') {
|
2017-06-28 11:20:31 +00:00
|
|
|
logger.debug(`PR mergeable state is dirty`);
|
2017-03-13 09:05:19 +00:00
|
|
|
pr.isUnmergeable = true;
|
|
|
|
}
|
|
|
|
if (pr.commits === 1) {
|
|
|
|
// Only one commit was made - must have been renovate
|
2017-06-28 11:20:31 +00:00
|
|
|
logger.debug('Only 1 commit in PR so rebase is possible');
|
2017-03-13 09:05:19 +00:00
|
|
|
pr.canRebase = true;
|
|
|
|
} else {
|
|
|
|
// Check if only one author of all commits
|
|
|
|
logger.debug('Checking all commits');
|
2017-10-17 05:15:01 +00:00
|
|
|
const prCommits = (await get(
|
2017-05-10 07:26:09 +00:00
|
|
|
`repos/${config.repoName}/pulls/${prNo}/commits`
|
|
|
|
)).body;
|
2017-03-13 09:05:19 +00:00
|
|
|
const authors = prCommits.reduce((arr, commit) => {
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.trace({ commit }, `Checking commit`);
|
2017-07-04 17:22:09 +00:00
|
|
|
let author = 'unknown';
|
2017-10-18 13:49:35 +00:00
|
|
|
if (commit.committer && commit.committer.login) {
|
|
|
|
author = commit.committer.login;
|
|
|
|
} else if (commit.author) {
|
2017-07-04 17:22:09 +00:00
|
|
|
author = commit.author.login;
|
|
|
|
} else if (commit.commit && commit.commit.author) {
|
|
|
|
author = commit.commit.author.email;
|
|
|
|
} else {
|
|
|
|
logger.debug('Could not determine commit author');
|
|
|
|
}
|
|
|
|
logger.debug(`Commit author is: ${author}`);
|
2017-11-10 12:00:18 +00:00
|
|
|
const message = commit.commit ? commit.commit.message : '';
|
|
|
|
const parents = commit.parents || [];
|
|
|
|
let ignoreWebFlow = false;
|
|
|
|
if (
|
|
|
|
author === 'web-flow' &&
|
|
|
|
message.startsWith("Merge branch '") &&
|
|
|
|
parents.length === 2
|
|
|
|
) {
|
|
|
|
ignoreWebFlow = true;
|
|
|
|
}
|
2017-10-18 13:49:35 +00:00
|
|
|
// Ignore GitHub "web-flow"
|
2017-11-10 12:00:18 +00:00
|
|
|
if (!ignoreWebFlow && arr.indexOf(author) === -1) {
|
2017-03-13 09:05:19 +00:00
|
|
|
arr.push(author);
|
|
|
|
}
|
|
|
|
return arr;
|
|
|
|
}, []);
|
|
|
|
logger.debug(`Author list: ${authors}`);
|
|
|
|
if (authors.length === 1) {
|
|
|
|
pr.canRebase = true;
|
|
|
|
}
|
|
|
|
}
|
2017-11-16 12:37:36 +00:00
|
|
|
const baseCommitSHA = await getBaseCommitSHA();
|
|
|
|
if (!pr.base || pr.base.sha !== baseCommitSHA) {
|
2017-03-13 09:05:19 +00:00
|
|
|
pr.isStale = true;
|
|
|
|
}
|
2017-02-06 06:56:33 +00:00
|
|
|
}
|
2017-02-01 12:50:28 +00:00
|
|
|
return pr;
|
2017-01-10 12:32:32 +00:00
|
|
|
}
|
|
|
|
|
2017-11-10 12:07:06 +00:00
|
|
|
// Return a list of all modified files in a PR
|
|
|
|
async function getPrFiles(prNo) {
|
|
|
|
logger.debug({ prNo }, 'getPrFiles');
|
|
|
|
if (!prNo) {
|
|
|
|
return [];
|
|
|
|
}
|
|
|
|
const files = (await get(`repos/${config.repoName}/pulls/${prNo}/files`))
|
|
|
|
.body;
|
|
|
|
return files.map(f => f.filename);
|
|
|
|
}
|
|
|
|
|
2017-01-31 11:19:06 +00:00
|
|
|
async function updatePr(prNo, title, body) {
|
2017-10-19 05:36:09 +00:00
|
|
|
logger.debug(`updatePr(${prNo}, ${title}, body)`);
|
|
|
|
const patchBody = { title };
|
|
|
|
if (body) {
|
|
|
|
patchBody.body = body;
|
|
|
|
}
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.patch(`repos/${config.repoName}/pulls/${prNo}`, {
|
2017-10-19 05:36:09 +00:00
|
|
|
body: patchBody,
|
2017-01-10 12:32:32 +00:00
|
|
|
});
|
|
|
|
}
|
|
|
|
|
2017-11-10 08:29:24 +00:00
|
|
|
async function mergePr(prNo, branchName) {
|
2017-12-10 14:22:58 +00:00
|
|
|
logger.debug(`mergePr(${prNo}, ${branchName})`);
|
|
|
|
// istanbul ignore if
|
|
|
|
if (config.pushProtection) {
|
|
|
|
logger.info(
|
|
|
|
{ branchName, prNo },
|
2017-12-10 14:38:47 +00:00
|
|
|
'Branch protection: Attempting to merge PR when push protection is enabled'
|
2017-12-10 14:22:58 +00:00
|
|
|
);
|
|
|
|
}
|
|
|
|
// istanbul ignore if
|
|
|
|
if (config.prReviewsRequired) {
|
|
|
|
logger.info(
|
|
|
|
{ branchName, prNo },
|
2017-12-10 14:38:47 +00:00
|
|
|
'Branch protection: Attempting to merge PR when PR reviews are enabled'
|
2017-12-10 14:22:58 +00:00
|
|
|
);
|
|
|
|
}
|
2017-11-10 08:29:24 +00:00
|
|
|
const url = `repos/${config.repoName}/pulls/${prNo}/merge`;
|
2017-06-03 07:40:13 +00:00
|
|
|
const options = {
|
|
|
|
body: {},
|
|
|
|
};
|
|
|
|
if (config.mergeMethod) {
|
|
|
|
// This path is taken if we have auto-detected the allowed merge types from the repo
|
|
|
|
options.body.merge_method = config.mergeMethod;
|
|
|
|
try {
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.debug({ options, url }, `mergePr`);
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.put(url, options);
|
2017-06-03 07:40:13 +00:00
|
|
|
} catch (err) {
|
2017-12-05 05:43:23 +00:00
|
|
|
if (err.statusCode === 405) {
|
|
|
|
// istanbul ignore next
|
|
|
|
logger.info('GitHub blocking PR merge');
|
|
|
|
} else {
|
|
|
|
logger.warn({ err }, `Failed to ${options.body.merge_method} PR`);
|
|
|
|
}
|
2017-08-31 05:15:53 +00:00
|
|
|
return false;
|
2017-06-03 07:40:13 +00:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// We need to guess the merge method and try squash -> rebase -> merge
|
|
|
|
options.body.merge_method = 'rebase';
|
|
|
|
try {
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.debug({ options, url }, `mergePr`);
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.put(url, options);
|
2017-06-03 07:40:13 +00:00
|
|
|
} catch (err1) {
|
2017-08-01 11:31:27 +00:00
|
|
|
logger.debug({ err: err1 }, `Failed to ${options.body.merge_method} PR`);
|
2017-06-03 07:40:13 +00:00
|
|
|
try {
|
|
|
|
options.body.merge_method = 'squash';
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.debug({ options, url }, `mergePr`);
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.put(url, options);
|
2017-06-03 07:40:13 +00:00
|
|
|
} catch (err2) {
|
|
|
|
logger.debug(
|
2017-07-19 06:05:26 +00:00
|
|
|
{ err: err2 },
|
|
|
|
`Failed to ${options.body.merge_method} PR`
|
2017-06-03 07:40:13 +00:00
|
|
|
);
|
|
|
|
try {
|
|
|
|
options.body.merge_method = 'merge';
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.debug({ options, url }, `mergePr`);
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.put(url, options);
|
2017-06-03 07:40:13 +00:00
|
|
|
} catch (err3) {
|
|
|
|
logger.debug(
|
2017-07-19 06:05:26 +00:00
|
|
|
{ err: err3 },
|
|
|
|
`Failed to ${options.body.merge_method} PR`
|
2017-06-03 07:40:13 +00:00
|
|
|
);
|
2017-11-10 08:29:24 +00:00
|
|
|
logger.info({ pr: prNo }, 'All merge attempts failed');
|
2017-08-31 05:15:53 +00:00
|
|
|
return false;
|
2017-06-03 07:40:13 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
2017-09-24 14:34:34 +00:00
|
|
|
logger.info('Automerging succeeded');
|
2017-06-07 13:42:20 +00:00
|
|
|
// Update base branch SHA
|
2017-11-15 13:20:17 +00:00
|
|
|
delete config.baseCommitSHA;
|
2017-04-20 11:01:23 +00:00
|
|
|
// Delete branch
|
2017-11-10 08:29:24 +00:00
|
|
|
await deleteBranch(branchName);
|
2017-08-31 05:15:53 +00:00
|
|
|
return true;
|
2017-04-20 11:01:23 +00:00
|
|
|
}
|
|
|
|
|
2017-01-10 12:32:32 +00:00
|
|
|
// Generic File operations
|
2017-01-29 20:25:12 +00:00
|
|
|
|
2017-08-07 08:51:17 +00:00
|
|
|
async function getFile(filePath, branchName) {
|
2017-11-16 06:13:50 +00:00
|
|
|
logger.debug(`getFile(filePath=${filePath}, branchName=${branchName})`);
|
|
|
|
if (!branchName || branchName === config.baseBranch) {
|
|
|
|
if (!config.fileList.includes(filePath)) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
2017-12-11 11:24:37 +00:00
|
|
|
let res;
|
2017-01-31 11:19:06 +00:00
|
|
|
try {
|
2017-12-11 11:24:37 +00:00
|
|
|
res = await get(
|
2017-11-08 11:23:32 +00:00
|
|
|
`repos/${config.repoName}/contents/${filePath}?ref=${branchName ||
|
|
|
|
config.baseBranch}`
|
|
|
|
);
|
2017-01-31 11:19:06 +00:00
|
|
|
} catch (error) {
|
2017-01-29 20:25:12 +00:00
|
|
|
if (error.statusCode === 404) {
|
|
|
|
// If file not found, then return null JSON
|
2017-11-16 06:13:50 +00:00
|
|
|
logger.warn({ filePath, branchName }, 'getFile 404');
|
2017-01-29 20:25:12 +00:00
|
|
|
return null;
|
2017-12-11 11:24:37 +00:00
|
|
|
} else if (
|
|
|
|
error.statusCode === 403 &&
|
|
|
|
error.message &&
|
|
|
|
error.message.startsWith('This API returns blobs up to 1 MB in size')
|
|
|
|
) {
|
|
|
|
logger.info('Large file');
|
|
|
|
let treeUrl = `repos/${config.repoName}/git/trees/${config.baseBranch}`;
|
|
|
|
const parentPath = path.dirname(filePath);
|
|
|
|
if (parentPath !== '.') {
|
|
|
|
treeUrl += `.${parentPath}`;
|
|
|
|
}
|
|
|
|
const baseName = path.basename(filePath);
|
|
|
|
let fileSha;
|
|
|
|
(await get(treeUrl)).body.tree.forEach(file => {
|
|
|
|
if (file.path === baseName) {
|
|
|
|
fileSha = file.sha;
|
|
|
|
}
|
|
|
|
});
|
|
|
|
if (!fileSha) {
|
|
|
|
logger.warn('Could not locate file blob');
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
res = await get(`repos/${config.repoName}/git/blobs/${fileSha}`);
|
|
|
|
} else {
|
|
|
|
// Propagate if it's any other error
|
|
|
|
throw error;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
2017-01-31 11:19:06 +00:00
|
|
|
}
|
2017-12-11 11:24:37 +00:00
|
|
|
if (res.body.content) {
|
|
|
|
return Buffer.from(res.body.content, 'base64').toString();
|
|
|
|
}
|
|
|
|
return null;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
2017-02-08 07:43:16 +00:00
|
|
|
// Add a new commit, create branch if not existing
|
|
|
|
async function commitFilesToBranch(
|
|
|
|
branchName,
|
|
|
|
files,
|
|
|
|
message,
|
2017-12-09 16:09:31 +00:00
|
|
|
parentBranch = config.baseBranch,
|
2017-12-09 16:56:23 +00:00
|
|
|
gitAuthor,
|
|
|
|
gitPrivateKey
|
2017-04-21 08:12:41 +00:00
|
|
|
) {
|
2017-05-10 07:26:09 +00:00
|
|
|
logger.debug(
|
|
|
|
`commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`
|
|
|
|
);
|
2017-01-31 11:19:06 +00:00
|
|
|
const parentCommit = await getBranchCommit(parentBranch);
|
|
|
|
const parentTree = await getCommitTree(parentCommit);
|
2017-02-08 07:43:16 +00:00
|
|
|
const fileBlobs = [];
|
|
|
|
// Create blobs
|
|
|
|
for (const file of files) {
|
|
|
|
const blob = await createBlob(file.contents);
|
|
|
|
fileBlobs.push({
|
|
|
|
name: file.name,
|
|
|
|
blob,
|
|
|
|
});
|
|
|
|
}
|
|
|
|
// Create tree
|
|
|
|
const tree = await createTree(parentTree, fileBlobs);
|
2017-12-09 16:56:23 +00:00
|
|
|
const commit = await createCommit(
|
|
|
|
parentCommit,
|
|
|
|
tree,
|
|
|
|
message,
|
|
|
|
gitAuthor,
|
|
|
|
gitPrivateKey
|
|
|
|
);
|
2017-02-01 16:43:28 +00:00
|
|
|
const isBranchExisting = await branchExists(branchName);
|
|
|
|
if (isBranchExisting) {
|
|
|
|
await updateBranch(branchName, commit);
|
|
|
|
} else {
|
|
|
|
await createBranch(branchName, commit);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Internal branch operations
|
|
|
|
|
|
|
|
// Creates a new branch with provided commit
|
2017-11-16 12:37:36 +00:00
|
|
|
async function createBranch(branchName, sha) {
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.post(`repos/${config.repoName}/git/refs`, {
|
2017-02-01 16:43:28 +00:00
|
|
|
body: {
|
|
|
|
ref: `refs/heads/${branchName}`,
|
2017-11-15 13:20:17 +00:00
|
|
|
sha,
|
2017-02-01 16:43:28 +00:00
|
|
|
},
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
// Internal: Updates an existing branch to new commit sha
|
|
|
|
async function updateBranch(branchName, commit) {
|
|
|
|
logger.debug(`Updating branch ${branchName} with commit ${commit}`);
|
2017-10-17 05:15:01 +00:00
|
|
|
await get.patch(`repos/${config.repoName}/git/refs/heads/${branchName}`, {
|
|
|
|
body: {
|
|
|
|
sha: commit,
|
|
|
|
force: true,
|
|
|
|
},
|
|
|
|
});
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Low-level commit operations
|
|
|
|
|
|
|
|
// Create a blob with fileContents and return sha
|
2017-01-31 11:19:06 +00:00
|
|
|
async function createBlob(fileContents) {
|
2017-01-29 20:25:12 +00:00
|
|
|
logger.debug('Creating blob');
|
2017-10-17 05:15:01 +00:00
|
|
|
return (await get.post(`repos/${config.repoName}/git/blobs`, {
|
2017-01-29 20:25:12 +00:00
|
|
|
body: {
|
|
|
|
encoding: 'base64',
|
2017-09-15 17:46:25 +00:00
|
|
|
content: Buffer.from(fileContents).toString('base64'),
|
2017-01-29 20:25:12 +00:00
|
|
|
},
|
2017-01-31 11:19:06 +00:00
|
|
|
})).body.sha;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Return the commit SHA for a branch
|
2017-01-31 11:19:06 +00:00
|
|
|
async function getBranchCommit(branchName) {
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get(
|
2017-07-30 09:06:15 +00:00
|
|
|
`repos/${config.repoName}/git/refs/heads/${branchName}`
|
2017-08-28 09:37:09 +00:00
|
|
|
);
|
|
|
|
return res.body.object.sha;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
2017-06-08 04:18:21 +00:00
|
|
|
async function getCommitDetails(commit) {
|
|
|
|
logger.debug(`getCommitDetails(${commit})`);
|
2017-10-17 05:15:01 +00:00
|
|
|
const results = await get(`repos/${config.repoName}/git/commits/${commit}`);
|
2017-06-08 04:18:21 +00:00
|
|
|
return results.body;
|
|
|
|
}
|
|
|
|
|
2017-01-29 20:25:12 +00:00
|
|
|
// Return the tree SHA for a commit
|
2017-01-31 11:19:06 +00:00
|
|
|
async function getCommitTree(commit) {
|
2017-01-29 20:25:12 +00:00
|
|
|
logger.debug(`getCommitTree(${commit})`);
|
2017-10-17 05:15:01 +00:00
|
|
|
return (await get(`repos/${config.repoName}/git/commits/${commit}`)).body.tree
|
|
|
|
.sha;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Create a tree and return SHA
|
2017-02-08 07:43:16 +00:00
|
|
|
async function createTree(baseTree, files) {
|
|
|
|
logger.debug(`createTree(${baseTree}, files)`);
|
|
|
|
const body = {
|
|
|
|
base_tree: baseTree,
|
|
|
|
tree: [],
|
|
|
|
};
|
2017-04-21 08:12:41 +00:00
|
|
|
files.forEach(file => {
|
2017-02-08 07:43:16 +00:00
|
|
|
body.tree.push({
|
|
|
|
path: file.name,
|
|
|
|
mode: '100644',
|
|
|
|
type: 'blob',
|
|
|
|
sha: file.blob,
|
|
|
|
});
|
|
|
|
});
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.trace({ body }, 'createTree body');
|
2017-10-17 05:15:01 +00:00
|
|
|
return (await get.post(`repos/${config.repoName}/git/trees`, { body })).body
|
|
|
|
.sha;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Create a commit and return commit SHA
|
2017-12-09 16:56:23 +00:00
|
|
|
async function createCommit(parent, tree, message, gitAuthor, gitPrivateKey) {
|
2017-12-09 16:09:31 +00:00
|
|
|
logger.debug(`createCommit(${parent}, ${tree}, ${message}, ${gitAuthor})`);
|
|
|
|
const now = moment();
|
|
|
|
let author;
|
|
|
|
try {
|
|
|
|
if (gitAuthor) {
|
2017-12-09 16:56:23 +00:00
|
|
|
logger.debug({ gitAuthor }, 'Found gitAuthor');
|
2017-12-09 16:09:31 +00:00
|
|
|
const { name, address: email } = addrs.parseOneAddress(gitAuthor);
|
|
|
|
author = {
|
|
|
|
name,
|
|
|
|
email,
|
|
|
|
date: now.format(),
|
|
|
|
};
|
|
|
|
}
|
|
|
|
} catch (err) {
|
|
|
|
logger.warn({ gitAuthor }, 'Error parsing gitAuthor');
|
|
|
|
}
|
|
|
|
const body = {
|
|
|
|
message,
|
|
|
|
parents: [parent],
|
|
|
|
tree,
|
|
|
|
};
|
|
|
|
if (author) {
|
|
|
|
body.author = author;
|
2017-12-09 16:56:23 +00:00
|
|
|
// istanbul ignore if
|
|
|
|
if (gitPrivateKey) {
|
|
|
|
logger.debug('Found gitPrivateKey');
|
|
|
|
const privKeyObj = openpgp.key.readArmored(gitPrivateKey).keys[0];
|
|
|
|
const commit = `tree ${tree}\nparent ${parent}\nauthor ${author.name} <${
|
|
|
|
author.email
|
|
|
|
}> ${now.format('X ZZ')}\ncommitter ${author.name} <${
|
|
|
|
author.email
|
|
|
|
}> ${now.format('X ZZ')}\n\n${message}`;
|
|
|
|
const { signature } = await openpgp.sign({
|
|
|
|
data: openpgp.util.str2Uint8Array(commit),
|
|
|
|
privateKeys: privKeyObj,
|
|
|
|
detached: true,
|
|
|
|
armor: true,
|
|
|
|
});
|
|
|
|
body.signature = signature;
|
|
|
|
}
|
2017-12-09 16:09:31 +00:00
|
|
|
}
|
|
|
|
return (await get.post(`repos/${config.repoName}/git/commits`, { body })).body
|
|
|
|
.sha;
|
2017-01-29 20:25:12 +00:00
|
|
|
}
|
2017-07-07 05:54:09 +00:00
|
|
|
|
|
|
|
async function getCommitMessages() {
|
|
|
|
logger.debug('getCommitMessages');
|
|
|
|
try {
|
2017-10-17 05:15:01 +00:00
|
|
|
const res = await get(`repos/${config.repoName}/commits`);
|
2017-07-07 05:54:09 +00:00
|
|
|
return res.body.map(commit => commit.commit.message);
|
|
|
|
} catch (err) {
|
2017-07-19 06:05:26 +00:00
|
|
|
logger.error({ err }, `getCommitMessages error`);
|
2017-07-07 05:54:09 +00:00
|
|
|
return [];
|
|
|
|
}
|
|
|
|
}
|