renovate/lib/platform/github/index.js

913 lines
26 KiB
JavaScript
Raw Normal View History

const get = require('./gh-got-wrapper');
const addrs = require('email-addresses');
const moment = require('moment');
const openpgp = require('openpgp');
let config = {};
Move code into github and npm helper libraries commit 8e84875bd5f7e4584d707d88d6850565bb02c79c Author: Rhys Arkins <rhys@keylocation.sg> Date: Sat Jan 7 08:22:21 2017 +0100 Synchronous commit 0f24ea192bcf54aae1264e91a4b6eb98fea55448 Author: Rhys Arkins <rhys@keylocation.sg> Date: Sat Jan 7 07:12:20 2017 +0100 externalise more npm commit 458d60975fc967f1373c81cd0fa28a9717dd9b0b Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:45:08 2017 +0100 Externalise npm commit 5d4f39e72d2977af1fec12d7a0a39d3877e4ad02 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:35:16 2017 +0100 Remove ghGot commit 06898801c1e591d6db9e6ac1e565233af5e9be7e Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:34:43 2017 +0100 Externalise PR functions commit 0b0e0f781b3384ad57a1df3df7d1089b2c72079a Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:34:25 2017 +0100 Enable verbose commit 4cebf1e0a80d7e14b9704c5fd7e5d0b036b9661a Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 14:23:12 2017 +0100 verbose commit 5a984b91e099cccb5c9dff857a6be07b3b4dedd5 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 14:22:59 2017 +0100 Change default branch naming commit ab9bc952c81d16be9be57227382dff8d05e73f54 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 13:05:08 2017 +0100 Fix branch matching commit eeecf17e196245964aed5247cf1703619d42b0d4 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 11:15:16 2017 +0100 Update message commit d27b345c5eb51dcb7e32b903beafe0728e24bfdb Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 11:09:39 2017 +0100 Refactor file write commit 7f12ef69f456ecd064be5d9851157131222f7700 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:59:24 2017 +0100 Refactor writeFile commit 8c7cc9e6a6c7e398aa60cb828c16ff51f36f2efa Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:39:27 2017 +0100 Refactor getFile commit b4338ade6d29b830ead657267248c93216c2f91d Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:15:02 2017 +0100 refactor commit dc4aeb39dad367844836da7f93e9f167864f6030 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:14:34 2017 +0100 createBranch commit d6a357f609de55d7b934652f30592219391a9884 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:04:04 2017 +0100 Add createBranch commit 11ba4e9f6c2153d7b783670944570cb4968ff718 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:27:08 2017 +0100 Rename commit 7a4be0fde0e070e2149bc4c34397c4903096ac51 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:17:31 2017 +0100 Externalise some github functions commit e393e92bcc9cb548fac3637644b0330a136f3611 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:17:19 2017 +0100 Fix error message commit 59fb50656d84491780bc31bab4cb9263a7912c03 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:16:59 2017 +0100 Improve error checks commit bc44b3a0d820ab5756c3b3c746402329e5b52703 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:34:04 2017 +0100 Make base branch configurable commit b9d31776814723d991a226d1ca1b2f39d0d2af85 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:33:44 2017 +0100 Reorder early lines commit b75f9f25cfb86f029b73445aae67b7889ff09b3e Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:26:47 2017 +0100 Error if RENOVATE_TOKEN is undefined Closes #11 commit 34e13a70326a71b3ee7f18c12ec3de55b78bcaa1 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 14:43:42 2017 +0100 arrow functions commit 6006db2deae887938bc20a07c93d1a59bd8cd74e Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 14:39:30 2017 +0100 Refactor templates
2017-01-07 07:22:48 +00:00
module.exports = {
// Initialization
getRepos,
initRepo,
getRepoForceRebase,
setBaseBranch,
// Search
getFileList,
// Branch
branchExists,
getAllRenovateBranches,
isBranchStale,
getBranchPr,
getBranchStatus,
getBranchStatusCheck,
setBranchStatus,
deleteBranch,
mergeBranch,
getBranchLastCommitTime,
2017-01-13 18:18:44 +00:00
// issue
2017-01-18 20:17:07 +00:00
addAssignees,
addReviewers,
// Comments
ensureComment,
ensureCommentRemoval,
// PR
findPr,
createPr,
getPr,
getPrFiles,
updatePr,
mergePr,
// file
2017-02-08 07:43:16 +00:00
commitFilesToBranch,
getFile,
// Commits
getCommitMessages,
};
// Get all repositories that the user has access to
async function getRepos(token, endpoint) {
logger.debug('getRepos(token, endpoint)');
if (token) {
process.env.GITHUB_TOKEN = token;
} else if (!process.env.GITHUB_TOKEN) {
throw new Error('No token found for getRepos');
}
if (endpoint) {
process.env.GITHUB_ENDPOINT = endpoint;
}
try {
const res = await get('user/repos', { paginate: true });
return res.body.map(repo => repo.full_name);
} catch (err) /* istanbul ignore next */ {
logger.error({ err }, `GitHub getRepos error`);
throw err;
}
}
// Initialize GitHub by getting base branch and SHA
2017-11-08 05:44:03 +00:00
async function initRepo(repoName, token, endpoint) {
logger.debug(`initRepo("${repoName}")`);
if (token) {
process.env.GITHUB_TOKEN = token;
} else if (!process.env.GITHUB_TOKEN) {
throw new Error(`No token found for GitHub repository ${repoName}`);
}
2017-02-05 08:10:29 +00:00
if (endpoint) {
process.env.GITHUB_ENDPOINT = endpoint;
}
config = {};
get.reset();
config.repoName = repoName;
const platformConfig = {};
let res;
try {
res = await get(`repos/${repoName}`);
2017-08-01 11:31:27 +00:00
logger.trace({ repositoryDetails: res.body }, 'Repository details');
platformConfig.privateRepo = res.body.private === true;
platformConfig.isFork = res.body.fork === true;
config.owner = res.body.owner.login;
logger.debug(`${repoName} owner = ${config.owner}`);
// Use default branch as PR target unless later overridden
config.defaultBranch = res.body.default_branch;
config.baseBranch = config.defaultBranch;
logger.debug(`${repoName} default branch = ${config.baseBranch}`);
if (res.body.allow_rebase_merge) {
config.mergeMethod = 'rebase';
} else if (res.body.allow_squash_merge) {
config.mergeMethod = 'squash';
} else if (res.body.allow_merge_commit) {
config.mergeMethod = 'merge';
} else {
logger.info('Could not find allowed merge methods for repo');
}
} catch (err) /* istanbul ignore next */ {
logger.info({ err, res }, 'Unknown GitHub initRepo error');
throw err;
}
delete config.prList;
delete config.fileList;
await Promise.all([getPrList(), getFileList()]);
return platformConfig;
}
async function getRepoForceRebase() {
if (config.repoForceRebase === undefined) {
try {
config.repoForceRebase = false;
const branchProtection = await getBranchProtection(config.baseBranch);
logger.info('Base branch protection found');
if (branchProtection.required_pull_request_reviews) {
logger.info('PR Reviews are required before merging');
config.prReviewsRequired = true;
}
if (branchProtection.required_status_checks) {
logger.info('Status checks are required before merging');
if (branchProtection.required_status_checks.strict) {
logger.info('PRs must be up-to-date before merging');
config.repoForceRebase = true;
}
}
if (branchProtection.restrictions) {
logger.info(
{
users: branchProtection.restrictions.users,
teams: branchProtection.restrictions.teams,
},
'Pushing to branch is restricted'
);
config.pushProtection = true;
}
} catch (err) {
if (err.statusCode === 404) {
logger.info(
`Repository has no branch protection for ${config.baseBranch}`
);
} else if (err.statusCode === 403) {
logger.debug('Do not have permissions to detect branch protection');
} else {
throw err;
}
}
}
return config.repoForceRebase;
}
async function getBaseCommitSHA() {
if (!config.baseCommitSHA) {
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
}
return config.baseCommitSHA;
}
async function getBranchProtection(branchName) {
const res = await get(
`repos/${config.repoName}/branches/${branchName}/protection`
);
return res.body;
}
async function setBaseBranch(branchName) {
if (branchName) {
logger.debug(`Setting baseBranch to ${branchName}`);
config.baseBranch = branchName;
delete config.baseCommitSHA;
delete config.fileList;
await getFileList(branchName);
}
}
// Search
// Get full file list
async function getFileList(branchName = config.baseBranch) {
if (config.fileList) {
return config.fileList;
}
try {
const res = await get(
`repos/${config.repoName}/git/trees/${branchName}?recursive=true`
);
if (res.body.truncated) {
logger.warn(
{ repository: config.repoName },
'repository tree is truncated'
);
}
config.fileList = res.body.tree
.filter(item => item.type === 'blob' && item.mode !== '120000')
.map(item => item.path)
.sort();
} catch (err) /* istanbul ignore next */ {
if (err.statusCode === 409) {
logger.debug('Repository is not initiated');
throw new Error('uninitiated');
}
logger.info(
{ repository: config.repoName },
'Error retrieving git tree - no files detected'
);
config.fileList = [];
}
return config.fileList;
}
// Branch
// Returns true if branch exists, otherwise false
async function branchExists(branchName) {
logger.debug(`branchExists(${branchName})`);
const branchList = (await get(
`repos/${config.repoName}/branches?per_page=100`,
{ paginate: true }
)).body.map(branch => branch.name);
return branchList.includes(branchName);
}
async function getAllRenovateBranches(branchPrefix) {
logger.trace('getAllRenovateBranches');
const allBranches = (await get(`repos/${config.repoName}/git/refs/heads`))
.body;
return allBranches.reduce((arr, branch) => {
if (branch.ref.indexOf(`refs/heads/${branchPrefix}`) === 0) {
arr.push(branch.ref.substring('refs/heads/'.length));
}
return arr;
}, []);
}
async function isBranchStale(branchName) {
// Check if branch's parent SHA = master SHA
logger.debug(`isBranchStale(${branchName})`);
const branchCommit = await getBranchCommit(branchName);
logger.debug(`branchCommit=${branchCommit}`);
const commitDetails = await getCommitDetails(branchCommit);
logger.trace({ commitDetails }, `commitDetails`);
const parentSha = commitDetails.parents[0].sha;
logger.debug(`parentSha=${parentSha}`);
const baseCommitSHA = await getBaseCommitSHA();
logger.debug(`baseCommitSHA=${baseCommitSHA}`);
// Return true if the SHAs don't match
return parentSha !== baseCommitSHA;
}
// Returns the Pull Request for a branch. Null if not exists.
async function getBranchPr(branchName) {
logger.debug(`getBranchPr(${branchName})`);
2017-10-19 04:19:39 +00:00
const existingPr = await findPr(branchName, null, 'open');
return existingPr ? getPr(existingPr.number) : null;
}
// Returns the combined status for a branch.
async function getBranchStatus(branchName, requiredStatusChecks) {
logger.debug(`getBranchStatus(${branchName})`);
if (!requiredStatusChecks) {
// null means disable status checks, so it always succeeds
logger.debug('Status checks disabled = returning "success"');
return 'success';
}
if (requiredStatusChecks.length) {
// This is Unsupported
logger.warn({ requiredStatusChecks }, `Unsupported requiredStatusChecks`);
return 'failed';
}
const gotString = `repos/${config.repoName}/commits/${branchName}/status`;
const res = await get(gotString);
logger.debug(
{ state: res.body.stage, statuses: res.body.statuses },
'branch status check result'
);
return res.body.state;
}
async function getBranchStatusCheck(branchName, context) {
const branchCommit = await getBranchCommit(branchName);
const url = `repos/${config.repoName}/commits/${branchCommit}/statuses`;
const res = await get(url);
for (const check of res.body) {
if (check.context === context) {
return check.state;
}
}
return null;
}
async function setBranchStatus(
branchName,
context,
description,
state,
targetUrl
) {
const branchCommit = await getBranchCommit(branchName);
const url = `repos/${config.repoName}/statuses/${branchCommit}`;
const options = {
state,
description,
context,
};
if (targetUrl) {
options.target_url = targetUrl;
}
await get.post(url, { body: options });
}
async function deleteBranch(branchName) {
await get.delete(`repos/${config.repoName}/git/refs/heads/${branchName}`);
}
async function mergeBranch(branchName, mergeType) {
logger.debug(`mergeBranch(${branchName}, ${mergeType})`);
// istanbul ignore if
if (config.pushProtection) {
logger.info(
{ branchName, mergeType },
'Branch protection: Attempting to merge branch when push protection is enabled'
);
}
if (mergeType === 'branch-push') {
const url = `repos/${config.repoName}/git/refs/heads/${config.baseBranch}`;
const options = {
body: {
sha: await getBranchCommit(branchName),
},
};
try {
await get.patch(url, options);
} catch (err) {
logger.warn({ err }, `Error pushing branch merge for ${branchName}`);
throw new Error('branch-push failed');
}
} else if (mergeType === 'branch-merge-commit') {
const url = `repos/${config.repoName}/merges`;
const options = {
body: {
base: config.baseBranch,
head: branchName,
},
};
try {
await get.post(url, options);
} catch (err) {
logger.warn({ err }, `Error pushing branch merge for ${branchName}`);
throw new Error('branch-merge-commit failed');
}
} else {
throw new Error(`Unsupported branch merge type: ${mergeType}`);
}
// Update base commit
delete config.baseCommitSHA;
// Delete branch
await deleteBranch(branchName);
}
async function getBranchLastCommitTime(branchName) {
try {
const res = await get(`repos/${config.repoName}/commits?sha=${branchName}`);
return new Date(res.body[0].commit.committer.date);
} catch (err) {
logger.error({ err }, `getBranchLastCommitTime error`);
return new Date();
}
}
2017-01-13 18:18:44 +00:00
// Issue
async function addAssignees(issueNo, assignees) {
2017-01-18 20:17:07 +00:00
logger.debug(`Adding assignees ${assignees} to #${issueNo}`);
await get.post(`repos/${config.repoName}/issues/${issueNo}/assignees`, {
body: {
assignees,
},
});
2017-01-18 20:17:07 +00:00
}
async function addReviewers(issueNo, reviewers) {
logger.debug(`Adding reviewers ${reviewers} to #${issueNo}`);
const res = await get.post(
2017-04-21 08:12:41 +00:00
`repos/${config.repoName}/pulls/${issueNo}/requested_reviewers`,
{
headers: {
accept: 'application/vnd.github.thor-preview+json',
},
2017-04-21 08:12:41 +00:00
body: {
reviewers,
team_reviewers: [],
2017-04-21 08:12:41 +00:00
},
2017-04-21 08:25:49 +00:00
}
2017-04-21 08:12:41 +00:00
);
logger.debug({ body: res.body }, 'Added reviewers');
}
async function addLabels(issueNo, labels) {
2017-01-13 18:18:44 +00:00
logger.debug(`Adding labels ${labels} to #${issueNo}`);
if (Array.isArray(labels) && labels.length) {
await get.post(`repos/${config.repoName}/issues/${issueNo}/labels`, {
body: labels,
});
}
2017-01-13 18:18:44 +00:00
}
async function getComments(issueNo) {
// GET /repos/:owner/:repo/issues/:number/comments
logger.debug(`Getting comments for #${issueNo}`);
const url = `repos/${
config.repoName
}/issues/${issueNo}/comments?per_page=100`;
const comments = (await get(url, { paginate: true })).body;
logger.debug(`Found ${comments.length} comments`);
return comments;
}
async function addComment(issueNo, body) {
// POST /repos/:owner/:repo/issues/:number/comments
await get.post(`repos/${config.repoName}/issues/${issueNo}/comments`, {
body: { body },
});
}
async function editComment(commentId, body) {
// PATCH /repos/:owner/:repo/issues/comments/:id
await get.patch(`repos/${config.repoName}/issues/comments/${commentId}`, {
body: { body },
});
}
async function deleteComment(commentId) {
// DELETE /repos/:owner/:repo/issues/comments/:id
await get.delete(`repos/${config.repoName}/issues/comments/${commentId}`);
}
async function ensureComment(issueNo, topic, content) {
logger.debug(`Ensuring comment "${topic}" in #${issueNo}`);
const body = `### ${topic}\n\n${content}`;
const comments = await getComments(issueNo);
let commentId;
let commentNeedsUpdating;
comments.forEach(comment => {
if (comment.body.startsWith(`### ${topic}\n\n`)) {
commentId = comment.id;
commentNeedsUpdating = comment.body !== body;
}
});
if (!commentId) {
await addComment(issueNo, body);
logger.info({ repository: config.repoName, issueNo }, 'Added comment');
} else if (commentNeedsUpdating) {
await editComment(commentId, body);
logger.info({ repository: config.repoName, issueNo }, 'Updated comment');
} else {
logger.debug('Comment is already update-to-date');
}
}
async function ensureCommentRemoval(issueNo, topic) {
logger.debug(`Ensuring comment "${topic}" in #${issueNo} is removed`);
const comments = await getComments(issueNo);
let commentId;
comments.forEach(comment => {
if (comment.body.startsWith(`### ${topic}\n\n`)) {
commentId = comment.id;
}
});
if (commentId) {
await deleteComment(commentId);
}
}
// Pull Request
async function getPrList() {
logger.debug('getPrList()');
if (!config.prList) {
logger.debug('Retrieving PR list');
const res = await get(
`repos/${config.repoName}/pulls?per_page=100&state=all`,
{ paginate: true }
);
config.prList = res.body.map(pr => ({
number: pr.number,
branchName: pr.head.ref,
title: pr.title,
state:
pr.state === 'closed' && pr.merged_at && pr.merged_at.length
? 'merged'
: pr.state,
closed_at: pr.closed_at,
}));
logger.info({ length: config.prList.length }, 'Retrieved Pull Requests');
logger.debug({ prList: config.prList });
}
return config.prList;
}
function matchesState(state, desiredState) {
if (desiredState === 'all') {
return true;
}
if (desiredState[0] === '!') {
return state !== desiredState.substring(1);
}
return state === desiredState;
}
async function findPr(branchName, prTitle, state = 'all') {
logger.debug(`findPr(${branchName}, ${prTitle}, ${state})`);
const prList = await getPrList();
const pr = prList.find(
p =>
p.branchName === branchName &&
(!prTitle || p.title === prTitle) &&
matchesState(p.state, state)
);
if (pr) {
logger.debug(`Found PR #${pr.number}`);
}
return pr;
}
// Creates PR and returns PR number
async function createPr(branchName, title, body, labels, useDefaultBranch) {
const base = useDefaultBranch ? config.defaultBranch : config.baseBranch;
const pr = (await get.post(`repos/${config.repoName}/pulls`, {
body: {
title,
head: branchName,
base,
body,
},
2017-02-08 07:34:19 +00:00
})).body;
pr.displayNumber = `Pull Request #${pr.number}`;
await addLabels(pr.number, labels);
return pr;
}
// Gets details for a PR
async function getPr(prNo) {
if (!prNo) {
return null;
}
const pr = (await get(`repos/${config.repoName}/pulls/${prNo}`)).body;
if (!pr) {
return null;
}
// Harmonise PR values
pr.displayNumber = `Pull Request #${pr.number}`;
if (pr.state === 'open') {
if (pr.mergeable_state === 'dirty') {
logger.debug(`PR mergeable state is dirty`);
pr.isUnmergeable = true;
}
if (pr.commits === 1) {
// Only one commit was made - must have been renovate
logger.debug('Only 1 commit in PR so rebase is possible');
pr.canRebase = true;
} else {
// Check if only one author of all commits
logger.debug('Checking all commits');
const prCommits = (await get(
`repos/${config.repoName}/pulls/${prNo}/commits`
)).body;
const authors = prCommits.reduce((arr, commit) => {
logger.trace({ commit }, `Checking commit`);
let author = 'unknown';
if (commit.committer && commit.committer.login) {
author = commit.committer.login;
} else if (commit.author) {
author = commit.author.login;
} else if (commit.commit && commit.commit.author) {
author = commit.commit.author.email;
} else {
logger.debug('Could not determine commit author');
}
logger.debug(`Commit author is: ${author}`);
const message = commit.commit ? commit.commit.message : '';
const parents = commit.parents || [];
let ignoreWebFlow = false;
if (
author === 'web-flow' &&
message.startsWith("Merge branch '") &&
parents.length === 2
) {
ignoreWebFlow = true;
}
// Ignore GitHub "web-flow"
if (!ignoreWebFlow && arr.indexOf(author) === -1) {
arr.push(author);
}
return arr;
}, []);
logger.debug(`Author list: ${authors}`);
if (authors.length === 1) {
pr.canRebase = true;
}
}
const baseCommitSHA = await getBaseCommitSHA();
if (!pr.base || pr.base.sha !== baseCommitSHA) {
pr.isStale = true;
}
}
return pr;
}
// Return a list of all modified files in a PR
async function getPrFiles(prNo) {
logger.debug({ prNo }, 'getPrFiles');
if (!prNo) {
return [];
}
const files = (await get(`repos/${config.repoName}/pulls/${prNo}/files`))
.body;
return files.map(f => f.filename);
}
async function updatePr(prNo, title, body) {
logger.debug(`updatePr(${prNo}, ${title}, body)`);
const patchBody = { title };
if (body) {
patchBody.body = body;
}
await get.patch(`repos/${config.repoName}/pulls/${prNo}`, {
body: patchBody,
});
}
async function mergePr(prNo, branchName) {
logger.debug(`mergePr(${prNo}, ${branchName})`);
// istanbul ignore if
if (config.pushProtection) {
logger.info(
{ branchName, prNo },
'Branch protection: Attempting to merge PR when push protection is enabled'
);
}
// istanbul ignore if
if (config.prReviewsRequired) {
logger.info(
{ branchName, prNo },
'Branch protection: Attempting to merge PR when PR reviews are enabled'
);
}
const url = `repos/${config.repoName}/pulls/${prNo}/merge`;
const options = {
body: {},
};
if (config.mergeMethod) {
// This path is taken if we have auto-detected the allowed merge types from the repo
options.body.merge_method = config.mergeMethod;
try {
logger.debug({ options, url }, `mergePr`);
await get.put(url, options);
} catch (err) {
if (err.statusCode === 405) {
// istanbul ignore next
logger.info('GitHub blocking PR merge');
} else {
logger.warn({ err }, `Failed to ${options.body.merge_method} PR`);
}
return false;
}
} else {
// We need to guess the merge method and try squash -> rebase -> merge
options.body.merge_method = 'rebase';
try {
logger.debug({ options, url }, `mergePr`);
await get.put(url, options);
} catch (err1) {
2017-08-01 11:31:27 +00:00
logger.debug({ err: err1 }, `Failed to ${options.body.merge_method} PR`);
try {
options.body.merge_method = 'squash';
logger.debug({ options, url }, `mergePr`);
await get.put(url, options);
} catch (err2) {
logger.debug(
{ err: err2 },
`Failed to ${options.body.merge_method} PR`
);
try {
options.body.merge_method = 'merge';
logger.debug({ options, url }, `mergePr`);
await get.put(url, options);
} catch (err3) {
logger.debug(
{ err: err3 },
`Failed to ${options.body.merge_method} PR`
);
logger.info({ pr: prNo }, 'All merge attempts failed');
return false;
}
}
}
}
logger.info('Automerging succeeded');
// Update base branch SHA
delete config.baseCommitSHA;
// Delete branch
await deleteBranch(branchName);
return true;
}
// Generic File operations
async function getFile(filePath, branchName) {
logger.debug(`getFile(filePath=${filePath}, branchName=${branchName})`);
if (!branchName || branchName === config.baseBranch) {
if (!config.fileList.includes(filePath)) {
return null;
}
}
try {
const res = await get(
`repos/${config.repoName}/contents/${filePath}?ref=${branchName ||
config.baseBranch}`
);
if (res.body.content) {
return Buffer.from(res.body.content, 'base64').toString();
}
return null;
} catch (error) {
if (error.statusCode === 404) {
// If file not found, then return null JSON
logger.warn({ filePath, branchName }, 'getFile 404');
return null;
}
// Propagate if it's any other error
throw error;
}
}
2017-02-08 07:43:16 +00:00
// Add a new commit, create branch if not existing
async function commitFilesToBranch(
branchName,
files,
message,
parentBranch = config.baseBranch,
gitAuthor,
gitPrivateKey
2017-04-21 08:12:41 +00:00
) {
logger.debug(
`commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`
);
const parentCommit = await getBranchCommit(parentBranch);
const parentTree = await getCommitTree(parentCommit);
2017-02-08 07:43:16 +00:00
const fileBlobs = [];
// Create blobs
for (const file of files) {
const blob = await createBlob(file.contents);
fileBlobs.push({
name: file.name,
blob,
});
}
// Create tree
const tree = await createTree(parentTree, fileBlobs);
const commit = await createCommit(
parentCommit,
tree,
message,
gitAuthor,
gitPrivateKey
);
const isBranchExisting = await branchExists(branchName);
if (isBranchExisting) {
await updateBranch(branchName, commit);
} else {
await createBranch(branchName, commit);
}
}
// Internal branch operations
// Creates a new branch with provided commit
async function createBranch(branchName, sha) {
await get.post(`repos/${config.repoName}/git/refs`, {
body: {
ref: `refs/heads/${branchName}`,
sha,
},
});
}
// Internal: Updates an existing branch to new commit sha
async function updateBranch(branchName, commit) {
logger.debug(`Updating branch ${branchName} with commit ${commit}`);
await get.patch(`repos/${config.repoName}/git/refs/heads/${branchName}`, {
body: {
sha: commit,
force: true,
},
});
}
// Low-level commit operations
// Create a blob with fileContents and return sha
async function createBlob(fileContents) {
logger.debug('Creating blob');
return (await get.post(`repos/${config.repoName}/git/blobs`, {
body: {
encoding: 'base64',
content: Buffer.from(fileContents).toString('base64'),
},
})).body.sha;
}
// Return the commit SHA for a branch
async function getBranchCommit(branchName) {
const res = await get(
`repos/${config.repoName}/git/refs/heads/${branchName}`
);
return res.body.object.sha;
}
async function getCommitDetails(commit) {
logger.debug(`getCommitDetails(${commit})`);
const results = await get(`repos/${config.repoName}/git/commits/${commit}`);
return results.body;
}
// Return the tree SHA for a commit
async function getCommitTree(commit) {
logger.debug(`getCommitTree(${commit})`);
return (await get(`repos/${config.repoName}/git/commits/${commit}`)).body.tree
.sha;
}
// Create a tree and return SHA
2017-02-08 07:43:16 +00:00
async function createTree(baseTree, files) {
logger.debug(`createTree(${baseTree}, files)`);
const body = {
base_tree: baseTree,
tree: [],
};
2017-04-21 08:12:41 +00:00
files.forEach(file => {
2017-02-08 07:43:16 +00:00
body.tree.push({
path: file.name,
mode: '100644',
type: 'blob',
sha: file.blob,
});
});
logger.trace({ body }, 'createTree body');
return (await get.post(`repos/${config.repoName}/git/trees`, { body })).body
.sha;
}
// Create a commit and return commit SHA
async function createCommit(parent, tree, message, gitAuthor, gitPrivateKey) {
logger.debug(`createCommit(${parent}, ${tree}, ${message}, ${gitAuthor})`);
const now = moment();
let author;
try {
if (gitAuthor) {
logger.debug({ gitAuthor }, 'Found gitAuthor');
const { name, address: email } = addrs.parseOneAddress(gitAuthor);
author = {
name,
email,
date: now.format(),
};
}
} catch (err) {
logger.warn({ gitAuthor }, 'Error parsing gitAuthor');
}
const body = {
message,
parents: [parent],
tree,
};
if (author) {
body.author = author;
// istanbul ignore if
if (gitPrivateKey) {
logger.debug('Found gitPrivateKey');
const privKeyObj = openpgp.key.readArmored(gitPrivateKey).keys[0];
const commit = `tree ${tree}\nparent ${parent}\nauthor ${author.name} <${
author.email
}> ${now.format('X ZZ')}\ncommitter ${author.name} <${
author.email
}> ${now.format('X ZZ')}\n\n${message}`;
const { signature } = await openpgp.sign({
data: openpgp.util.str2Uint8Array(commit),
privateKeys: privKeyObj,
detached: true,
armor: true,
});
body.signature = signature;
}
}
return (await get.post(`repos/${config.repoName}/git/commits`, { body })).body
.sha;
}
async function getCommitMessages() {
logger.debug('getCommitMessages');
try {
const res = await get(`repos/${config.repoName}/commits`);
return res.body.map(commit => commit.commit.message);
} catch (err) {
logger.error({ err }, `getCommitMessages error`);
return [];
}
}