renovate/lib/api/github.js

706 lines
20 KiB
JavaScript
Raw Normal View History

let logger = require('../logger');
Move code into github and npm helper libraries commit 8e84875bd5f7e4584d707d88d6850565bb02c79c Author: Rhys Arkins <rhys@keylocation.sg> Date: Sat Jan 7 08:22:21 2017 +0100 Synchronous commit 0f24ea192bcf54aae1264e91a4b6eb98fea55448 Author: Rhys Arkins <rhys@keylocation.sg> Date: Sat Jan 7 07:12:20 2017 +0100 externalise more npm commit 458d60975fc967f1373c81cd0fa28a9717dd9b0b Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:45:08 2017 +0100 Externalise npm commit 5d4f39e72d2977af1fec12d7a0a39d3877e4ad02 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:35:16 2017 +0100 Remove ghGot commit 06898801c1e591d6db9e6ac1e565233af5e9be7e Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:34:43 2017 +0100 Externalise PR functions commit 0b0e0f781b3384ad57a1df3df7d1089b2c72079a Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:34:25 2017 +0100 Enable verbose commit 4cebf1e0a80d7e14b9704c5fd7e5d0b036b9661a Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 14:23:12 2017 +0100 verbose commit 5a984b91e099cccb5c9dff857a6be07b3b4dedd5 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 14:22:59 2017 +0100 Change default branch naming commit ab9bc952c81d16be9be57227382dff8d05e73f54 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 13:05:08 2017 +0100 Fix branch matching commit eeecf17e196245964aed5247cf1703619d42b0d4 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 11:15:16 2017 +0100 Update message commit d27b345c5eb51dcb7e32b903beafe0728e24bfdb Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 11:09:39 2017 +0100 Refactor file write commit 7f12ef69f456ecd064be5d9851157131222f7700 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:59:24 2017 +0100 Refactor writeFile commit 8c7cc9e6a6c7e398aa60cb828c16ff51f36f2efa Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:39:27 2017 +0100 Refactor getFile commit b4338ade6d29b830ead657267248c93216c2f91d Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:15:02 2017 +0100 refactor commit dc4aeb39dad367844836da7f93e9f167864f6030 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:14:34 2017 +0100 createBranch commit d6a357f609de55d7b934652f30592219391a9884 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:04:04 2017 +0100 Add createBranch commit 11ba4e9f6c2153d7b783670944570cb4968ff718 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:27:08 2017 +0100 Rename commit 7a4be0fde0e070e2149bc4c34397c4903096ac51 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:17:31 2017 +0100 Externalise some github functions commit e393e92bcc9cb548fac3637644b0330a136f3611 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:17:19 2017 +0100 Fix error message commit 59fb50656d84491780bc31bab4cb9263a7912c03 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:16:59 2017 +0100 Improve error checks commit bc44b3a0d820ab5756c3b3c746402329e5b52703 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:34:04 2017 +0100 Make base branch configurable commit b9d31776814723d991a226d1ca1b2f39d0d2af85 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:33:44 2017 +0100 Reorder early lines commit b75f9f25cfb86f029b73445aae67b7889ff09b3e Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:26:47 2017 +0100 Error if RENOVATE_TOKEN is undefined Closes #11 commit 34e13a70326a71b3ee7f18c12ec3de55b78bcaa1 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 14:43:42 2017 +0100 arrow functions commit 6006db2deae887938bc20a07c93d1a59bd8cd74e Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 14:39:30 2017 +0100 Refactor templates
2017-01-07 07:22:48 +00:00
const ghGot = require('gh-got');
2017-01-11 12:19:59 +00:00
const config = {};
Move code into github and npm helper libraries commit 8e84875bd5f7e4584d707d88d6850565bb02c79c Author: Rhys Arkins <rhys@keylocation.sg> Date: Sat Jan 7 08:22:21 2017 +0100 Synchronous commit 0f24ea192bcf54aae1264e91a4b6eb98fea55448 Author: Rhys Arkins <rhys@keylocation.sg> Date: Sat Jan 7 07:12:20 2017 +0100 externalise more npm commit 458d60975fc967f1373c81cd0fa28a9717dd9b0b Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:45:08 2017 +0100 Externalise npm commit 5d4f39e72d2977af1fec12d7a0a39d3877e4ad02 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:35:16 2017 +0100 Remove ghGot commit 06898801c1e591d6db9e6ac1e565233af5e9be7e Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:34:43 2017 +0100 Externalise PR functions commit 0b0e0f781b3384ad57a1df3df7d1089b2c72079a Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 15:34:25 2017 +0100 Enable verbose commit 4cebf1e0a80d7e14b9704c5fd7e5d0b036b9661a Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 14:23:12 2017 +0100 verbose commit 5a984b91e099cccb5c9dff857a6be07b3b4dedd5 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 14:22:59 2017 +0100 Change default branch naming commit ab9bc952c81d16be9be57227382dff8d05e73f54 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 13:05:08 2017 +0100 Fix branch matching commit eeecf17e196245964aed5247cf1703619d42b0d4 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 11:15:16 2017 +0100 Update message commit d27b345c5eb51dcb7e32b903beafe0728e24bfdb Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 11:09:39 2017 +0100 Refactor file write commit 7f12ef69f456ecd064be5d9851157131222f7700 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:59:24 2017 +0100 Refactor writeFile commit 8c7cc9e6a6c7e398aa60cb828c16ff51f36f2efa Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:39:27 2017 +0100 Refactor getFile commit b4338ade6d29b830ead657267248c93216c2f91d Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:15:02 2017 +0100 refactor commit dc4aeb39dad367844836da7f93e9f167864f6030 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:14:34 2017 +0100 createBranch commit d6a357f609de55d7b934652f30592219391a9884 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 10:04:04 2017 +0100 Add createBranch commit 11ba4e9f6c2153d7b783670944570cb4968ff718 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:27:08 2017 +0100 Rename commit 7a4be0fde0e070e2149bc4c34397c4903096ac51 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:17:31 2017 +0100 Externalise some github functions commit e393e92bcc9cb548fac3637644b0330a136f3611 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:17:19 2017 +0100 Fix error message commit 59fb50656d84491780bc31bab4cb9263a7912c03 Author: Rhys Arkins <rhys@keylocation.sg> Date: Fri Jan 6 07:16:59 2017 +0100 Improve error checks commit bc44b3a0d820ab5756c3b3c746402329e5b52703 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:34:04 2017 +0100 Make base branch configurable commit b9d31776814723d991a226d1ca1b2f39d0d2af85 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:33:44 2017 +0100 Reorder early lines commit b75f9f25cfb86f029b73445aae67b7889ff09b3e Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 15:26:47 2017 +0100 Error if RENOVATE_TOKEN is undefined Closes #11 commit 34e13a70326a71b3ee7f18c12ec3de55b78bcaa1 Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 14:43:42 2017 +0100 arrow functions commit 6006db2deae887938bc20a07c93d1a59bd8cd74e Author: Rhys Arkins <rhys@keylocation.sg> Date: Thu Jan 5 14:39:30 2017 +0100 Refactor templates
2017-01-07 07:22:48 +00:00
module.exports = {
// GitHub App
getInstallations,
getInstallationToken,
getInstallationRepositories,
// Initialization
getRepos,
initRepo,
setBaseBranch,
// Search
findFilePaths,
// Branch
branchExists,
getAllRenovateBranches,
isBranchStale,
getBranchPr,
getBranchStatus,
deleteBranch,
mergeBranch,
2017-01-13 18:18:44 +00:00
// issue
2017-01-18 20:17:07 +00:00
addAssignees,
addReviewers,
2017-01-13 18:18:44 +00:00
addLabels,
// PR
findPr,
checkForClosedPr,
createPr,
getPr,
getAllPrs,
updatePr,
mergePr,
// file
2017-02-08 07:43:16 +00:00
commitFilesToBranch,
getFile,
getFileContent,
2017-01-18 18:55:03 +00:00
getFileJson,
// Commits
getCommitMessages,
};
// Get all installations for a GitHub app
async function getInstallations(appToken) {
logger.debug('getInstallations(appToken)');
try {
const url = 'app/installations';
const options = {
headers: {
accept: 'application/vnd.github.machine-man-preview+json',
authorization: `Bearer ${appToken}`,
},
};
const res = await ghGot(url, options);
logger.debug(`Returning ${res.body.length} results`);
return res.body;
} catch (err) {
logger.error(`GitHub getInstallations error: ${JSON.stringify(err)}`);
throw err;
}
}
// Get the user's installation token
async function getInstallationToken(appToken, installationId) {
logger.debug(`getInstallationToken(appToken, ${installationId})`);
try {
const url = `installations/${installationId}/access_tokens`;
const options = {
headers: {
accept: 'application/vnd.github.machine-man-preview+json',
authorization: `Bearer ${appToken}`,
},
};
const res = await ghGot.post(url, options);
return res.body.token;
} catch (err) {
logger.error(`GitHub getInstallationToken error: ${JSON.stringify(err)}`);
throw err;
}
}
// Get all repositories for a user's installation
async function getInstallationRepositories(userToken) {
logger.debug('getInstallationRepositories(userToken)');
try {
const url = 'installation/repositories';
const options = {
headers: {
accept: 'application/vnd.github.machine-man-preview+json',
authorization: `token ${userToken}`,
},
};
const res = await ghGot(url, options);
logger.debug(
`Returning ${res.body.repositories.length} results from a total of ${res
.body.total_count}`
);
return res.body;
} catch (err) {
logger.error(
`GitHub getInstallationRepositories error: ${JSON.stringify(err)}`
);
throw err;
}
}
// Get all repositories that the user has access to
async function getRepos(token, endpoint) {
logger.debug('getRepos(token, endpoint)');
if (token) {
process.env.GITHUB_TOKEN = token;
} else if (!process.env.GITHUB_TOKEN) {
throw new Error('No token found for getRepos');
}
if (endpoint) {
process.env.GITHUB_ENDPOINT = endpoint;
}
try {
const res = await ghGot('user/repos');
return res.body.map(repo => repo.full_name);
} catch (err) /* istanbul ignore next */ {
logger.error(`GitHub getRepos error: ${JSON.stringify(err)}`);
throw err;
}
}
// Initialize GitHub by getting base branch and SHA
async function initRepo(repoName, token, endpoint, repoLogger) {
Refactor repository worker (#344) * Move to subdir * Downgrade eslint to 3 * Refactor api and config usage * Refactor mergeRenovateJson * Test mergeRenovateJson * getOnboardingStatus tests * Refactor repository structure * Refactor config.logger * Revert "Refactor config.logger" This reverts commit 6d7f81af6ee284d01aab811dab7eb05c2274edf3. * Refactor repository logging * Refactor try/catch * Refactor platform and onboarding * Refactor setNpmrc * Fix github logger * npm api use config.logger * Refactor repo worker logger * Refactor repo worker * Refactor branched upgrades * Repository refactoring * Move some debug logging to trace * Deprecate fileName * Refactor upgrades * Refactor repository logs * More repository log refactoring * Refactor repository location * Revert "Refactor repository location" This reverts commit faecbf29516737a2752de54103c0228b9112a51c. * Fix tests * mergeRenovateJson * Recombine repository worker * Add initApis tests * add detectPackageFiles tests * Add determineRepoUpgrades tests * start groupUpgradesByBranch tests * add test * add test * Finish groupUpgradesByBranch coverage * Test updateBranchesSequentially * Finish repo coverage * Finish branch worker coverage * Finish workers coverage * Fix isPin * Complete workers coverage * Finish helpers coverage * Add gitlab api tests * getBranchStatus tests * test createPr * start getPr testing * getPr * update and merge PR tests * getFile * getFileContent tests * getFileJson tests * createFile * updateFile * createBranch * commitFilesToBranch * update yarn * Update yarn
2017-06-25 05:36:13 +00:00
logger = repoLogger || logger;
logger.debug(`initRepo(${JSON.stringify(repoName)})`);
if (repoLogger) {
logger = repoLogger;
}
if (token) {
process.env.GITHUB_TOKEN = token;
} else if (!process.env.GITHUB_TOKEN) {
throw new Error(`No token found for GitHub repository ${repoName}`);
}
2017-02-05 08:10:29 +00:00
if (endpoint) {
process.env.GITHUB_ENDPOINT = endpoint;
}
config.repoName = repoName;
try {
const res = await ghGot(`repos/${repoName}`);
config.owner = res.body.owner.login;
logger.debug(`${repoName} owner = ${config.owner}`);
// Use default branch as PR target unless later overridden
config.defaultBranch = res.body.default_branch;
config.baseBranch = config.defaultBranch;
logger.debug(`${repoName} default branch = ${config.baseBranch}`);
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
if (res.body.allow_rebase_merge) {
config.mergeMethod = 'rebase';
} else if (res.body.allow_squash_merge) {
config.mergeMethod = 'squash';
} else if (res.body.allow_merge_commit) {
config.mergeMethod = 'merge';
} else {
logger.debug('Could not find allowed merge methods for repo');
}
} catch (err) /* istanbul ignore next */ {
logger.error(`GitHub init error: ${JSON.stringify(err)}`);
2017-01-11 12:19:59 +00:00
throw err;
}
2017-02-11 19:18:54 +00:00
return config;
}
async function setBaseBranch(branchName) {
if (branchName) {
logger.debug(`Setting baseBranch to ${branchName}`);
config.baseBranch = branchName;
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
}
}
// Search
// Returns an array of file paths in current repo matching the fileName
async function findFilePaths(fileName) {
const res = await ghGot(
`search/code?q=repo:${config.repoName}+filename:${fileName}`
);
const exactMatches = res.body.items.filter(item => item.name === fileName);
// GitHub seems to return files in the root with a leading `/`
// which then breaks things later on down the line
return exactMatches.map(item => item.path.replace(/^\//, ''));
}
// Branch
// Returns true if branch exists, otherwise false
async function branchExists(branchName) {
logger.debug(`Checking if branch exists: ${branchName}`);
try {
const res = await ghGot(
`repos/${config.repoName}/git/refs/heads/${branchName}`
);
if (res.statusCode === 200) {
if (Array.isArray(res.body)) {
// This seems to happen if GitHub has partial matches, so we check ref
2017-04-21 08:12:41 +00:00
const matchedBranch = res.body.some(
2017-04-21 08:25:49 +00:00
branch => branch.ref === `refs/heads/${branchName}`
2017-04-21 08:12:41 +00:00
);
if (matchedBranch) {
logger.debug('Branch exists');
} else {
logger.debug('No matching branches');
}
return matchedBranch;
}
// This should happen if there's an exact match
return res.body.ref === `refs/heads/${branchName}`;
}
// This probably shouldn't happen
2017-04-21 08:12:41 +00:00
logger.debug("Branch doesn't exist");
return false;
} catch (error) {
if (error.statusCode === 404) {
// If file not found, then return false
2017-04-21 08:12:41 +00:00
logger.debug("Branch doesn't exist");
return false;
}
// Propagate if it's any other error
throw error;
}
}
async function getAllRenovateBranches() {
logger.trace('getAllRenovateBranches');
const allBranches = (await ghGot(`repos/${config.repoName}/git/refs/heads`))
.body;
return allBranches.reduce((arr, branch) => {
if (branch.ref.indexOf('refs/heads/renovate/') === 0) {
arr.push(branch.ref.substring('refs/heads/'.length));
}
return arr;
}, []);
}
async function isBranchStale(branchName) {
// Check if branch's parent SHA = master SHA
logger.debug(`isBranchStale(${branchName})`);
const branchCommit = await getBranchCommit(branchName);
logger.debug(`branchCommit=${branchCommit}`);
const commitDetails = await getCommitDetails(branchCommit);
logger.debug(`commitDetails=${JSON.stringify(commitDetails)}`);
const parentSha = commitDetails.parents[0].sha;
logger.debug(`parentSha=${parentSha}`);
// Return true if the SHAs don't match
return parentSha !== config.baseCommitSHA;
}
// Returns the Pull Request for a branch. Null if not exists.
async function getBranchPr(branchName) {
logger.debug(`getBranchPr(${branchName})`);
2017-04-21 08:12:41 +00:00
const gotString =
`repos/${config.repoName}/pulls?` +
`state=open&base=${config.baseBranch}&head=${config.owner}:${branchName}`;
const res = await ghGot(gotString);
if (!res.body.length) {
return null;
}
const prNo = res.body[0].number;
return getPr(prNo);
}
// Returns the combined status for a branch.
async function getBranchStatus(branchName, requiredStatusChecks) {
logger.debug(`getBranchStatus(${branchName})`);
if (!requiredStatusChecks) {
// null means disable status checks, so it always succeeds
return 'success';
}
if (requiredStatusChecks.length) {
// This is Unsupported
logger.warn(
`Unsupported requiredStatusChecks: ${JSON.stringify(
requiredStatusChecks
)}`
);
return 'failed';
}
const gotString = `repos/${config.repoName}/commits/${branchName}/status`;
logger.debug(gotString);
const res = await ghGot(gotString);
return res.body.state;
}
async function deleteBranch(branchName) {
await ghGot.delete(`repos/${config.repoName}/git/refs/heads/${branchName}`);
}
async function mergeBranch(branchName, mergeType) {
logger.debug(`mergeBranch(${branchName}, ${mergeType})`);
if (mergeType === 'branch-push') {
const url = `repos/${config.repoName}/git/refs/heads/${config.baseBranch}`;
const options = {
body: {
sha: await getBranchCommit(branchName),
},
};
try {
await ghGot.patch(url, options);
} catch (err) {
logger.error(`Error pushing branch merge for ${branchName}`);
logger.debug(JSON.stringify(err));
throw new Error('branch-push failed');
}
} else if (mergeType === 'branch-merge-commit') {
const url = `repos/${config.repoName}/merges`;
const options = {
body: {
base: config.baseBranch,
head: branchName,
},
};
try {
await ghGot.post(url, options);
} catch (err) {
logger.error(`Error pushing branch merge for ${branchName}`);
logger.debug(JSON.stringify(err));
throw new Error('branch-push failed');
}
} else {
throw new Error(`Unsupported branch merge type: ${mergeType}`);
}
// Update base commit
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
// Delete branch
await deleteBranch(branchName);
}
2017-01-13 18:18:44 +00:00
// Issue
async function addAssignees(issueNo, assignees) {
2017-01-18 20:17:07 +00:00
logger.debug(`Adding assignees ${assignees} to #${issueNo}`);
await ghGot.post(`repos/${config.repoName}/issues/${issueNo}/assignees`, {
2017-01-18 20:17:07 +00:00
body: {
assignees,
},
});
}
async function addReviewers(issueNo, reviewers) {
logger.debug(`Adding reviewers ${reviewers} to #${issueNo}`);
2017-04-21 08:12:41 +00:00
await ghGot.post(
`repos/${config.repoName}/pulls/${issueNo}/requested_reviewers`,
{
headers: {
accept: 'application/vnd.github.black-cat-preview+json',
},
body: {
reviewers,
},
2017-04-21 08:25:49 +00:00
}
2017-04-21 08:12:41 +00:00
);
}
async function addLabels(issueNo, labels) {
2017-01-13 18:18:44 +00:00
logger.debug(`Adding labels ${labels} to #${issueNo}`);
await ghGot.post(`repos/${config.repoName}/issues/${issueNo}/labels`, {
body: labels,
2017-01-13 18:18:44 +00:00
});
}
async function findPr(branchName, prTitle, state = 'all') {
logger.debug(`findPr(${branchName}, ${state})`);
const urlString = `repos/${config.repoName}/pulls?head=${config.owner}:${branchName}&state=${state}`;
logger.debug(`findPr urlString: ${urlString}`);
const res = await ghGot(urlString);
let pr = null;
2017-04-21 08:12:41 +00:00
res.body.forEach(result => {
if (!prTitle || result.title === prTitle) {
pr = result;
if (pr.state === 'closed') {
pr.isClosed = true;
}
pr.displayNumber = `Pull Request #${pr.number}`;
}
});
return pr;
}
// Pull Request
async function checkForClosedPr(branchName, prTitle) {
logger.debug(`checkForClosedPr(${branchName}, ${prTitle})`);
const url = `repos/${config.repoName}/pulls?state=closed&head=${config.owner}:${branchName}`;
const res = await ghGot(url);
// Return true if any of the titles match exactly
2017-04-21 08:12:41 +00:00
return res.body.some(
pr =>
2017-04-21 08:25:49 +00:00
pr.title === prTitle && pr.head.label === `${config.owner}:${branchName}`
2017-04-21 08:12:41 +00:00
);
}
// Creates PR and returns PR number
async function createPr(branchName, title, body, useDefaultBranch) {
const base = useDefaultBranch ? config.defaultBranch : config.baseBranch;
const pr = (await ghGot.post(`repos/${config.repoName}/pulls`, {
body: {
title,
head: branchName,
base,
body,
},
2017-02-08 07:34:19 +00:00
})).body;
pr.displayNumber = `Pull Request #${pr.number}`;
return pr;
}
// Gets details for a PR
async function getPr(prNo) {
if (!prNo) {
return null;
}
const pr = (await ghGot(`repos/${config.repoName}/pulls/${prNo}`)).body;
if (!pr) {
return null;
}
// Harmonise PR values
pr.displayNumber = `Pull Request #${pr.number}`;
if (pr.state === 'closed') {
pr.isClosed = true;
}
if (!pr.isClosed) {
if (pr.mergeable_state === 'dirty') {
logger.debug(`PR mergeable state is dirty`);
pr.isUnmergeable = true;
}
if (pr.commits === 1) {
// Only one commit was made - must have been renovate
logger.debug('Only 1 commit in PR so rebase is possible');
pr.canRebase = true;
} else {
// Check if only one author of all commits
logger.debug('Checking all commits');
const prCommits = (await ghGot(
`repos/${config.repoName}/pulls/${prNo}/commits`
)).body;
const authors = prCommits.reduce((arr, commit) => {
logger.trace(`Checking commit: ${JSON.stringify(commit)}`);
let author = 'unknown';
if (commit.author) {
author = commit.author.login;
} else if (commit.commit && commit.commit.author) {
author = commit.commit.author.email;
} else {
logger.debug('Could not determine commit author');
}
logger.debug(`Commit author is: ${author}`);
if (arr.indexOf(author) === -1) {
arr.push(author);
}
return arr;
}, []);
logger.debug(`Author list: ${authors}`);
if (authors.length === 1) {
pr.canRebase = true;
}
}
if (pr.base.sha !== config.baseCommitSHA) {
pr.isStale = true;
}
}
return pr;
}
async function getAllPrs() {
const all = (await ghGot(`repos/${config.repoName}/pulls?state=open`)).body;
return all.map(pr => ({
number: pr.number,
branchName: pr.head.ref,
}));
}
async function updatePr(prNo, title, body) {
await ghGot.patch(`repos/${config.repoName}/pulls/${prNo}`, {
2017-01-10 22:06:25 +00:00
body: { title, body },
});
}
async function mergePr(pr) {
const url = `repos/${config.repoName}/pulls/${pr.number}/merge`;
const options = {
body: {},
};
if (config.mergeMethod) {
// This path is taken if we have auto-detected the allowed merge types from the repo
options.body.merge_method = config.mergeMethod;
try {
logger.debug(`mergePr: ${url}, ${JSON.stringify(options)}`);
await ghGot.put(url, options);
} catch (err) {
logger.error(
`Failed to ${options.body.merge_method} PR: ${JSON.stringify(err)}`
);
return;
}
} else {
// We need to guess the merge method and try squash -> rebase -> merge
options.body.merge_method = 'rebase';
try {
logger.debug(`mergePr: ${url}, ${JSON.stringify(options)}`);
await ghGot.put(url, options);
} catch (err1) {
logger.debug(
`Failed to ${options.body.merge_method} PR: ${JSON.stringify(err1)}`
);
try {
options.body.merge_method = 'squash';
logger.debug(`mergePr: ${url}, ${JSON.stringify(options)}`);
await ghGot.put(url, options);
} catch (err2) {
logger.debug(
`Failed to ${options.body.merge_method} PR: ${JSON.stringify(err2)}`
);
try {
options.body.merge_method = 'merge';
logger.debug(`mergePr: ${url}, ${JSON.stringify(options)}`);
await ghGot.put(url, options);
} catch (err3) {
logger.debug(
`Failed to ${options.body.merge_method} PR: ${JSON.stringify(err3)}`
);
logger.error('All merge attempts failed');
return;
}
}
}
}
// Update base branch SHA
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
// Delete branch
await deleteBranch(pr.head.ref);
}
// Generic File operations
async function getFile(filePath, branchName = config.baseBranch) {
const res = await ghGot(
`repos/${config.repoName}/contents/${filePath}?ref=${branchName}`
);
return res.body.content;
2017-01-07 21:08:45 +00:00
}
async function getFileContent(filePath, branchName = config.baseBranch) {
logger.trace(
`getFileContent(filePath=${filePath}, branchName=${branchName})`
);
try {
const file = await getFile(filePath, branchName);
return new Buffer(file, 'base64').toString();
} catch (error) {
if (error.statusCode === 404) {
// If file not found, then return null JSON
return null;
}
// Propagate if it's any other error
throw error;
}
}
async function getFileJson(filePath, branchName) {
logger.trace(`getFileJson(filePath=${filePath}, branchName=${branchName})`);
let fileJson = null;
try {
fileJson = JSON.parse(await getFileContent(filePath, branchName));
} catch (err) {
logger.error(`Failed to parse JSON for ${filePath}`);
}
return fileJson;
}
2017-02-08 07:43:16 +00:00
// Add a new commit, create branch if not existing
async function commitFilesToBranch(
branchName,
files,
message,
parentBranch = config.baseBranch
2017-04-21 08:12:41 +00:00
) {
logger.debug(
`commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`
);
const parentCommit = await getBranchCommit(parentBranch);
const parentTree = await getCommitTree(parentCommit);
2017-02-08 07:43:16 +00:00
const fileBlobs = [];
// Create blobs
for (const file of files) {
const blob = await createBlob(file.contents);
fileBlobs.push({
name: file.name,
blob,
});
}
// Create tree
const tree = await createTree(parentTree, fileBlobs);
const commit = await createCommit(parentCommit, tree, message);
const isBranchExisting = await branchExists(branchName);
if (isBranchExisting) {
await updateBranch(branchName, commit);
} else {
await createBranch(branchName, commit);
}
}
// Internal branch operations
// Creates a new branch with provided commit
async function createBranch(branchName, commit = config.baseCommitSHA) {
await ghGot.post(`repos/${config.repoName}/git/refs`, {
body: {
ref: `refs/heads/${branchName}`,
sha: commit,
},
});
}
// Internal: Updates an existing branch to new commit sha
async function updateBranch(branchName, commit) {
logger.debug(`Updating branch ${branchName} with commit ${commit}`);
await ghGot.patch(`repos/${config.repoName}/git/refs/heads/${branchName}`, {
body: {
sha: commit,
force: true,
},
});
}
// Low-level commit operations
// Create a blob with fileContents and return sha
async function createBlob(fileContents) {
logger.debug('Creating blob');
return (await ghGot.post(`repos/${config.repoName}/git/blobs`, {
body: {
encoding: 'base64',
content: new Buffer(fileContents).toString('base64'),
},
})).body.sha;
}
// Return the commit SHA for a branch
async function getBranchCommit(branchName) {
2017-04-21 08:12:41 +00:00
return (await ghGot(`repos/${config.repoName}/git/refs/heads/${branchName}`))
.body.object.sha;
}
async function getCommitDetails(commit) {
logger.debug(`getCommitDetails(${commit})`);
const results = await ghGot(`repos/${config.repoName}/git/commits/${commit}`);
return results.body;
}
// Return the tree SHA for a commit
async function getCommitTree(commit) {
logger.debug(`getCommitTree(${commit})`);
2017-04-21 08:12:41 +00:00
return (await ghGot(`repos/${config.repoName}/git/commits/${commit}`)).body
.tree.sha;
}
// Create a tree and return SHA
2017-02-08 07:43:16 +00:00
async function createTree(baseTree, files) {
logger.debug(`createTree(${baseTree}, files)`);
const body = {
base_tree: baseTree,
tree: [],
};
2017-04-21 08:12:41 +00:00
files.forEach(file => {
2017-02-08 07:43:16 +00:00
body.tree.push({
path: file.name,
mode: '100644',
type: 'blob',
sha: file.blob,
});
});
logger.debug(body);
2017-04-21 08:12:41 +00:00
return (await ghGot.post(`repos/${config.repoName}/git/trees`, { body })).body
.sha;
}
// Create a commit and return commit SHA
async function createCommit(parent, tree, message) {
logger.debug(`createCommit(${parent}, ${tree}, ${message})`);
return (await ghGot.post(`repos/${config.repoName}/git/commits`, {
body: {
message,
parents: [parent],
tree,
},
})).body.sha;
}
async function getCommitMessages() {
logger.debug('getCommitMessages');
try {
const res = await ghGot(`repos/${config.repoName}/commits`);
return res.body.map(commit => commit.commit.message);
} catch (err) {
logger.error(`getCommitMessages error: ${JSON.stringify(err)}`);
return [];
}
}