feat(github): use git for all file operations (#3806)

With Renovate’s github platform code now using git for all file system operations, we need to tell Renovate which gitAuthor to use.

If you had already configured a gitAuthor in your bot config, you do not need to make any change.

Otherwise, to keep functionality as before, you should either:
(1) configure `gitAuthor` to match the bot’s account, or
(2) recreate your bot’s personal access token to include the “user:email” permission so that the bot can retrieve the email itself

BREAKING CHANGE: GitHub bot admins should either configure gitAuthor in their config or generate a new token with “user:email” permissions.
This commit is contained in:
Rhys Arkins 2019-05-24 16:34:52 +02:00 committed by GitHub
parent 5f213255d0
commit 2426a5239f
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 194 additions and 1619 deletions

View file

@ -147,19 +147,9 @@ If you wish to override the base directory to be used (e.g. instead of `/tmp/ren
If you wish to override the cache location specifically then configure a value for `cacheDir` instead.
## gitFs
`gitFs` is the recommended way to perform file operations using Renovate. Using `gitFs` means Renovate does a shallow clone to read and subsequently write files for each repository, instead of using platform-specific APIs to read/write files. Platform APIs are still used for things like Issues and Pull Requests regardless.
`gitFs` is supported for all platforms, and is the only approach for Bitbucket Cloud, Bitbucket Server, and Azure DevOps. It's optional for GitHub and GitLab. In the case of GitLab, it is necessary to set `gitFs=ssh` because GitLab does not support write options via git/https when using a Personal Access Token. In this case you need to make sure that Renovate has access to the SSH private key which is associated with its account.
If you wish for git data to be preserved between Renovate runs, then configure `preserveRepoData` to `true` in your bot config. Doing so means that Renovate needs to do only a `git fetch` each time rather than `git clone`. You can control where this data is stored using the `baseDir` config option mentioned above.
### Identification and Authorization
`gitFs` means Git is used, which means that commits need a username/email combination. If one is not set for the system that Renovate is run on then you should configure one using the `gitAuthor` configuration option.
It's also possible to sign git commits, but for this you need to set up the GPG key and setting out of band. In short:
It's possible to sign git commits, but for this you need to set up the GPG key and setting out of band. In short:
- Make sure the private key is added via GPG
- Tell git about the private key (e.g. `git config --global user.signingkey AABBCCDDEEFF`)

View file

@ -94,6 +94,7 @@ async function extractSetupFile(content, packageFile, config) {
}
if (stderr) {
stderr = stderr.replace(/.*\n\s*import imp/, '').trim();
// istanbul ignore if
if (stderr.length) {
logger.warn({ stdout, stderr }, 'Error in read setup file');
}

View file

@ -4,7 +4,6 @@ const URL = require('url');
const get = require('./gh-got-wrapper');
const hostRules = require('../../util/host-rules');
const Storage = require('./storage');
const GitStorage = require('../git/storage');
const {
@ -84,17 +83,35 @@ async function initPlatform({ endpoint, token }) {
defaults.endpoint = endpoint.replace(/\/?$/, '/'); // always add a trailing slash
get.setEndpoint(defaults.endpoint);
} else {
logger.info('Using default github endpoint: ' + res.endpoint);
logger.info('Using default github endpoint: ' + defaults.endpoint);
}
res.endpoint = defaults.endpoint;
try {
res.renovateUsername = (await get(res.endpoint + 'user', {
const userData = (await get(res.endpoint + 'user', {
token,
})).body.login;
})).body;
res.renovateUsername = userData.login;
res.gitAuthor = userData.name;
} catch (err) {
logger.debug({ err }, 'Error authenticating with GitHub');
throw new Error('Init: Authentication failure');
}
try {
const userEmail = (await get(res.endpoint + 'user/emails', {
token,
})).body;
if (userEmail.length && userEmail[0].email) {
res.gitAuthor += ` <${userEmail[0].email}>`;
} else {
logger.debug('Cannot find an email address for Renovate user');
delete res.gitAuthor;
}
} catch (err) {
logger.debug(
'Cannot read user/emails endpoint on GitHub to retrieve gitAuthor'
);
delete res.gitAuthor;
}
logger.info('Authenticated as GitHub user: ' + res.renovateUsername);
return res;
}
@ -127,7 +144,6 @@ async function initRepo({
forkMode,
forkToken,
gitPrivateKey,
gitFs,
localDir,
includeForks,
renovateUsername,
@ -154,7 +170,7 @@ async function initRepo({
res = await get(`repos/${repository}`);
logger.trace({ repositoryDetails: res.body }, 'Repository details');
// istanbul ignore if
if (res.body.fork && gitFs && !includeForks) {
if (res.body.fork && !includeForks) {
try {
const renovateConfig = JSON.parse(
Buffer.from(
@ -229,8 +245,6 @@ async function initRepo({
config.prList = null;
config.openPrList = null;
config.closedPrList = null;
config.storage = new Storage();
await config.storage.initRepo(config);
if (forkMode) {
logger.info('Bot is in forkMode');
config.forkToken = forkToken;
@ -289,31 +303,25 @@ async function initRepo({
// Wait an arbitrary 30s to hopefully give GitHub enough time for forking to complete
await delay(30000);
}
await config.storage.initRepo(config);
}
// istanbul ignore if
if (gitFs) {
logger.debug('Enabling Git FS');
let { host } = URL.parse(defaults.endpoint);
if (host === 'api.github.com') {
host = null;
}
host = host || 'github.com';
const url = GitStorage.getUrl({
gitFs,
auth:
config.forkToken ||
(global.appMode ? `x-access-token:${opts.token}` : opts.token),
hostname: host,
repository: config.repository,
});
// gitFs
const parsedEndpoint = URL.parse(defaults.endpoint);
parsedEndpoint.auth =
config.forkToken || global.appMode
? /* istanbul ignore next */ `x-access-token:${opts.token}`
: opts.token;
parsedEndpoint.host = parsedEndpoint.host.replace(
'api.github.com',
'github.com'
);
parsedEndpoint.pathname = config.repository + '.git';
const url = URL.format(parsedEndpoint);
config.storage = new GitStorage();
await config.storage.initRepo({
...config,
url,
});
}
return platformConfig;
}
@ -363,11 +371,28 @@ async function getRepoForceRebase() {
return config.repoForceRebase;
}
// Return the commit SHA for a branch
async function getBranchCommit(branchName) {
try {
const res = await get(
`repos/${config.repository}/git/refs/heads/${branchName}`
);
return res.body.object.sha;
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, 'Error getting branch commit');
if (err.statusCode === 404) {
throw new Error('repository-changed');
}
if (err.statusCode === 409) {
throw new Error('empty');
}
throw err;
}
}
async function getBaseCommitSHA() {
if (!config.baseCommitSHA) {
config.baseCommitSHA = await config.storage.getBranchCommit(
config.baseBranch
);
config.baseCommitSHA = await getBranchCommit(config.baseBranch);
}
return config.baseCommitSHA;
}
@ -383,12 +408,12 @@ async function getBranchProtection(branchName) {
return res.body;
}
// istanbul ignore next
async function setBaseBranch(branchName = config.baseBranch) {
logger.debug(`Setting baseBranch to ${branchName}`);
config.baseBranch = branchName;
config.baseCommitSHA = null;
await config.storage.setBaseBranch(branchName);
await getFileList(branchName);
}
// istanbul ignore next
@ -398,34 +423,39 @@ function setBranchPrefix(branchPrefix) {
// Search
// Get full file list
// istanbul ignore next
function getFileList(branchName = config.baseBranch) {
return config.storage.getFileList(branchName);
}
// Branch
// Returns true if branch exists, otherwise false
// istanbul ignore next
function branchExists(branchName) {
return config.storage.branchExists(branchName);
}
// istanbul ignore next
function getAllRenovateBranches(branchPrefix) {
return config.storage.getAllRenovateBranches(branchPrefix);
}
// istanbul ignore next
function isBranchStale(branchName) {
return config.storage.isBranchStale(branchName);
}
// istanbul ignore next
function getFile(filePath, branchName) {
return config.storage.getFile(filePath, branchName);
}
// istanbul ignore next
function deleteBranch(branchName) {
return config.storage.deleteBranch(branchName);
}
// istanbul ignore next
function getBranchLastCommitTime(branchName) {
return config.storage.getBranchLastCommitTime(branchName);
}
@ -435,8 +465,8 @@ function getRepoStatus() {
return config.storage.getRepoStatus();
}
// istanbul ignore next
function mergeBranch(branchName) {
// istanbul ignore if
if (config.pushProtection) {
logger.info(
{ branch: branchName },
@ -446,6 +476,7 @@ function mergeBranch(branchName) {
return config.storage.mergeBranch(branchName);
}
// istanbul ignore next
function commitFilesToBranch(
branchName,
files,
@ -460,6 +491,7 @@ function commitFilesToBranch(
);
}
// istanbul ignore next
function getCommitMessages() {
return config.storage.getCommitMessages();
}
@ -912,7 +944,7 @@ async function getPrList() {
title: pr.title,
state:
pr.state === 'closed' && pr.merged_at && pr.merged_at.length
? 'merged'
? /* istanbul ignore next */ 'merged'
: pr.state,
createdAt: pr.created_at,
closed_at: pr.closed_at,

View file

@ -1,530 +0,0 @@
const moment = require('moment');
const openpgp = require('openpgp');
const path = require('path');
const get = require('./gh-got-wrapper');
class Storage {
constructor() {
// config
let config = {};
// cache
let branchFiles = {};
let branchList = null;
Object.assign(this, {
initRepo,
cleanRepo,
getRepoStatus: () => ({}),
branchExists,
commitFilesToBranch,
createBranch,
deleteBranch,
getAllRenovateBranches,
getBranchCommit,
getBranchLastCommitTime,
getCommitMessages,
getFile,
getFileList,
isBranchStale,
mergeBranch,
setBaseBranch,
setBranchPrefix,
});
function initRepo(args) {
cleanRepo();
config = { ...args };
}
function cleanRepo() {
branchFiles = {};
branchList = null;
}
async function getBranchList() {
if (!branchList) {
logger.debug('Retrieving branchList');
branchList = (await get(
`repos/${config.repository}/branches?per_page=100`,
{
paginate: true,
}
)).body.map(branch => branch.name);
}
return branchList;
}
// Returns true if branch exists, otherwise false
async function branchExists(branchName) {
const res = (await getBranchList()).includes(branchName);
logger.debug(`branchExists(${branchName})=${res}`);
return res;
}
function setBaseBranch(branchName) {
if (branchName) {
logger.debug(`Setting baseBranch to ${branchName}`);
config.baseBranch = branchName;
}
}
// istanbul ignore next
function setBranchPrefix() {
// Do nothing
}
// Get full file list
async function getFileList(branchName) {
const branch = branchName || config.baseBranch;
if (branchFiles[branch]) {
return branchFiles[branch];
}
try {
const res = await get(
`repos/${config.repository}/git/trees/${branch}?recursive=true`
);
if (res.body.truncated) {
logger.warn(
{ repository: config.repository },
'repository tree is truncated'
);
}
const fileList = res.body.tree
.filter(item => item.type === 'blob' && item.mode !== '120000')
.map(item => item.path)
.sort();
logger.debug(`Retrieved fileList with length ${fileList.length}`);
branchFiles[branch] = fileList;
return fileList;
} catch (err) /* istanbul ignore next */ {
if (err.statusCode === 409) {
logger.debug('Repository is not initiated');
throw new Error('uninitiated');
}
logger.info(
{ branchName, err, repository: config.repository },
'Error retrieving git tree - no files detected'
);
throw err;
}
}
async function getAllRenovateBranches(branchPrefix) {
logger.trace('getAllRenovateBranches');
const allBranches = await getBranchList();
if (branchPrefix.endsWith('/')) {
const branchPrefixPrefix = branchPrefix.slice(0, -1);
if (allBranches.includes(branchPrefixPrefix)) {
logger.warn(
`Pruning branch "${branchPrefixPrefix}" so that it does not block PRs`
);
await deleteBranch(branchPrefixPrefix);
}
}
return allBranches.filter(branchName =>
branchName.startsWith(branchPrefix)
);
}
async function isBranchStale(branchName) {
// Check if branch's parent SHA = master SHA
logger.debug(`isBranchStale(${branchName})`);
const branchCommit = await getBranchCommit(branchName);
logger.debug(`branchCommit=${branchCommit}`);
const commitDetails = await getCommitDetails(branchCommit);
logger.trace({ commitDetails }, `commitDetails`);
const parentSha = commitDetails.parents[0].sha;
logger.debug(`parentSha=${parentSha}`);
const baseCommitSHA = await getBranchCommit(config.baseBranch);
logger.debug(`baseCommitSHA=${baseCommitSHA}`);
// Return true if the SHAs don't match
return parentSha !== baseCommitSHA;
}
async function deleteBranch(branchName) {
delete branchFiles[branchName];
const options = config.forkToken
? /* istanbul ignore next */ { token: config.forkToken }
: undefined;
try {
await get.delete(
`repos/${config.repository}/git/refs/heads/${branchName}`,
options
);
} catch (err) /* istanbul ignore next */ {
if (err.message.startsWith('Reference does not exist')) {
logger.info(
{ branch: branchName },
'Branch to delete does not exist'
);
} else if (err.message.startsWith('Cannot delete protected branch')) {
logger.info({ branch: branchName }, 'Cannot delete protected branch');
} else {
logger.warn({ err, branch: branchName }, 'Error deleting branch');
}
}
}
async function mergeBranch(branchName) {
logger.debug(`mergeBranch(${branchName})`);
const url = `repos/${config.repository}/git/refs/heads/${
config.baseBranch
}`;
const options = {
body: {
sha: await getBranchCommit(branchName),
},
};
try {
await get.patch(url, options);
logger.debug({ branch: branchName }, 'Branch merged');
} catch (err) {
if (
err.message.startsWith('Required status check') ||
err.message.includes('required status checks are expected')
) {
logger.debug('Branch is not ready for merge: ' + err.message);
throw new Error('not ready');
}
logger.info({ err }, `Error pushing branch merge for ${branchName}`);
throw new Error('Branch automerge failed');
}
// Delete branch
await deleteBranch(branchName);
}
async function getBranchLastCommitTime(branchName) {
try {
const res = await get(
`repos/${config.repository}/commits?sha=${branchName}`
);
return new Date(res.body[0].commit.committer.date);
} catch (err) {
logger.error({ err }, `getBranchLastCommitTime error`);
return new Date();
}
}
// Generic File operations
async function getFile(filePath, branchName) {
logger.trace(`getFile(filePath=${filePath}, branchName=${branchName})`);
if (!(await getFileList(branchName)).includes(filePath)) {
return null;
}
let res;
try {
res = await get(
`repos/${config.repository}/contents/${encodeURI(
filePath
)}?ref=${branchName || config.baseBranch}`
);
} catch (error) {
if (error.statusCode === 404) {
// If file not found, then return null JSON
logger.info({ filePath, branch: branchName }, 'getFile 404');
return null;
}
if (
error.statusCode === 403 &&
error.message &&
error.message.startsWith('This API returns blobs up to 1 MB in size')
) {
logger.info('Large file');
// istanbul ignore if
if (branchName && branchName !== config.baseBranch) {
logger.info('Cannot retrieve large files from non-master branch');
return null;
}
// istanbul ignore if
if (path.dirname(filePath) !== '.') {
logger.info(
'Cannot retrieve large files from non-root directories'
);
return null;
}
const treeUrl = `repos/${config.repository}/git/trees/${
config.baseBranch
}`;
const baseName = path.basename(filePath);
let fileSha;
(await get(treeUrl)).body.tree.forEach(file => {
if (file.path === baseName) {
fileSha = file.sha;
}
});
if (!fileSha) {
logger.warn('Could not locate file blob');
throw error;
}
res = await get(`repos/${config.repository}/git/blobs/${fileSha}`);
} else {
// Propagate if it's any other error
throw error;
}
}
if (res && res.body) {
if (res.body.content) {
return Buffer.from(res.body.content, 'base64').toString();
}
// istanbul ignore next
return '';
}
return null;
}
// Add a new commit, create branch if not existing
async function commitFilesToBranch(
branchName,
files,
message,
parentBranch = config.baseBranch
) {
logger.debug(
`commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`
);
try {
delete branchFiles[branchName];
const parentCommit = await getBranchCommit(parentBranch);
const parentTree = await getCommitTree(parentCommit);
const fileBlobs = [];
// Create blobs
for (const file of files) {
const blob = await createBlob(file.contents);
fileBlobs.push({
name: file.name,
blob,
});
}
// Create tree
const tree = await createTree(parentTree, fileBlobs);
const commit = await createCommit(parentCommit, tree, message);
const isBranchExisting = await branchExists(branchName);
if (isBranchExisting) {
await updateBranch(branchName, commit);
logger.debug({ branch: branchName }, 'Branch updated');
return 'updated';
}
await createBranch(branchName, commit);
logger.debug({ branch: branchName }, 'Branch created');
// istanbul ignore if
if (branchList) {
branchList.push(branchName);
}
return 'created';
} catch (err) /* istanbul ignore next */ {
if (err.statusCode === 404) {
throw new Error('repository-changed');
}
throw err;
}
}
// Internal branch operations
// Creates a new branch with provided commit
async function createBranch(branchName, sha) {
logger.debug(`createBranch(${branchName})`);
const options = {
body: {
ref: `refs/heads/${branchName}`,
sha,
},
};
// istanbul ignore if
if (config.forkToken) {
options.token = config.forkToken;
}
try {
// istanbul ignore if
if (branchName.includes('/')) {
const [blockingBranch] = branchName.split('/');
if (await branchExists(blockingBranch)) {
logger.warn({ blockingBranch }, 'Deleting blocking branch');
await deleteBranch(blockingBranch);
}
}
logger.debug({ options, branch: branchName }, 'Creating branch');
await get.post(`repos/${config.repository}/git/refs`, options);
branchList.push(branchName);
logger.debug('Created branch');
} catch (err) /* istanbul ignore next */ {
const headers = err.response.req.getHeaders();
delete headers.token;
logger.warn(
{
err,
options,
},
'Error creating branch'
);
if (err.statusCode === 422) {
throw new Error('repository-changed');
}
throw err;
}
}
// Return the commit SHA for a branch
async function getBranchCommit(branchName) {
try {
const res = await get(
`repos/${config.repository}/git/refs/heads/${branchName}`
);
return res.body.object.sha;
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, 'Error getting branch commit');
if (err.statusCode === 404) {
throw new Error('repository-changed');
}
if (err.statusCode === 409) {
throw new Error('empty');
}
throw err;
}
}
async function getCommitMessages() {
logger.debug('getCommitMessages');
const res = await get(`repos/${config.repository}/commits`);
return res.body.map(commit => commit.commit.message);
}
// Internal: Updates an existing branch to new commit sha
async function updateBranch(branchName, commit) {
logger.debug(`Updating branch ${branchName} with commit ${commit}`);
const options = {
body: {
sha: commit,
force: true,
},
};
// istanbul ignore if
if (config.forkToken) {
options.token = config.forkToken;
}
try {
await get.patch(
`repos/${config.repository}/git/refs/heads/${branchName}`,
options
);
} catch (err) /* istanbul ignore next */ {
if (err.statusCode === 422) {
logger.info({ err }, 'Branch no longer exists - exiting');
throw new Error('repository-changed');
}
throw err;
}
}
// Low-level commit operations
// Create a blob with fileContents and return sha
async function createBlob(fileContents) {
logger.debug('Creating blob');
const options = {
body: {
encoding: 'base64',
content: Buffer.from(fileContents).toString('base64'),
},
};
// istanbul ignore if
if (config.forkToken) {
options.token = config.forkToken;
}
return (await get.post(`repos/${config.repository}/git/blobs`, options))
.body.sha;
}
// Return the tree SHA for a commit
async function getCommitTree(commit) {
logger.debug(`getCommitTree(${commit})`);
return (await get(`repos/${config.repository}/git/commits/${commit}`))
.body.tree.sha;
}
// Create a tree and return SHA
async function createTree(baseTree, files) {
logger.debug(`createTree(${baseTree}, files)`);
const body = {
base_tree: baseTree,
tree: [],
};
files.forEach(file => {
body.tree.push({
path: file.name,
mode: '100644',
type: 'blob',
sha: file.blob,
});
});
logger.trace({ body }, 'createTree body');
const options = { body };
// istanbul ignore if
if (config.forkToken) {
options.token = config.forkToken;
}
return (await get.post(`repos/${config.repository}/git/trees`, options))
.body.sha;
}
// Create a commit and return commit SHA
async function createCommit(parent, tree, message) {
logger.debug(`createCommit(${parent}, ${tree}, ${message})`);
const { gitPrivateKey } = config;
const now = moment();
let author;
if (global.gitAuthor) {
logger.trace('Setting gitAuthor');
author = {
name: global.gitAuthor.name,
email: global.gitAuthor.email,
date: now.format(),
};
}
const body = {
message,
parents: [parent],
tree,
};
if (author) {
body.author = author;
// istanbul ignore if
if (gitPrivateKey) {
logger.debug('Found gitPrivateKey');
const privKeyObj = openpgp.key.readArmored(gitPrivateKey).keys[0];
const commit = `tree ${tree}\nparent ${parent}\nauthor ${
author.name
} <${author.email}> ${now.format('X ZZ')}\ncommitter ${
author.name
} <${author.email}> ${now.format('X ZZ')}\n\n${message}`;
const { signature } = await openpgp.sign({
data: openpgp.util.str2Uint8Array(commit),
privateKeys: privKeyObj,
detached: true,
armor: true,
});
body.signature = signature;
}
}
const options = {
body,
};
// istanbul ignore if
if (config.forkToken) {
options.token = config.forkToken;
}
return (await get.post(`repos/${config.repository}/git/commits`, options))
.body.sha;
}
async function getCommitDetails(commit) {
logger.debug(`getCommitDetails(${commit})`);
const results = await get(
`repos/${config.repository}/git/commits/${commit}`
);
return results.body;
}
}
}
module.exports = Storage;

View file

@ -1,3 +1,4 @@
const addrs = require('email-addresses');
const hostRules = require('../util/host-rules');
/* eslint-disable global-require */
@ -26,6 +27,32 @@ async function initPlatform(config) {
}
const platformInfo = await global.platform.initPlatform(config);
const returnConfig = { ...config, ...platformInfo };
let gitAuthor;
if (config && config.gitAuthor) {
logger.info(`Using configured gitAuthor (${config.gitAuthor})`);
gitAuthor = config.gitAuthor;
} else if (!(platformInfo && platformInfo.gitAuthor)) {
logger.info('Using default gitAuthor: Renovate Bot <bot@renovateapp.com>');
gitAuthor = 'Renovate Bot <bot@renovateapp.com>';
} /* istanbul ignore next */ else {
logger.info('Using platform gitAuthor: ' + platformInfo.gitAuthor);
gitAuthor = platformInfo.gitAuthor;
}
let gitAuthorParsed;
try {
gitAuthorParsed = addrs.parseOneAddress(gitAuthor);
} catch (err) /* istanbul ignore next */ {
logger.debug({ gitAuthor, err }, 'Error parsing gitAuthor');
}
// istanbul ignore if
if (!gitAuthorParsed) {
throw new Error('Init: gitAuthor is not parsed as valid RFC5322 format');
}
global.gitAuthor = {
name: gitAuthorParsed.name,
email: gitAuthorParsed.address,
};
delete returnConfig.gitAuthor;
let token = config.token;
if (
config.platform.startsWith('bitbucket') &&

View file

@ -8,7 +8,6 @@ const repositoryWorker = require('../repository');
const cache = require('./cache');
const { appName } = require('../../config/app-strings');
const { autodiscoverRepositories } = require('./autodiscover');
const { setMeta } = require('./meta');
const { initPlatform } = require('../../platform');
const hostRules = require('../../util/host-rules');
@ -24,7 +23,6 @@ async function start() {
let config = await configParser.parseConfigs(process.env, process.argv);
config = await initPlatform(config);
config = await setDirectories(config);
setMeta(config);
config = await autodiscoverRepositories(config);
cache.init(config.cacheDir);
if (config.repositories.length === 0) {

View file

@ -1,34 +0,0 @@
const addrs = require('email-addresses');
module.exports = {
setMeta,
};
function setMeta(config) {
const { gitAuthor } = config;
if (gitAuthor) {
logger.debug('Using configured git author');
let gitAuthorParsed;
try {
gitAuthorParsed = addrs.parseOneAddress(gitAuthor);
} catch (err) /* istanbul ignore next */ {
logger.debug({ gitAuthor, err }, 'Error parsing gitAuthor');
}
// istanbul ignore if
if (!gitAuthorParsed) {
throw new Error(
'Configured gitAuthor is not parsed as valid RFC5322 format'
);
}
global.gitAuthor = {
name: gitAuthorParsed.name,
email: gitAuthorParsed.address,
};
} else {
logger.debug('Using default git author (Renovate Bot)');
global.gitAuthor = {
name: 'Renovate Bot',
email: 'bot@renovateapp.com',
};
}
}

View file

@ -32,46 +32,6 @@ Array [
]
`;
exports[`platform/github commitFilesToBranch(branchName, files, message, parentBranch) should add a commit to a new branch if the branch does not already exist 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/refs/heads/master",
],
Array [
"repos/some/repo/git/commits/1111",
],
Array [
"repos/some/repo/branches?per_page=100",
Object {
"paginate": true,
},
],
]
`;
exports[`platform/github commitFilesToBranch(branchName, files, message, parentBranch) should add a new commit to the branch 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/refs/heads/master",
],
Array [
"repos/some/repo/git/commits/1111",
],
Array [
"repos/some/repo/branches?per_page=100",
Object {
"paginate": true,
},
],
]
`;
exports[`platform/github createPr() should create and return a PR object 1`] = `
Object {
"branchName": "some-branch",
@ -103,7 +63,7 @@ Array [
},
],
Array [
"repos/some/repo/statuses/some-sha",
"repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e",
Object {
"body": Object {
"context": "renovate/verify",
@ -178,8 +138,6 @@ content",
]
`;
exports[`platform/github getBranchLastCommitTime should return a Date 1`] = `2011-04-14T16:00:49.000Z`;
exports[`platform/github getBranchPr(branchName) should return the PR object 1`] = `
Array [
Array [
@ -221,86 +179,6 @@ Object {
}
`;
exports[`platform/github getCommitMessages() returns commits messages 1`] = `
Array [
"foo",
"bar",
]
`;
exports[`platform/github getFile() should return large file via git API 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/trees/master?recursive=true",
],
Array [
"repos/some/repo/contents/package-lock.json?ref=master",
],
Array [
"repos/some/repo/git/trees/master",
],
Array [
"repos/some/repo/git/blobs/some-sha",
],
]
`;
exports[`platform/github getFile() should return large file via git API 2`] = `"{\\"hello\\":\\"workd\\"}"`;
exports[`platform/github getFile() should return null if GitHub returns a 404 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/trees/master?recursive=true",
],
Array [
"repos/some/repo/contents/package.json?ref=master",
],
]
`;
exports[`platform/github getFile() should return null if getFile returns nothing 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/trees/master?recursive=true",
],
Array [
"repos/some/repo/contents/package.json?ref=master",
],
]
`;
exports[`platform/github getFile() should return the encoded file content 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/trees/master?recursive=true",
],
Array [
"repos/some/repo/contents/package.json?ref=master",
],
]
`;
exports[`platform/github getFileList should return the files matching the fileName 1`] = `
Array [
"package.json",
"some-dir/package.json.some-thing-else",
"src/app/package.json",
"src/otherapp/package.json",
]
`;
exports[`platform/github getPr(prNo) should return PR from closed graphql result 1`] = `
Object {
"body": "dummy body",
@ -345,6 +223,7 @@ Object {
},
"displayNumber": "Pull Request #1",
"mergeable": true,
"merged_at": "sometime",
"number": 1,
"state": "closed",
}
@ -478,35 +357,6 @@ Array [
]
`;
exports[`platform/github getPrList() should return PRs 1`] = `
Array [
Object {
"branchName": "somebranch",
"closed_at": undefined,
"createdAt": undefined,
"number": 91,
"sha": undefined,
"sourceRepo": undefined,
"state": "merged",
"title": undefined,
},
]
`;
exports[`platform/github getPrList() should return PRs 2`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/pulls?per_page=100&state=all",
Object {
"paginate": true,
},
],
]
`;
exports[`platform/github getRepos should return an array of repos 1`] = `
Array [
Array [
@ -532,13 +382,28 @@ Object {
}
`;
exports[`platform/github initPlatform() should support default endpoint 1`] = `
exports[`platform/github initPlatform() should support default endpoint no email access 1`] = `
Object {
"endpoint": "https://api.github.com/",
"renovateUsername": "renovate-bot",
}
`;
exports[`platform/github initPlatform() should support default endpoint no email result 1`] = `
Object {
"endpoint": "https://api.github.com/",
"renovateUsername": "renovate-bot",
}
`;
exports[`platform/github initPlatform() should support default endpoint with email 1`] = `
Object {
"endpoint": "https://api.github.com/",
"gitAuthor": "undefined <user@domain.com>",
"renovateUsername": "renovate-bot",
}
`;
exports[`platform/github initRepo should forks when forkMode 1`] = `
Object {
"isFork": false,
@ -581,97 +446,6 @@ Object {
}
`;
exports[`platform/github mergeBranch(branchName) should perform a branch merge 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/refs/heads/thebranchname",
],
]
`;
exports[`platform/github mergeBranch(branchName) should perform a branch merge 2`] = `
Array [
Array [
"repos/some/repo/git/refs/heads/master",
Object {
"body": Object {
"sha": "1235",
},
},
],
]
`;
exports[`platform/github mergeBranch(branchName) should perform a branch merge 3`] = `Array []`;
exports[`platform/github mergeBranch(branchName) should perform a branch merge 4`] = `Array []`;
exports[`platform/github mergeBranch(branchName) should perform a branch merge 5`] = `
Array [
Array [
"repos/some/repo/git/refs/heads/thebranchname",
undefined,
],
]
`;
exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 1`] = `[Error: Branch automerge failed]`;
exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 2`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/refs/heads/thebranchname",
],
]
`;
exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 3`] = `
Array [
Array [
"repos/some/repo/git/refs/heads/master",
Object {
"body": Object {
"sha": "1235",
},
},
],
]
`;
exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 4`] = `Array []`;
exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 5`] = `Array []`;
exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 6`] = `Array []`;
exports[`platform/github setBaseBranch(branchName) sets the base branch 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/trees/some-branch?recursive=true",
],
]
`;
exports[`platform/github setBaseBranch(branchName) sets the default base branch 1`] = `
Array [
Array [
"repos/some/repo",
],
Array [
"repos/some/repo/git/trees/master?recursive=true",
],
]
`;
exports[`platform/github updatePr(prNo, title, body) should update the PR 1`] = `
Array [
Array [

View file

@ -4,6 +4,7 @@ describe('platform/github', () => {
let github;
let get;
let hostRules;
let GitStorage;
beforeEach(() => {
// reset module
jest.resetModules();
@ -13,6 +14,27 @@ describe('platform/github', () => {
get = require('../../../lib/platform/github/gh-got-wrapper');
github = require('../../../lib/platform/github');
hostRules = require('../../../lib/util/host-rules');
jest.mock('../../../lib/platform/git/storage');
GitStorage = require('../../../lib/platform/git/storage');
GitStorage.mockImplementation(() => ({
initRepo: jest.fn(),
cleanRepo: jest.fn(),
getFileList: jest.fn(),
branchExists: jest.fn(() => true),
isBranchStale: jest.fn(() => false),
setBaseBranch: jest.fn(),
getBranchLastCommitTime: jest.fn(),
getAllRenovateBranches: jest.fn(),
getCommitMessages: jest.fn(),
getFile: jest.fn(),
commitFilesToBranch: jest.fn(),
mergeBranch: jest.fn(),
deleteBranch: jest.fn(),
getRepoStatus: jest.fn(),
getBranchCommit: jest.fn(
() => '0d9c7726c3d628b7e28af234595cfd20febdbf8e'
),
}));
delete global.gitAuthor;
hostRules.find.mockReturnValue({
hostType: 'github',
@ -53,7 +75,7 @@ describe('platform/github', () => {
get.mockImplementationOnce(() => ({}));
await expect(github.initPlatform({ token: 'abc123' })).rejects.toThrow();
});
it('should support default endpoint', async () => {
it('should support default endpoint no email access', async () => {
get.mockImplementationOnce(() => ({
body: {
login: 'renovate-bot',
@ -61,6 +83,32 @@ describe('platform/github', () => {
}));
expect(await github.initPlatform({ token: 'abc123' })).toMatchSnapshot();
});
it('should support default endpoint no email result', async () => {
get.mockImplementationOnce(() => ({
body: {
login: 'renovate-bot',
},
}));
get.mockImplementationOnce(() => ({
body: [{}],
}));
expect(await github.initPlatform({ token: 'abc123' })).toMatchSnapshot();
});
it('should support default endpoint with email', async () => {
get.mockImplementationOnce(() => ({
body: {
login: 'renovate-bot',
},
}));
get.mockImplementationOnce(() => ({
body: [
{
email: 'user@domain.com',
},
],
}));
expect(await github.initPlatform({ token: 'abc123' })).toMatchSnapshot();
});
it('should support custom endpoint', async () => {
get.mockImplementationOnce(() => ({
body: {
@ -97,8 +145,15 @@ describe('platform/github', () => {
allow_merge_commit: true,
},
}));
if (args.length) {
return github.initRepo(...args);
}
return github.initRepo({
endpoint: 'https://github.com',
repository: 'some/repo',
token: 'token',
});
}
describe('initRepo', () => {
it('should rebase', async () => {
@ -361,236 +416,6 @@ describe('platform/github', () => {
});
});
});
describe('setBaseBranch(branchName)', () => {
it('sets the base branch', async () => {
await initRepo({
repository: 'some/repo',
});
get.mockImplementationOnce(() => ({
body: {
truncated: true,
tree: [],
},
}));
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1238',
},
},
}));
await github.setBaseBranch('some-branch');
expect(get.mock.calls).toMatchSnapshot();
});
it('sets the default base branch', async () => {
await initRepo({
repository: 'some/repo',
defaultBranch: 'some-branch',
});
get.mockImplementationOnce(() => ({
body: {
truncated: true,
tree: [],
},
}));
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1238',
},
},
}));
await github.setBaseBranch();
expect(get.mock.calls).toMatchSnapshot();
});
});
describe('getFileList', () => {
beforeEach(async () => {
await initRepo({
repository: 'some/repo',
});
});
it('throws if error', async () => {
get.mockImplementationOnce(() => {
throw new Error('some error');
});
await expect(github.getFileList('error-branch')).rejects.toThrow();
});
it('warns if truncated result', async () => {
get.mockImplementationOnce(() => ({
body: {
truncated: true,
tree: [],
},
}));
const files = await github.getFileList('truncated-branch');
expect(files).toHaveLength(0);
});
it('caches the result', async () => {
get.mockImplementationOnce(() => ({
body: {
truncated: true,
tree: [],
},
}));
let files = await github.getFileList('cached-branch');
expect(files).toHaveLength(0);
files = await github.getFileList('cached-branch');
expect(files).toHaveLength(0);
});
it('should return the files matching the fileName', async () => {
get.mockImplementationOnce(() => ({
body: {
tree: [
{ type: 'blob', path: 'symlinks/package.json', mode: '120000' },
{ type: 'blob', path: 'package.json' },
{
type: 'blob',
path: 'some-dir/package.json.some-thing-else',
},
{ type: 'blob', path: 'src/app/package.json' },
{ type: 'blob', path: 'src/otherapp/package.json' },
],
},
}));
const files = await github.getFileList('npm-branch');
expect(files).toMatchSnapshot();
});
it('uses default branch', async () => {
get.mockImplementationOnce(() => ({
body: {
truncated: true,
tree: [],
},
}));
expect(await github.getFileList()).toHaveLength(0);
});
});
describe('branchExists(branchName)', () => {
it('should return true if the branch exists (one result)', async () => {
await initRepo({
repository: 'some/repo',
});
get.mockImplementationOnce(() => ({
body: [
{
name: 'thebranchname',
},
],
}));
const exists = await github.branchExists('thebranchname');
expect(exists).toBe(true);
});
});
describe('getAllRenovateBranches()', () => {
it('should return all renovate branches', async () => {
await initRepo({
repository: 'some/repo',
});
get.mockImplementationOnce(() => ({
body: [
{
name: 'thebranchname',
},
{
name: 'renovate',
},
{
name: 'renovate/abc-1.x',
},
],
}));
const res = await github.getAllRenovateBranches('renovate/');
expect(res).toHaveLength(1);
});
});
describe('isBranchStale(branchName)', () => {
it('should return false if same SHA as master', async () => {
await initRepo({
repository: 'some/repo',
}); // getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
// getCommitDetails - same as master
get.mockImplementationOnce(() => ({
body: {
parents: [
{
sha: '1234',
},
],
},
}));
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1234',
},
},
}));
expect(await github.isBranchStale('thebranchname')).toBe(false);
});
it('should return true if SHA different from master', async () => {
await initRepo({
repository: 'some/repo',
}); // getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
// getCommitDetails - different
get.mockImplementationOnce(() => ({
body: {
parents: [
{
sha: '12345678',
},
],
},
}));
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1234',
},
},
}));
expect(await github.isBranchStale('thebranchname')).toBe(true);
});
});
describe('getPrList()', () => {
beforeEach(async () => {
await initRepo({
repository: 'some/repo',
});
});
it('should return PRs', async () => {
get.mockImplementationOnce(() => ({
body: [
{
number: 91,
head: { ref: 'somebranch', repo: {} },
state: 'closed',
merged_at: '12345',
},
],
}));
expect(await github.getPrList()).toMatchSnapshot();
expect(get.mock.calls).toMatchSnapshot();
});
});
describe('getBranchPr(branchName)', () => {
it('should return null if no PR exists', async () => {
await initRepo({
@ -767,14 +592,8 @@ describe('platform/github', () => {
it('returns state if found', async () => {
await initRepo({
repository: 'some/repo',
}); // getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
token: 'token',
});
get.mockImplementationOnce(() => ({
body: [
{
@ -791,21 +610,16 @@ describe('platform/github', () => {
},
],
}));
const res = await github.getBranchStatusCheck('somebranch', 'context-2');
const res = await github.getBranchStatusCheck(
'renovate/future_branch',
'context-2'
);
expect(res).toEqual('state-2');
});
it('returns null', async () => {
await initRepo({
repository: 'some/repo',
});
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.mockImplementationOnce(() => ({
body: [
{
@ -831,14 +645,6 @@ describe('platform/github', () => {
await initRepo({
repository: 'some/repo',
});
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.mockImplementationOnce(() => ({
body: [
{
@ -860,14 +666,6 @@ describe('platform/github', () => {
await initRepo({
repository: 'some/repo',
});
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.mockImplementationOnce(() => ({
body: [
{
@ -902,112 +700,6 @@ describe('platform/github', () => {
expect(get.post).toHaveBeenCalledTimes(1);
});
});
describe('mergeBranch(branchName)', () => {
it('should perform a branch merge', async () => {
await initRepo({
repository: 'some/repo',
}); // getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.patch.mockImplementationOnce();
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
// deleteBranch
get.delete.mockImplementationOnce();
await github.mergeBranch('thebranchname', 'branch');
expect(get.mock.calls).toMatchSnapshot();
expect(get.patch.mock.calls).toMatchSnapshot();
expect(get.post.mock.calls).toMatchSnapshot();
expect(get.put.mock.calls).toMatchSnapshot();
expect(get.delete.mock.calls).toMatchSnapshot();
});
it('should throw if branch merge throws', async () => {
await initRepo({
repository: 'some/repo',
}); // getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.patch.mockImplementationOnce(() => {
throw new Error('branch failed');
});
let e;
try {
await github.mergeBranch('thebranchname', 'branch');
} catch (err) {
e = err;
}
expect(e).toMatchSnapshot();
expect(get.mock.calls).toMatchSnapshot();
expect(get.patch.mock.calls).toMatchSnapshot();
expect(get.post.mock.calls).toMatchSnapshot();
expect(get.put.mock.calls).toMatchSnapshot();
expect(get.delete.mock.calls).toMatchSnapshot();
});
it('should throw not ready', async () => {
await initRepo({
repository: 'some/repo',
}); // getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.patch.mockImplementationOnce(() => {
throw new Error('3 of 3 required status checks are expected.');
});
await expect(
github.mergeBranch('thebranchname', 'branch')
).rejects.toThrow(Error('not ready'));
});
});
describe('getBranchLastCommitTime', () => {
it('should return a Date', async () => {
await initRepo({
repository: 'some/repo',
});
get.mockReturnValueOnce({
body: [
{
commit: {
committer: {
date: '2011-04-14T16:00:49Z',
},
},
},
],
});
const res = await github.getBranchLastCommitTime('some-branch');
expect(res).toMatchSnapshot();
});
it('handles error', async () => {
await initRepo({
repository: 'some/repo',
});
get.mockReturnValueOnce({
body: [],
});
const res = await github.getBranchLastCommitTime('some-branch');
expect(res).toBeDefined();
});
});
describe('findIssue()', () => {
it('returns null if no issue', async () => {
get.mockReturnValueOnce({
@ -1352,14 +1044,6 @@ describe('platform/github', () => {
number: 123,
},
}));
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1235',
},
},
}));
get.mockImplementationOnce(() => ({
body: [],
}));
@ -1449,7 +1133,13 @@ describe('platform/github', () => {
expect(pr).toBeNull();
});
[
{ number: 1, state: 'closed', base: { sha: '1234' }, mergeable: true },
{
number: 1,
state: 'closed',
base: { sha: '1234' },
mergeable: true,
merged_at: 'sometime',
},
{
number: 1,
state: 'open',
@ -1717,7 +1407,6 @@ describe('platform/github', () => {
};
expect(await github.mergePr(pr)).toBe(true);
expect(get.put).toHaveBeenCalledTimes(1);
expect(get.delete).toHaveBeenCalledTimes(1);
expect(get).toHaveBeenCalledTimes(1);
});
it('should handle merge error', async () => {
@ -1733,7 +1422,6 @@ describe('platform/github', () => {
});
expect(await github.mergePr(pr)).toBe(false);
expect(get.put).toHaveBeenCalledTimes(1);
expect(get.delete).toHaveBeenCalledTimes(0);
expect(get).toHaveBeenCalledTimes(1);
});
});
@ -1816,7 +1504,6 @@ describe('platform/github', () => {
};
expect(await github.mergePr(pr)).toBe(true);
expect(get.put).toHaveBeenCalledTimes(1);
expect(get.delete).toHaveBeenCalledTimes(1);
});
it('should try squash after rebase', async () => {
const pr = {
@ -1830,7 +1517,6 @@ describe('platform/github', () => {
});
await github.mergePr(pr);
expect(get.put).toHaveBeenCalledTimes(2);
expect(get.delete).toHaveBeenCalledTimes(1);
});
it('should try merge after squash', async () => {
const pr = {
@ -1847,7 +1533,6 @@ describe('platform/github', () => {
});
expect(await github.mergePr(pr)).toBe(true);
expect(get.put).toHaveBeenCalledTimes(3);
expect(get.delete).toHaveBeenCalledTimes(1);
});
it('should give up', async () => {
const pr = {
@ -1867,354 +1552,6 @@ describe('platform/github', () => {
});
expect(await github.mergePr(pr)).toBe(false);
expect(get.put).toHaveBeenCalledTimes(3);
expect(get.delete).toHaveBeenCalledTimes(0);
});
});
describe('getFile()', () => {
it('should return the encoded file content', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
get.mockImplementationOnce(() => ({
body: {
content: Buffer.from('hello world').toString('base64'),
},
}));
const content = await github.getFile('package.json');
expect(get.mock.calls).toMatchSnapshot();
expect(content).toBe('hello world');
});
it('should return null if not in file list', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
const content = await github.getFile('.npmrc');
expect(content).toBeNull();
});
it('should return null if GitHub returns a 404', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
get.mockImplementationOnce(() =>
Promise.reject({
statusCode: 404,
})
);
const content = await github.getFile('package.json');
expect(get.mock.calls).toMatchSnapshot();
expect(content).toBeNull();
});
it('should return large file via git API', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
get.mockImplementationOnce(() =>
Promise.reject({
statusCode: 403,
message: 'This API returns blobs up to 1 MB in size, OK?',
})
);
get.mockImplementationOnce(() => ({
body: {
tree: [
{
path: 'package-lock.json',
sha: 'some-sha',
},
],
},
}));
get.mockImplementationOnce(() => ({
body: {
content: Buffer.from('{"hello":"workd"}').toString('base64'),
},
}));
const content = await github.getFile('package-lock.json');
expect(get.mock.calls).toMatchSnapshot();
expect(content).toMatchSnapshot();
});
it('should throw if cannot find large file via git API', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
get.mockImplementationOnce(() =>
Promise.reject({
statusCode: 403,
message: 'This API returns blobs up to 1 MB in size, OK?',
})
);
get.mockImplementationOnce(() => ({
body: {
tree: [],
},
}));
await expect(github.getFile('package-lock.json')).rejects.toEqual({
statusCode: 403,
message: 'This API returns blobs up to 1 MB in size, OK?',
});
});
it('should return null if getFile returns nothing', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
get.mockImplementationOnce(() => ({}));
const content = await github.getFile('package.json');
expect(get.mock.calls).toMatchSnapshot();
expect(content).toBeNull();
});
it('should return propagate unknown errors', async () => {
await initRepo({ repository: 'some/repo', token: 'token' });
// getFileList
get.mockImplementationOnce(() => ({
body: {
tree: [
{
type: 'blob',
path: 'package.json',
},
{
type: 'blob',
path: 'package-lock.json',
},
],
},
}));
get.mockImplementationOnce(() => {
throw new Error('Something went wrong');
});
await expect(github.getFile('package.json')).rejects.toThrow(
Error('Something went wrong')
);
});
});
describe('commitFilesToBranch(branchName, files, message, parentBranch)', () => {
beforeEach(async () => {
global.gitAuthor = {
name: 'Renovate Bot',
email: 'bot@renovatebot.com',
};
await initRepo({
repository: 'some/repo',
});
// getBranchCommit
get.mockImplementationOnce(() => ({
body: {
object: {
sha: '1111',
},
},
}));
// getCommitTree
get.mockImplementationOnce(() => ({
body: {
tree: {
sha: '2222',
},
},
}));
// createBlob
get.post.mockImplementationOnce(() => ({
body: {
sha: '3333',
},
}));
// createTree
get.post.mockImplementationOnce(() => ({
body: {
sha: '4444',
},
}));
// createCommit
get.post.mockImplementationOnce(() => ({
body: {
sha: '5555',
},
}));
});
it('should add a new commit to the branch', async () => {
// branchExists
get.mockImplementationOnce(() => ({
body: [
{
name: 'master',
},
{
name: 'the-branch',
},
],
}));
const files = [
{
name: 'package.json',
contents: 'hello world',
},
];
await github.commitFilesToBranch(
'the-branch',
files,
'my commit message'
);
expect(get.mock.calls).toMatchSnapshot();
expect(get.post).toHaveBeenCalledTimes(3);
expect(get.patch).toHaveBeenCalledTimes(1);
});
it('should add a commit to a new branch if the branch does not already exist', async () => {
// branchExists
get.mockImplementationOnce(() => ({
body: [
{
name: 'master',
},
],
}));
const files = [
{
name: 'package.json',
contents: 'hello world',
},
];
await github.commitFilesToBranch(
'the-branch',
files,
'my other commit message'
);
expect(get.mock.calls).toMatchSnapshot();
expect(get.post).toHaveBeenCalledTimes(4);
expect(get.patch).toHaveBeenCalledTimes(0);
});
it('should parse valid gitAuthor', async () => {
// branchExists
get.mockImplementationOnce(() => ({
body: [
{
name: 'master',
},
],
}));
const files = [
{
name: 'package.json',
contents: 'hello world',
},
];
global.gitAuthor = {
name: 'Renovate Bot',
email: 'bot@renovatebot.com',
};
await github.commitFilesToBranch(
'the-branch',
files,
'my other commit message'
);
expect(get.post.mock.calls[2][1].body.author.name).toEqual(
'Renovate Bot'
);
expect(get.post.mock.calls[2][1].body.author.email).toEqual(
'bot@renovatebot.com'
);
});
});
describe('getCommitMessages()', () => {
it('returns commits messages', async () => {
await initRepo({
repository: 'some/repo',
gitAuthor: 'Renovate Bot <bot@renovatebot.com>',
});
get.mockReturnValueOnce({
body: [
{
commit: { message: 'foo' },
},
{
commit: { message: 'bar' },
},
],
});
const res = await github.getCommitMessages();
expect(res).toMatchSnapshot();
});
});
describe('getVulnerabilityAlerts()', () => {

View file

@ -1,25 +0,0 @@
describe('platform/github/storage', () => {
const GithubStorage = require('../../../lib/platform/github/storage');
const GitStorage = require('../../../lib/platform/git/storage');
function getAllPropertyNames(obj) {
let props = [];
let obj2 = obj;
while (obj2 != null) {
props = props.concat(Object.getOwnPropertyNames(obj2));
obj2 = Object.getPrototypeOf(obj2);
}
return props.filter(p => !p.startsWith('_'));
}
it('has same API for git storage', () => {
const githubMethods = getAllPropertyNames(new GithubStorage()).sort();
const gitMethods = getAllPropertyNames(new GitStorage()).sort();
expect(githubMethods).toMatchObject(gitMethods);
});
it('getRepoStatus exists', async () => {
expect((await new GithubStorage()).getRepoStatus()).toEqual({});
});
});

View file

@ -12,7 +12,12 @@ describe('platform', () => {
await expect(platform.initPlatform(config)).rejects.toThrow();
});
it('initializes', async () => {
const config = { platform: 'bitbucket', username: 'abc', password: '123' };
const config = {
platform: 'bitbucket',
gitAuthor: 'user@domain.com',
username: 'abc',
password: '123',
};
expect(await platform.initPlatform(config)).toMatchSnapshot();
});
it('has a list of supported methods for github', () => {