feat: rebase branch whenever versions need updating (#1018)

This commit is contained in:
Rhys Arkins 2017-10-21 16:42:40 +02:00 committed by GitHub
parent 50295417d4
commit 2fa50b3771
8 changed files with 156 additions and 73 deletions

View file

@ -69,9 +69,9 @@ async function processBranch(branchConfig) {
await config.api.ensureComment(pr.number, subject, content);
return 'already-existed';
}
config.parentBranch = await getParentBranch(config);
Object.assign(config, await getParentBranch(config));
logger.debug(`Using parentBranch: ${config.parentBranch}`);
config.updatedPackageFiles = await getUpdatedPackageFiles(config);
Object.assign(config, await getUpdatedPackageFiles(config));
if (config.updatedPackageFiles.length) {
logger.debug(
{ updatedPackageFiles: config.updatedPackageFiles },

View file

@ -44,15 +44,35 @@ async function getUpdatedPackageFiles(config) {
config.logger
);
}
if (!newContent) {
if (config.parentBranch && config.canRebase) {
logger.info('Rebasing branch after error updating content');
return getUpdatedPackageFiles({
...config,
parentBranch: undefined,
});
}
throw new Error('Error updating branch content and cannot rebase');
}
if (newContent !== existingContent) {
if (config.parentBranch && config.canRebase) {
// This ensure it's always 1 commit from Renovate
logger.info('Need to update package file so will rebase first');
return getUpdatedPackageFiles({
...config,
parentBranch: undefined,
});
}
logger.debug('Updating packageFile content');
updatedPackageFiles[upgrade.packageFile] = newContent;
}
}
}
return Object.keys(updatedPackageFiles).map(packageFile => ({
return {
parentBranch: config.parentBranch, // Need to overwrite original config
updatedPackageFiles: Object.keys(updatedPackageFiles).map(packageFile => ({
name: packageFile,
contents: updatedPackageFiles[packageFile],
}));
})),
};
}

View file

@ -6,6 +6,7 @@ module.exports = {
function setNewValue(currentFileContent, depType, depName, newVersion, logger) {
logger.debug(`setNewValue: ${depType}.${depName} = ${newVersion}`);
try {
const parsedContents = JSON.parse(currentFileContent);
// Save the old version
const oldVersion = parsedContents[depType][depName];
@ -20,7 +21,8 @@ function setNewValue(currentFileContent, depType, depName, newVersion, logger) {
const newString = `"${newVersion}"`;
let newFileContent = null;
// Skip ahead to depType section
let searchIndex = currentFileContent.indexOf(`"${depType}"`) + depType.length;
let searchIndex =
currentFileContent.indexOf(`"${depType}"`) + depType.length;
logger.debug(`Starting search at index ${searchIndex}`);
// Iterate through the rest of the file
for (; searchIndex < currentFileContent.length; searchIndex += 1) {
@ -51,6 +53,10 @@ function setNewValue(currentFileContent, depType, depName, newVersion, logger) {
return currentFileContent;
}
return newFileContent;
} catch (err) {
logger.info({ err }, 'setNewValue error');
return null;
}
}
// Return true if the match string is found at index in content

View file

@ -17,7 +17,7 @@ async function getParentBranch(config) {
const branchExists = await api.branchExists(branchName);
if (!branchExists) {
logger.info(`Branch needs creating`);
return undefined;
return { parentBranch: undefined };
}
logger.info(`Branch already exists`);
@ -34,11 +34,11 @@ async function getParentBranch(config) {
logger.info(`Branch is stale and needs rebasing`);
// We can rebase the branch only if no PR or PR can be rebased
if (!pr || pr.canRebase) {
return undefined;
return { parentBranch: undefined };
}
// TODO: Warn here so that it appears in PR body
logger.info('Cannot rebase branch');
return branchName;
return { parentBranch: branchName, canRebase: false };
}
}
@ -53,12 +53,12 @@ async function getParentBranch(config) {
await config.api.deleteBranch(branchName);
}
// Setting parentBranch back to undefined means that we'll use the default branch
return undefined;
return { parentBranch: undefined };
}
// Don't do anything different, but warn
// TODO: Add warning to PR
logger.info(`Branch is not mergeable but can't be rebased`);
}
logger.debug(`Branch does not need rebasing`);
return branchName;
return { parentBranch: branchName, canRebase: true };
}

View file

@ -82,7 +82,9 @@ describe('workers/branch', () => {
expect(config.logger.error.mock.calls).toHaveLength(0);
});
it('returns if no branch exists', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: false,
updatedLockFiles: [],
@ -92,7 +94,9 @@ describe('workers/branch', () => {
expect(commit.commitFilesToBranch.mock.calls).toHaveLength(1);
});
it('returns if branch automerged', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [{}],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: false,
updatedLockFiles: [{}],
@ -105,7 +109,9 @@ describe('workers/branch', () => {
expect(prWorker.ensurePr.mock.calls).toHaveLength(0);
});
it('ensures PR and tries automerge', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [{}],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: false,
updatedLockFiles: [{}],
@ -120,7 +126,9 @@ describe('workers/branch', () => {
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(1);
});
it('ensures PR and adds lock file error comment', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [{}],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: false,
updatedLockFiles: [{}],
@ -137,7 +145,9 @@ describe('workers/branch', () => {
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
});
it('ensures PR and adds lock file error comment recreate closed', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [{}],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: false,
updatedLockFiles: [{}],
@ -161,7 +171,9 @@ describe('workers/branch', () => {
await branchWorker.processBranch(config);
});
it('throws and swallows branch errors', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [{}],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: true,
updatedLockFiles: [{}],
@ -169,7 +181,9 @@ describe('workers/branch', () => {
await branchWorker.processBranch(config);
});
it('swallows pr errors', async () => {
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
updatedPackageFiles: [{}],
});
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
lockFileError: false,
updatedLockFiles: [{}],

View file

@ -15,6 +15,7 @@ describe('workers/branch/package-files', () => {
...defaultConfig,
api: { getFileContent: jest.fn() },
logger,
parentBranch: 'some-branch',
};
packageJsonHelper.setNewValue = jest.fn();
dockerHelper.setNewValue = jest.fn();
@ -23,22 +24,45 @@ describe('workers/branch/package-files', () => {
it('returns empty if lock file maintenance', async () => {
config.upgrades = [{ type: 'lockFileMaintenance' }];
const res = await getUpdatedPackageFiles(config);
expect(res).toHaveLength(0);
expect(res.updatedPackageFiles).toHaveLength(0);
});
it('recurses if setNewValue error', async () => {
config.parentBranch = 'some-branch';
config.canRebase = true;
config.upgrades = [{ packageFile: 'package.json' }];
packageJsonHelper.setNewValue.mockReturnValueOnce(null);
packageJsonHelper.setNewValue.mockReturnValueOnce('some content');
const res = await getUpdatedPackageFiles(config);
expect(res.updatedPackageFiles).toHaveLength(1);
});
it('errors if cannot rebase', async () => {
config.upgrades = [{ packageFile: 'package.json' }];
let e;
try {
await getUpdatedPackageFiles(config);
} catch (err) {
e = err;
}
expect(e).toBeDefined();
});
it('returns updated files', async () => {
config.parentBranch = 'some-branch';
config.canRebase = true;
config.upgrades = [
{ packageFile: 'package.json' },
{ packageFile: 'Dockerfile' },
{ packageFile: 'packages/foo/package.js' },
];
config.api.getFileContent.mockReturnValueOnce('old content 1');
config.api.getFileContent.mockReturnValueOnce('old content 1');
config.api.getFileContent.mockReturnValueOnce('old content 2');
config.api.getFileContent.mockReturnValueOnce('old content 3');
packageJsonHelper.setNewValue.mockReturnValueOnce('old content 1');
packageJsonHelper.setNewValue.mockReturnValueOnce('new content 1');
packageJsonHelper.setNewValue.mockReturnValueOnce('new content 1+');
dockerHelper.setNewValue.mockReturnValueOnce('new content 2');
packageJsHelper.setNewValue.mockReturnValueOnce('old content 3');
const res = await getUpdatedPackageFiles(config);
expect(res).toHaveLength(1);
expect(res.updatedPackageFiles).toHaveLength(2);
});
});
});

View file

@ -57,5 +57,15 @@ describe('workers/branch/package-json', () => {
);
testContent.should.equal(input01Content);
});
it('returns null if throws error', () => {
const testContent = packageJson.setNewValue(
input01Content,
'blah',
'angular-touch-not',
'1.5.8',
logger
);
expect(testContent).toBe(null);
});
});
});

View file

@ -36,31 +36,36 @@ describe('workers/branch/parent', () => {
});
it('returns undefined if branch does not exist', async () => {
config.api.branchExists.mockReturnValue(false);
expect(await getParentBranch(config)).toBe(undefined);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(undefined);
});
it('returns branchName if no PR', async () => {
config.api.getBranchPr.mockReturnValue(null);
expect(await getParentBranch(config)).toBe(config.branchName);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(config.branchName);
});
it('returns branchName if does not need rebaseing', async () => {
config.api.getBranchPr.mockReturnValue({
isUnmergeable: false,
});
expect(await getParentBranch(config)).toBe(config.branchName);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(config.branchName);
});
it('returns branchName if unmergeable and cannot rebase', async () => {
config.api.getBranchPr.mockReturnValue({
isUnmergeable: true,
canRebase: false,
});
expect(await getParentBranch(config)).toBe(config.branchName);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(config.branchName);
});
it('returns undefined if unmergeable and can rebase', async () => {
config.api.getBranchPr.mockReturnValue({
isUnmergeable: true,
canRebase: true,
});
expect(await getParentBranch(config)).toBe(undefined);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(undefined);
});
it('returns undefined if unmergeable and can rebase (gitlab)', async () => {
config.isGitLab = true;
@ -68,19 +73,22 @@ describe('workers/branch/parent', () => {
isUnmergeable: true,
canRebase: true,
});
expect(await getParentBranch(config)).toBe(undefined);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(undefined);
expect(config.api.deleteBranch.mock.calls.length).toBe(1);
});
it('returns branchName if automerge branch-push and not stale', async () => {
config.automerge = true;
config.automergeType = 'branch-push';
expect(await getParentBranch(config)).toBe(config.branchName);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(config.branchName);
});
it('returns undefined if automerge branch-push and stale', async () => {
config.automerge = true;
config.automergeType = 'branch-push';
config.api.isBranchStale.mockReturnValueOnce(true);
expect(await getParentBranch(config)).toBe(undefined);
const res = await getParentBranch(config);
expect(res.parentBranch).toBe(undefined);
});
it('returns branch if rebaseStalePrs enabled but cannot rebase', async () => {
config.rebaseStalePrs = true;
@ -89,7 +97,8 @@ describe('workers/branch/parent', () => {
isUnmergeable: true,
canRebase: false,
});
expect(await getParentBranch(config)).not.toBe(undefined);
const res = await getParentBranch(config);
expect(res.parentBranch).not.toBe(undefined);
});
});
});