mirror of
https://github.com/renovatebot/renovate.git
synced 2025-01-11 14:36:25 +00:00
feat: rebase branch whenever versions need updating (#1018)
This commit is contained in:
parent
50295417d4
commit
2fa50b3771
8 changed files with 156 additions and 73 deletions
|
@ -69,9 +69,9 @@ async function processBranch(branchConfig) {
|
||||||
await config.api.ensureComment(pr.number, subject, content);
|
await config.api.ensureComment(pr.number, subject, content);
|
||||||
return 'already-existed';
|
return 'already-existed';
|
||||||
}
|
}
|
||||||
config.parentBranch = await getParentBranch(config);
|
Object.assign(config, await getParentBranch(config));
|
||||||
logger.debug(`Using parentBranch: ${config.parentBranch}`);
|
logger.debug(`Using parentBranch: ${config.parentBranch}`);
|
||||||
config.updatedPackageFiles = await getUpdatedPackageFiles(config);
|
Object.assign(config, await getUpdatedPackageFiles(config));
|
||||||
if (config.updatedPackageFiles.length) {
|
if (config.updatedPackageFiles.length) {
|
||||||
logger.debug(
|
logger.debug(
|
||||||
{ updatedPackageFiles: config.updatedPackageFiles },
|
{ updatedPackageFiles: config.updatedPackageFiles },
|
||||||
|
|
|
@ -44,15 +44,35 @@ async function getUpdatedPackageFiles(config) {
|
||||||
config.logger
|
config.logger
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
if (!newContent) {
|
||||||
|
if (config.parentBranch && config.canRebase) {
|
||||||
|
logger.info('Rebasing branch after error updating content');
|
||||||
|
return getUpdatedPackageFiles({
|
||||||
|
...config,
|
||||||
|
parentBranch: undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
throw new Error('Error updating branch content and cannot rebase');
|
||||||
|
}
|
||||||
if (newContent !== existingContent) {
|
if (newContent !== existingContent) {
|
||||||
|
if (config.parentBranch && config.canRebase) {
|
||||||
|
// This ensure it's always 1 commit from Renovate
|
||||||
|
logger.info('Need to update package file so will rebase first');
|
||||||
|
return getUpdatedPackageFiles({
|
||||||
|
...config,
|
||||||
|
parentBranch: undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
logger.debug('Updating packageFile content');
|
logger.debug('Updating packageFile content');
|
||||||
updatedPackageFiles[upgrade.packageFile] = newContent;
|
updatedPackageFiles[upgrade.packageFile] = newContent;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
return {
|
||||||
return Object.keys(updatedPackageFiles).map(packageFile => ({
|
parentBranch: config.parentBranch, // Need to overwrite original config
|
||||||
name: packageFile,
|
updatedPackageFiles: Object.keys(updatedPackageFiles).map(packageFile => ({
|
||||||
contents: updatedPackageFiles[packageFile],
|
name: packageFile,
|
||||||
}));
|
contents: updatedPackageFiles[packageFile],
|
||||||
|
})),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,51 +6,57 @@ module.exports = {
|
||||||
|
|
||||||
function setNewValue(currentFileContent, depType, depName, newVersion, logger) {
|
function setNewValue(currentFileContent, depType, depName, newVersion, logger) {
|
||||||
logger.debug(`setNewValue: ${depType}.${depName} = ${newVersion}`);
|
logger.debug(`setNewValue: ${depType}.${depName} = ${newVersion}`);
|
||||||
const parsedContents = JSON.parse(currentFileContent);
|
try {
|
||||||
// Save the old version
|
const parsedContents = JSON.parse(currentFileContent);
|
||||||
const oldVersion = parsedContents[depType][depName];
|
// Save the old version
|
||||||
if (oldVersion === newVersion) {
|
const oldVersion = parsedContents[depType][depName];
|
||||||
logger.debug('Version is already updated');
|
if (oldVersion === newVersion) {
|
||||||
return currentFileContent;
|
logger.debug('Version is already updated');
|
||||||
}
|
return currentFileContent;
|
||||||
// Update the file = this is what we want
|
}
|
||||||
parsedContents[depType][depName] = newVersion;
|
// Update the file = this is what we want
|
||||||
// Look for the old version number
|
parsedContents[depType][depName] = newVersion;
|
||||||
const searchString = `"${oldVersion}"`;
|
// Look for the old version number
|
||||||
const newString = `"${newVersion}"`;
|
const searchString = `"${oldVersion}"`;
|
||||||
let newFileContent = null;
|
const newString = `"${newVersion}"`;
|
||||||
// Skip ahead to depType section
|
let newFileContent = null;
|
||||||
let searchIndex = currentFileContent.indexOf(`"${depType}"`) + depType.length;
|
// Skip ahead to depType section
|
||||||
logger.debug(`Starting search at index ${searchIndex}`);
|
let searchIndex =
|
||||||
// Iterate through the rest of the file
|
currentFileContent.indexOf(`"${depType}"`) + depType.length;
|
||||||
for (; searchIndex < currentFileContent.length; searchIndex += 1) {
|
logger.debug(`Starting search at index ${searchIndex}`);
|
||||||
// First check if we have a hit for the old version
|
// Iterate through the rest of the file
|
||||||
if (matchAt(currentFileContent, searchIndex, searchString)) {
|
for (; searchIndex < currentFileContent.length; searchIndex += 1) {
|
||||||
logger.debug(`Found match at index ${searchIndex}`);
|
// First check if we have a hit for the old version
|
||||||
// Now test if the result matches
|
if (matchAt(currentFileContent, searchIndex, searchString)) {
|
||||||
const testContent = replaceAt(
|
logger.debug(`Found match at index ${searchIndex}`);
|
||||||
currentFileContent,
|
// Now test if the result matches
|
||||||
searchIndex,
|
const testContent = replaceAt(
|
||||||
searchString,
|
currentFileContent,
|
||||||
newString,
|
searchIndex,
|
||||||
logger
|
searchString,
|
||||||
);
|
newString,
|
||||||
// Compare the parsed JSON structure of old and new
|
logger
|
||||||
if (_.isEqual(parsedContents, JSON.parse(testContent))) {
|
);
|
||||||
newFileContent = testContent;
|
// Compare the parsed JSON structure of old and new
|
||||||
break;
|
if (_.isEqual(parsedContents, JSON.parse(testContent))) {
|
||||||
|
newFileContent = testContent;
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// istanbul ignore if
|
||||||
|
if (!newFileContent) {
|
||||||
|
logger.warn(
|
||||||
|
{ currentFileContent, parsedContents, depType, depName, newVersion },
|
||||||
|
'setNewValue error'
|
||||||
|
);
|
||||||
|
return currentFileContent;
|
||||||
|
}
|
||||||
|
return newFileContent;
|
||||||
|
} catch (err) {
|
||||||
|
logger.info({ err }, 'setNewValue error');
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
// istanbul ignore if
|
|
||||||
if (!newFileContent) {
|
|
||||||
logger.warn(
|
|
||||||
{ currentFileContent, parsedContents, depType, depName, newVersion },
|
|
||||||
'setNewValue error'
|
|
||||||
);
|
|
||||||
return currentFileContent;
|
|
||||||
}
|
|
||||||
return newFileContent;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Return true if the match string is found at index in content
|
// Return true if the match string is found at index in content
|
||||||
|
|
|
@ -17,7 +17,7 @@ async function getParentBranch(config) {
|
||||||
const branchExists = await api.branchExists(branchName);
|
const branchExists = await api.branchExists(branchName);
|
||||||
if (!branchExists) {
|
if (!branchExists) {
|
||||||
logger.info(`Branch needs creating`);
|
logger.info(`Branch needs creating`);
|
||||||
return undefined;
|
return { parentBranch: undefined };
|
||||||
}
|
}
|
||||||
logger.info(`Branch already exists`);
|
logger.info(`Branch already exists`);
|
||||||
|
|
||||||
|
@ -34,11 +34,11 @@ async function getParentBranch(config) {
|
||||||
logger.info(`Branch is stale and needs rebasing`);
|
logger.info(`Branch is stale and needs rebasing`);
|
||||||
// We can rebase the branch only if no PR or PR can be rebased
|
// We can rebase the branch only if no PR or PR can be rebased
|
||||||
if (!pr || pr.canRebase) {
|
if (!pr || pr.canRebase) {
|
||||||
return undefined;
|
return { parentBranch: undefined };
|
||||||
}
|
}
|
||||||
// TODO: Warn here so that it appears in PR body
|
// TODO: Warn here so that it appears in PR body
|
||||||
logger.info('Cannot rebase branch');
|
logger.info('Cannot rebase branch');
|
||||||
return branchName;
|
return { parentBranch: branchName, canRebase: false };
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,12 +53,12 @@ async function getParentBranch(config) {
|
||||||
await config.api.deleteBranch(branchName);
|
await config.api.deleteBranch(branchName);
|
||||||
}
|
}
|
||||||
// Setting parentBranch back to undefined means that we'll use the default branch
|
// Setting parentBranch back to undefined means that we'll use the default branch
|
||||||
return undefined;
|
return { parentBranch: undefined };
|
||||||
}
|
}
|
||||||
// Don't do anything different, but warn
|
// Don't do anything different, but warn
|
||||||
// TODO: Add warning to PR
|
// TODO: Add warning to PR
|
||||||
logger.info(`Branch is not mergeable but can't be rebased`);
|
logger.info(`Branch is not mergeable but can't be rebased`);
|
||||||
}
|
}
|
||||||
logger.debug(`Branch does not need rebasing`);
|
logger.debug(`Branch does not need rebasing`);
|
||||||
return branchName;
|
return { parentBranch: branchName, canRebase: true };
|
||||||
}
|
}
|
||||||
|
|
|
@ -82,7 +82,9 @@ describe('workers/branch', () => {
|
||||||
expect(config.logger.error.mock.calls).toHaveLength(0);
|
expect(config.logger.error.mock.calls).toHaveLength(0);
|
||||||
});
|
});
|
||||||
it('returns if no branch exists', async () => {
|
it('returns if no branch exists', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: false,
|
lockFileError: false,
|
||||||
updatedLockFiles: [],
|
updatedLockFiles: [],
|
||||||
|
@ -92,7 +94,9 @@ describe('workers/branch', () => {
|
||||||
expect(commit.commitFilesToBranch.mock.calls).toHaveLength(1);
|
expect(commit.commitFilesToBranch.mock.calls).toHaveLength(1);
|
||||||
});
|
});
|
||||||
it('returns if branch automerged', async () => {
|
it('returns if branch automerged', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [{}],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: false,
|
lockFileError: false,
|
||||||
updatedLockFiles: [{}],
|
updatedLockFiles: [{}],
|
||||||
|
@ -105,7 +109,9 @@ describe('workers/branch', () => {
|
||||||
expect(prWorker.ensurePr.mock.calls).toHaveLength(0);
|
expect(prWorker.ensurePr.mock.calls).toHaveLength(0);
|
||||||
});
|
});
|
||||||
it('ensures PR and tries automerge', async () => {
|
it('ensures PR and tries automerge', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [{}],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: false,
|
lockFileError: false,
|
||||||
updatedLockFiles: [{}],
|
updatedLockFiles: [{}],
|
||||||
|
@ -120,7 +126,9 @@ describe('workers/branch', () => {
|
||||||
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(1);
|
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(1);
|
||||||
});
|
});
|
||||||
it('ensures PR and adds lock file error comment', async () => {
|
it('ensures PR and adds lock file error comment', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [{}],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: false,
|
lockFileError: false,
|
||||||
updatedLockFiles: [{}],
|
updatedLockFiles: [{}],
|
||||||
|
@ -137,7 +145,9 @@ describe('workers/branch', () => {
|
||||||
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
|
expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
|
||||||
});
|
});
|
||||||
it('ensures PR and adds lock file error comment recreate closed', async () => {
|
it('ensures PR and adds lock file error comment recreate closed', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [{}],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: false,
|
lockFileError: false,
|
||||||
updatedLockFiles: [{}],
|
updatedLockFiles: [{}],
|
||||||
|
@ -161,7 +171,9 @@ describe('workers/branch', () => {
|
||||||
await branchWorker.processBranch(config);
|
await branchWorker.processBranch(config);
|
||||||
});
|
});
|
||||||
it('throws and swallows branch errors', async () => {
|
it('throws and swallows branch errors', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [{}],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: true,
|
lockFileError: true,
|
||||||
updatedLockFiles: [{}],
|
updatedLockFiles: [{}],
|
||||||
|
@ -169,7 +181,9 @@ describe('workers/branch', () => {
|
||||||
await branchWorker.processBranch(config);
|
await branchWorker.processBranch(config);
|
||||||
});
|
});
|
||||||
it('swallows pr errors', async () => {
|
it('swallows pr errors', async () => {
|
||||||
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
|
packageFiles.getUpdatedPackageFiles.mockReturnValueOnce({
|
||||||
|
updatedPackageFiles: [{}],
|
||||||
|
});
|
||||||
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
|
||||||
lockFileError: false,
|
lockFileError: false,
|
||||||
updatedLockFiles: [{}],
|
updatedLockFiles: [{}],
|
||||||
|
|
|
@ -15,6 +15,7 @@ describe('workers/branch/package-files', () => {
|
||||||
...defaultConfig,
|
...defaultConfig,
|
||||||
api: { getFileContent: jest.fn() },
|
api: { getFileContent: jest.fn() },
|
||||||
logger,
|
logger,
|
||||||
|
parentBranch: 'some-branch',
|
||||||
};
|
};
|
||||||
packageJsonHelper.setNewValue = jest.fn();
|
packageJsonHelper.setNewValue = jest.fn();
|
||||||
dockerHelper.setNewValue = jest.fn();
|
dockerHelper.setNewValue = jest.fn();
|
||||||
|
@ -23,22 +24,45 @@ describe('workers/branch/package-files', () => {
|
||||||
it('returns empty if lock file maintenance', async () => {
|
it('returns empty if lock file maintenance', async () => {
|
||||||
config.upgrades = [{ type: 'lockFileMaintenance' }];
|
config.upgrades = [{ type: 'lockFileMaintenance' }];
|
||||||
const res = await getUpdatedPackageFiles(config);
|
const res = await getUpdatedPackageFiles(config);
|
||||||
expect(res).toHaveLength(0);
|
expect(res.updatedPackageFiles).toHaveLength(0);
|
||||||
|
});
|
||||||
|
it('recurses if setNewValue error', async () => {
|
||||||
|
config.parentBranch = 'some-branch';
|
||||||
|
config.canRebase = true;
|
||||||
|
config.upgrades = [{ packageFile: 'package.json' }];
|
||||||
|
packageJsonHelper.setNewValue.mockReturnValueOnce(null);
|
||||||
|
packageJsonHelper.setNewValue.mockReturnValueOnce('some content');
|
||||||
|
const res = await getUpdatedPackageFiles(config);
|
||||||
|
expect(res.updatedPackageFiles).toHaveLength(1);
|
||||||
|
});
|
||||||
|
it('errors if cannot rebase', async () => {
|
||||||
|
config.upgrades = [{ packageFile: 'package.json' }];
|
||||||
|
let e;
|
||||||
|
try {
|
||||||
|
await getUpdatedPackageFiles(config);
|
||||||
|
} catch (err) {
|
||||||
|
e = err;
|
||||||
|
}
|
||||||
|
expect(e).toBeDefined();
|
||||||
});
|
});
|
||||||
it('returns updated files', async () => {
|
it('returns updated files', async () => {
|
||||||
|
config.parentBranch = 'some-branch';
|
||||||
|
config.canRebase = true;
|
||||||
config.upgrades = [
|
config.upgrades = [
|
||||||
{ packageFile: 'package.json' },
|
{ packageFile: 'package.json' },
|
||||||
{ packageFile: 'Dockerfile' },
|
{ packageFile: 'Dockerfile' },
|
||||||
{ packageFile: 'packages/foo/package.js' },
|
{ packageFile: 'packages/foo/package.js' },
|
||||||
];
|
];
|
||||||
config.api.getFileContent.mockReturnValueOnce('old content 1');
|
config.api.getFileContent.mockReturnValueOnce('old content 1');
|
||||||
|
config.api.getFileContent.mockReturnValueOnce('old content 1');
|
||||||
config.api.getFileContent.mockReturnValueOnce('old content 2');
|
config.api.getFileContent.mockReturnValueOnce('old content 2');
|
||||||
config.api.getFileContent.mockReturnValueOnce('old content 3');
|
config.api.getFileContent.mockReturnValueOnce('old content 3');
|
||||||
packageJsonHelper.setNewValue.mockReturnValueOnce('old content 1');
|
packageJsonHelper.setNewValue.mockReturnValueOnce('new content 1');
|
||||||
|
packageJsonHelper.setNewValue.mockReturnValueOnce('new content 1+');
|
||||||
dockerHelper.setNewValue.mockReturnValueOnce('new content 2');
|
dockerHelper.setNewValue.mockReturnValueOnce('new content 2');
|
||||||
packageJsHelper.setNewValue.mockReturnValueOnce('old content 3');
|
packageJsHelper.setNewValue.mockReturnValueOnce('old content 3');
|
||||||
const res = await getUpdatedPackageFiles(config);
|
const res = await getUpdatedPackageFiles(config);
|
||||||
expect(res).toHaveLength(1);
|
expect(res.updatedPackageFiles).toHaveLength(2);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -57,5 +57,15 @@ describe('workers/branch/package-json', () => {
|
||||||
);
|
);
|
||||||
testContent.should.equal(input01Content);
|
testContent.should.equal(input01Content);
|
||||||
});
|
});
|
||||||
|
it('returns null if throws error', () => {
|
||||||
|
const testContent = packageJson.setNewValue(
|
||||||
|
input01Content,
|
||||||
|
'blah',
|
||||||
|
'angular-touch-not',
|
||||||
|
'1.5.8',
|
||||||
|
logger
|
||||||
|
);
|
||||||
|
expect(testContent).toBe(null);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
|
@ -36,31 +36,36 @@ describe('workers/branch/parent', () => {
|
||||||
});
|
});
|
||||||
it('returns undefined if branch does not exist', async () => {
|
it('returns undefined if branch does not exist', async () => {
|
||||||
config.api.branchExists.mockReturnValue(false);
|
config.api.branchExists.mockReturnValue(false);
|
||||||
expect(await getParentBranch(config)).toBe(undefined);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(undefined);
|
||||||
});
|
});
|
||||||
it('returns branchName if no PR', async () => {
|
it('returns branchName if no PR', async () => {
|
||||||
config.api.getBranchPr.mockReturnValue(null);
|
config.api.getBranchPr.mockReturnValue(null);
|
||||||
expect(await getParentBranch(config)).toBe(config.branchName);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(config.branchName);
|
||||||
});
|
});
|
||||||
it('returns branchName if does not need rebaseing', async () => {
|
it('returns branchName if does not need rebaseing', async () => {
|
||||||
config.api.getBranchPr.mockReturnValue({
|
config.api.getBranchPr.mockReturnValue({
|
||||||
isUnmergeable: false,
|
isUnmergeable: false,
|
||||||
});
|
});
|
||||||
expect(await getParentBranch(config)).toBe(config.branchName);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(config.branchName);
|
||||||
});
|
});
|
||||||
it('returns branchName if unmergeable and cannot rebase', async () => {
|
it('returns branchName if unmergeable and cannot rebase', async () => {
|
||||||
config.api.getBranchPr.mockReturnValue({
|
config.api.getBranchPr.mockReturnValue({
|
||||||
isUnmergeable: true,
|
isUnmergeable: true,
|
||||||
canRebase: false,
|
canRebase: false,
|
||||||
});
|
});
|
||||||
expect(await getParentBranch(config)).toBe(config.branchName);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(config.branchName);
|
||||||
});
|
});
|
||||||
it('returns undefined if unmergeable and can rebase', async () => {
|
it('returns undefined if unmergeable and can rebase', async () => {
|
||||||
config.api.getBranchPr.mockReturnValue({
|
config.api.getBranchPr.mockReturnValue({
|
||||||
isUnmergeable: true,
|
isUnmergeable: true,
|
||||||
canRebase: true,
|
canRebase: true,
|
||||||
});
|
});
|
||||||
expect(await getParentBranch(config)).toBe(undefined);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(undefined);
|
||||||
});
|
});
|
||||||
it('returns undefined if unmergeable and can rebase (gitlab)', async () => {
|
it('returns undefined if unmergeable and can rebase (gitlab)', async () => {
|
||||||
config.isGitLab = true;
|
config.isGitLab = true;
|
||||||
|
@ -68,19 +73,22 @@ describe('workers/branch/parent', () => {
|
||||||
isUnmergeable: true,
|
isUnmergeable: true,
|
||||||
canRebase: true,
|
canRebase: true,
|
||||||
});
|
});
|
||||||
expect(await getParentBranch(config)).toBe(undefined);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(undefined);
|
||||||
expect(config.api.deleteBranch.mock.calls.length).toBe(1);
|
expect(config.api.deleteBranch.mock.calls.length).toBe(1);
|
||||||
});
|
});
|
||||||
it('returns branchName if automerge branch-push and not stale', async () => {
|
it('returns branchName if automerge branch-push and not stale', async () => {
|
||||||
config.automerge = true;
|
config.automerge = true;
|
||||||
config.automergeType = 'branch-push';
|
config.automergeType = 'branch-push';
|
||||||
expect(await getParentBranch(config)).toBe(config.branchName);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(config.branchName);
|
||||||
});
|
});
|
||||||
it('returns undefined if automerge branch-push and stale', async () => {
|
it('returns undefined if automerge branch-push and stale', async () => {
|
||||||
config.automerge = true;
|
config.automerge = true;
|
||||||
config.automergeType = 'branch-push';
|
config.automergeType = 'branch-push';
|
||||||
config.api.isBranchStale.mockReturnValueOnce(true);
|
config.api.isBranchStale.mockReturnValueOnce(true);
|
||||||
expect(await getParentBranch(config)).toBe(undefined);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).toBe(undefined);
|
||||||
});
|
});
|
||||||
it('returns branch if rebaseStalePrs enabled but cannot rebase', async () => {
|
it('returns branch if rebaseStalePrs enabled but cannot rebase', async () => {
|
||||||
config.rebaseStalePrs = true;
|
config.rebaseStalePrs = true;
|
||||||
|
@ -89,7 +97,8 @@ describe('workers/branch/parent', () => {
|
||||||
isUnmergeable: true,
|
isUnmergeable: true,
|
||||||
canRebase: false,
|
canRebase: false,
|
||||||
});
|
});
|
||||||
expect(await getParentBranch(config)).not.toBe(undefined);
|
const res = await getParentBranch(config);
|
||||||
|
expect(res.parentBranch).not.toBe(undefined);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
Loading…
Reference in a new issue