2019-07-25 06:17:19 +00:00
|
|
|
import { fromStream } from 'hasha';
|
|
|
|
import got from '../../util/got';
|
|
|
|
import { logger } from '../../logger';
|
|
|
|
import { Upgrade } from '../common';
|
2019-10-22 06:48:40 +00:00
|
|
|
import { regEx } from '../../util/regex';
|
2017-12-14 19:05:45 +00:00
|
|
|
|
2019-07-25 06:17:19 +00:00
|
|
|
function updateWithNewVersion(
|
|
|
|
content: string,
|
|
|
|
currentValue: string,
|
|
|
|
newValue: string
|
2019-08-22 15:42:35 +00:00
|
|
|
): string {
|
2019-06-30 04:18:37 +00:00
|
|
|
const currentVersion = currentValue.replace(/^v/, '');
|
|
|
|
const newVersion = newValue.replace(/^v/, '');
|
|
|
|
let newContent = content;
|
|
|
|
do {
|
|
|
|
newContent = newContent.replace(currentVersion, newVersion);
|
|
|
|
} while (newContent.includes(currentVersion));
|
|
|
|
return newContent;
|
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
function extractUrl(flattened: string): string[] | null {
|
2019-07-18 21:26:09 +00:00
|
|
|
const urlMatch = flattened.match(/url="(.*?)"/);
|
|
|
|
if (!urlMatch) {
|
|
|
|
logger.debug('Cannot locate urls in new definition');
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
return [urlMatch[1]];
|
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
function extractUrls(content: string): string[] | null {
|
2019-06-30 04:18:37 +00:00
|
|
|
const flattened = content.replace(/\n/g, '').replace(/\s/g, '');
|
|
|
|
const urlsMatch = flattened.match(/urls?=\[.*?\]/);
|
|
|
|
if (!urlsMatch) {
|
2019-07-18 21:26:09 +00:00
|
|
|
return extractUrl(flattened);
|
2019-06-30 04:18:37 +00:00
|
|
|
}
|
|
|
|
const urls = urlsMatch[0]
|
|
|
|
.replace(/urls?=\[/, '')
|
|
|
|
.replace(/,?\]$/, '')
|
|
|
|
.split(',')
|
|
|
|
.map(url => url.replace(/"/g, ''));
|
|
|
|
return urls;
|
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
async function getHashFromUrl(url: string): Promise<string | null> {
|
2019-06-30 04:18:37 +00:00
|
|
|
const cacheNamespace = 'url-sha256';
|
2019-08-22 15:42:35 +00:00
|
|
|
const cachedResult = await renovateCache.get<string | null>(
|
|
|
|
cacheNamespace,
|
|
|
|
url
|
|
|
|
);
|
2019-06-30 04:18:37 +00:00
|
|
|
/* istanbul ignore next line */
|
|
|
|
if (cachedResult) return cachedResult;
|
|
|
|
try {
|
2019-07-25 06:17:19 +00:00
|
|
|
const hash = await fromStream(got.stream(url), {
|
2019-06-30 04:18:37 +00:00
|
|
|
algorithm: 'sha256',
|
|
|
|
});
|
|
|
|
const cacheMinutes = 3 * 24 * 60; // 3 days
|
|
|
|
await renovateCache.set(cacheNamespace, url, hash, cacheMinutes);
|
|
|
|
return hash;
|
|
|
|
} catch (err) /* istanbul ignore next */ {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
async function getHashFromUrls(urls: string[]): Promise<string | null> {
|
2019-06-30 04:18:37 +00:00
|
|
|
const hashes = (await Promise.all(
|
|
|
|
urls.map(url => getHashFromUrl(url))
|
|
|
|
)).filter(Boolean);
|
|
|
|
const distinctHashes = [...new Set(hashes)];
|
|
|
|
if (!distinctHashes.length) {
|
2019-10-13 03:47:24 +00:00
|
|
|
logger.debug({ hashes, urls }, 'Could not calculate hash for URLs');
|
2019-06-30 04:18:37 +00:00
|
|
|
return null;
|
|
|
|
}
|
|
|
|
// istanbul ignore if
|
|
|
|
if (distinctHashes.length > 1) {
|
|
|
|
logger.warn({ urls }, 'Found multiple hashes for single def');
|
|
|
|
}
|
|
|
|
return distinctHashes[0];
|
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
function setNewHash(content: string, hash: string): string {
|
2019-06-30 04:18:37 +00:00
|
|
|
return content.replace(/(sha256\s*=\s*)"[^"]+"/, `$1"${hash}"`);
|
2019-06-21 06:26:20 +00:00
|
|
|
}
|
|
|
|
|
2019-07-25 06:17:19 +00:00
|
|
|
export async function updateDependency(
|
|
|
|
fileContent: string,
|
|
|
|
upgrade: Upgrade
|
2019-08-22 15:42:35 +00:00
|
|
|
): Promise<string | null> {
|
2017-12-07 08:22:10 +00:00
|
|
|
try {
|
2019-06-01 03:31:25 +00:00
|
|
|
logger.debug(
|
|
|
|
`bazel.updateDependency(): ${upgrade.newValue || upgrade.newDigest}`
|
|
|
|
);
|
2019-07-25 06:17:19 +00:00
|
|
|
let newDef: string;
|
2019-04-12 11:27:49 +00:00
|
|
|
if (upgrade.depType === 'container_pull') {
|
2019-07-24 05:19:34 +00:00
|
|
|
newDef = upgrade.managerData.def
|
2019-04-12 11:27:49 +00:00
|
|
|
.replace(/(tag\s*=\s*)"[^"]+"/, `$1"${upgrade.newValue}"`)
|
|
|
|
.replace(/(digest\s*=\s*)"[^"]+"/, `$1"${upgrade.newDigest}"`);
|
|
|
|
}
|
2018-11-10 20:50:17 +00:00
|
|
|
if (
|
|
|
|
upgrade.depType === 'git_repository' ||
|
|
|
|
upgrade.depType === 'go_repository'
|
|
|
|
) {
|
2019-07-24 05:19:34 +00:00
|
|
|
newDef = upgrade.managerData.def
|
2019-03-04 05:05:10 +00:00
|
|
|
.replace(/(tag\s*=\s*)"[^"]+"/, `$1"${upgrade.newValue}"`)
|
|
|
|
.replace(/(commit\s*=\s*)"[^"]+"/, `$1"${upgrade.newDigest}"`);
|
2018-11-18 21:27:20 +00:00
|
|
|
if (upgrade.currentDigest && upgrade.updateType !== 'digest') {
|
|
|
|
newDef = newDef.replace(
|
2019-03-04 05:05:10 +00:00
|
|
|
/(commit\s*=\s*)"[^"]+".*?\n/,
|
|
|
|
`$1"${upgrade.newDigest}", # ${upgrade.newValue}\n`
|
2018-11-18 21:27:20 +00:00
|
|
|
);
|
|
|
|
}
|
2019-03-04 06:48:58 +00:00
|
|
|
} else if (upgrade.depType === 'http_archive' && upgrade.newValue) {
|
2019-06-30 04:18:37 +00:00
|
|
|
newDef = updateWithNewVersion(
|
2019-07-24 05:19:34 +00:00
|
|
|
upgrade.managerData.def,
|
2019-06-30 04:18:37 +00:00
|
|
|
upgrade.currentValue,
|
|
|
|
upgrade.newValue
|
2019-06-17 18:37:46 +00:00
|
|
|
);
|
2019-07-22 14:46:57 +00:00
|
|
|
const massages = {
|
2020-01-15 12:03:16 +00:00
|
|
|
'bazel-skylib.': 'bazel_skylib-',
|
|
|
|
'/bazel-gazelle/releases/download/0':
|
|
|
|
'/bazel-gazelle/releases/download/v0',
|
|
|
|
'/bazel-gazelle-0': '/bazel-gazelle-v0',
|
|
|
|
'/rules_go/releases/download/0': '/rules_go/releases/download/v0',
|
|
|
|
'/rules_go-0': '/rules_go-v0',
|
2019-07-22 14:46:57 +00:00
|
|
|
};
|
|
|
|
for (const [from, to] of Object.entries(massages)) {
|
|
|
|
newDef = newDef.replace(from, to);
|
|
|
|
}
|
2019-06-30 04:18:37 +00:00
|
|
|
const urls = extractUrls(newDef);
|
|
|
|
if (!(urls && urls.length)) {
|
|
|
|
logger.debug({ newDef }, 'urls is empty');
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
const hash = await getHashFromUrls(urls);
|
|
|
|
if (!hash) {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
logger.debug({ hash }, 'Calculated hash');
|
|
|
|
newDef = setNewHash(newDef, hash);
|
2019-03-04 06:48:58 +00:00
|
|
|
} else if (upgrade.depType === 'http_archive' && upgrade.newDigest) {
|
|
|
|
const [, shortRepo] = upgrade.repo.split('/');
|
2019-10-15 10:27:27 +00:00
|
|
|
const url = `https://github.com/${upgrade.repo}/archive/${upgrade.newDigest}.tar.gz`;
|
2019-06-30 04:18:37 +00:00
|
|
|
const hash = await getHashFromUrl(url);
|
2019-07-24 05:19:34 +00:00
|
|
|
newDef = setNewHash(upgrade.managerData.def, hash);
|
2019-03-04 06:48:58 +00:00
|
|
|
newDef = newDef.replace(
|
2019-10-22 06:48:40 +00:00
|
|
|
regEx(`(strip_prefix\\s*=\\s*)"[^"]*"`),
|
2019-03-04 06:48:58 +00:00
|
|
|
`$1"${shortRepo}-${upgrade.newDigest}"`
|
|
|
|
);
|
2019-07-24 05:19:34 +00:00
|
|
|
const match =
|
|
|
|
upgrade.managerData.def.match(/(?<=archive\/).*(?=\.tar\.gz)/g) || [];
|
2019-03-20 16:43:25 +00:00
|
|
|
match.forEach(matchedHash => {
|
|
|
|
newDef = newDef.replace(matchedHash, upgrade.newDigest);
|
|
|
|
});
|
2017-12-14 19:05:45 +00:00
|
|
|
}
|
2019-07-24 05:19:34 +00:00
|
|
|
logger.debug({ oldDef: upgrade.managerData.def, newDef });
|
2019-10-15 10:27:27 +00:00
|
|
|
let existingRegExStr = `${upgrade.depType}\\([^\\)]+name\\s*=\\s*"${upgrade.depName}"(.*\\n)+?\\s*\\)`;
|
2019-04-05 19:09:20 +00:00
|
|
|
if (newDef.endsWith('\n')) {
|
|
|
|
existingRegExStr += '\n';
|
|
|
|
}
|
2019-10-22 06:48:40 +00:00
|
|
|
const existingDef = regEx(existingRegExStr);
|
2019-06-17 18:48:01 +00:00
|
|
|
// istanbul ignore if
|
|
|
|
if (!fileContent.match(existingDef)) {
|
|
|
|
logger.info('Cannot match existing string');
|
|
|
|
return null;
|
|
|
|
}
|
2018-03-30 04:05:00 +00:00
|
|
|
return fileContent.replace(existingDef, newDef);
|
2019-06-21 06:09:42 +00:00
|
|
|
} catch (err) /* istanbul ignore next */ {
|
2017-12-07 08:22:10 +00:00
|
|
|
logger.info({ err }, 'Error setting new bazel WORKSPACE version');
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|