renovate/lib/manager/bazel/update.ts

176 lines
5.7 KiB
TypeScript
Raw Normal View History

import { fromStream } from 'hasha';
import { logger } from '../../logger';
2020-05-11 08:38:07 +00:00
import * as globalCache from '../../util/cache/global';
2020-05-01 16:03:48 +00:00
import { Http } from '../../util/http';
2019-10-22 06:48:40 +00:00
import { regEx } from '../../util/regex';
2020-05-01 16:03:48 +00:00
import { UpdateDependencyConfig } from '../common';
const http = new Http('bazel');
function updateWithNewVersion(
content: string,
currentValue: string,
newValue: string
): string {
const currentVersion = currentValue.replace(/^v/, '');
const newVersion = newValue.replace(/^v/, '');
let newContent = content;
do {
newContent = newContent.replace(currentVersion, newVersion);
} while (newContent.includes(currentVersion));
return newContent;
}
function extractUrl(flattened: string): string[] | null {
const urlMatch = /url="(.*?)"/.exec(flattened);
if (!urlMatch) {
logger.debug('Cannot locate urls in new definition');
return null;
}
return [urlMatch[1]];
}
function extractUrls(content: string): string[] | null {
const flattened = content.replace(/\n/g, '').replace(/\s/g, '');
const urlsMatch = /urls?=\[.*?\]/.exec(flattened);
if (!urlsMatch) {
return extractUrl(flattened);
}
const urls = urlsMatch[0]
.replace(/urls?=\[/, '')
.replace(/,?\]$/, '')
.split(',')
.map((url) => url.replace(/"/g, ''));
return urls;
}
async function getHashFromUrl(url: string): Promise<string | null> {
const cacheNamespace = 'url-sha256';
2020-05-11 08:38:07 +00:00
const cachedResult = await globalCache.get<string | null>(
cacheNamespace,
url
);
/* istanbul ignore next line */
if (cachedResult) {
return cachedResult;
}
try {
const hash = await fromStream(http.stream(url), {
algorithm: 'sha256',
});
const cacheMinutes = 3 * 24 * 60; // 3 days
2020-05-11 08:38:07 +00:00
await globalCache.set(cacheNamespace, url, hash, cacheMinutes);
return hash;
} catch (err) /* istanbul ignore next */ {
return null;
}
}
async function getHashFromUrls(urls: string[]): Promise<string | null> {
const hashes = (
await Promise.all(urls.map((url) => getHashFromUrl(url)))
).filter(Boolean);
const distinctHashes = [...new Set(hashes)];
if (!distinctHashes.length) {
2019-10-13 03:47:24 +00:00
logger.debug({ hashes, urls }, 'Could not calculate hash for URLs');
return null;
}
// istanbul ignore if
if (distinctHashes.length > 1) {
logger.warn({ urls }, 'Found multiple hashes for single def');
}
return distinctHashes[0];
}
function setNewHash(content: string, hash: string): string {
return content.replace(/(sha256\s*=\s*)"[^"]+"/, `$1"${hash}"`);
}
export async function updateDependency({
fileContent,
upgrade,
}: UpdateDependencyConfig): Promise<string | null> {
try {
logger.debug(
`bazel.updateDependency(): ${upgrade.newValue || upgrade.newDigest}`
);
let newDef: string;
if (upgrade.depType === 'container_pull') {
2019-07-24 05:19:34 +00:00
newDef = upgrade.managerData.def
.replace(/(tag\s*=\s*)"[^"]+"/, `$1"${upgrade.newValue}"`)
.replace(/(digest\s*=\s*)"[^"]+"/, `$1"${upgrade.newDigest}"`);
}
if (
upgrade.depType === 'git_repository' ||
upgrade.depType === 'go_repository'
) {
2019-07-24 05:19:34 +00:00
newDef = upgrade.managerData.def
2019-03-04 05:05:10 +00:00
.replace(/(tag\s*=\s*)"[^"]+"/, `$1"${upgrade.newValue}"`)
.replace(/(commit\s*=\s*)"[^"]+"/, `$1"${upgrade.newDigest}"`);
2018-11-18 21:27:20 +00:00
if (upgrade.currentDigest && upgrade.updateType !== 'digest') {
newDef = newDef.replace(
2019-03-04 05:05:10 +00:00
/(commit\s*=\s*)"[^"]+".*?\n/,
`$1"${upgrade.newDigest}", # ${upgrade.newValue}\n`
2018-11-18 21:27:20 +00:00
);
}
} else if (upgrade.depType === 'http_archive' && upgrade.newValue) {
newDef = updateWithNewVersion(
2019-07-24 05:19:34 +00:00
upgrade.managerData.def,
upgrade.currentValue,
upgrade.newValue
);
2019-07-22 14:46:57 +00:00
const massages = {
2020-01-15 12:03:16 +00:00
'bazel-skylib.': 'bazel_skylib-',
'/bazel-gazelle/releases/download/0':
'/bazel-gazelle/releases/download/v0',
'/bazel-gazelle-0': '/bazel-gazelle-v0',
'/rules_go/releases/download/0': '/rules_go/releases/download/v0',
'/rules_go-0': '/rules_go-v0',
2019-07-22 14:46:57 +00:00
};
for (const [from, to] of Object.entries(massages)) {
newDef = newDef.replace(from, to);
}
const urls = extractUrls(newDef);
if (!(urls && urls.length)) {
logger.debug({ newDef }, 'urls is empty');
return null;
}
const hash = await getHashFromUrls(urls);
if (!hash) {
return null;
}
logger.debug({ hash }, 'Calculated hash');
newDef = setNewHash(newDef, hash);
} else if (upgrade.depType === 'http_archive' && upgrade.newDigest) {
const [, shortRepo] = upgrade.repo.split('/');
2019-10-15 10:27:27 +00:00
const url = `https://github.com/${upgrade.repo}/archive/${upgrade.newDigest}.tar.gz`;
const hash = await getHashFromUrl(url);
2019-07-24 05:19:34 +00:00
newDef = setNewHash(upgrade.managerData.def, hash);
newDef = newDef.replace(
2019-10-22 06:48:40 +00:00
regEx(`(strip_prefix\\s*=\\s*)"[^"]*"`),
`$1"${shortRepo}-${upgrade.newDigest}"`
);
2019-07-24 05:19:34 +00:00
const match =
upgrade.managerData.def.match(/(?<=archive\/).*(?=\.tar\.gz)/g) || [];
match.forEach((matchedHash) => {
newDef = newDef.replace(matchedHash, upgrade.newDigest);
});
}
2019-07-24 05:19:34 +00:00
logger.debug({ oldDef: upgrade.managerData.def, newDef });
2019-10-15 10:27:27 +00:00
let existingRegExStr = `${upgrade.depType}\\([^\\)]+name\\s*=\\s*"${upgrade.depName}"(.*\\n)+?\\s*\\)`;
if (newDef.endsWith('\n')) {
existingRegExStr += '\n';
}
2019-10-22 06:48:40 +00:00
const existingDef = regEx(existingRegExStr);
// istanbul ignore if
if (!existingDef.test(fileContent)) {
logger.debug('Cannot match existing string');
return null;
}
return fileContent.replace(existingDef, newDef);
2019-06-21 06:09:42 +00:00
} catch (err) /* istanbul ignore next */ {
logger.debug({ err }, 'Error setting new bazel WORKSPACE version');
return null;
}
}