2022-10-19 09:20:22 +00:00
|
|
|
import is from '@sindresorhus/is';
|
2021-12-22 11:28:20 +00:00
|
|
|
import hasha from 'hasha';
|
2022-03-03 09:35:26 +00:00
|
|
|
import { logger } from '../../../logger';
|
|
|
|
import * as packageCache from '../../../util/cache/package';
|
|
|
|
import { Http } from '../../../util/http';
|
2022-10-19 09:20:22 +00:00
|
|
|
import { map as pMap } from '../../../util/promises';
|
2022-03-03 09:35:26 +00:00
|
|
|
import { regEx } from '../../../util/regex';
|
2021-03-02 20:44:55 +00:00
|
|
|
import type { UpdateDependencyConfig } from '../types';
|
2022-10-19 09:20:22 +00:00
|
|
|
import { findCodeFragment, patchCodeAtFragments, updateCode } from './common';
|
|
|
|
import type { BazelManagerData, RecordFragment, StringFragment } from './types';
|
2017-12-14 19:05:45 +00:00
|
|
|
|
2020-04-03 11:45:55 +00:00
|
|
|
const http = new Http('bazel');
|
|
|
|
|
2022-10-19 09:20:22 +00:00
|
|
|
function getUrlFragments(rule: RecordFragment): StringFragment[] {
|
|
|
|
const urls: StringFragment[] = [];
|
|
|
|
|
|
|
|
const urlRecord = rule.children['url'];
|
|
|
|
if (urlRecord?.type === 'string') {
|
|
|
|
urls.push(urlRecord);
|
|
|
|
}
|
|
|
|
|
|
|
|
const urlsRecord = rule.children['urls'];
|
|
|
|
if (urlsRecord?.type === 'array') {
|
|
|
|
for (const urlRecord of urlsRecord.children) {
|
|
|
|
if (urlRecord.type === 'string') {
|
|
|
|
urls.push(urlRecord);
|
|
|
|
}
|
|
|
|
}
|
2022-06-28 09:27:57 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
|
|
|
return urls;
|
2019-06-30 04:18:37 +00:00
|
|
|
}
|
|
|
|
|
2022-10-19 09:20:22 +00:00
|
|
|
const urlMassages = {
|
|
|
|
'bazel-skylib.': 'bazel_skylib-',
|
|
|
|
'/bazel-gazelle/releases/download/0': '/bazel-gazelle/releases/download/v0',
|
|
|
|
'/bazel-gazelle-0': '/bazel-gazelle-v0',
|
|
|
|
'/rules_go/releases/download/0': '/rules_go/releases/download/v0',
|
|
|
|
'/rules_go-0': '/rules_go-v0',
|
|
|
|
};
|
|
|
|
|
|
|
|
function massageUrl(url: string): string {
|
|
|
|
let result = url;
|
|
|
|
for (const [from, to] of Object.entries(urlMassages)) {
|
|
|
|
result = result.replace(from, to);
|
2019-07-18 21:26:09 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
function replaceAll(input: string, from: string, to: string): string {
|
|
|
|
return input.split(from).join(to);
|
2019-07-18 21:26:09 +00:00
|
|
|
}
|
|
|
|
|
2022-10-19 09:20:22 +00:00
|
|
|
function replaceValues(
|
|
|
|
content: string,
|
|
|
|
from: string | null | undefined,
|
|
|
|
to: string | null | undefined
|
|
|
|
): string {
|
|
|
|
// istanbul ignore if
|
|
|
|
if (!from || !to || from === to) {
|
|
|
|
return content;
|
2019-06-30 04:18:37 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
const massagedFrom = from.replace(regEx(/^v/), '');
|
|
|
|
const massagedTo = to.replace(regEx(/^v/), '');
|
|
|
|
return replaceAll(content, massagedFrom, massagedTo);
|
2019-06-30 04:18:37 +00:00
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
async function getHashFromUrl(url: string): Promise<string | null> {
|
2019-06-30 04:18:37 +00:00
|
|
|
const cacheNamespace = 'url-sha256';
|
2020-06-25 06:32:55 +00:00
|
|
|
const cachedResult = await packageCache.get<string | null>(
|
2019-08-22 15:42:35 +00:00
|
|
|
cacheNamespace,
|
|
|
|
url
|
|
|
|
);
|
2019-06-30 04:18:37 +00:00
|
|
|
/* istanbul ignore next line */
|
2020-06-17 08:07:22 +00:00
|
|
|
if (cachedResult) {
|
2020-03-17 11:15:22 +00:00
|
|
|
return cachedResult;
|
|
|
|
}
|
2019-06-30 04:18:37 +00:00
|
|
|
try {
|
2021-12-22 11:28:20 +00:00
|
|
|
const hash = await hasha.fromStream(http.stream(url), {
|
2019-06-30 04:18:37 +00:00
|
|
|
algorithm: 'sha256',
|
|
|
|
});
|
|
|
|
const cacheMinutes = 3 * 24 * 60; // 3 days
|
2020-06-25 06:32:55 +00:00
|
|
|
await packageCache.set(cacheNamespace, url, hash, cacheMinutes);
|
2019-06-30 04:18:37 +00:00
|
|
|
return hash;
|
|
|
|
} catch (err) /* istanbul ignore next */ {
|
|
|
|
return null;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-08-22 15:42:35 +00:00
|
|
|
async function getHashFromUrls(urls: string[]): Promise<string | null> {
|
2020-01-16 15:21:07 +00:00
|
|
|
const hashes = (
|
2022-10-19 09:20:22 +00:00
|
|
|
await pMap(urls, (url) => getHashFromUrl(massageUrl(url)))
|
|
|
|
).filter(is.truthy);
|
|
|
|
if (!hashes.length) {
|
|
|
|
logger.debug({ urls }, 'Could not calculate hash for URLs');
|
2019-06-30 04:18:37 +00:00
|
|
|
return null;
|
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
|
|
|
const distinctHashes = new Set(hashes);
|
2019-06-30 04:18:37 +00:00
|
|
|
// istanbul ignore if
|
2022-10-19 09:20:22 +00:00
|
|
|
if (distinctHashes.size > 1) {
|
2019-06-30 04:18:37 +00:00
|
|
|
logger.warn({ urls }, 'Found multiple hashes for single def');
|
|
|
|
}
|
|
|
|
|
2022-10-19 09:20:22 +00:00
|
|
|
return hashes[0];
|
2019-06-21 06:26:20 +00:00
|
|
|
}
|
|
|
|
|
2020-02-06 13:01:21 +00:00
|
|
|
export async function updateDependency({
|
|
|
|
fileContent,
|
|
|
|
upgrade,
|
2022-04-17 12:34:26 +00:00
|
|
|
}: UpdateDependencyConfig<BazelManagerData>): Promise<string | null> {
|
2017-12-07 08:22:10 +00:00
|
|
|
try {
|
2022-10-19 09:20:22 +00:00
|
|
|
const { newValue, newDigest } = upgrade;
|
|
|
|
logger.debug({ newValue, newDigest }, `bazel.updateDependency()`);
|
|
|
|
const idx = upgrade.managerData!.idx;
|
|
|
|
|
|
|
|
if (upgrade.depType === 'container_pull') {
|
|
|
|
let result = fileContent;
|
|
|
|
|
|
|
|
if (newValue) {
|
|
|
|
result = updateCode(result, [idx, 'tag'], newValue);
|
|
|
|
}
|
|
|
|
|
|
|
|
if (newDigest) {
|
|
|
|
result = updateCode(result, [idx, 'digest'], newDigest);
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
2019-04-12 11:27:49 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
2018-11-10 20:50:17 +00:00
|
|
|
if (
|
2022-10-19 09:20:22 +00:00
|
|
|
upgrade.depType === 'git_repository' ||
|
|
|
|
upgrade.depType === 'go_repository'
|
2018-11-10 20:50:17 +00:00
|
|
|
) {
|
2022-10-19 09:20:22 +00:00
|
|
|
let result = fileContent;
|
|
|
|
|
|
|
|
if (newValue) {
|
|
|
|
result = updateCode(result, [idx, 'tag'], newValue);
|
2018-11-18 21:27:20 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
|
|
|
if (newDigest) {
|
|
|
|
result = updateCode(result, [idx, 'commit'], newDigest);
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
|
|
|
if (upgrade.depType === 'http_file' || upgrade.depType === 'http_archive') {
|
|
|
|
const rule = findCodeFragment(fileContent, [idx]);
|
|
|
|
// istanbul ignore if
|
|
|
|
if (rule?.type !== 'record') {
|
|
|
|
return null;
|
2019-07-22 14:46:57 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
|
|
|
const urlFragments = getUrlFragments(rule);
|
|
|
|
if (!urlFragments?.length) {
|
|
|
|
logger.debug({ def: rule.value }, 'urls is empty');
|
2019-06-30 04:18:37 +00:00
|
|
|
return null;
|
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
|
|
|
const updateValues = (oldUrl: string): string => {
|
|
|
|
let url = oldUrl;
|
|
|
|
url = replaceValues(url, upgrade.currentValue, upgrade.newValue);
|
|
|
|
url = replaceValues(url, upgrade.currentDigest, upgrade.newDigest);
|
|
|
|
return url;
|
|
|
|
};
|
|
|
|
|
|
|
|
const urls = urlFragments.map(({ value }) => updateValues(value));
|
2019-06-30 04:18:37 +00:00
|
|
|
const hash = await getHashFromUrls(urls);
|
|
|
|
if (!hash) {
|
|
|
|
return null;
|
|
|
|
}
|
2022-04-17 12:34:26 +00:00
|
|
|
|
2022-10-19 09:20:22 +00:00
|
|
|
let result = fileContent;
|
|
|
|
result = patchCodeAtFragments(result, urlFragments, updateValues);
|
|
|
|
result = updateCode(result, [idx, 'strip_prefix'], updateValues);
|
|
|
|
result = updateCode(result, [idx, 'sha256'], hash);
|
|
|
|
return result;
|
2019-04-05 19:09:20 +00:00
|
|
|
}
|
2019-06-21 06:09:42 +00:00
|
|
|
} catch (err) /* istanbul ignore next */ {
|
2020-02-24 07:43:01 +00:00
|
|
|
logger.debug({ err }, 'Error setting new bazel WORKSPACE version');
|
2017-12-07 08:22:10 +00:00
|
|
|
}
|
2022-10-19 09:20:22 +00:00
|
|
|
|
|
|
|
// istanbul ignore next
|
|
|
|
return null;
|
2017-12-07 08:22:10 +00:00
|
|
|
}
|