refactor: Object.assign -> object spread (#9076)

This commit is contained in:
Rhys Arkins 2021-03-11 08:03:37 +01:00 committed by GitHub
parent dbd9d5c791
commit 3357e6333d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 43 additions and 37 deletions

View file

@ -57,7 +57,7 @@ export async function parseConfigs(
const cliConfig = await resolveConfigPresets(cliParser.getConfig(argv)); const cliConfig = await resolveConfigPresets(cliParser.getConfig(argv));
const envConfig = await resolveConfigPresets(envParser.getConfig(env)); const envConfig = await resolveConfigPresets(envParser.getConfig(env));
let config = mergeChildConfig(fileConfig, envConfig); let config: GlobalConfig = mergeChildConfig(fileConfig, envConfig);
config = mergeChildConfig(config, cliConfig); config = mergeChildConfig(config, cliConfig);
const combinedConfig = config; const combinedConfig = config;
@ -73,7 +73,7 @@ export async function parseConfigs(
delete forcedCli.token; delete forcedCli.token;
delete forcedCli.hostRules; delete forcedCli.hostRules;
if (config.force) { if (config.force) {
config.force = Object.assign(config.force, forcedCli); config.force = { ...config.force, ...forcedCli };
} else { } else {
config.force = forcedCli; config.force = forcedCli;
} }

View file

@ -57,10 +57,10 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
PackageRule PackageRule
][]) { ][]) {
if (updateTypes.includes(key)) { if (updateTypes.includes(key)) {
const newRule = clone(rule); let newRule = clone(rule);
newRule.matchUpdateTypes = rule.matchUpdateTypes || []; newRule.matchUpdateTypes = rule.matchUpdateTypes || [];
newRule.matchUpdateTypes.push(key); newRule.matchUpdateTypes.push(key);
Object.assign(newRule, val); newRule = { ...newRule, ...val };
newRules.push(newRule); newRules.push(newRule);
} }
} }

View file

@ -151,7 +151,7 @@ export interface RenovateConfig
defaultBranch?: string; defaultBranch?: string;
branchList?: string[]; branchList?: string[];
description?: string | string[]; description?: string | string[];
force?: RenovateConfig;
errors?: ValidationMessage[]; errors?: ValidationMessage[];
gitAuthor?: string; gitAuthor?: string;

View file

@ -22,10 +22,10 @@ export function mergeChildConfig<T, TChild>(
) { ) {
logger.trace(`mergeable option: ${option.name}`); logger.trace(`mergeable option: ${option.name}`);
if (option.name === 'constraints') { if (option.name === 'constraints') {
config[option.name] = Object.assign( config[option.name] = {
parentConfig[option.name], ...parentConfig[option.name],
childConfig[option.name] ...childConfig[option.name],
); };
} else if (option.type === 'array') { } else if (option.type === 'array') {
config[option.name] = (parentConfig[option.name] as unknown[]).concat( config[option.name] = (parentConfig[option.name] as unknown[]).concat(
config[option.name] config[option.name]
@ -42,5 +42,5 @@ export function mergeChildConfig<T, TChild>(
); );
} }
} }
return Object.assign(config, config.force); return { ...config, ...config.force };
} }

View file

@ -19,13 +19,13 @@ const http = new Http(id);
// We calculate auth at this datasource layer so that we can know whether it's safe to cache or not // We calculate auth at this datasource layer so that we can know whether it's safe to cache or not
function getHostOpts(url: string): HttpOptions { function getHostOpts(url: string): HttpOptions {
const opts: HttpOptions = {}; let opts: HttpOptions = {};
const { username, password } = hostRules.find({ const { username, password } = hostRules.find({
hostType: id, hostType: id,
url, url,
}); });
if (username && password) { if (username && password) {
Object.assign(opts, { username, password }); opts = { ...opts, username, password };
} }
return opts; return opts;
} }

View file

@ -27,14 +27,14 @@ export interface ParsedLine {
} }
export function parseLine(line: string): ParsedLine { export function parseLine(line: string): ParsedLine {
const result: ParsedLine = {}; let result: ParsedLine = {};
if (!line) { if (!line) {
return result; return result;
} }
for (const regex of Object.values(regexMappings)) { for (const regex of Object.values(regexMappings)) {
const match = regex.exec(line.replace(/#.*$/, '')); const match = regex.exec(line.replace(/#.*$/, ''));
if (match?.groups) { if (match?.groups) {
Object.assign(result, match.groups); result = { ...result, ...match.groups };
} }
} }

View file

@ -469,7 +469,7 @@ export function parseGradle(
initVars: PackageVariables = {}, initVars: PackageVariables = {},
packageFile?: string packageFile?: string
): ParseGradleResult { ): ParseGradleResult {
const vars: PackageVariables = { ...initVars }; let vars: PackageVariables = { ...initVars };
const deps: PackageDependency<ManagerData>[] = []; const deps: PackageDependency<ManagerData>[] = [];
const urls = []; const urls = [];
@ -481,7 +481,7 @@ export function parseGradle(
deps.push(...matchResult.deps); deps.push(...matchResult.deps);
} }
if (matchResult?.vars) { if (matchResult?.vars) {
Object.assign(vars, matchResult.vars); vars = { ...vars, ...matchResult.vars };
} }
if (matchResult?.urls) { if (matchResult?.urls) {
urls.push(...matchResult.urls); urls.push(...matchResult.urls);

View file

@ -307,14 +307,14 @@ export async function extractPackageFile(
packageJson[depType] as NpmPackageDependency packageJson[depType] as NpmPackageDependency
)) { )) {
const depName = parseDepName(depType, key); const depName = parseDepName(depType, key);
const dep: PackageDependency = { let dep: PackageDependency = {
depType, depType,
depName, depName,
}; };
if (depName !== key) { if (depName !== key) {
dep.managerData = { key }; dep.managerData = { key };
} }
Object.assign(dep, extractDependency(depType, depName, val)); dep = { ...dep, ...extractDependency(depType, depName, val) };
if (depName === 'node') { if (depName === 'node') {
// This is a special case for Node.js to group it together with other managers // This is a special case for Node.js to group it together with other managers
dep.commitMessageTopic = 'Node.js'; dep.commitMessageTopic = 'Node.js';

View file

@ -139,11 +139,12 @@ export async function initRepo({
); );
config.defaultBranch = info.mainbranch; config.defaultBranch = info.mainbranch;
Object.assign(config, { config = {
...config,
owner: info.owner, owner: info.owner,
mergeMethod: info.mergeMethod, mergeMethod: info.mergeMethod,
has_issues: info.has_issues, has_issues: info.has_issues,
}); };
logger.debug(`${repository} owner = ${config.owner}`); logger.debug(`${repository} owner = ${config.owner}`);
} catch (err) /* istanbul ignore next */ { } catch (err) /* istanbul ignore next */ {

View file

@ -16,7 +16,7 @@ export function getChildProcessEnv(
): NodeJS.ProcessEnv { ): NodeJS.ProcessEnv {
const env: NodeJS.ProcessEnv = {}; const env: NodeJS.ProcessEnv = {};
if (getAdminConfig().trustLevel === 'high') { if (getAdminConfig().trustLevel === 'high') {
return Object.assign(env, process.env); return { ...env, ...process.env };
} }
const envVars = [...basicEnvVars, ...customEnvVars]; const envVars = [...basicEnvVars, ...customEnvVars];
envVars.forEach((envVar) => { envVars.forEach((envVar) => {

View file

@ -23,7 +23,7 @@ export async function getUpdatedPackageFiles(
logger.debug( logger.debug(
`manager.getUpdatedPackageFiles() reuseExistinbranch=${reuseExistingBranch}` `manager.getUpdatedPackageFiles() reuseExistinbranch=${reuseExistingBranch}`
); );
const updatedFileContents: Record<string, string> = {}; let updatedFileContents: Record<string, string> = {};
const nonUpdatedFileContents: Record<string, string> = {}; const nonUpdatedFileContents: Record<string, string> = {};
const packageFileManagers: Record<string, string> = {}; const packageFileManagers: Record<string, string> = {};
const packageFileUpdatedDeps: Record<string, string[]> = {}; const packageFileUpdatedDeps: Record<string, string[]> = {};
@ -91,7 +91,7 @@ export async function getUpdatedPackageFiles(
reuseExistingBranch: false, reuseExistingBranch: false,
}); });
} }
Object.assign(updatedFileContents, files); updatedFileContents = { ...updatedFileContents, ...files };
} }
} else { } else {
const bumpPackageVersion = get(manager, 'bumpPackageVersion'); const bumpPackageVersion = get(manager, 'bumpPackageVersion');

View file

@ -67,7 +67,7 @@ async function deleteBranchSilently(branchName: string): Promise<void> {
export async function processBranch( export async function processBranch(
branchConfig: BranchConfig branchConfig: BranchConfig
): Promise<ProcessBranchResult> { ): Promise<ProcessBranchResult> {
const config: BranchConfig = { ...branchConfig }; let config: BranchConfig = { ...branchConfig };
const dependencies = config.upgrades const dependencies = config.upgrades
.map((upgrade) => upgrade.depName) .map((upgrade) => upgrade.depName)
.filter((v) => v) // remove nulls (happens for lock file maintenance) .filter((v) => v) // remove nulls (happens for lock file maintenance)
@ -310,7 +310,7 @@ export async function processBranch(
logger.debug('Manual rebase requested via Dependency Dashboard'); logger.debug('Manual rebase requested via Dependency Dashboard');
config.reuseExistingBranch = false; config.reuseExistingBranch = false;
} else { } else {
Object.assign(config, await shouldReuseExistingBranch(config)); config = { ...config, ...(await shouldReuseExistingBranch(config)) };
} }
logger.debug(`Using reuseExistingBranch: ${config.reuseExistingBranch}`); logger.debug(`Using reuseExistingBranch: ${config.reuseExistingBranch}`);
const res = await getUpdatedPackageFiles(config); const res = await getUpdatedPackageFiles(config);
@ -318,7 +318,7 @@ export async function processBranch(
if (res.artifactErrors && config.artifactErrors) { if (res.artifactErrors && config.artifactErrors) {
res.artifactErrors = config.artifactErrors.concat(res.artifactErrors); res.artifactErrors = config.artifactErrors.concat(res.artifactErrors);
} }
Object.assign(config, res); config = { ...config, ...res };
if (config.updatedPackageFiles?.length) { if (config.updatedPackageFiles?.length) {
logger.debug( logger.debug(
`Updated ${config.updatedPackageFiles.length} package files` `Updated ${config.updatedPackageFiles.length} package files`

View file

@ -176,7 +176,7 @@ export async function detectVulnerabilityAlerts(
datasource === datasourcePypi.id datasource === datasourcePypi.id
? `==${val.firstPatchedVersion}` ? `==${val.firstPatchedVersion}`
: val.firstPatchedVersion; : val.firstPatchedVersion;
const matchRule: PackageRule = { let matchRule: PackageRule = {
matchDatasources: [datasource], matchDatasources: [datasource],
matchPackageNames: [depName], matchPackageNames: [depName],
matchCurrentVersion, matchCurrentVersion,
@ -202,14 +202,15 @@ export async function detectVulnerabilityAlerts(
matchRule.enabled = false; matchRule.enabled = false;
} else { } else {
// Remediate only direct dependencies // Remediate only direct dependencies
Object.assign(matchRule, { matchRule = {
...matchRule,
allowedVersions, allowedVersions,
prBodyNotes, prBodyNotes,
isVulnerabilityAlert: true, isVulnerabilityAlert: true,
force: { force: {
...config.vulnerabilityAlerts, ...config.vulnerabilityAlerts,
}, },
}); };
} }
alertPackageRules.push(matchRule); alertPackageRules.push(matchRule);
} }

View file

@ -19,7 +19,7 @@ async function fetchDepUpdates(
packageFileConfig: ManagerConfig & PackageFile, packageFileConfig: ManagerConfig & PackageFile,
indep: PackageDependency indep: PackageDependency
): Promise<PackageDependency> { ): Promise<PackageDependency> {
const dep = clone(indep); let dep = clone(indep);
dep.updates = []; dep.updates = [];
if (dep.skipReason) { if (dep.skipReason) {
return dep; return dep;
@ -39,7 +39,10 @@ async function fetchDepUpdates(
dep.skipReason = SkipReason.Disabled; dep.skipReason = SkipReason.Disabled;
} else { } else {
if (depConfig.datasource) { if (depConfig.datasource) {
Object.assign(dep, await lookupUpdates(depConfig as LookupUpdateConfig)); dep = {
...dep,
...(await lookupUpdates(depConfig as LookupUpdateConfig)),
};
} else { } else {
dep.updates = await getPackageUpdates(manager, depConfig); dep.updates = await getPackageUpdates(manager, depConfig);
} }

View file

@ -326,10 +326,12 @@ export function generateBranchConfig(
config.blockedByPin = config.upgrades.every( config.blockedByPin = config.upgrades.every(
(upgrade) => upgrade.blockedByPin (upgrade) => upgrade.blockedByPin
); );
config.constraints = Object.assign( config.constraints = {};
{}, for (const upgrade of config.upgrades || []) {
...config.upgrades.map((upgrade) => upgrade.constraints) if (upgrade.constraints) {
); config.constraints = { ...config.constraints, ...upgrade.constraints };
}
}
const tableRows = config.upgrades const tableRows = config.upgrades
.map((upgrade) => getTableValues(upgrade)) .map((upgrade) => getTableValues(upgrade))
.filter(Boolean); .filter(Boolean);

View file

@ -35,7 +35,7 @@ export interface BranchUpgradeConfig
excludeCommitPaths?: string[]; excludeCommitPaths?: string[];
githubName?: string; githubName?: string;
group?: GroupConfig; group?: GroupConfig;
constraints?: Record<string, string>;
groupName?: string; groupName?: string;
groupSlug?: string; groupSlug?: string;
language?: string; language?: string;
@ -110,7 +110,6 @@ export interface BranchConfig
releaseTimestamp?: string; releaseTimestamp?: string;
forceCommit?: boolean; forceCommit?: boolean;
rebaseRequested?: boolean; rebaseRequested?: boolean;
res?: ProcessBranchResult; res?: ProcessBranchResult;
upgrades: BranchUpgradeConfig[]; upgrades: BranchUpgradeConfig[];
packageFiles?: Record<string, PackageFile[]>; packageFiles?: Record<string, PackageFile[]>;