refactor: Object.assign -> object spread (#9076)

This commit is contained in:
Rhys Arkins 2021-03-11 08:03:37 +01:00 committed by GitHub
parent dbd9d5c791
commit 3357e6333d
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
16 changed files with 43 additions and 37 deletions

View file

@ -57,7 +57,7 @@ export async function parseConfigs(
const cliConfig = await resolveConfigPresets(cliParser.getConfig(argv));
const envConfig = await resolveConfigPresets(envParser.getConfig(env));
let config = mergeChildConfig(fileConfig, envConfig);
let config: GlobalConfig = mergeChildConfig(fileConfig, envConfig);
config = mergeChildConfig(config, cliConfig);
const combinedConfig = config;
@ -73,7 +73,7 @@ export async function parseConfigs(
delete forcedCli.token;
delete forcedCli.hostRules;
if (config.force) {
config.force = Object.assign(config.force, forcedCli);
config.force = { ...config.force, ...forcedCli };
} else {
config.force = forcedCli;
}

View file

@ -57,10 +57,10 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
PackageRule
][]) {
if (updateTypes.includes(key)) {
const newRule = clone(rule);
let newRule = clone(rule);
newRule.matchUpdateTypes = rule.matchUpdateTypes || [];
newRule.matchUpdateTypes.push(key);
Object.assign(newRule, val);
newRule = { ...newRule, ...val };
newRules.push(newRule);
}
}

View file

@ -151,7 +151,7 @@ export interface RenovateConfig
defaultBranch?: string;
branchList?: string[];
description?: string | string[];
force?: RenovateConfig;
errors?: ValidationMessage[];
gitAuthor?: string;

View file

@ -22,10 +22,10 @@ export function mergeChildConfig<T, TChild>(
) {
logger.trace(`mergeable option: ${option.name}`);
if (option.name === 'constraints') {
config[option.name] = Object.assign(
parentConfig[option.name],
childConfig[option.name]
);
config[option.name] = {
...parentConfig[option.name],
...childConfig[option.name],
};
} else if (option.type === 'array') {
config[option.name] = (parentConfig[option.name] as unknown[]).concat(
config[option.name]
@ -42,5 +42,5 @@ export function mergeChildConfig<T, TChild>(
);
}
}
return Object.assign(config, config.force);
return { ...config, ...config.force };
}

View file

@ -19,13 +19,13 @@ const http = new Http(id);
// We calculate auth at this datasource layer so that we can know whether it's safe to cache or not
function getHostOpts(url: string): HttpOptions {
const opts: HttpOptions = {};
let opts: HttpOptions = {};
const { username, password } = hostRules.find({
hostType: id,
url,
});
if (username && password) {
Object.assign(opts, { username, password });
opts = { ...opts, username, password };
}
return opts;
}

View file

@ -27,14 +27,14 @@ export interface ParsedLine {
}
export function parseLine(line: string): ParsedLine {
const result: ParsedLine = {};
let result: ParsedLine = {};
if (!line) {
return result;
}
for (const regex of Object.values(regexMappings)) {
const match = regex.exec(line.replace(/#.*$/, ''));
if (match?.groups) {
Object.assign(result, match.groups);
result = { ...result, ...match.groups };
}
}

View file

@ -469,7 +469,7 @@ export function parseGradle(
initVars: PackageVariables = {},
packageFile?: string
): ParseGradleResult {
const vars: PackageVariables = { ...initVars };
let vars: PackageVariables = { ...initVars };
const deps: PackageDependency<ManagerData>[] = [];
const urls = [];
@ -481,7 +481,7 @@ export function parseGradle(
deps.push(...matchResult.deps);
}
if (matchResult?.vars) {
Object.assign(vars, matchResult.vars);
vars = { ...vars, ...matchResult.vars };
}
if (matchResult?.urls) {
urls.push(...matchResult.urls);

View file

@ -307,14 +307,14 @@ export async function extractPackageFile(
packageJson[depType] as NpmPackageDependency
)) {
const depName = parseDepName(depType, key);
const dep: PackageDependency = {
let dep: PackageDependency = {
depType,
depName,
};
if (depName !== key) {
dep.managerData = { key };
}
Object.assign(dep, extractDependency(depType, depName, val));
dep = { ...dep, ...extractDependency(depType, depName, val) };
if (depName === 'node') {
// This is a special case for Node.js to group it together with other managers
dep.commitMessageTopic = 'Node.js';

View file

@ -139,11 +139,12 @@ export async function initRepo({
);
config.defaultBranch = info.mainbranch;
Object.assign(config, {
config = {
...config,
owner: info.owner,
mergeMethod: info.mergeMethod,
has_issues: info.has_issues,
});
};
logger.debug(`${repository} owner = ${config.owner}`);
} catch (err) /* istanbul ignore next */ {

View file

@ -16,7 +16,7 @@ export function getChildProcessEnv(
): NodeJS.ProcessEnv {
const env: NodeJS.ProcessEnv = {};
if (getAdminConfig().trustLevel === 'high') {
return Object.assign(env, process.env);
return { ...env, ...process.env };
}
const envVars = [...basicEnvVars, ...customEnvVars];
envVars.forEach((envVar) => {

View file

@ -23,7 +23,7 @@ export async function getUpdatedPackageFiles(
logger.debug(
`manager.getUpdatedPackageFiles() reuseExistinbranch=${reuseExistingBranch}`
);
const updatedFileContents: Record<string, string> = {};
let updatedFileContents: Record<string, string> = {};
const nonUpdatedFileContents: Record<string, string> = {};
const packageFileManagers: Record<string, string> = {};
const packageFileUpdatedDeps: Record<string, string[]> = {};
@ -91,7 +91,7 @@ export async function getUpdatedPackageFiles(
reuseExistingBranch: false,
});
}
Object.assign(updatedFileContents, files);
updatedFileContents = { ...updatedFileContents, ...files };
}
} else {
const bumpPackageVersion = get(manager, 'bumpPackageVersion');

View file

@ -67,7 +67,7 @@ async function deleteBranchSilently(branchName: string): Promise<void> {
export async function processBranch(
branchConfig: BranchConfig
): Promise<ProcessBranchResult> {
const config: BranchConfig = { ...branchConfig };
let config: BranchConfig = { ...branchConfig };
const dependencies = config.upgrades
.map((upgrade) => upgrade.depName)
.filter((v) => v) // remove nulls (happens for lock file maintenance)
@ -310,7 +310,7 @@ export async function processBranch(
logger.debug('Manual rebase requested via Dependency Dashboard');
config.reuseExistingBranch = false;
} else {
Object.assign(config, await shouldReuseExistingBranch(config));
config = { ...config, ...(await shouldReuseExistingBranch(config)) };
}
logger.debug(`Using reuseExistingBranch: ${config.reuseExistingBranch}`);
const res = await getUpdatedPackageFiles(config);
@ -318,7 +318,7 @@ export async function processBranch(
if (res.artifactErrors && config.artifactErrors) {
res.artifactErrors = config.artifactErrors.concat(res.artifactErrors);
}
Object.assign(config, res);
config = { ...config, ...res };
if (config.updatedPackageFiles?.length) {
logger.debug(
`Updated ${config.updatedPackageFiles.length} package files`

View file

@ -176,7 +176,7 @@ export async function detectVulnerabilityAlerts(
datasource === datasourcePypi.id
? `==${val.firstPatchedVersion}`
: val.firstPatchedVersion;
const matchRule: PackageRule = {
let matchRule: PackageRule = {
matchDatasources: [datasource],
matchPackageNames: [depName],
matchCurrentVersion,
@ -202,14 +202,15 @@ export async function detectVulnerabilityAlerts(
matchRule.enabled = false;
} else {
// Remediate only direct dependencies
Object.assign(matchRule, {
matchRule = {
...matchRule,
allowedVersions,
prBodyNotes,
isVulnerabilityAlert: true,
force: {
...config.vulnerabilityAlerts,
},
});
};
}
alertPackageRules.push(matchRule);
}

View file

@ -19,7 +19,7 @@ async function fetchDepUpdates(
packageFileConfig: ManagerConfig & PackageFile,
indep: PackageDependency
): Promise<PackageDependency> {
const dep = clone(indep);
let dep = clone(indep);
dep.updates = [];
if (dep.skipReason) {
return dep;
@ -39,7 +39,10 @@ async function fetchDepUpdates(
dep.skipReason = SkipReason.Disabled;
} else {
if (depConfig.datasource) {
Object.assign(dep, await lookupUpdates(depConfig as LookupUpdateConfig));
dep = {
...dep,
...(await lookupUpdates(depConfig as LookupUpdateConfig)),
};
} else {
dep.updates = await getPackageUpdates(manager, depConfig);
}

View file

@ -326,10 +326,12 @@ export function generateBranchConfig(
config.blockedByPin = config.upgrades.every(
(upgrade) => upgrade.blockedByPin
);
config.constraints = Object.assign(
{},
...config.upgrades.map((upgrade) => upgrade.constraints)
);
config.constraints = {};
for (const upgrade of config.upgrades || []) {
if (upgrade.constraints) {
config.constraints = { ...config.constraints, ...upgrade.constraints };
}
}
const tableRows = config.upgrades
.map((upgrade) => getTableValues(upgrade))
.filter(Boolean);

View file

@ -35,7 +35,7 @@ export interface BranchUpgradeConfig
excludeCommitPaths?: string[];
githubName?: string;
group?: GroupConfig;
constraints?: Record<string, string>;
groupName?: string;
groupSlug?: string;
language?: string;
@ -110,7 +110,6 @@ export interface BranchConfig
releaseTimestamp?: string;
forceCommit?: boolean;
rebaseRequested?: boolean;
res?: ProcessBranchResult;
upgrades: BranchUpgradeConfig[];
packageFiles?: Record<string, PackageFile[]>;