feat(manager/azure-pipelines): Deployment jobs support (#26048)

This commit is contained in:
Mike 2023-12-11 10:24:51 -05:00 committed by GitHub
parent f88353c671
commit c39aa7e5b1
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
7 changed files with 359 additions and 109 deletions

View file

@ -1,6 +0,0 @@
jobs:
- job: job_one
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'

View file

@ -1,8 +0,0 @@
stages:
- stage: stage_one
jobs:
- job: job_one
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'

View file

@ -1,4 +0,0 @@
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'

View file

@ -1,3 +1,4 @@
import { codeBlock } from 'common-tags';
import { Fixtures } from '../../../../test/fixtures'; import { Fixtures } from '../../../../test/fixtures';
import { GlobalConfig } from '../../../config/global'; import { GlobalConfig } from '../../../config/global';
import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks'; import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks';
@ -15,9 +16,6 @@ const azurePipelines = Fixtures.get('azure-pipelines.yaml');
const azurePipelinesNoDependency = Fixtures.get( const azurePipelinesNoDependency = Fixtures.get(
'azure-pipelines-no-dependency.yaml', 'azure-pipelines-no-dependency.yaml',
); );
const azurePipelinesStages = Fixtures.get('azure-pipelines-stages.yaml');
const azurePipelinesJobs = Fixtures.get('azure-pipelines-jobs.yaml');
const azurePipelinesSteps = Fixtures.get('azure-pipelines-steps.yaml');
describe('modules/manager/azure-pipelines/extract', () => { describe('modules/manager/azure-pipelines/extract', () => {
afterEach(() => { afterEach(() => {
@ -58,12 +56,11 @@ describe('modules/manager/azure-pipelines/extract', () => {
).toBeNull(); ).toBeNull();
}); });
it('should return null when reference is not defined', () => { it('should return null when reference is not defined specified', () => {
expect( expect(
extractRepository({ extractRepository({
type: 'github', type: 'github',
name: 'user/repo', name: 'user/repo',
ref: null,
}), }),
).toBeNull(); ).toBeNull();
}); });
@ -138,10 +135,6 @@ describe('modules/manager/azure-pipelines/extract', () => {
datasource: 'docker', datasource: 'docker',
}); });
}); });
it('should return null if image field is missing', () => {
expect(extractContainer({ image: null })).toBeNull();
});
}); });
describe('extractAzurePipelinesTasks()', () => { describe('extractAzurePipelinesTasks()', () => {
@ -191,11 +184,196 @@ describe('modules/manager/azure-pipelines/extract', () => {
).toBeNull(); ).toBeNull();
}); });
it('should extract deployment jobs runonce', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
runOnce:
deploy:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs on failure', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
runOnce:
on:
failure:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs on success', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
runOnce:
on:
success:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs postroute', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
runOnce:
postRouteTraffic:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs predeploy', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
runOnce:
preDeploy:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs route', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
runOnce:
routeTraffic:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs rolling', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
rolling:
deploy:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract deployment jobs canary', () => {
const packageFile = codeBlock`
jobs:
- deployment: deployment_one
strategy:
canary:
deploy:
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([
{
depName: 'Bash',
currentValue: '3',
datasource: AzurePipelinesTasksDatasource.id,
},
]);
});
it('should extract stages', () => { it('should extract stages', () => {
const res = extractPackageFile( const packageFile = codeBlock`
azurePipelinesStages, stages:
azurePipelinesFilename, - stage: stage_one
); jobs:
- job: job_one
steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([ expect(res?.deps).toEqual([
{ {
depName: 'Bash', depName: 'Bash',
@ -206,10 +384,15 @@ describe('modules/manager/azure-pipelines/extract', () => {
}); });
it('should extract jobs', () => { it('should extract jobs', () => {
const res = extractPackageFile( const packageFile = codeBlock`
azurePipelinesJobs, jobs:
azurePipelinesFilename, - job: job_one
); steps:
- task: Bash@3
inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([ expect(res?.deps).toEqual([
{ {
depName: 'Bash', depName: 'Bash',
@ -220,10 +403,13 @@ describe('modules/manager/azure-pipelines/extract', () => {
}); });
it('should extract steps', () => { it('should extract steps', () => {
const res = extractPackageFile( const packageFile = codeBlock`
azurePipelinesSteps, steps:
azurePipelinesFilename, - task: Bash@3
); inputs:
script: 'echo Hello World'
`;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res?.deps).toEqual([ expect(res?.deps).toEqual([
{ {
depName: 'Bash', depName: 'Bash',
@ -234,10 +420,11 @@ describe('modules/manager/azure-pipelines/extract', () => {
}); });
it('should return null when task alias used', () => { it('should return null when task alias used', () => {
const content = ` const packageFile = codeBlock`
steps: steps:
- bash: 'echo Hello World'`; - bash: 'echo Hello World';
const res = extractPackageFile(content, azurePipelinesFilename); `;
const res = extractPackageFile(packageFile, azurePipelinesFilename);
expect(res).toBeNull(); expect(res).toBeNull();
}); });
}); });

View file

@ -3,12 +3,21 @@ import { logger } from '../../../logger';
import { coerceArray } from '../../../util/array'; import { coerceArray } from '../../../util/array';
import { regEx } from '../../../util/regex'; import { regEx } from '../../../util/regex';
import { joinUrlParts } from '../../../util/url'; import { joinUrlParts } from '../../../util/url';
import { parseSingleYaml } from '../../../util/yaml';
import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks'; import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks';
import { GitTagsDatasource } from '../../datasource/git-tags'; import { GitTagsDatasource } from '../../datasource/git-tags';
import { getDep } from '../dockerfile/extract'; import { getDep } from '../dockerfile/extract';
import type { PackageDependency, PackageFileContent } from '../types'; import type { PackageDependency, PackageFileContent } from '../types';
import type { AzurePipelines, Container, Repository } from './types'; import {
AzurePipelines,
AzurePipelinesYaml,
Container,
Deploy,
Deployment,
Job,
Jobs,
Repository,
Step,
} from './schema';
const AzurePipelinesTaskRegex = regEx(/^(?<name>[^@]+)@(?<version>.*)$/); const AzurePipelinesTaskRegex = regEx(/^(?<name>[^@]+)@(?<version>.*)$/);
@ -68,10 +77,6 @@ export function extractRepository(
export function extractContainer( export function extractContainer(
container: Container, container: Container,
): PackageDependency | null { ): PackageDependency | null {
if (!container.image) {
return null;
}
const dep = getDep(container.image); const dep = getDep(container.image);
logger.debug( logger.debug(
{ {
@ -104,15 +109,60 @@ export function parseAzurePipelines(
content: string, content: string,
packageFile: string, packageFile: string,
): AzurePipelines | null { ): AzurePipelines | null {
let pkg: AzurePipelines | null = null; const res = AzurePipelinesYaml.safeParse(content);
try { if (res.success) {
pkg = parseSingleYaml(content, { json: true }) as AzurePipelines; return res.data;
} catch (err) /* istanbul ignore next */ { } else {
logger.debug({ packageFile, err }, 'Error parsing azure-pipelines content'); logger.debug(
return null; { err: res.error, packageFile },
'Error parsing pubspec lockfile.',
);
} }
return null;
}
return pkg; function extractSteps(
steps: Step[] | undefined,
): PackageDependency<Record<string, any>>[] {
const deps = [];
for (const step of coerceArray(steps)) {
const task = extractAzurePipelinesTasks(step.task);
if (task) {
deps.push(task);
}
}
return deps;
}
function extractJob(job: Job | undefined): PackageDependency[] {
return extractSteps(job?.steps);
}
function extractDeploy(deploy: Deploy | undefined): PackageDependency[] {
const deps = extractJob(deploy?.deploy);
deps.push(...extractJob(deploy?.postRouteTraffic));
deps.push(...extractJob(deploy?.preDeploy));
deps.push(...extractJob(deploy?.routeTraffic));
deps.push(...extractJob(deploy?.on?.failure));
deps.push(...extractJob(deploy?.on?.success));
return deps;
}
function extractJobs(jobs: Jobs | undefined): PackageDependency[] {
const deps: PackageDependency[] = [];
for (const jobOrDeployment of coerceArray(jobs)) {
const deployment = jobOrDeployment as Deployment;
if (deployment.strategy) {
deps.push(...extractDeploy(deployment.strategy.canary));
deps.push(...extractDeploy(deployment.strategy.rolling));
deps.push(...extractDeploy(deployment.strategy.runOnce));
continue;
}
const job = jobOrDeployment as Job;
deps.push(...extractJob(job));
}
return deps;
} }
export function extractPackageFile( export function extractPackageFile(
@ -142,31 +192,11 @@ export function extractPackageFile(
} }
for (const { jobs } of coerceArray(pkg.stages)) { for (const { jobs } of coerceArray(pkg.stages)) {
for (const { steps } of coerceArray(jobs)) { deps.push(...extractJobs(jobs));
for (const step of coerceArray(steps)) {
const task = extractAzurePipelinesTasks(step.task);
if (task) {
deps.push(task);
}
}
}
} }
for (const { steps } of coerceArray(pkg.jobs)) { deps.push(...extractJobs(pkg.jobs));
for (const step of coerceArray(steps)) { deps.push(...extractSteps(pkg.steps));
const task = extractAzurePipelinesTasks(step.task);
if (task) {
deps.push(task);
}
}
}
for (const step of coerceArray(pkg.steps)) {
const task = extractAzurePipelinesTasks(step.task);
if (task) {
deps.push(task);
}
}
if (!deps.length) { if (!deps.length) {
return null; return null;

View file

@ -0,0 +1,81 @@
import { z } from 'zod';
import { LooseArray, Yaml } from '../../../util/schema-utils';
export const Step = z.object({
task: z.string(),
});
export type Step = z.infer<typeof Step>;
export const Job = z.object({
steps: LooseArray(Step),
});
export type Job = z.infer<typeof Job>;
export const Deploy = z
.object({
deploy: Job,
preDeploy: Job,
routeTraffic: Job,
postRouteTraffic: Job,
on: z
.object({
failure: Job,
success: Job,
})
.partial(),
})
.partial();
export type Deploy = z.infer<typeof Deploy>;
export const Deployment = z
.object({
strategy: z
.object({
runOnce: Deploy,
rolling: Deploy,
canary: Deploy,
})
.partial(),
})
.partial();
export type Deployment = z.infer<typeof Deployment>;
export const Jobs = LooseArray(z.union([Job, Deployment]));
export type Jobs = z.infer<typeof Jobs>;
export const Stage = z.object({
jobs: Jobs,
});
export type Stage = z.infer<typeof Stage>;
export const Container = z.object({
image: z.string(),
});
export type Container = z.infer<typeof Container>;
export const Repository = z.object({
type: z.enum(['git', 'github', 'bitbucket']),
name: z.string(),
ref: z.string().optional(),
});
export type Repository = z.infer<typeof Repository>;
export const Resources = z
.object({
repositories: LooseArray(Repository),
containers: LooseArray(Container),
})
.partial();
export type Resources = z.infer<typeof Resources>;
export const AzurePipelines = z
.object({
resources: Resources,
stages: LooseArray(Stage),
jobs: Jobs,
steps: LooseArray(Step),
})
.partial();
export type AzurePipelines = z.infer<typeof AzurePipelines>;
export const AzurePipelinesYaml = Yaml.pipe(AzurePipelines);

View file

@ -1,30 +0,0 @@
export interface Container {
image?: string | null;
}
export interface Repository {
type: 'git' | 'github' | 'bitbucket';
name: string;
ref?: string | null;
}
export interface Resources {
repositories?: Repository[];
containers?: Container[];
}
export interface AzurePipelines {
resources?: Resources;
stages?: Stage[];
jobs?: Job[];
steps?: Step[];
}
export interface Stage {
jobs?: Job[];
}
export interface Job {
steps?: Step[];
}
export interface Step {
task: string;
}