refactor(github): Remove old Github platform wrappers (#6203)

* refactor(github): Remove old Github platform wrappers

* Refactor 'util/cache/run' imports

* Fix pod http client

* Fix test

* refactor(pod): Split request functions

Co-authored-by: Michael Kriese <michael.kriese@visualon.de>
Co-authored-by: Rhys Arkins <rhys@arkins.net>
This commit is contained in:
Sergio Zharinov 2020-05-16 12:53:11 +04:00 committed by GitHub
parent f8a418f82a
commit fcced24a6a
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
20 changed files with 1333 additions and 1886 deletions

View file

@ -27,3 +27,19 @@ Object {
"sourceUrl": "https://github.com/some/dep", "sourceUrl": "https://github.com/some/dep",
} }
`; `;
exports[`datasource/github-releases getReleases returns releases 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/releases?per_page=100",
},
]
`;

View file

@ -1,26 +1,41 @@
import { api } from '../../platform/github/gh-got-wrapper'; import * as httpMock from '../../../test/httpMock';
import * as globalCache from '../../util/cache/global'; import * as globalCache from '../../util/cache/global';
import * as runCache from '../../util/cache/run';
import * as _hostRules from '../../util/host-rules';
import * as github from '.'; import * as github from '.';
jest.mock('../../platform/github/gh-got-wrapper');
jest.mock('../../util/got');
jest.mock('../../util/host-rules'); jest.mock('../../util/host-rules');
const hostRules: any = _hostRules;
const ghGot: any = api.get; const githubApiHost = 'https://api.github.com';
describe('datasource/github-releases', () => { describe('datasource/github-releases', () => {
beforeEach(() => globalCache.rmAll()); beforeEach(async () => {
await globalCache.rmAll();
hostRules.hosts = jest.fn(() => []);
hostRules.find.mockReturnValue({
token: 'some-token',
});
httpMock.setup();
});
afterEach(() => {
httpMock.reset();
runCache.clear();
});
describe('getReleases', () => { describe('getReleases', () => {
beforeAll(() => globalCache.rmAll());
it('returns releases', async () => { it('returns releases', async () => {
const body = [ httpMock
{ tag_name: 'a', published_at: '2020-03-09T13:00:00Z' }, .scope(githubApiHost)
{ tag_name: 'v', published_at: '2020-03-09T12:00:00Z' }, .get('/repos/some/dep/releases?per_page=100')
{ tag_name: '1.0.0', published_at: '2020-03-09T11:00:00Z' }, .reply(200, [
{ tag_name: 'v1.1.0', published_at: '2020-03-09T10:00:00Z' }, { tag_name: 'a', published_at: '2020-03-09T13:00:00Z' },
]; { tag_name: 'v', published_at: '2020-03-09T12:00:00Z' },
ghGot.mockReturnValueOnce({ headers: {}, body }); { tag_name: '1.0.0', published_at: '2020-03-09T11:00:00Z' },
{ tag_name: 'v1.1.0', published_at: '2020-03-09T10:00:00Z' },
]);
const res = await github.getReleases({ const res = await github.getReleases({
lookupName: 'some/dep', lookupName: 'some/dep',
}); });
@ -29,6 +44,7 @@ describe('datasource/github-releases', () => {
expect( expect(
res.releases.find((release) => release.version === 'v1.1.0') res.releases.find((release) => release.version === 'v1.1.0')
).toBeDefined(); ).toBeDefined();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
}); });
}); });

View file

@ -1,14 +1,14 @@
import { logger } from '../../logger'; import { logger } from '../../logger';
import { api } from '../../platform/github/gh-got-wrapper';
import * as globalCache from '../../util/cache/global'; import * as globalCache from '../../util/cache/global';
import { GithubHttp } from '../../util/http/github';
import { GetReleasesConfig, ReleaseResult } from '../common'; import { GetReleasesConfig, ReleaseResult } from '../common';
const { get: ghGot } = api;
export const id = 'github-releases'; export const id = 'github-releases';
const cacheNamespace = 'datasource-github-releases'; const cacheNamespace = 'datasource-github-releases';
const http = new GithubHttp();
type GithubRelease = { type GithubRelease = {
tag_name: string; tag_name: string;
published_at: string; published_at: string;
@ -38,7 +38,7 @@ export async function getReleases({
} }
try { try {
const url = `https://api.github.com/repos/${repo}/releases?per_page=100`; const url = `https://api.github.com/repos/${repo}/releases?per_page=100`;
const res = await ghGot<GithubRelease[]>(url, { const res = await http.getJson<GithubRelease[]>(url, {
paginate: true, paginate: true,
}); });
githubReleases = res.body; githubReleases = res.body;

View file

@ -1,5 +1,112 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP // Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`datasource/github-tags getDigest returns commit digest 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/git/refs/tags/v1.2.0",
},
]
`;
exports[`datasource/github-tags getDigest returns digest 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/commits?per_page=1",
},
]
`;
exports[`datasource/github-tags getDigest returns null for missed tagged digest 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/git/refs/tags/v1.2.0",
},
]
`;
exports[`datasource/github-tags getDigest returns null if no token 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/commits?per_page=1",
},
]
`;
exports[`datasource/github-tags getDigest returns tagged commit digest 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/git/refs/tags/v1.2.0",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/some-url",
},
]
`;
exports[`datasource/github-tags getDigest warns if unknown ref 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep/git/refs/tags/v1.2.0",
},
]
`;
exports[`datasource/github-tags getReleases returns tags 1`] = ` exports[`datasource/github-tags getReleases returns tags 1`] = `
Object { Object {
"releases": Array [ "releases": Array [
@ -15,3 +122,19 @@ Object {
"sourceUrl": "https://github.com/some/dep2", "sourceUrl": "https://github.com/some/dep2",
} }
`; `;
exports[`datasource/github-tags getReleases returns tags 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token some-token",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/some/dep2/tags?per_page=100",
},
]
`;

View file

@ -1,75 +1,111 @@
import { api } from '../../platform/github/gh-got-wrapper'; import * as httpMock from '../../../test/httpMock';
import * as globalCache from '../../util/cache/global'; import * as globalCache from '../../util/cache/global';
import * as runCache from '../../util/cache/run'; import * as runCache from '../../util/cache/run';
import * as _hostRules from '../../util/host-rules'; import * as _hostRules from '../../util/host-rules';
import * as github from '.'; import * as github from '.';
jest.mock('../../platform/github/gh-got-wrapper');
jest.mock('../../util/got');
jest.mock('../../util/host-rules'); jest.mock('../../util/host-rules');
const ghGot: any = api.get;
const hostRules: any = _hostRules; const hostRules: any = _hostRules;
const githubApiHost = 'https://api.github.com';
describe('datasource/github-tags', () => { describe('datasource/github-tags', () => {
beforeEach(() => globalCache.rmAll()); beforeEach(async () => {
httpMock.setup();
await globalCache.rmAll();
});
afterEach(() => {
runCache.clear();
httpMock.reset();
});
describe('getDigest', () => { describe('getDigest', () => {
const lookupName = 'some/dep';
const tag = 'v1.2.0';
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks(); jest.resetAllMocks();
hostRules.hosts = jest.fn(() => []); hostRules.hosts = jest.fn(() => []);
runCache.clear(); hostRules.find.mockReturnValue({
token: 'some-token',
});
return globalCache.rmAll(); return globalCache.rmAll();
}); });
it('returns null if no token', async () => { it('returns null if no token', async () => {
ghGot.mockReturnValueOnce({ body: [] }); httpMock
const res = await github.getDigest({ lookupName: 'some/dep' }, null); .scope(githubApiHost)
.get(`/repos/${lookupName}/commits?per_page=1`)
.reply(200, []);
const res = await github.getDigest({ lookupName }, null);
expect(res).toBeNull(); expect(res).toBeNull();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('returns digest', async () => { it('returns digest', async () => {
ghGot.mockReturnValueOnce({ body: [{ sha: 'abcdef' }] }); httpMock
const res = await github.getDigest({ lookupName: 'some/dep' }, null); .scope(githubApiHost)
.get(`/repos/${lookupName}/commits?per_page=1`)
.reply(200, [{ sha: 'abcdef' }]);
const res = await github.getDigest({ lookupName }, null);
expect(res).toBe('abcdef'); expect(res).toBe('abcdef');
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('returns commit digest', async () => { it('returns commit digest', async () => {
ghGot.mockReturnValueOnce({ httpMock
body: { object: { type: 'commit', sha: 'ddd111' } }, .scope(githubApiHost)
}); .get(`/repos/${lookupName}/git/refs/tags/${tag}`)
const res = await github.getDigest({ lookupName: 'some/dep' }, 'v1.2.0'); .reply(200, { object: { type: 'commit', sha: 'ddd111' } });
const res = await github.getDigest({ lookupName }, tag);
expect(res).toBe('ddd111'); expect(res).toBe('ddd111');
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('returns tagged commit digest', async () => { it('returns tagged commit digest', async () => {
ghGot.mockReturnValueOnce({ httpMock
body: { object: { type: 'tag', url: 'some-url' } }, .scope(githubApiHost)
}); .get(`/repos/${lookupName}/git/refs/tags/${tag}`)
ghGot.mockReturnValueOnce({ .reply(200, {
body: { object: { type: 'commit', sha: 'ddd111' } }, object: { type: 'tag', url: `${githubApiHost}/some-url` },
}); })
const res = await github.getDigest({ lookupName: 'some/dep' }, 'v1.2.0'); .get('/some-url')
.reply(200, { object: { type: 'commit', sha: 'ddd111' } });
const res = await github.getDigest({ lookupName }, tag);
expect(res).toBe('ddd111'); expect(res).toBe('ddd111');
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('warns if unknown ref', async () => { it('warns if unknown ref', async () => {
ghGot.mockReturnValueOnce({ httpMock
body: { object: { sha: 'ddd111' } }, .scope(githubApiHost)
}); .get(`/repos/${lookupName}/git/refs/tags/${tag}`)
const res = await github.getDigest({ lookupName: 'some/dep' }, 'v1.2.0'); .reply(200, { object: { sha: 'ddd111' } });
const res = await github.getDigest({ lookupName }, tag);
expect(res).toBeNull(); expect(res).toBeNull();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('returns null for missed tagged digest', async () => { it('returns null for missed tagged digest', async () => {
ghGot.mockReturnValueOnce({}); httpMock
.scope(githubApiHost)
.get(`/repos/${lookupName}/git/refs/tags/${tag}`)
.reply(200, {});
const res = await github.getDigest({ lookupName: 'some/dep' }, 'v1.2.0'); const res = await github.getDigest({ lookupName: 'some/dep' }, 'v1.2.0');
expect(res).toBeNull(); expect(res).toBeNull();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
}); });
describe('getReleases', () => { describe('getReleases', () => {
const lookupName = 'some/dep2';
beforeAll(() => globalCache.rmAll()); beforeAll(() => globalCache.rmAll());
it('returns tags', async () => { it('returns tags', async () => {
const body = [{ name: 'v1.0.0' }, { name: 'v1.1.0' }]; const body = [{ name: 'v1.0.0' }, { name: 'v1.1.0' }];
ghGot.mockReturnValueOnce({ headers: {}, body }); httpMock
const res = await github.getReleases({ .scope(githubApiHost)
lookupName: 'some/dep2', .get(`/repos/${lookupName}/tags?per_page=100`)
}); .reply(200, body);
const res = await github.getReleases({ lookupName });
expect(res).toMatchSnapshot(); expect(res).toMatchSnapshot();
expect(res.releases).toHaveLength(2); expect(res.releases).toHaveLength(2);
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
}); });
}); });

View file

@ -1,17 +1,25 @@
import { logger } from '../../logger'; import { logger } from '../../logger';
import { api } from '../../platform/github/gh-got-wrapper';
import * as globalCache from '../../util/cache/global'; import * as globalCache from '../../util/cache/global';
import { GithubHttp } from '../../util/http/github';
import { DigestConfig, GetReleasesConfig, ReleaseResult } from '../common'; import { DigestConfig, GetReleasesConfig, ReleaseResult } from '../common';
const { get: ghGot } = api;
export const id = 'github-tags'; export const id = 'github-tags';
const http = new GithubHttp();
const cacheNamespace = 'datasource-github-tags'; const cacheNamespace = 'datasource-github-tags';
function getCacheKey(repo: string, type: string): string { function getCacheKey(repo: string, type: string): string {
return `${repo}:${type}`; return `${repo}:${type}`;
} }
interface TagResponse {
object: {
type: string;
url: string;
sha: string;
};
}
async function getTagCommit( async function getTagCommit(
githubRepo: string, githubRepo: string,
tag: string tag: string
@ -27,11 +35,11 @@ async function getTagCommit(
let digest: string; let digest: string;
try { try {
const url = `https://api.github.com/repos/${githubRepo}/git/refs/tags/${tag}`; const url = `https://api.github.com/repos/${githubRepo}/git/refs/tags/${tag}`;
const res = (await ghGot(url)).body.object; const res = (await http.getJson<TagResponse>(url)).body.object;
if (res.type === 'commit') { if (res.type === 'commit') {
digest = res.sha; digest = res.sha;
} else if (res.type === 'tag') { } else if (res.type === 'tag') {
digest = (await ghGot(res.url)).body.object.sha; digest = (await http.getJson<TagResponse>(res.url)).body.object.sha;
} else { } else {
logger.warn({ res }, 'Unknown git tag refs type'); logger.warn({ res }, 'Unknown git tag refs type');
} }
@ -79,7 +87,8 @@ export async function getDigest(
let digest: string; let digest: string;
try { try {
const url = `https://api.github.com/repos/${githubRepo}/commits?per_page=1`; const url = `https://api.github.com/repos/${githubRepo}/commits?per_page=1`;
digest = (await ghGot(url)).body[0].sha; const res = await http.getJson<{ sha: string }[]>(url);
digest = res.body[0].sha;
} catch (err) { } catch (err) {
logger.debug( logger.debug(
{ githubRepo, err }, { githubRepo, err },
@ -129,7 +138,7 @@ export async function getReleases({
}[]; }[];
versions = ( versions = (
await ghGot<GitHubTag>(url, { await http.getJson<GitHubTag>(url, {
paginate: true, paginate: true,
}) })
).body.map((o) => o.name); ).body.map((o) => o.name);

View file

@ -0,0 +1,107 @@
// Jest Snapshot v1, https://goo.gl/fbAQLP
exports[`datasource/cocoapods getReleases processes real data from CDN 1`] = `
Array [
Object {
"headers": Object {
"accept-encoding": "gzip, deflate",
"host": "cdn.cocoapods.org",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://cdn.cocoapods.org/all_pods_versions_a_c_b.txt",
},
]
`;
exports[`datasource/cocoapods getReleases processes real data from Github 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/Artsy/Specs/contents/Specs/foo",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/Artsy/Specs/contents/Specs/a/c/b/foo",
},
]
`;
exports[`datasource/cocoapods getReleases returns null for 401 1`] = `
Array [
Object {
"headers": Object {
"accept-encoding": "gzip, deflate",
"host": "cdn.cocoapods.org",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://cdn.cocoapods.org/all_pods_versions_a_c_b.txt",
},
]
`;
exports[`datasource/cocoapods getReleases returns null for 404 1`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/foo/bar/contents/Specs/foo",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/foo/bar/contents/Specs/a/c/b/foo",
},
]
`;
exports[`datasource/cocoapods getReleases returns null for unknown error 1`] = `
Array [
Object {
"headers": Object {
"accept-encoding": "gzip, deflate",
"host": "cdn.cocoapods.org",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://cdn.cocoapods.org/all_pods_versions_a_c_b.txt",
},
]
`;
exports[`datasource/cocoapods getReleases throws for 429 1`] = `
Array [
Object {
"headers": Object {
"accept-encoding": "gzip, deflate",
"host": "cdn.cocoapods.org",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://cdn.cocoapods.org/all_pods_versions_a_c_b.txt",
},
]
`;

View file

@ -1,16 +1,10 @@
import { getPkgReleases } from '..'; import { getPkgReleases } from '..';
import { mocked } from '../../../test/util'; import * as httpMock from '../../../test/httpMock';
import { GotResponse } from '../../platform';
import { api as _api } from '../../platform/github/gh-got-wrapper';
import * as globalCache from '../../util/cache/global'; import * as globalCache from '../../util/cache/global';
import * as runCache from '../../util/cache/run'; import * as runCache from '../../util/cache/run';
import * as rubyVersioning from '../../versioning/ruby'; import * as rubyVersioning from '../../versioning/ruby';
import * as pod from '.'; import * as pod from '.';
const api = mocked(_api);
jest.mock('../../platform/github/gh-got-wrapper');
const config = { const config = {
versioning: rubyVersioning.id, versioning: rubyVersioning.id,
datasource: pod.id, datasource: pod.id,
@ -18,16 +12,23 @@ const config = {
registryUrls: [], registryUrls: [],
}; };
const githubApiHost = 'https://api.github.com';
const cocoapodsHost = 'https://cdn.cocoapods.org';
describe('datasource/cocoapods', () => { describe('datasource/cocoapods', () => {
describe('getReleases', () => { describe('getReleases', () => {
beforeEach(() => { beforeEach(() => {
jest.resetAllMocks(); jest.resetAllMocks();
runCache.clear(); httpMock.setup();
return globalCache.rmAll(); return globalCache.rmAll();
}); });
afterEach(() => {
httpMock.reset();
runCache.clear();
});
it('returns null for invalid inputs', async () => { it('returns null for invalid inputs', async () => {
api.get.mockResolvedValueOnce(null);
expect( expect(
await getPkgReleases({ await getPkgReleases({
datasource: pod.id, datasource: pod.id,
@ -37,77 +38,53 @@ describe('datasource/cocoapods', () => {
).toBeNull(); ).toBeNull();
}); });
it('returns null for empty result', async () => { it('returns null for empty result', async () => {
api.get.mockResolvedValueOnce(null);
expect(await getPkgReleases(config)).toBeNull();
});
it('returns null for missing fields', async () => {
api.get.mockResolvedValueOnce({} as GotResponse);
expect(await getPkgReleases(config)).toBeNull();
api.get.mockResolvedValueOnce({ body: '' } as GotResponse);
expect(await getPkgReleases(config)).toBeNull(); expect(await getPkgReleases(config)).toBeNull();
}); });
it('returns null for 404', async () => { it('returns null for 404', async () => {
api.get.mockImplementation(() => httpMock
Promise.reject({ .scope(githubApiHost)
statusCode: 404, .get('/repos/foo/bar/contents/Specs/foo')
}) .reply(404)
); .get('/repos/foo/bar/contents/Specs/a/c/b/foo')
expect( .reply(404);
await getPkgReleases({ const res = await getPkgReleases({
...config, ...config,
registryUrls: [ registryUrls: [...config.registryUrls, 'https://github.com/foo/bar'],
...config.registryUrls, });
'invalid', expect(res).toBeNull();
'https://github.com/foo/bar', expect(httpMock.getTrace()).toMatchSnapshot();
],
})
).toBeNull();
}); });
it('returns null for 401', async () => { it('returns null for 401', async () => {
api.get.mockImplementationOnce(() => httpMock
Promise.reject({ .scope(cocoapodsHost)
statusCode: 401, .get('/all_pods_versions_a_c_b.txt')
}) .reply(401);
);
expect(await getPkgReleases(config)).toBeNull(); expect(await getPkgReleases(config)).toBeNull();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('throws for 429', async () => { it('throws for 429', async () => {
api.get.mockImplementationOnce(() => httpMock
Promise.reject({ .scope(cocoapodsHost)
statusCode: 429, .get('/all_pods_versions_a_c_b.txt')
}) .reply(429);
await expect(getPkgReleases(config)).rejects.toThrowError(
'registry-failure'
); );
await expect( expect(httpMock.getTrace()).toMatchSnapshot();
getPkgReleases({
...config,
registryUrls: ['https://cdn.cocoapods.org'],
})
).rejects.toThrowError('registry-failure');
});
it('throws for 5xx', async () => {
api.get.mockImplementationOnce(() =>
Promise.reject({
statusCode: 502,
})
);
await expect(
getPkgReleases({
...config,
registryUrls: ['https://cdn.cocoapods.org'],
})
).rejects.toThrowError('registry-failure');
}); });
it('returns null for unknown error', async () => { it('returns null for unknown error', async () => {
api.get.mockImplementationOnce(() => { httpMock
throw new Error(); .scope(cocoapodsHost)
}); .get('/all_pods_versions_a_c_b.txt')
.replyWithError('foobar');
expect(await getPkgReleases(config)).toBeNull(); expect(await getPkgReleases(config)).toBeNull();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('processes real data from CDN', async () => { it('processes real data from CDN', async () => {
api.get.mockResolvedValueOnce({ httpMock
body: 'foo/1.2.3', .scope(cocoapodsHost)
} as GotResponse); .get('/all_pods_versions_a_c_b.txt')
.reply(200, 'foo/1.2.3');
expect( expect(
await getPkgReleases({ await getPkgReleases({
...config, ...config,
@ -120,23 +97,27 @@ describe('datasource/cocoapods', () => {
}, },
], ],
}); });
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('processes real data from Github', async () => { it('processes real data from Github', async () => {
api.get.mockResolvedValueOnce({ httpMock
body: [{ name: '1.2.3' }], .scope(githubApiHost)
} as GotResponse); .get('/repos/Artsy/Specs/contents/Specs/foo')
expect( .reply(404)
await getPkgReleases({ .get('/repos/Artsy/Specs/contents/Specs/a/c/b/foo')
...config, .reply(200, [{ name: '1.2.3' }]);
registryUrls: ['https://github.com/Artsy/Specs'], const res = await getPkgReleases({
}) ...config,
).toEqual({ registryUrls: ['https://github.com/Artsy/Specs'],
});
expect(res).toEqual({
releases: [ releases: [
{ {
version: '1.2.3', version: '1.2.3',
}, },
], ],
}); });
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
}); });
}); });

View file

@ -1,7 +1,8 @@
import crypto from 'crypto'; import crypto from 'crypto';
import { logger } from '../../logger'; import { logger } from '../../logger';
import { api } from '../../platform/github/gh-got-wrapper';
import * as globalCache from '../../util/cache/global'; import * as globalCache from '../../util/cache/global';
import { Http } from '../../util/http';
import { GithubHttp } from '../../util/http/github';
import { GetReleasesConfig, ReleaseResult } from '../common'; import { GetReleasesConfig, ReleaseResult } from '../common';
export const id = 'pod'; export const id = 'pod';
@ -11,6 +12,9 @@ export const defaultRegistryUrls = ['https://cdn.cocoapods.org'];
const cacheNamespace = `datasource-${id}`; const cacheNamespace = `datasource-${id}`;
const cacheMinutes = 30; const cacheMinutes = 30;
const githubHttp = new GithubHttp();
const http = new Http(id);
function shardParts(lookupName: string): string[] { function shardParts(lookupName: string): string[] {
return crypto return crypto
.createHash('md5') .createHash('md5')
@ -31,34 +35,53 @@ function releasesGithubUrl(
return `${prefix}/${account}/${repo}/contents/Specs/${suffix}`; return `${prefix}/${account}/${repo}/contents/Specs/${suffix}`;
} }
async function makeRequest<T = unknown>( function handleError(lookupName: string, err: Error): void {
const errorData = { lookupName, err };
if (
err.statusCode === 429 ||
(err.statusCode >= 500 && err.statusCode < 600)
) {
logger.warn({ lookupName, err }, `CocoaPods registry failure`);
throw new Error('registry-failure');
}
if (err.statusCode === 401) {
logger.debug(errorData, 'Authorization error');
} else if (err.statusCode === 404) {
logger.debug(errorData, 'Package lookup error');
} else {
logger.warn(errorData, 'CocoaPods lookup failure: Unknown error');
}
}
async function requestCDN(
url: string, url: string,
lookupName: string, lookupName: string
json = true ): Promise<string | null> {
): Promise<T | null> {
try { try {
const resp = await api.get(url, { json }); const resp = await http.get(url);
if (resp && resp.body) { if (resp && resp.body) {
return resp.body; return resp.body;
} }
} catch (err) { } catch (err) {
const errorData = { lookupName, err }; handleError(lookupName, err);
}
if ( return null;
err.statusCode === 429 || }
(err.statusCode >= 500 && err.statusCode < 600)
) {
logger.warn({ lookupName, err }, `CocoaPods registry failure`);
throw new Error('registry-failure');
}
if (err.statusCode === 401) { async function requestGithub<T = unknown>(
logger.debug(errorData, 'Authorization error'); url: string,
} else if (err.statusCode === 404) { lookupName: string
logger.debug(errorData, 'Package lookup error'); ): Promise<T | null> {
} else { try {
logger.warn(errorData, 'CocoaPods lookup failure: Unknown error'); const resp = await githubHttp.getJson<T>(url);
if (resp && resp.body) {
return resp.body;
} }
} catch (err) {
handleError(lookupName, err);
} }
return null; return null;
@ -75,7 +98,7 @@ async function getReleasesFromGithub(
const { account, repo } = (match && match.groups) || {}; const { account, repo } = (match && match.groups) || {};
const opts = { account, repo, useShard }; const opts = { account, repo, useShard };
const url = releasesGithubUrl(lookupName, opts); const url = releasesGithubUrl(lookupName, opts);
const resp = await makeRequest<{ name: string }[]>(url, lookupName); const resp = await requestGithub<{ name: string }[]>(url, lookupName);
if (resp) { if (resp) {
const releases = resp.map(({ name }) => ({ version: name })); const releases = resp.map(({ name }) => ({ version: name }));
return { releases }; return { releases };
@ -98,7 +121,7 @@ async function getReleasesFromCDN(
registryUrl: string registryUrl: string
): Promise<ReleaseResult | null> { ): Promise<ReleaseResult | null> {
const url = releasesCDNUrl(lookupName, registryUrl); const url = releasesCDNUrl(lookupName, registryUrl);
const resp = await makeRequest<string>(url, lookupName, false); const resp = await requestCDN(url, lookupName);
if (resp) { if (resp) {
const lines = resp.split('\n'); const lines = resp.split('\n');
for (let idx = 0; idx < lines.length; idx += 1) { for (let idx = 0; idx < lines.length; idx += 1) {

File diff suppressed because it is too large Load diff

View file

@ -1,239 +0,0 @@
import delay from 'delay';
import { Response } from 'got';
import {
PLATFORM_BAD_CREDENTIALS,
PLATFORM_FAILURE,
PLATFORM_INTEGRATION_UNAUTHORIZED,
PLATFORM_RATE_LIMIT_EXCEEDED,
REPOSITORY_CHANGED,
} from '../../constants/error-messages';
import _got from '../../util/got';
import { api } from './gh-got-wrapper';
jest.mock('../../util/got');
jest.mock('delay');
const got: any = _got;
const get: <T extends object = any>(
path: string,
options?: any,
okToRetry?: boolean
) => Promise<Response<T>> = api as any;
async function getError(): Promise<Error> {
try {
await get('some-url', {}, false);
} catch (err) {
return err;
}
return null;
}
describe('platform/gh-got-wrapper', () => {
beforeEach(() => {
jest.resetAllMocks();
delete global.appMode;
(delay as any).mockImplementation(() => Promise.resolve());
});
it('supports app mode', async () => {
global.appMode = true;
await api.get('some-url', { headers: { accept: 'some-accept' } });
expect(got.mock.calls[0][1].headers.accept).toBe(
'application/vnd.github.machine-man-preview+json, some-accept'
);
});
it('strips v3 for graphql', async () => {
got.mockImplementationOnce(() => ({
body: '{"data":{',
}));
api.setBaseUrl('https://ghe.mycompany.com/api/v3/');
await api.post('graphql', {
body: 'abc',
});
expect(got.mock.calls[0][0].includes('/v3')).toBe(false);
});
it('paginates', async () => {
got.mockReturnValueOnce({
headers: {
link:
'<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=2>; rel="next", <https://api.github.com/search/code?q=addClass+user%3Amozilla&page=3>; rel="last"',
},
body: ['a'],
});
got.mockReturnValueOnce({
headers: {
link:
'<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=3>; rel="next", <https://api.github.com/search/code?q=addClass+user%3Amozilla&page=3>; rel="last"',
},
body: ['b', 'c'],
});
got.mockReturnValueOnce({
headers: {},
body: ['d'],
});
const res = await api.get('some-url', { paginate: true });
expect(res.body).toEqual(['a', 'b', 'c', 'd']);
expect(got).toHaveBeenCalledTimes(3);
});
it('attempts to paginate', async () => {
got.mockReturnValueOnce({
headers: {
link:
'<https://api.github.com/search/code?q=addClass+user%3Amozilla&page=34>; rel="last"',
},
body: ['a'],
});
got.mockReturnValueOnce({
headers: {},
body: ['b'],
});
const res = await api.get('some-url', { paginate: true });
expect(res.body).toHaveLength(1);
expect(got).toHaveBeenCalledTimes(1);
});
it('should throw rate limit exceeded', async () => {
got.mockImplementationOnce(() =>
Promise.reject({
statusCode: 403,
message:
'Error updating branch: API rate limit exceeded for installation ID 48411. (403)',
})
);
await expect(api.get('some-url')).rejects.toThrow();
});
it('should throw Bad credentials', async () => {
got.mockImplementationOnce(() =>
Promise.reject({
statusCode: 401,
message: 'Bad credentials. (401)',
})
);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_BAD_CREDENTIALS);
});
it('should throw platform failure', async () => {
got.mockImplementationOnce(() =>
Promise.reject({
statusCode: 401,
message: 'Bad credentials. (401)',
headers: {
'x-ratelimit-limit': '60',
},
})
);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_FAILURE);
});
it('should throw platform failure for ENOTFOUND, ETIMEDOUT or EAI_AGAIN', async () => {
const codes = ['ENOTFOUND', 'ETIMEDOUT', 'EAI_AGAIN'];
for (let idx = 0; idx < codes.length; idx += 1) {
const code = codes[idx];
got.mockImplementationOnce(() =>
Promise.reject({
name: 'RequestError',
code,
})
);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_FAILURE);
}
});
it('should throw platform failure for 500', async () => {
got.mockImplementationOnce(() =>
Promise.reject({
statusCode: 500,
message: 'Internal Server Error',
})
);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_FAILURE);
});
it('should throw platform failure ParseError', async () => {
got.mockImplementationOnce(() =>
Promise.reject({
name: 'ParseError',
})
);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_FAILURE);
});
it('should throw for unauthorized integration', async () => {
got.mockImplementationOnce(() =>
Promise.reject({
statusCode: 403,
message: 'Resource not accessible by integration (403)',
})
);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_INTEGRATION_UNAUTHORIZED);
});
it('should throw for unauthorized integration', async () => {
const gotErr = {
statusCode: 403,
body: { message: 'Upgrade to GitHub Pro' },
};
got.mockRejectedValueOnce(gotErr);
const e = await getError();
expect(e).toBeDefined();
expect(e).toBe(gotErr);
});
it('should throw on abuse', async () => {
const gotErr = {
statusCode: 403,
message: 'You have triggered an abuse detection mechanism',
};
got.mockRejectedValueOnce(gotErr);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_RATE_LIMIT_EXCEEDED);
});
it('should throw on repository change', async () => {
const gotErr = {
statusCode: 422,
body: {
message: 'foobar',
errors: [{ code: 'invalid' }],
},
};
got.mockRejectedValueOnce(gotErr);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(REPOSITORY_CHANGED);
});
it('should throw platform failure on 422 response', async () => {
const gotErr = {
statusCode: 422,
message: 'foobar',
};
got.mockRejectedValueOnce(gotErr);
const e = await getError();
expect(e).toBeDefined();
expect(e.message).toEqual(PLATFORM_FAILURE);
});
it('should throw original error when failed to add reviewers', async () => {
const gotErr = {
statusCode: 422,
message: 'Review cannot be requested from pull request author.',
};
got.mockRejectedValueOnce(gotErr);
const e = await getError();
expect(e).toBeDefined();
expect(e).toStrictEqual(gotErr);
});
it('should throw original error of unknown type', async () => {
const gotErr = {
statusCode: 418,
message: 'Sorry, this is a teapot',
};
got.mockRejectedValueOnce(gotErr);
const e = await getError();
expect(e).toBe(gotErr);
});
});

View file

@ -1,214 +0,0 @@
import URL from 'url';
import { GotError } from 'got';
import pAll from 'p-all';
import parseLinkHeader from 'parse-link-header';
import {
PLATFORM_BAD_CREDENTIALS,
PLATFORM_FAILURE,
PLATFORM_INTEGRATION_UNAUTHORIZED,
PLATFORM_RATE_LIMIT_EXCEEDED,
REPOSITORY_CHANGED,
} from '../../constants/error-messages';
import { PLATFORM_TYPE_GITHUB } from '../../constants/platforms';
import { logger } from '../../logger';
import got, { GotJSONOptions } from '../../util/got';
import { maskToken } from '../../util/mask';
import { GotApi, GotResponse } from '../common';
const hostType = PLATFORM_TYPE_GITHUB;
export const getHostType = (): string => hostType;
let baseUrl = 'https://api.github.com/';
export const getBaseUrl = (): string => baseUrl;
type GotRequestError<E = unknown, T = unknown> = GotError & {
body: {
message?: string;
errors?: E[];
};
headers?: Record<string, T>;
};
type GotRequestOptions = GotJSONOptions & {
token?: string;
};
export function dispatchError(
err: GotRequestError,
path: string,
opts: GotRequestOptions
): never {
let message = err.message;
if (err.body && err.body.message) {
message = err.body.message;
}
if (
err.name === 'RequestError' &&
(err.code === 'ENOTFOUND' ||
err.code === 'ETIMEDOUT' ||
err.code === 'EAI_AGAIN')
) {
logger.debug({ err }, 'GitHub failure: RequestError');
throw new Error(PLATFORM_FAILURE);
}
if (err.name === 'ParseError') {
logger.debug({ err }, 'GitHub failure: ParseError');
throw new Error(PLATFORM_FAILURE);
}
if (err.statusCode >= 500 && err.statusCode < 600) {
logger.debug({ err }, 'GitHub failure: 5xx');
throw new Error(PLATFORM_FAILURE);
}
if (
err.statusCode === 403 &&
message.startsWith('You have triggered an abuse detection mechanism')
) {
logger.debug({ err }, 'GitHub failure: abuse detection');
throw new Error(PLATFORM_RATE_LIMIT_EXCEEDED);
}
if (err.statusCode === 403 && message.includes('Upgrade to GitHub Pro')) {
logger.debug({ path }, 'Endpoint needs paid GitHub plan');
throw err;
}
if (err.statusCode === 403 && message.includes('rate limit exceeded')) {
logger.debug({ err }, 'GitHub failure: rate limit');
throw new Error(PLATFORM_RATE_LIMIT_EXCEEDED);
}
if (
err.statusCode === 403 &&
message.startsWith('Resource not accessible by integration')
) {
logger.debug(
{ err },
'GitHub failure: Resource not accessible by integration'
);
throw new Error(PLATFORM_INTEGRATION_UNAUTHORIZED);
}
if (err.statusCode === 401 && message.includes('Bad credentials')) {
const rateLimit = err.headers ? err.headers['x-ratelimit-limit'] : -1;
logger.debug(
{
token: maskToken(opts.token),
err,
},
'GitHub failure: Bad credentials'
);
if (rateLimit === '60') {
throw new Error(PLATFORM_FAILURE);
}
throw new Error(PLATFORM_BAD_CREDENTIALS);
}
if (err.statusCode === 422) {
if (
message.includes('Review cannot be requested from pull request author')
) {
throw err;
} else if (
err.body &&
err.body.errors &&
err.body.errors.find((e: any) => e.code === 'invalid')
) {
throw new Error(REPOSITORY_CHANGED);
}
logger.debug({ err }, '422 Error thrown from GitHub');
throw new Error(PLATFORM_FAILURE);
}
throw err;
}
async function get(
path: string,
options?: any,
okToRetry = true
): Promise<GotResponse> {
let result = null;
const opts = {
hostType,
baseUrl,
json: true,
...options,
};
const method = opts.method || 'get';
if (method.toLowerCase() === 'post' && path === 'graphql') {
// GitHub Enterprise uses unversioned graphql path
opts.baseUrl = opts.baseUrl.replace('/v3/', '/');
}
logger.trace(`${method.toUpperCase()} ${path}`);
try {
if (global.appMode) {
const appAccept = 'application/vnd.github.machine-man-preview+json';
opts.headers = {
accept: appAccept,
'user-agent':
process.env.RENOVATE_USER_AGENT ||
'https://github.com/renovatebot/renovate',
...opts.headers,
};
if (opts.headers.accept !== appAccept) {
opts.headers.accept = `${appAccept}, ${opts.headers.accept}`;
}
}
result = await got(path, opts);
if (opts.paginate) {
// Check if result is paginated
const pageLimit = opts.pageLimit || 10;
const linkHeader = parseLinkHeader(result.headers.link as string);
if (linkHeader && linkHeader.next && linkHeader.last) {
let lastPage = +linkHeader.last.page;
if (!process.env.RENOVATE_PAGINATE_ALL && opts.paginate !== 'all') {
lastPage = Math.min(pageLimit, lastPage);
}
const pageNumbers = Array.from(
new Array(lastPage),
(x, i) => i + 1
).slice(1);
const queue = pageNumbers.map((page) => (): Promise<GotResponse> => {
const nextUrl = URL.parse(linkHeader.next.url, true);
delete nextUrl.search;
nextUrl.query.page = page.toString();
return get(
URL.format(nextUrl),
{ ...opts, paginate: false },
okToRetry
);
});
const pages = await pAll<{ body: any[] }>(queue, { concurrency: 5 });
result.body = result.body.concat(
...pages.filter(Boolean).map((page) => page.body)
);
}
}
// istanbul ignore if
if (method === 'POST' && path === 'graphql') {
const goodResult = '{"data":{';
if (result.body.startsWith(goodResult)) {
if (!okToRetry) {
logger.debug('Recovered graphql query');
}
} else if (okToRetry) {
logger.debug('Retrying graphql query');
opts.body = opts.body.replace('first: 100', 'first: 25');
return get(path, opts, !okToRetry);
}
}
} catch (gotErr) {
dispatchError(gotErr, path, opts);
}
return result;
}
const helpers = ['get', 'post', 'put', 'patch', 'head', 'delete'];
for (const x of helpers) {
(get as any)[x] = (url: string, opts: any): Promise<GotResponse> =>
get(url, { ...opts, method: x.toUpperCase() });
}
get.setBaseUrl = (u: string): void => {
baseUrl = u;
};
export const api: GotApi = get as any;
export default api;

View file

@ -1,158 +0,0 @@
import { getGraphqlNodes } from './gh-graphql-wrapper';
/** @type any */
const got = require('../../util/got').default;
jest.mock('../../util/got');
const query = `
query {
repository(owner: "testOwner", name: "testName") {
testItem (orderBy: {field: UPDATED_AT, direction: DESC}, filterBy: {createdBy: "someone"}) {
pageInfo {
endCursor
hasNextPage
}
nodes {
number state title body
}
}
}
}`;
async function getError(q: string, f: string) {
let error;
try {
await getGraphqlNodes(q, f);
} catch (err) {
error = err;
}
return error;
}
describe('platform/gh-graphql-wrapper', () => {
beforeEach(() => {
jest.resetAllMocks();
delete global.appMode;
});
it('supports app mode', async () => {
global.appMode = true;
await getGraphqlNodes(query, 'testItem');
expect(got.mock.calls[0][1].headers.accept).toEqual(
'application/vnd.github.machine-man-preview+json, application/vnd.github.merge-info-preview+json'
);
});
it('returns empty array for undefined data', async () => {
got.mockReturnValue({
body: {
data: {
someprop: 'someval',
},
},
});
expect(await getGraphqlNodes(query, 'testItem')).toEqual([]);
});
it('returns empty array for undefined data.', async () => {
got.mockReturnValue({
body: {
data: { repository: { otherField: 'someval' } },
},
});
expect(await getGraphqlNodes(query, 'testItem')).toEqual([]);
});
it('throws errors for invalid responses', async () => {
const gotErr = {
statusCode: 418,
message: 'Sorry, this is a teapot',
};
got.mockImplementationOnce(() => Promise.reject(gotErr));
const e = await getError(query, 'someItem');
expect(e).toBe(gotErr);
});
it('halves node count and retries request', async () => {
got.mockReturnValue({
body: {
data: {
someprop: 'someval',
},
},
});
await getGraphqlNodes(query, 'testItem');
expect(got).toHaveBeenCalledTimes(7);
});
it('retrieves all data from all pages', async () => {
got.mockReturnValueOnce({
body: {
data: {
repository: {
testItem: {
pageInfo: {
endCursor: 'cursor1',
hasNextPage: true,
},
nodes: [
{
number: 1,
state: 'OPEN',
title: 'title-1',
body: 'the body 1',
},
],
},
},
},
},
});
got.mockReturnValueOnce({
body: {
data: {
repository: {
testItem: {
pageInfo: {
endCursor: 'cursor2',
hasNextPage: true,
},
nodes: [
{
number: 2,
state: 'CLOSED',
title: 'title-2',
body: 'the body 2',
},
],
},
},
},
},
});
got.mockReturnValueOnce({
body: {
data: {
repository: {
testItem: {
pageInfo: {
endCursor: 'cursor3',
hasNextPage: false,
},
nodes: [
{
number: 3,
state: 'OPEN',
title: 'title-3',
body: 'the body 3',
},
],
},
},
},
},
});
const items = await getGraphqlNodes(query, 'testItem');
expect(got).toHaveBeenCalledTimes(3);
expect(items.length).toEqual(3);
});
});

View file

@ -1,100 +0,0 @@
import { logger } from '../../logger';
import got, { GotJSONOptions } from '../../util/got';
import { dispatchError, getBaseUrl, getHostType } from './gh-got-wrapper';
const accept = 'application/vnd.github.merge-info-preview+json';
const gqlOpts: GotJSONOptions = {
json: true,
method: 'POST',
headers: {
accept,
},
};
interface GithubGraphqlResponse<T = unknown> {
data?: {
repository?: T;
};
errors?: { message: string; locations: unknown }[];
}
async function get<T = unknown>(
query: string
): Promise<GithubGraphqlResponse<T>> {
let result = null;
const path = 'graphql';
const options: GotJSONOptions = {
...gqlOpts,
hostType: getHostType(),
baseUrl: (getBaseUrl() || '').replace('/v3/', '/'), // GitHub Enterprise uses unversioned graphql path
body: { query },
};
if (global.appMode) {
options.headers = {
...options.headers,
accept: `application/vnd.github.machine-man-preview+json, ${accept}`,
'user-agent':
process.env.RENOVATE_USER_AGENT ||
'https://github.com/renovatebot/renovate',
};
}
logger.trace(`Performing Github GraphQL request`);
try {
const res = await got('graphql', options);
result = res && res.body;
} catch (gotErr) {
dispatchError(gotErr, path, options);
}
return result;
}
export async function getGraphqlNodes<T = Record<string, unknown>>(
queryOrig: string,
fieldName: string
): Promise<T[]> {
const result: T[] = [];
const regex = new RegExp(`(\\W)${fieldName}(\\s*)\\(`);
let cursor = null;
let count = 100;
let canIterate = true;
while (canIterate) {
let replacement = `$1${fieldName}$2(first: ${count}`;
if (cursor) {
replacement += `, after: "${cursor}", `;
}
const query = queryOrig.replace(regex, replacement);
const gqlRes = await get<T>(query);
if (
gqlRes &&
gqlRes.data &&
gqlRes.data.repository &&
gqlRes.data.repository[fieldName]
) {
const { nodes, pageInfo } = gqlRes.data.repository[fieldName];
result.push(...nodes);
const { hasNextPage, endCursor } = pageInfo;
if (hasNextPage && endCursor) {
cursor = endCursor;
} else {
canIterate = false;
}
} else {
count = Math.floor(count / 2);
if (count === 0) {
logger.error('Error fetching GraphQL nodes');
canIterate = false;
}
}
}
return result;
}

View file

@ -64,7 +64,7 @@ describe('platform/github', () => {
'lib/platform/github/__fixtures__/graphql/pullrequest-1.json', 'lib/platform/github/__fixtures__/graphql/pullrequest-1.json',
'utf8' 'utf8'
); );
const graphqlClosedPullrequests = fs.readFileSync( const graphqlClosedPullRequests = fs.readFileSync(
'lib/platform/github/__fixtures__/graphql/pullrequests-closed.json', 'lib/platform/github/__fixtures__/graphql/pullrequests-closed.json',
'utf8' 'utf8'
); );
@ -426,8 +426,8 @@ describe('platform/github', () => {
const scope = httpMock.scope(githubApiHost); const scope = httpMock.scope(githubApiHost);
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.persist()
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/pulls?per_page=100&state=all') .get('/repos/some/repo/pulls?per_page=100&state=all')
.reply(200, [ .reply(200, [
@ -468,8 +468,8 @@ describe('platform/github', () => {
const scope = httpMock.scope(githubApiHost); const scope = httpMock.scope(githubApiHost);
forkInitRepoMock(scope, 'some/repo', 'forked/repo'); forkInitRepoMock(scope, 'some/repo', 'forked/repo');
scope scope
.persist()
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/pulls?per_page=100&state=all') .get('/repos/some/repo/pulls?per_page=100&state=all')
.reply(200, [ .reply(200, [
@ -1329,7 +1329,6 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/issues/42/comments?per_page=100') .get('/repos/some/repo/issues/42/comments?per_page=100')
.reply(200, []) .reply(200, [])
@ -1351,8 +1350,7 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice() .reply(200, graphqlClosedPullRequests)
.reply(200, graphqlClosedPullrequests)
.post('/repos/some/repo/issues/2499/comments') .post('/repos/some/repo/issues/2499/comments')
.reply(200); .reply(200);
await github.initRepo({ await github.initRepo({
@ -1370,7 +1368,6 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/issues/42/comments?per_page=100') .get('/repos/some/repo/issues/42/comments?per_page=100')
.reply(200, [{ id: 1234, body: '### some-subject\n\nblablabla' }]) .reply(200, [{ id: 1234, body: '### some-subject\n\nblablabla' }])
@ -1391,7 +1388,6 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/issues/42/comments?per_page=100') .get('/repos/some/repo/issues/42/comments?per_page=100')
.reply(200, [{ id: 1234, body: '### some-subject\n\nsome\ncontent' }]); .reply(200, [{ id: 1234, body: '### some-subject\n\nsome\ncontent' }]);
@ -1410,7 +1406,6 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/issues/42/comments?per_page=100') .get('/repos/some/repo/issues/42/comments?per_page=100')
.reply(200, [{ id: 1234, body: '!merge' }]); .reply(200, [{ id: 1234, body: '!merge' }]);
@ -1431,7 +1426,6 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/issues/42/comments?per_page=100') .get('/repos/some/repo/issues/42/comments?per_page=100')
.reply(200, [{ id: 1234, body: '### some-subject\n\nblablabla' }]) .reply(200, [{ id: 1234, body: '### some-subject\n\nblablabla' }])
@ -1446,7 +1440,6 @@ describe('platform/github', () => {
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.post('/graphql') .post('/graphql')
.twice()
.reply(200, {}) .reply(200, {})
.get('/repos/some/repo/issues/42/comments?per_page=100') .get('/repos/some/repo/issues/42/comments?per_page=100')
.reply(200, [{ id: 1234, body: 'some-content' }]) .reply(200, [{ id: 1234, body: 'some-content' }])
@ -1596,7 +1589,6 @@ describe('platform/github', () => {
const scope = httpMock.scope(githubApiHost); const scope = httpMock.scope(githubApiHost);
initRepoMock(scope, 'some/repo'); initRepoMock(scope, 'some/repo');
scope scope
.persist()
.post('/graphql') .post('/graphql')
.reply(200, graphqlOpenPullRequests) .reply(200, graphqlOpenPullRequests)
.get('/repos/some/repo/git/refs/heads/master') .get('/repos/some/repo/git/refs/heads/master')
@ -1624,11 +1616,13 @@ describe('platform/github', () => {
.post('/graphql') .post('/graphql')
.reply(200, graphqlOpenPullRequests) .reply(200, graphqlOpenPullRequests)
.post('/graphql') .post('/graphql')
.times(2) .reply(200, graphqlClosedPullRequests)
.reply(200, {}) .get('/repos/some/repo/git/refs/heads/master')
.reply(200, {
.post('/graphql') object: {
.reply(200, graphqlClosedPullrequests); sha: '1234123412341234123412341234123412341234',
},
});
await github.initRepo({ await github.initRepo({
repository: 'some/repo', repository: 'some/repo',
} as any); } as any);
@ -2108,28 +2102,43 @@ describe('platform/github', () => {
}); });
describe('getVulnerabilityAlerts()', () => { describe('getVulnerabilityAlerts()', () => {
it('returns empty if error', async () => { it('returns empty if error', async () => {
httpMock.scope(githubApiHost).post('/graphql').twice().reply(200, {}); httpMock.scope(githubApiHost).post('/graphql').reply(200, {});
const res = await github.getVulnerabilityAlerts(); const res = await github.getVulnerabilityAlerts();
expect(res).toHaveLength(0); expect(res).toHaveLength(0);
expect(httpMock.getTrace()).toMatchSnapshot(); expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('returns array if found', async () => { it('returns array if found', async () => {
// prettier-ignore // prettier-ignore
httpMock.scope(githubApiHost).post('/graphql').reply(200, "{\"data\":{\"repository\":{\"vulnerabilityAlerts\":{\"edges\":[{\"node\":{\"externalIdentifier\":\"CVE-2018-1000136\",\"externalReference\":\"https://nvd.nist.gov/vuln/detail/CVE-2018-1000136\",\"affectedRange\":\">= 1.8, < 1.8.3\",\"fixedIn\":\"1.8.3\",\"id\":\"MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1MzE3NDk4MQ==\",\"packageName\":\"electron\"}}]}}}}"); httpMock.scope(githubApiHost).post('/graphql').reply(200, {
"data": {
"repository": {
"vulnerabilityAlerts": {
"edges": [{
"node": {
"externalIdentifier": "CVE-2018-1000136",
"externalReference": "https://nvd.nist.gov/vuln/detail/CVE-2018-1000136",
"affectedRange": ">= 1.8, < 1.8.3", "fixedIn": "1.8.3",
"id": "MDI4OlJlcG9zaXRvcnlWdWxuZXJhYmlsaXR5QWxlcnQ1MzE3NDk4MQ==", "packageName": "electron"
}
}]
}
}
}
});
const res = await github.getVulnerabilityAlerts(); const res = await github.getVulnerabilityAlerts();
expect(res).toHaveLength(1); expect(res).toHaveLength(1);
expect(httpMock.getTrace()).toMatchSnapshot(); expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('returns empty if disabled', async () => { it('returns empty if disabled', async () => {
// prettier-ignore // prettier-ignore
httpMock.scope(githubApiHost).post('/graphql').reply(200, "{\"data\":{\"repository\":{}}}"); httpMock.scope(githubApiHost).post('/graphql').reply(200, {data: { repository: {} }} );
const res = await github.getVulnerabilityAlerts(); const res = await github.getVulnerabilityAlerts();
expect(res).toHaveLength(0); expect(res).toHaveLength(0);
expect(httpMock.getTrace()).toMatchSnapshot(); expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('handles network error', async () => { it('handles network error', async () => {
// prettier-ignore // prettier-ignore
httpMock.scope(githubApiHost).persist().post('/graphql').replyWithError('unknown error'); httpMock.scope(githubApiHost).post('/graphql').replyWithError('unknown error');
const res = await github.getVulnerabilityAlerts(); const res = await github.getVulnerabilityAlerts();
expect(res).toHaveLength(0); expect(res).toHaveLength(0);
expect(httpMock.getTrace()).toMatchSnapshot(); expect(httpMock.getTrace()).toMatchSnapshot();

View file

@ -25,6 +25,7 @@ import {
import { logger } from '../../logger'; import { logger } from '../../logger';
import { BranchStatus } from '../../types'; import { BranchStatus } from '../../types';
import * as hostRules from '../../util/host-rules'; import * as hostRules from '../../util/host-rules';
import * as githubHttp from '../../util/http/github';
import { sanitize } from '../../util/sanitize'; import { sanitize } from '../../util/sanitize';
import { ensureTrailingSlash } from '../../util/url'; import { ensureTrailingSlash } from '../../util/url';
import { import {
@ -45,8 +46,6 @@ import {
} from '../common'; } from '../common';
import GitStorage, { StatusResult } from '../git/storage'; import GitStorage, { StatusResult } from '../git/storage';
import { smartTruncate } from '../utils/pr-body'; import { smartTruncate } from '../utils/pr-body';
import { api } from './gh-got-wrapper';
import { getGraphqlNodes } from './gh-graphql-wrapper';
import { import {
BranchProtection, BranchProtection,
CombinedBranchStatus, CombinedBranchStatus,
@ -57,6 +56,8 @@ import {
PrList, PrList,
} from './types'; } from './types';
const githubApi = new githubHttp.GithubHttp();
const defaultConfigFile = configFileNames[0]; const defaultConfigFile = configFileNames[0];
let config: LocalRepoConfig = {} as any; let config: LocalRepoConfig = {} as any;
@ -82,7 +83,7 @@ export async function initPlatform({
if (endpoint) { if (endpoint) {
defaults.endpoint = ensureTrailingSlash(endpoint); defaults.endpoint = ensureTrailingSlash(endpoint);
api.setBaseUrl(defaults.endpoint); githubHttp.setBaseUrl(defaults.endpoint);
} else { } else {
logger.debug('Using default github endpoint: ' + defaults.endpoint); logger.debug('Using default github endpoint: ' + defaults.endpoint);
} }
@ -90,9 +91,12 @@ export async function initPlatform({
let renovateUsername: string; let renovateUsername: string;
try { try {
const userData = ( const userData = (
await api.get(defaults.endpoint + 'user', { await githubApi.getJson<{ login: string; name: string }>(
token, defaults.endpoint + 'user',
}) {
token,
}
)
).body; ).body;
renovateUsername = userData.login; renovateUsername = userData.login;
gitAuthor = userData.name; gitAuthor = userData.name;
@ -102,9 +106,12 @@ export async function initPlatform({
} }
try { try {
const userEmail = ( const userEmail = (
await api.get(defaults.endpoint + 'user/emails', { await githubApi.getJson<{ email: string }[]>(
token, defaults.endpoint + 'user/emails',
}) {
token,
}
)
).body; ).body;
if (userEmail.length && userEmail[0].email) { if (userEmail.length && userEmail[0].email) {
gitAuthor += ` <${userEmail[0].email}>`; gitAuthor += ` <${userEmail[0].email}>`;
@ -131,8 +138,11 @@ export async function initPlatform({
export async function getRepos(): Promise<string[]> { export async function getRepos(): Promise<string[]> {
logger.debug('Autodiscovering GitHub repositories'); logger.debug('Autodiscovering GitHub repositories');
try { try {
const res = await api.get('user/repos?per_page=100', { paginate: true }); const res = await githubApi.getJson<{ full_name: string }[]>(
return res.body.map((repo: { full_name: string }) => repo.full_name); 'user/repos?per_page=100',
{ paginate: true }
);
return res.body.map((repo) => repo.full_name);
} catch (err) /* istanbul ignore next */ { } catch (err) /* istanbul ignore next */ {
logger.error({ err }, `GitHub getRepos error`); logger.error({ err }, `GitHub getRepos error`);
throw err; throw err;
@ -156,7 +166,7 @@ async function getBranchProtection(
if (config.parentRepo) { if (config.parentRepo) {
return {}; return {};
} }
const res = await api.get( const res = await githubApi.getJson<BranchProtection>(
`repos/${config.repository}/branches/${escapeHash(branchName)}/protection` `repos/${config.repository}/branches/${escapeHash(branchName)}/protection`
); );
return res.body; return res.body;
@ -165,7 +175,7 @@ async function getBranchProtection(
// Return the commit SHA for a branch // Return the commit SHA for a branch
async function getBranchCommit(branchName: string): Promise<string> { async function getBranchCommit(branchName: string): Promise<string> {
try { try {
const res = await api.get( const res = await githubApi.getJson<{ object: { sha: string } }>(
`repos/${config.repository}/git/refs/heads/${escapeHash(branchName)}` `repos/${config.repository}/git/refs/heads/${escapeHash(branchName)}`
); );
return res.body.object.sha; return res.body.object.sha;
@ -208,7 +218,7 @@ export async function initRepo({
// Necessary for Renovate Pro - do not remove // Necessary for Renovate Pro - do not remove
logger.debug('Overriding default GitHub endpoint'); logger.debug('Overriding default GitHub endpoint');
defaults.endpoint = endpoint; defaults.endpoint = endpoint;
api.setBaseUrl(endpoint); githubHttp.setBaseUrl(endpoint);
} }
const opts = hostRules.find({ const opts = hostRules.find({
hostType: PLATFORM_TYPE_GITHUB, hostType: PLATFORM_TYPE_GITHUB,
@ -222,7 +232,7 @@ export async function initRepo({
config.gitPrivateKey = gitPrivateKey; config.gitPrivateKey = gitPrivateKey;
let res; let res;
try { try {
res = await api.get(`repos/${repository}`); res = await githubApi.getJson<{ fork: boolean }>(`repos/${repository}`);
logger.trace({ repositoryDetails: res.body }, 'Repository details'); logger.trace({ repositoryDetails: res.body }, 'Repository details');
config.enterpriseVersion = config.enterpriseVersion =
res.headers && (res.headers['x-github-enterprise-version'] as string); res.headers && (res.headers['x-github-enterprise-version'] as string);
@ -232,7 +242,7 @@ export async function initRepo({
const renovateConfig = JSON.parse( const renovateConfig = JSON.parse(
Buffer.from( Buffer.from(
( (
await api.get( await githubApi.getJson<{ content: string }>(
`repos/${config.repository}/contents/${defaultConfigFile}` `repos/${config.repository}/contents/${defaultConfigFile}`
) )
).body.content, ).body.content,
@ -265,7 +275,7 @@ export async function initRepo({
renovateConfig = JSON.parse( renovateConfig = JSON.parse(
Buffer.from( Buffer.from(
( (
await api.get( await githubApi.getJson<{ content: string }>(
`repos/${config.repository}/contents/${defaultConfigFile}` `repos/${config.repository}/contents/${defaultConfigFile}`
) )
).body.content, ).body.content,
@ -344,16 +354,22 @@ export async function initRepo({
config.repository = null; config.repository = null;
// Get list of existing repos // Get list of existing repos
const existingRepos = ( const existingRepos = (
await api.get<{ full_name: string }[]>('user/repos?per_page=100', { await githubApi.getJson<{ full_name: string }[]>(
token: forkToken || opts.token, 'user/repos?per_page=100',
paginate: true, {
}) token: forkToken || opts.token,
paginate: true,
}
)
).body.map((r) => r.full_name); ).body.map((r) => r.full_name);
try { try {
config.repository = ( config.repository = (
await api.post(`repos/${repository}/forks`, { await githubApi.postJson<{ full_name: string }>(
token: forkToken || opts.token, `repos/${repository}/forks`,
}) {
token: forkToken || opts.token,
}
)
).body.full_name; ).body.full_name;
} catch (err) /* istanbul ignore next */ { } catch (err) /* istanbul ignore next */ {
logger.debug({ err }, 'Error forking repository'); logger.debug({ err }, 'Error forking repository');
@ -372,7 +388,7 @@ export async function initRepo({
// This is a lovely "hack" by GitHub that lets us force update our fork's master // This is a lovely "hack" by GitHub that lets us force update our fork's master
// with the base commit from the parent repository // with the base commit from the parent repository
try { try {
await api.patch( await githubApi.patchJson(
`repos/${config.repository}/git/refs/heads/${config.baseBranch}`, `repos/${config.repository}/git/refs/heads/${config.baseBranch}`,
{ {
body: { body: {
@ -576,7 +592,6 @@ async function getClosedPrs(): Promise<PrList> {
config.closedPrList = {}; config.closedPrList = {};
let query; let query;
try { try {
const url = 'graphql';
// prettier-ignore // prettier-ignore
query = ` query = `
query { query {
@ -598,21 +613,18 @@ async function getClosedPrs(): Promise<PrList> {
} }
} }
`; `;
const options = { const nodes = await githubApi.getGraphqlNodes<any>(
body: JSON.stringify({ query }), query,
json: false, 'pullRequests',
}; { paginate: false }
const res = JSON.parse((await api.post(url, options)).body); );
const prNumbers: number[] = []; const prNumbers: number[] = [];
// istanbul ignore if // istanbul ignore if
if (!res.data) { if (!nodes?.length) {
logger.debug( logger.debug({ query }, 'No graphql data, returning empty list');
{ query, res },
'No graphql res.data, returning empty list'
);
return {}; return {};
} }
for (const pr of res.data.repository.pullRequests.nodes) { for (const pr of nodes) {
// https://developer.github.com/v4/object/pullrequest/ // https://developer.github.com/v4/object/pullrequest/
pr.displayNumber = `Pull Request #${pr.number}`; pr.displayNumber = `Pull Request #${pr.number}`;
pr.state = pr.state.toLowerCase(); pr.state = pr.state.toLowerCase();
@ -649,11 +661,6 @@ async function getOpenPrs(): Promise<PrList> {
config.openPrList = {}; config.openPrList = {};
let query; let query;
try { try {
const url = 'graphql';
// https://developer.github.com/v4/previews/#mergeinfopreview---more-detailed-information-about-a-pull-requests-merge-state
const headers = {
accept: 'application/vnd.github.merge-info-preview+json',
};
// prettier-ignore // prettier-ignore
query = ` query = `
query { query {
@ -702,19 +709,18 @@ async function getOpenPrs(): Promise<PrList> {
} }
} }
`; `;
const options = { const nodes = await githubApi.getGraphqlNodes<any>(
headers, query,
body: JSON.stringify({ query }), 'pullRequests',
json: false, { paginate: false }
}; );
const res = JSON.parse((await api.post(url, options)).body);
const prNumbers: number[] = []; const prNumbers: number[] = [];
// istanbul ignore if // istanbul ignore if
if (!res.data) { if (!nodes?.length) {
logger.debug({ query, res }, 'No graphql res.data'); logger.debug({ query }, 'No graphql res.data');
return {}; return {};
} }
for (const pr of res.data.repository.pullRequests.nodes) { for (const pr of nodes) {
// https://developer.github.com/v4/object/pullrequest/ // https://developer.github.com/v4/object/pullrequest/
pr.displayNumber = `Pull Request #${pr.number}`; pr.displayNumber = `Pull Request #${pr.number}`;
pr.state = PR_STATE_OPEN; pr.state = PR_STATE_OPEN;
@ -816,12 +822,14 @@ export async function getPr(prNo: number): Promise<Pr | null> {
if (!prNo) { if (!prNo) {
return null; return null;
} }
const openPr = (await getOpenPrs())[prNo]; const openPrs = await getOpenPrs();
const openPr = openPrs[prNo];
if (openPr) { if (openPr) {
logger.debug('Returning from graphql open PR list'); logger.debug('Returning from graphql open PR list');
return openPr; return openPr;
} }
const closedPr = (await getClosedPrs())[prNo]; const closedPrs = await getClosedPrs();
const closedPr = closedPrs[prNo];
if (closedPr) { if (closedPr) {
logger.debug('Returning from graphql closed PR list'); logger.debug('Returning from graphql closed PR list');
return closedPr; return closedPr;
@ -831,7 +839,7 @@ export async function getPr(prNo: number): Promise<Pr | null> {
'PR not found in open or closed PRs list - trying to fetch it directly' 'PR not found in open or closed PRs list - trying to fetch it directly'
); );
const pr = ( const pr = (
await api.get( await githubApi.getJson<any>(
`repos/${config.parentRepo || config.repository}/pulls/${prNo}` `repos/${config.parentRepo || config.repository}/pulls/${prNo}`
) )
).body; ).body;
@ -858,7 +866,7 @@ export async function getPr(prNo: number): Promise<Pr | null> {
if (global.gitAuthor) { if (global.gitAuthor) {
// Check against gitAuthor // Check against gitAuthor
const commitAuthorEmail = ( const commitAuthorEmail = (
await api.get( await githubApi.getJson<{ commit: { author: { email } } }[]>(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/pulls/${prNo}/commits` }/pulls/${prNo}/commits`
@ -892,7 +900,9 @@ export async function getPr(prNo: number): Promise<Pr | null> {
// Check if only one author of all commits // Check if only one author of all commits
logger.debug({ prNo }, 'Checking all commits'); logger.debug({ prNo }, 'Checking all commits');
const prCommits = ( const prCommits = (
await api.get( await githubApi.getJson<
{ committer: { login: string }; commit: { message: string } }[]
>(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/pulls/${prNo}/commits` }/pulls/${prNo}/commits`
@ -900,10 +910,7 @@ export async function getPr(prNo: number): Promise<Pr | null> {
).body; ).body;
// Filter out "Update branch" presses // Filter out "Update branch" presses
const remainingCommits = prCommits.filter( const remainingCommits = prCommits.filter(
(commit: { (commit: { committer; commit }) => {
committer: { login: string };
commit: { message: string };
}) => {
const isWebflow = const isWebflow =
commit.committer && commit.committer.login === 'web-flow'; commit.committer && commit.committer.login === 'web-flow';
if (!isWebflow) { if (!isWebflow) {
@ -950,7 +957,15 @@ export async function getPrList(): Promise<Pr[]> {
logger.debug('Retrieving PR list'); logger.debug('Retrieving PR list');
let res; let res;
try { try {
res = await api.get( res = await githubApi.getJson<{
number: number;
head: { ref: string; sha: string; repo: { full_name: string } };
title: string;
state: string;
merged_at: string;
created_at: string;
closed_at: string;
}>(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/pulls?per_page=100&state=all`, }/pulls?per_page=100&state=all`,
@ -960,30 +975,19 @@ export async function getPrList(): Promise<Pr[]> {
logger.debug({ err }, 'getPrList err'); logger.debug({ err }, 'getPrList err');
throw new Error('platform-failure'); throw new Error('platform-failure');
} }
config.prList = res.body.map( config.prList = res.body.map((pr) => ({
(pr: { number: pr.number,
number: number; branchName: pr.head.ref,
head: { ref: string; sha: string; repo: { full_name: string } }; sha: pr.head.sha,
title: string; title: pr.title,
state: string; state:
merged_at: string; pr.state === PR_STATE_CLOSED && pr.merged_at && pr.merged_at.length
created_at: string; ? /* istanbul ignore next */ 'merged'
closed_at: string; : pr.state,
}) => ({ createdAt: pr.created_at,
number: pr.number, closed_at: pr.closed_at,
branchName: pr.head.ref, sourceRepo: pr.head && pr.head.repo ? pr.head.repo.full_name : undefined,
sha: pr.head.sha, }));
title: pr.title,
state:
pr.state === PR_STATE_CLOSED && pr.merged_at && pr.merged_at.length
? /* istanbul ignore next */ 'merged'
: pr.state,
createdAt: pr.created_at,
closed_at: pr.closed_at,
sourceRepo:
pr.head && pr.head.repo ? pr.head.repo.full_name : undefined,
})
);
logger.debug(`Retrieved ${config.prList.length} Pull Requests`); logger.debug(`Retrieved ${config.prList.length} Pull Requests`);
} }
return config.prList; return config.prList;
@ -1027,7 +1031,9 @@ async function getStatus(
branchName branchName
)}/status`; )}/status`;
return (await api.get(commitStatusUrl, { useCache })).body; return (
await githubApi.getJson<CombinedBranchStatus>(commitStatusUrl, { useCache })
).body;
} }
// Returns the combined status for a branch. // Returns the combined status for a branch.
@ -1074,15 +1080,17 @@ export async function getBranchStatus(
Accept: 'application/vnd.github.antiope-preview+json', Accept: 'application/vnd.github.antiope-preview+json',
}, },
}; };
const checkRunsRaw = (await api.get(checkRunsUrl, opts)).body; const checkRunsRaw = (
await githubApi.getJson<{
check_runs: { name: string; status: string; conclusion: string }[];
}>(checkRunsUrl, opts)
).body;
if (checkRunsRaw.check_runs && checkRunsRaw.check_runs.length) { if (checkRunsRaw.check_runs && checkRunsRaw.check_runs.length) {
checkRuns = checkRunsRaw.check_runs.map( checkRuns = checkRunsRaw.check_runs.map((run) => ({
(run: { name: string; status: string; conclusion: string }) => ({ name: run.name,
name: run.name, status: run.status,
status: run.status, conclusion: run.conclusion,
conclusion: run.conclusion, }));
})
);
logger.debug({ checkRuns }, 'check runs result'); logger.debug({ checkRuns }, 'check runs result');
} else { } else {
// istanbul ignore next // istanbul ignore next
@ -1136,7 +1144,7 @@ async function getStatusCheck(
const url = `repos/${config.repository}/commits/${branchCommit}/statuses`; const url = `repos/${config.repository}/commits/${branchCommit}/statuses`;
return (await api.get(url, { useCache })).body; return (await githubApi.getJson<GhBranchStatus[]>(url, { useCache })).body;
} }
const githubToRenovateStatusMapping = { const githubToRenovateStatusMapping = {
@ -1202,7 +1210,7 @@ export async function setBranchStatus({
if (targetUrl) { if (targetUrl) {
options.target_url = targetUrl; options.target_url = targetUrl;
} }
await api.post(url, { body: options }); await githubApi.postJson(url, { body: options });
// update status cache // update status cache
await getStatus(branchName, false); await getStatus(branchName, false);
@ -1237,7 +1245,7 @@ async function getIssues(): Promise<Issue[]> {
} }
`; `;
const result = await getGraphqlNodes<Issue>(query, 'issues'); const result = await githubApi.getGraphqlNodes<Issue>(query, 'issues');
logger.debug(`Retrieved ${result.length} issues`); logger.debug(`Retrieved ${result.length} issues`);
return result.map((issue) => ({ return result.map((issue) => ({
@ -1264,7 +1272,7 @@ export async function findIssue(title: string): Promise<Issue | null> {
} }
logger.debug('Found issue ' + issue.number); logger.debug('Found issue ' + issue.number);
const issueBody = ( const issueBody = (
await api.get( await githubApi.getJson<{ body: string }>(
`repos/${config.parentRepo || config.repository}/issues/${issue.number}` `repos/${config.parentRepo || config.repository}/issues/${issue.number}`
) )
).body.body; ).body.body;
@ -1276,7 +1284,7 @@ export async function findIssue(title: string): Promise<Issue | null> {
async function closeIssue(issueNumber: number): Promise<void> { async function closeIssue(issueNumber: number): Promise<void> {
logger.debug(`closeIssue(${issueNumber})`); logger.debug(`closeIssue(${issueNumber})`);
await api.patch( await githubApi.patchJson(
`repos/${config.parentRepo || config.repository}/issues/${issueNumber}`, `repos/${config.parentRepo || config.repository}/issues/${issueNumber}`,
{ {
body: { state: 'closed' }, body: { state: 'closed' },
@ -1314,7 +1322,7 @@ export async function ensureIssue({
} }
} }
const issueBody = ( const issueBody = (
await api.get( await githubApi.getJson<{ body: string }>(
`repos/${config.parentRepo || config.repository}/issues/${ `repos/${config.parentRepo || config.repository}/issues/${
issue.number issue.number
}` }`
@ -1326,7 +1334,7 @@ export async function ensureIssue({
} }
if (shouldReOpen) { if (shouldReOpen) {
logger.debug('Patching issue'); logger.debug('Patching issue');
await api.patch( await githubApi.patchJson(
`repos/${config.parentRepo || config.repository}/issues/${ `repos/${config.parentRepo || config.repository}/issues/${
issue.number issue.number
}`, }`,
@ -1338,12 +1346,15 @@ export async function ensureIssue({
return 'updated'; return 'updated';
} }
} }
await api.post(`repos/${config.parentRepo || config.repository}/issues`, { await githubApi.postJson(
body: { `repos/${config.parentRepo || config.repository}/issues`,
title, {
body, body: {
}, title,
}); body,
},
}
);
logger.info('Issue created'); logger.info('Issue created');
// reset issueList so that it will be fetched again as-needed // reset issueList so that it will be fetched again as-needed
delete config.issueList; delete config.issueList;
@ -1381,7 +1392,7 @@ export async function addAssignees(
): Promise<void> { ): Promise<void> {
logger.debug(`Adding assignees ${assignees} to #${issueNo}`); logger.debug(`Adding assignees ${assignees} to #${issueNo}`);
const repository = config.parentRepo || config.repository; const repository = config.parentRepo || config.repository;
await api.post(`repos/${repository}/issues/${issueNo}/assignees`, { await githubApi.postJson(`repos/${repository}/issues/${issueNo}/assignees`, {
body: { body: {
assignees, assignees,
}, },
@ -1399,7 +1410,7 @@ export async function addReviewers(
.filter((e) => e.startsWith('team:')) .filter((e) => e.startsWith('team:'))
.map((e) => e.replace(/^team:/, '')); .map((e) => e.replace(/^team:/, ''));
try { try {
await api.post( await githubApi.postJson(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/pulls/${prNo}/requested_reviewers`, }/pulls/${prNo}/requested_reviewers`,
@ -1422,7 +1433,7 @@ async function addLabels(
logger.debug(`Adding labels ${labels} to #${issueNo}`); logger.debug(`Adding labels ${labels} to #${issueNo}`);
const repository = config.parentRepo || config.repository; const repository = config.parentRepo || config.repository;
if (is.array(labels) && labels.length) { if (is.array(labels) && labels.length) {
await api.post(`repos/${repository}/issues/${issueNo}/labels`, { await githubApi.postJson(`repos/${repository}/issues/${issueNo}/labels`, {
body: labels, body: labels,
}); });
} }
@ -1435,7 +1446,9 @@ export async function deleteLabel(
logger.debug(`Deleting label ${label} from #${issueNo}`); logger.debug(`Deleting label ${label} from #${issueNo}`);
const repository = config.parentRepo || config.repository; const repository = config.parentRepo || config.repository;
try { try {
await api.delete(`repos/${repository}/issues/${issueNo}/labels/${label}`); await githubApi.deleteJson(
`repos/${repository}/issues/${issueNo}/labels/${label}`
);
} catch (err) /* istanbul ignore next */ { } catch (err) /* istanbul ignore next */ {
logger.warn({ err, issueNo, label }, 'Failed to delete label'); logger.warn({ err, issueNo, label }, 'Failed to delete label');
} }
@ -1443,7 +1456,7 @@ export async function deleteLabel(
async function addComment(issueNo: number, body: string): Promise<void> { async function addComment(issueNo: number, body: string): Promise<void> {
// POST /repos/:owner/:repo/issues/:number/comments // POST /repos/:owner/:repo/issues/:number/comments
await api.post( await githubApi.postJson(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/issues/${issueNo}/comments`, }/issues/${issueNo}/comments`,
@ -1455,7 +1468,7 @@ async function addComment(issueNo: number, body: string): Promise<void> {
async function editComment(commentId: number, body: string): Promise<void> { async function editComment(commentId: number, body: string): Promise<void> {
// PATCH /repos/:owner/:repo/issues/comments/:id // PATCH /repos/:owner/:repo/issues/comments/:id
await api.patch( await githubApi.patchJson(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/issues/comments/${commentId}`, }/issues/comments/${commentId}`,
@ -1467,7 +1480,7 @@ async function editComment(commentId: number, body: string): Promise<void> {
async function deleteComment(commentId: number): Promise<void> { async function deleteComment(commentId: number): Promise<void> {
// DELETE /repos/:owner/:repo/issues/comments/:id // DELETE /repos/:owner/:repo/issues/comments/:id
await api.delete( await githubApi.deleteJson(
`repos/${ `repos/${
config.parentRepo || config.repository config.parentRepo || config.repository
}/issues/comments/${commentId}` }/issues/comments/${commentId}`
@ -1487,7 +1500,7 @@ async function getComments(issueNo: number): Promise<Comment[]> {
}/issues/${issueNo}/comments?per_page=100`; }/issues/${issueNo}/comments?per_page=100`;
try { try {
const comments = ( const comments = (
await api.get<Comment[]>(url, { await githubApi.getJson<Comment[]>(url, {
paginate: true, paginate: true,
}) })
).body; ).body;
@ -1622,7 +1635,7 @@ export async function createPr({
} }
logger.debug({ title, head, base }, 'Creating PR'); logger.debug({ title, head, base }, 'Creating PR');
const pr = ( const pr = (
await api.post<GhPr>( await githubApi.postJson<GhPr>(
`repos/${config.parentRepo || config.repository}/pulls`, `repos/${config.parentRepo || config.repository}/pulls`,
options options
) )
@ -1656,11 +1669,11 @@ export async function getPrFiles(prNo: number): Promise<string[]> {
return []; return [];
} }
const files = ( const files = (
await api.get( await githubApi.getJson<{ filename: string }[]>(
`repos/${config.parentRepo || config.repository}/pulls/${prNo}/files` `repos/${config.parentRepo || config.repository}/pulls/${prNo}/files`
) )
).body; ).body;
return files.map((f: { filename: string }) => f.filename); return files.map((f) => f.filename);
} }
export async function updatePr( export async function updatePr(
@ -1682,7 +1695,7 @@ export async function updatePr(
options.token = config.forkToken; options.token = config.forkToken;
} }
try { try {
await api.patch( await githubApi.patchJson(
`repos/${config.parentRepo || config.repository}/pulls/${prNo}`, `repos/${config.parentRepo || config.repository}/pulls/${prNo}`,
options options
); );
@ -1715,9 +1728,11 @@ export async function mergePr(
'Branch protection: Attempting to merge PR when PR reviews are enabled' 'Branch protection: Attempting to merge PR when PR reviews are enabled'
); );
const repository = config.parentRepo || config.repository; const repository = config.parentRepo || config.repository;
const reviews = await api.get(`repos/${repository}/pulls/${prNo}/reviews`); const reviews = await githubApi.getJson<{ state: string }[]>(
`repos/${repository}/pulls/${prNo}/reviews`
);
const isApproved = reviews.body.some( const isApproved = reviews.body.some(
(review: { state: string }) => review.state === 'APPROVED' (review) => review.state === 'APPROVED'
); );
if (!isApproved) { if (!isApproved) {
logger.debug( logger.debug(
@ -1740,7 +1755,7 @@ export async function mergePr(
options.body.merge_method = config.mergeMethod; options.body.merge_method = config.mergeMethod;
try { try {
logger.debug({ options, url }, `mergePr`); logger.debug({ options, url }, `mergePr`);
await api.put(url, options); await githubApi.putJson(url, options);
automerged = true; automerged = true;
} catch (err) { } catch (err) {
if (err.statusCode === 404 || err.statusCode === 405) { if (err.statusCode === 404 || err.statusCode === 405) {
@ -1760,7 +1775,7 @@ export async function mergePr(
options.body.merge_method = 'rebase'; options.body.merge_method = 'rebase';
try { try {
logger.debug({ options, url }, `mergePr`); logger.debug({ options, url }, `mergePr`);
await api.put(url, options); await githubApi.putJson(url, options);
} catch (err1) { } catch (err1) {
logger.debug( logger.debug(
{ err: err1 }, { err: err1 },
@ -1769,7 +1784,7 @@ export async function mergePr(
try { try {
options.body.merge_method = 'squash'; options.body.merge_method = 'squash';
logger.debug({ options, url }, `mergePr`); logger.debug({ options, url }, `mergePr`);
await api.put(url, options); await githubApi.putJson(url, options);
} catch (err2) { } catch (err2) {
logger.debug( logger.debug(
{ err: err2 }, { err: err2 },
@ -1778,7 +1793,7 @@ export async function mergePr(
try { try {
options.body.merge_method = 'merge'; options.body.merge_method = 'merge';
logger.debug({ options, url }, `mergePr`); logger.debug({ options, url }, `mergePr`);
await api.put(url, options); await githubApi.putJson(url, options);
} catch (err3) { } catch (err3) {
logger.debug( logger.debug(
{ err: err3 }, { err: err3 },
@ -1811,10 +1826,6 @@ export function getPrBody(input: string): string {
} }
export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> { export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
const headers = {
accept: 'application/vnd.github.vixen-preview+json',
};
const url = 'graphql';
// prettier-ignore // prettier-ignore
const query = ` const query = `
query { query {
@ -1842,18 +1853,18 @@ export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
} }
} }
}`; }`;
const options = {
headers,
body: JSON.stringify({ query }),
json: false,
};
let alerts = []; let alerts = [];
try { try {
const res = JSON.parse((await api.post(url, options)).body); const vulnerabilityAlerts = await githubApi.getGraphqlNodes<{ node: any }>(
if (res?.data?.repository?.vulnerabilityAlerts) { query,
alerts = res.data.repository.vulnerabilityAlerts.edges.map( 'vulnerabilityAlerts',
(edge: { node: any }) => edge.node {
); paginate: false,
acceptHeader: 'application/vnd.github.vixen-preview+json',
}
);
if (vulnerabilityAlerts?.length) {
alerts = vulnerabilityAlerts.map((edge) => edge.node);
if (alerts.length) { if (alerts.length) {
logger.debug({ alerts }, 'Found GitHub vulnerability alerts'); logger.debug({ alerts }, 'Found GitHub vulnerability alerts');
} }

View file

@ -51,6 +51,110 @@ Object {
} }
`; `;
exports[`workers/pr/changelog getChangeLogJSON filters unnecessary warns 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/tags?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/releases?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/releases?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/releases?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/releases?per_page=100",
},
]
`;
exports[`workers/pr/changelog getChangeLogJSON supports github enterprise and github enterprise changelog 1`] = ` exports[`workers/pr/changelog getChangeLogJSON supports github enterprise and github enterprise changelog 1`] = `
Object { Object {
"hasReleaseNotes": true, "hasReleaseNotes": true,
@ -102,6 +206,44 @@ Object {
} }
`; `;
exports[`workers/pr/changelog getChangeLogJSON supports github enterprise and github enterprise changelog 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "github-enterprise.example.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://github-enterprise.example.com/repos/chalk/chalk/tags?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "github-enterprise.example.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://github-enterprise.example.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "github-enterprise.example.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://github-enterprise.example.com/repos/chalk/chalk/releases?per_page=100",
},
]
`;
exports[`workers/pr/changelog getChangeLogJSON supports github enterprise and github.com changelog 1`] = ` exports[`workers/pr/changelog getChangeLogJSON supports github enterprise and github.com changelog 1`] = `
Object { Object {
"hasReleaseNotes": true, "hasReleaseNotes": true,
@ -153,6 +295,44 @@ Object {
} }
`; `;
exports[`workers/pr/changelog getChangeLogJSON supports github enterprise and github.com changelog 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/tags?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/releases?per_page=100",
},
]
`;
exports[`workers/pr/changelog getChangeLogJSON supports github.com and github enterprise changelog 1`] = ` exports[`workers/pr/changelog getChangeLogJSON supports github.com and github enterprise changelog 1`] = `
Object { Object {
"hasReleaseNotes": true, "hasReleaseNotes": true,
@ -204,6 +384,44 @@ Object {
} }
`; `;
exports[`workers/pr/changelog getChangeLogJSON supports github.com and github enterprise changelog 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "github-enterprise.example.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://github-enterprise.example.com/repos/chalk/chalk/tags?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "github-enterprise.example.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://github-enterprise.example.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "github-enterprise.example.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://github-enterprise.example.com/repos/chalk/chalk/releases?per_page=100",
},
]
`;
exports[`workers/pr/changelog getChangeLogJSON supports node engines 1`] = ` exports[`workers/pr/changelog getChangeLogJSON supports node engines 1`] = `
Object { Object {
"hasReleaseNotes": true, "hasReleaseNotes": true,
@ -310,6 +528,44 @@ Object {
} }
`; `;
exports[`workers/pr/changelog getChangeLogJSON uses GitHub tags 2`] = `
Array [
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/tags?per_page=100",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/contents/",
},
Object {
"headers": Object {
"accept": "application/json",
"accept-encoding": "gzip, deflate",
"authorization": "token abc",
"host": "api.github.com",
"user-agent": "https://github.com/renovatebot/renovate",
},
"method": "GET",
"url": "https://api.github.com/repos/chalk/chalk/releases?per_page=100",
},
]
`;
exports[`workers/pr/changelog getChangeLogJSON works without Github 1`] = ` exports[`workers/pr/changelog getChangeLogJSON works without Github 1`] = `
Object { Object {
"hasReleaseNotes": true, "hasReleaseNotes": true,

View file

@ -1,17 +1,17 @@
import { mocked, partial } from '../../../../test/util'; import * as httpMock from '../../../../test/httpMock';
import { partial } from '../../../../test/util';
import { PLATFORM_TYPE_GITHUB } from '../../../constants/platforms'; import { PLATFORM_TYPE_GITHUB } from '../../../constants/platforms';
import { api } from '../../../platform/github/gh-got-wrapper';
import * as globalCache from '../../../util/cache/global'; import * as globalCache from '../../../util/cache/global';
import { clear } from '../../../util/cache/run';
import * as runCache from '../../../util/cache/run'; import * as runCache from '../../../util/cache/run';
import * as hostRules from '../../../util/host-rules'; import * as hostRules from '../../../util/host-rules';
import * as semverVersioning from '../../../versioning/semver'; import * as semverVersioning from '../../../versioning/semver';
import { BranchConfig } from '../../common'; import { BranchConfig } from '../../common';
import { ChangeLogError, getChangeLogJSON } from '.'; import { ChangeLogError, getChangeLogJSON } from '.';
jest.mock('../../../platform/github/gh-got-wrapper');
jest.mock('../../../datasource/npm'); jest.mock('../../../datasource/npm');
const ghGot = mocked(api).get; const githubApiHost = 'https://api.github.com';
const upgrade: BranchConfig = partial<BranchConfig>({ const upgrade: BranchConfig = partial<BranchConfig>({
endpoint: 'https://api.github.com/', endpoint: 'https://api.github.com/',
@ -37,7 +37,7 @@ const upgrade: BranchConfig = partial<BranchConfig>({
describe('workers/pr/changelog', () => { describe('workers/pr/changelog', () => {
describe('getChangeLogJSON', () => { describe('getChangeLogJSON', () => {
beforeEach(async () => { beforeEach(async () => {
ghGot.mockClear(); httpMock.setup();
hostRules.clear(); hostRules.clear();
hostRules.add({ hostRules.add({
hostType: PLATFORM_TYPE_GITHUB, hostType: PLATFORM_TYPE_GITHUB,
@ -47,25 +47,34 @@ describe('workers/pr/changelog', () => {
await globalCache.rmAll(); await globalCache.rmAll();
runCache.clear(); runCache.clear();
}); });
afterEach(() => {
clear();
httpMock.reset();
});
it('returns null if @types', async () => { it('returns null if @types', async () => {
httpMock.scope(githubApiHost);
expect( expect(
await getChangeLogJSON({ await getChangeLogJSON({
...upgrade, ...upgrade,
fromVersion: null, fromVersion: null,
}) })
).toBeNull(); ).toBeNull();
expect(ghGot).toHaveBeenCalledTimes(0); expect(httpMock.getTrace()).toHaveLength(0);
}); });
it('returns null if no fromVersion', async () => { it('returns null if no fromVersion', async () => {
httpMock.scope(githubApiHost);
expect( expect(
await getChangeLogJSON({ await getChangeLogJSON({
...upgrade, ...upgrade,
sourceUrl: 'https://github.com/DefinitelyTyped/DefinitelyTyped', sourceUrl: 'https://github.com/DefinitelyTyped/DefinitelyTyped',
}) })
).toBeNull(); ).toBeNull();
expect(ghGot).toHaveBeenCalledTimes(0); expect(httpMock.getTrace()).toHaveLength(0);
}); });
it('returns null if fromVersion equals toVersion', async () => { it('returns null if fromVersion equals toVersion', async () => {
httpMock.scope(githubApiHost);
expect( expect(
await getChangeLogJSON({ await getChangeLogJSON({
...upgrade, ...upgrade,
@ -73,50 +82,61 @@ describe('workers/pr/changelog', () => {
toVersion: '1.0.0', toVersion: '1.0.0',
}) })
).toBeNull(); ).toBeNull();
expect(ghGot).toHaveBeenCalledTimes(0); expect(httpMock.getTrace()).toHaveLength(0);
}); });
it('skips invalid repos', async () => { it('skips invalid repos', async () => {
httpMock.scope(githubApiHost);
expect( expect(
await getChangeLogJSON({ await getChangeLogJSON({
...upgrade, ...upgrade,
sourceUrl: 'https://github.com/about', sourceUrl: 'https://github.com/about',
}) })
).toBeNull(); ).toBeNull();
expect(httpMock.getTrace()).toHaveLength(0);
}); });
it('works without Github', async () => { it('works without Github', async () => {
httpMock.scope(githubApiHost);
expect( expect(
await getChangeLogJSON({ await getChangeLogJSON({
...upgrade, ...upgrade,
}) })
).toMatchSnapshot(); ).toMatchSnapshot();
expect(httpMock.getTrace()).toHaveLength(0);
}); });
it('uses GitHub tags', async () => { it('uses GitHub tags', async () => {
ghGot.mockResolvedValueOnce({ httpMock
body: [ .scope(githubApiHost)
.get('/repos/chalk/chalk/tags?per_page=100')
.reply(200, [
{ name: '0.9.0' }, { name: '0.9.0' },
{ name: '1.0.0' }, { name: '1.0.0' },
{ name: '1.4.0' }, { name: '1.4.0' },
{ name: 'v2.3.0' }, { name: 'v2.3.0' },
{ name: '2.2.2' }, { name: '2.2.2' },
{ name: 'v2.4.2' }, { name: 'v2.4.2' },
], ])
} as never); .persist()
.get(/.*/)
.reply(200, []);
expect( expect(
await getChangeLogJSON({ await getChangeLogJSON({
...upgrade, ...upgrade,
}) })
).toMatchSnapshot(); ).toMatchSnapshot();
expect(httpMock.getTrace()).toMatchSnapshot();
}); });
it('filters unnecessary warns', async () => { it('filters unnecessary warns', async () => {
ghGot.mockImplementation(() => { httpMock
throw new Error('Unknown Github Repo'); .scope(githubApiHost)
.persist()
.get(/.*/)
.replyWithError('Unknown Github Repo');
const res = await getChangeLogJSON({
...upgrade,
depName: '@renovate/no',
}); });
expect( expect(res).toMatchSnapshot();
await getChangeLogJSON({ expect(httpMock.getTrace()).toMatchSnapshot();
...upgrade,
depName: '@renovate/no',
})
).toMatchSnapshot();
}); });
it('supports node engines', async () => { it('supports node engines', async () => {
expect( expect(
@ -167,6 +187,7 @@ describe('workers/pr/changelog', () => {
).toBeNull(); ).toBeNull();
}); });
it('supports github enterprise and github.com changelog', async () => { it('supports github enterprise and github.com changelog', async () => {
httpMock.scope(githubApiHost).persist().get(/.*/).reply(200, []);
hostRules.add({ hostRules.add({
hostType: PLATFORM_TYPE_GITHUB, hostType: PLATFORM_TYPE_GITHUB,
token: 'super_secret', token: 'super_secret',
@ -178,21 +199,14 @@ describe('workers/pr/changelog', () => {
endpoint: 'https://github-enterprise.example.com/', endpoint: 'https://github-enterprise.example.com/',
}) })
).toMatchSnapshot(); ).toMatchSnapshot();
expect(ghGot).toHaveBeenNthCalledWith( expect(httpMock.getTrace()).toMatchSnapshot();
1,
'https://api.github.com/repos/chalk/chalk/tags?per_page=100',
{ paginate: true }
);
expect(ghGot).toHaveBeenNthCalledWith(
2,
'https://api.github.com/repos/chalk/chalk/contents/'
);
expect(ghGot).toHaveBeenNthCalledWith(
3,
'https://api.github.com/repos/chalk/chalk/releases?per_page=100'
);
}); });
it('supports github enterprise and github enterprise changelog', async () => { it('supports github enterprise and github enterprise changelog', async () => {
httpMock
.scope('https://github-enterprise.example.com')
.persist()
.get(/.*/)
.reply(200, []);
hostRules.add({ hostRules.add({
hostType: PLATFORM_TYPE_GITHUB, hostType: PLATFORM_TYPE_GITHUB,
baseUrl: 'https://github-enterprise.example.com/', baseUrl: 'https://github-enterprise.example.com/',
@ -206,22 +220,15 @@ describe('workers/pr/changelog', () => {
endpoint: 'https://github-enterprise.example.com/', endpoint: 'https://github-enterprise.example.com/',
}) })
).toMatchSnapshot(); ).toMatchSnapshot();
expect(ghGot).toHaveBeenNthCalledWith( expect(httpMock.getTrace()).toMatchSnapshot();
1,
'https://github-enterprise.example.com/repos/chalk/chalk/tags?per_page=100',
{ paginate: true }
);
expect(ghGot).toHaveBeenNthCalledWith(
2,
'https://github-enterprise.example.com/repos/chalk/chalk/contents/'
);
expect(ghGot).toHaveBeenNthCalledWith(
3,
'https://github-enterprise.example.com/repos/chalk/chalk/releases?per_page=100'
);
}); });
it('supports github.com and github enterprise changelog', async () => { it('supports github.com and github enterprise changelog', async () => {
httpMock
.scope('https://github-enterprise.example.com')
.persist()
.get(/.*/)
.reply(200, []);
hostRules.add({ hostRules.add({
hostType: PLATFORM_TYPE_GITHUB, hostType: PLATFORM_TYPE_GITHUB,
baseUrl: 'https://github-enterprise.example.com/', baseUrl: 'https://github-enterprise.example.com/',
@ -233,19 +240,7 @@ describe('workers/pr/changelog', () => {
sourceUrl: 'https://github-enterprise.example.com/chalk/chalk', sourceUrl: 'https://github-enterprise.example.com/chalk/chalk',
}) })
).toMatchSnapshot(); ).toMatchSnapshot();
expect(ghGot).toHaveBeenNthCalledWith( expect(httpMock.getTrace()).toMatchSnapshot();
1,
'https://github-enterprise.example.com/repos/chalk/chalk/tags?per_page=100',
{ paginate: true }
);
expect(ghGot).toHaveBeenNthCalledWith(
2,
'https://github-enterprise.example.com/repos/chalk/chalk/contents/'
);
expect(ghGot).toHaveBeenNthCalledWith(
3,
'https://github-enterprise.example.com/repos/chalk/chalk/releases?per_page=100'
);
}); });
}); });
}); });

View file

@ -4,15 +4,15 @@ import { linkify } from 'linkify-markdown';
import MarkdownIt from 'markdown-it'; import MarkdownIt from 'markdown-it';
import { logger } from '../../../logger'; import { logger } from '../../../logger';
import { api } from '../../../platform/github/gh-got-wrapper';
import * as globalCache from '../../../util/cache/global'; import * as globalCache from '../../../util/cache/global';
import { GithubHttp } from '../../../util/http/github';
import { ChangeLogNotes, ChangeLogResult } from './common'; import { ChangeLogNotes, ChangeLogResult } from './common';
const { get: ghGot } = api;
const markdown = new MarkdownIt('zero'); const markdown = new MarkdownIt('zero');
markdown.enable(['heading', 'lheading']); markdown.enable(['heading', 'lheading']);
const http = new GithubHttp();
export async function getReleaseList( export async function getReleaseList(
apiBaseUrl: string, apiBaseUrl: string,
repository: string repository: string
@ -25,7 +25,7 @@ export async function getReleaseList(
try { try {
let url = apiBaseUrl.replace(/\/?$/, '/'); let url = apiBaseUrl.replace(/\/?$/, '/');
url += `repos/${repository}/releases?per_page=100`; url += `repos/${repository}/releases?per_page=100`;
const res = await ghGot< const res = await http.getJson<
{ {
html_url: string; html_url: string;
id: number; id: number;
@ -161,7 +161,7 @@ export async function getReleaseNotesMd(
let apiPrefix = apiBaseUrl.replace(/\/?$/, '/'); let apiPrefix = apiBaseUrl.replace(/\/?$/, '/');
apiPrefix += `repos/${repository}/contents/`; apiPrefix += `repos/${repository}/contents/`;
const filesRes = await ghGot<{ name: string }[]>(apiPrefix); const filesRes = await http.getJson<{ name: string }[]>(apiPrefix);
const files = filesRes.body const files = filesRes.body
.map((f) => f.name) .map((f) => f.name)
.filter((f) => changelogFilenameRegex.test(f)); .filter((f) => changelogFilenameRegex.test(f));
@ -176,7 +176,7 @@ export async function getReleaseNotesMd(
`Multiple candidates for changelog file, using ${changelogFile}` `Multiple candidates for changelog file, using ${changelogFile}`
); );
} }
const fileRes = await ghGot<{ content: string }>( const fileRes = await http.getJson<{ content: string }>(
`${apiPrefix}/${changelogFile}` `${apiPrefix}/${changelogFile}`
); );
changelogMd = changelogMd =

View file

@ -2,15 +2,15 @@ import URL from 'url';
import { PLATFORM_TYPE_GITHUB } from '../../../constants/platforms'; import { PLATFORM_TYPE_GITHUB } from '../../../constants/platforms';
import { Release } from '../../../datasource'; import { Release } from '../../../datasource';
import { logger } from '../../../logger'; import { logger } from '../../../logger';
import { api } from '../../../platform/github/gh-got-wrapper';
import * as globalCache from '../../../util/cache/global'; import * as globalCache from '../../../util/cache/global';
import * as hostRules from '../../../util/host-rules'; import * as hostRules from '../../../util/host-rules';
import { GithubHttp } from '../../../util/http/github';
import * as allVersioning from '../../../versioning'; import * as allVersioning from '../../../versioning';
import { BranchUpgradeConfig } from '../../common'; import { BranchUpgradeConfig } from '../../common';
import { ChangeLogError, ChangeLogRelease, ChangeLogResult } from './common'; import { ChangeLogError, ChangeLogRelease, ChangeLogResult } from './common';
import { addReleaseNotes } from './release-notes'; import { addReleaseNotes } from './release-notes';
const { get: ghGot } = api; const http = new GithubHttp();
async function getTags( async function getTags(
endpoint: string, endpoint: string,
@ -18,7 +18,7 @@ async function getTags(
): Promise<string[]> { ): Promise<string[]> {
const url = `${endpoint}repos/${repository}/tags?per_page=100`; const url = `${endpoint}repos/${repository}/tags?per_page=100`;
try { try {
const res = await ghGot<{ name: string }[]>(url, { const res = await http.getJson<{ name: string }[]>(url, {
paginate: true, paginate: true,
}); });