From 9a2e37f5bd4998649aa5d59ff06cc9777603724a Mon Sep 17 00:00:00 2001 From: Sergei Zharinov <zharinov@users.noreply.github.com> Date: Wed, 12 Jan 2022 14:31:36 +0300 Subject: [PATCH] fix(http/github): Preserve search params for paginated requests (#13506) Co-authored-by: Michael Kriese <michael.kriese@visualon.de> --- lib/util/http/github.spec.ts | 20 ++++++++++---------- lib/util/http/github.ts | 21 +++++++-------------- 2 files changed, 17 insertions(+), 24 deletions(-) diff --git a/lib/util/http/github.spec.ts b/lib/util/http/github.spec.ts index 26d2714252..1a36c4efcf 100644 --- a/lib/util/http/github.spec.ts +++ b/lib/util/http/github.spec.ts @@ -80,21 +80,21 @@ describe('util/http/github', () => { }); it('paginates', async () => { - const url = '/some-url'; + const url = '/some-url?per_page=2'; httpMock .scope(githubApiHost) .get(url) - .reply(200, ['a'], { - link: `<${url}?page=2>; rel="next", <${url}?page=3>; rel="last"`, + .reply(200, ['a', 'b'], { + link: `<${url}&page=2>; rel="next", <${url}&page=3>; rel="last"`, }) - .get(`${url}?page=2`) - .reply(200, ['b', 'c'], { - link: `<${url}?page=3>; rel="next", <${url}?page=3>; rel="last"`, + .get(`${url}&page=2`) + .reply(200, ['c', 'd'], { + link: `<${url}&page=3>; rel="next", <${url}&page=3>; rel="last"`, }) - .get(`${url}?page=3`) - .reply(200, ['d']); - const res = await githubApi.getJson('some-url', { paginate: true }); - expect(res.body).toEqual(['a', 'b', 'c', 'd']); + .get(`${url}&page=3`) + .reply(200, ['e']); + const res = await githubApi.getJson(url, { paginate: true }); + expect(res.body).toEqual(['a', 'b', 'c', 'd', 'e']); const trace = httpMock.getTrace(); expect(trace).toHaveLength(3); }); diff --git a/lib/util/http/github.ts b/lib/util/http/github.ts index ec0e949a68..dd5373602a 100644 --- a/lib/util/http/github.ts +++ b/lib/util/http/github.ts @@ -210,31 +210,24 @@ export class GithubHttp extends Http<GithubHttpOptions, GithubHttpOptions> { if (opts.paginate) { // Check if result is paginated const pageLimit = opts.pageLimit ?? 10; - const linkHeader = - result?.headers?.link && - parseLinkHeader(result.headers.link as string); + const linkHeader = parseLinkHeader(result?.headers?.link); if (linkHeader?.next && linkHeader?.last) { - let lastPage = +linkHeader.last.page; + let lastPage = parseInt(linkHeader.last.page, 10); // istanbul ignore else: needs a test if (!process.env.RENOVATE_PAGINATE_ALL && opts.paginate !== 'all') { lastPage = Math.min(pageLimit, lastPage); } - const pageNumbers = Array.from( - new Array(lastPage), - (x, i) => i + 1 - ).slice(1); - const queue = pageNumbers.map( - (page) => (): Promise<HttpResponse<T>> => { + const queue = Array.from(new Array(lastPage), (_, i) => `${i + 1}`) + .slice(1) + .map((pageNumber) => (): Promise<HttpResponse<T>> => { const nextUrl = new URL(linkHeader.next.url, baseUrl); - nextUrl.search = ''; - nextUrl.searchParams.set('page', page.toString()); + nextUrl.searchParams.set('page', pageNumber); return this.request<T>( nextUrl, { ...opts, paginate: false }, okToRetry ); - } - ); + }); const pages = await pAll(queue, { concurrency: 5 }); if (opts.paginationField && is.plainObject(result.body)) { const paginatedResult = result.body[opts.paginationField]; -- GitLab