diff --git a/lib/platform/github/index.js b/lib/platform/github/index.js index ba376a113c6462daee614b6fdee6ac5302fff83d..fe914e50913730575dbf727f8808517765a3671f 100644 --- a/lib/platform/github/index.js +++ b/lib/platform/github/index.js @@ -4,6 +4,7 @@ const URL = require('url'); const get = require('./gh-got-wrapper'); const hostRules = require('../../util/host-rules'); +const Storage = require('./storage'); const GitStorage = require('../git/storage'); const { @@ -126,6 +127,7 @@ async function initRepo({ forkMode, forkToken, gitPrivateKey, + gitFs, localDir, includeForks, renovateUsername, @@ -152,7 +154,7 @@ async function initRepo({ res = await get(`repos/${repository}`); logger.trace({ repositoryDetails: res.body }, 'Repository details'); // istanbul ignore if - if (res.body.fork && !includeForks) { + if (res.body.fork && gitFs && !includeForks) { try { const renovateConfig = JSON.parse( Buffer.from( @@ -228,6 +230,8 @@ async function initRepo({ config.prList = null; config.openPrList = null; config.closedPrList = null; + config.storage = new Storage(); + await config.storage.initRepo(config); if (forkMode) { logger.info('Bot is in forkMode'); config.forkToken = forkToken; @@ -286,25 +290,31 @@ async function initRepo({ // Wait an arbitrary 30s to hopefully give GitHub enough time for forking to complete await delay(30000); } + await config.storage.initRepo(config); } - // gitFs - const parsedEndpoint = URL.parse(defaults.endpoint); - parsedEndpoint.auth = - config.forkToken || global.appMode - ? `x-access-token:${opts.token}` - : opts.token; - parsedEndpoint.host = parsedEndpoint.host.replace( - 'api.github.com', - 'github.com' - ); - parsedEndpoint.pathname = config.repository + '.git'; - const url = URL.format(parsedEndpoint); - config.storage = new GitStorage(); - await config.storage.initRepo({ - ...config, - url, - }); + // istanbul ignore if + if (gitFs) { + logger.debug('Enabling Git FS'); + let { host } = URL.parse(defaults.endpoint); + if (host === 'api.github.com') { + host = null; + } + host = host || 'github.com'; + const url = GitStorage.getUrl({ + gitFs, + auth: + config.forkToken || + (global.appMode ? `x-access-token:${opts.token}` : opts.token), + hostname: host, + repository: config.repository, + }); + config.storage = new GitStorage(); + await config.storage.initRepo({ + ...config, + url, + }); + } return platformConfig; } @@ -354,28 +364,11 @@ async function getRepoForceRebase() { return config.repoForceRebase; } -// Return the commit SHA for a branch -async function getBranchCommit(branchName) { - try { - const res = await get( - `repos/${config.repository}/git/refs/heads/${branchName}` - ); - return res.body.object.sha; - } catch (err) /* istanbul ignore next */ { - logger.debug({ err }, 'Error getting branch commit'); - if (err.statusCode === 404) { - throw new Error('repository-changed'); - } - if (err.statusCode === 409) { - throw new Error('empty'); - } - throw err; - } -} - async function getBaseCommitSHA() { if (!config.baseCommitSHA) { - config.baseCommitSHA = await getBranchCommit(config.baseBranch); + config.baseCommitSHA = await config.storage.getBranchCommit( + config.baseBranch + ); } return config.baseCommitSHA; } @@ -391,12 +384,12 @@ async function getBranchProtection(branchName) { return res.body; } -// istanbul ignore next async function setBaseBranch(branchName = config.baseBranch) { logger.debug(`Setting baseBranch to ${branchName}`); config.baseBranch = branchName; config.baseCommitSHA = null; await config.storage.setBaseBranch(branchName); + await getFileList(branchName); } // istanbul ignore next @@ -406,39 +399,34 @@ function setBranchPrefix(branchPrefix) { // Search -// istanbul ignore next +// Get full file list function getFileList(branchName = config.baseBranch) { return config.storage.getFileList(branchName); } // Branch -// istanbul ignore next +// Returns true if branch exists, otherwise false function branchExists(branchName) { return config.storage.branchExists(branchName); } -// istanbul ignore next function getAllRenovateBranches(branchPrefix) { return config.storage.getAllRenovateBranches(branchPrefix); } -// istanbul ignore next function isBranchStale(branchName) { return config.storage.isBranchStale(branchName); } -// istanbul ignore next function getFile(filePath, branchName) { return config.storage.getFile(filePath, branchName); } -// istanbul ignore next function deleteBranch(branchName) { return config.storage.deleteBranch(branchName); } -// istanbul ignore next function getBranchLastCommitTime(branchName) { return config.storage.getBranchLastCommitTime(branchName); } @@ -448,8 +436,8 @@ function getRepoStatus() { return config.storage.getRepoStatus(); } -// istanbul ignore next function mergeBranch(branchName) { + // istanbul ignore if if (config.pushProtection) { logger.info( { branch: branchName }, @@ -459,7 +447,6 @@ function mergeBranch(branchName) { return config.storage.mergeBranch(branchName); } -// istanbul ignore next function commitFilesToBranch( branchName, files, @@ -474,7 +461,6 @@ function commitFilesToBranch( ); } -// istanbul ignore next function getCommitMessages() { return config.storage.getCommitMessages(); } diff --git a/lib/platform/github/storage.js b/lib/platform/github/storage.js new file mode 100644 index 0000000000000000000000000000000000000000..7dbd19c6ec85119ce4b05fd3f846cec977549cc1 --- /dev/null +++ b/lib/platform/github/storage.js @@ -0,0 +1,530 @@ +const moment = require('moment'); +const openpgp = require('openpgp'); +const path = require('path'); +const get = require('./gh-got-wrapper'); + +class Storage { + constructor() { + // config + let config = {}; + // cache + let branchFiles = {}; + let branchList = null; + + Object.assign(this, { + initRepo, + cleanRepo, + getRepoStatus: () => ({}), + branchExists, + commitFilesToBranch, + createBranch, + deleteBranch, + getAllRenovateBranches, + getBranchCommit, + getBranchLastCommitTime, + getCommitMessages, + getFile, + getFileList, + isBranchStale, + mergeBranch, + setBaseBranch, + setBranchPrefix, + }); + + function initRepo(args) { + cleanRepo(); + config = { ...args }; + } + + function cleanRepo() { + branchFiles = {}; + branchList = null; + } + + async function getBranchList() { + if (!branchList) { + logger.debug('Retrieving branchList'); + branchList = (await get( + `repos/${config.repository}/branches?per_page=100`, + { + paginate: true, + } + )).body.map(branch => branch.name); + } + return branchList; + } + + // Returns true if branch exists, otherwise false + async function branchExists(branchName) { + const res = (await getBranchList()).includes(branchName); + logger.debug(`branchExists(${branchName})=${res}`); + return res; + } + + function setBaseBranch(branchName) { + if (branchName) { + logger.debug(`Setting baseBranch to ${branchName}`); + config.baseBranch = branchName; + } + } + + // istanbul ignore next + function setBranchPrefix() { + // Do nothing + } + + // Get full file list + async function getFileList(branchName) { + const branch = branchName || config.baseBranch; + if (branchFiles[branch]) { + return branchFiles[branch]; + } + try { + const res = await get( + `repos/${config.repository}/git/trees/${branch}?recursive=true` + ); + if (res.body.truncated) { + logger.warn( + { repository: config.repository }, + 'repository tree is truncated' + ); + } + const fileList = res.body.tree + .filter(item => item.type === 'blob' && item.mode !== '120000') + .map(item => item.path) + .sort(); + logger.debug(`Retrieved fileList with length ${fileList.length}`); + branchFiles[branch] = fileList; + return fileList; + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 409) { + logger.debug('Repository is not initiated'); + throw new Error('uninitiated'); + } + logger.info( + { branchName, err, repository: config.repository }, + 'Error retrieving git tree - no files detected' + ); + throw err; + } + } + + async function getAllRenovateBranches(branchPrefix) { + logger.trace('getAllRenovateBranches'); + const allBranches = await getBranchList(); + if (branchPrefix.endsWith('/')) { + const branchPrefixPrefix = branchPrefix.slice(0, -1); + if (allBranches.includes(branchPrefixPrefix)) { + logger.warn( + `Pruning branch "${branchPrefixPrefix}" so that it does not block PRs` + ); + await deleteBranch(branchPrefixPrefix); + } + } + return allBranches.filter(branchName => + branchName.startsWith(branchPrefix) + ); + } + + async function isBranchStale(branchName) { + // Check if branch's parent SHA = master SHA + logger.debug(`isBranchStale(${branchName})`); + const branchCommit = await getBranchCommit(branchName); + logger.debug(`branchCommit=${branchCommit}`); + const commitDetails = await getCommitDetails(branchCommit); + logger.trace({ commitDetails }, `commitDetails`); + const parentSha = commitDetails.parents[0].sha; + logger.debug(`parentSha=${parentSha}`); + const baseCommitSHA = await getBranchCommit(config.baseBranch); + logger.debug(`baseCommitSHA=${baseCommitSHA}`); + // Return true if the SHAs don't match + return parentSha !== baseCommitSHA; + } + + async function deleteBranch(branchName) { + delete branchFiles[branchName]; + const options = config.forkToken + ? { token: config.forkToken } + : undefined; + try { + await get.delete( + `repos/${config.repository}/git/refs/heads/${branchName}`, + options + ); + } catch (err) /* istanbul ignore next */ { + if (err.message.startsWith('Reference does not exist')) { + logger.info( + { branch: branchName }, + 'Branch to delete does not exist' + ); + } else if (err.message.startsWith('Cannot delete protected branch')) { + logger.info({ branch: branchName }, 'Cannot delete protected branch'); + } else { + logger.warn({ err, branch: branchName }, 'Error deleting branch'); + } + } + } + + async function mergeBranch(branchName) { + logger.debug(`mergeBranch(${branchName})`); + const url = `repos/${config.repository}/git/refs/heads/${ + config.baseBranch + }`; + const options = { + body: { + sha: await getBranchCommit(branchName), + }, + }; + try { + await get.patch(url, options); + logger.debug({ branch: branchName }, 'Branch merged'); + } catch (err) { + if ( + err.message.startsWith('Required status check') || + err.message.includes('required status checks are expected') + ) { + logger.debug('Branch is not ready for merge: ' + err.message); + throw new Error('not ready'); + } + logger.info({ err }, `Error pushing branch merge for ${branchName}`); + throw new Error('Branch automerge failed'); + } + // Delete branch + await deleteBranch(branchName); + } + + async function getBranchLastCommitTime(branchName) { + try { + const res = await get( + `repos/${config.repository}/commits?sha=${branchName}` + ); + return new Date(res.body[0].commit.committer.date); + } catch (err) { + logger.error({ err }, `getBranchLastCommitTime error`); + return new Date(); + } + } + + // Generic File operations + + async function getFile(filePath, branchName) { + logger.trace(`getFile(filePath=${filePath}, branchName=${branchName})`); + if (!(await getFileList(branchName)).includes(filePath)) { + return null; + } + let res; + try { + res = await get( + `repos/${config.repository}/contents/${encodeURI( + filePath + )}?ref=${branchName || config.baseBranch}` + ); + } catch (error) { + if (error.statusCode === 404) { + // If file not found, then return null JSON + logger.info({ filePath, branch: branchName }, 'getFile 404'); + return null; + } + if ( + error.statusCode === 403 && + error.message && + error.message.startsWith('This API returns blobs up to 1 MB in size') + ) { + logger.info('Large file'); + // istanbul ignore if + if (branchName && branchName !== config.baseBranch) { + logger.info('Cannot retrieve large files from non-master branch'); + return null; + } + // istanbul ignore if + if (path.dirname(filePath) !== '.') { + logger.info( + 'Cannot retrieve large files from non-root directories' + ); + return null; + } + const treeUrl = `repos/${config.repository}/git/trees/${ + config.baseBranch + }`; + const baseName = path.basename(filePath); + let fileSha; + (await get(treeUrl)).body.tree.forEach(file => { + if (file.path === baseName) { + fileSha = file.sha; + } + }); + if (!fileSha) { + logger.warn('Could not locate file blob'); + throw error; + } + res = await get(`repos/${config.repository}/git/blobs/${fileSha}`); + } else { + // Propagate if it's any other error + throw error; + } + } + if (res && res.body) { + if (res.body.content) { + return Buffer.from(res.body.content, 'base64').toString(); + } + // istanbul ignore next + return ''; + } + return null; + } + + // Add a new commit, create branch if not existing + async function commitFilesToBranch( + branchName, + files, + message, + parentBranch = config.baseBranch + ) { + logger.debug( + `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'` + ); + try { + delete branchFiles[branchName]; + const parentCommit = await getBranchCommit(parentBranch); + const parentTree = await getCommitTree(parentCommit); + const fileBlobs = []; + // Create blobs + for (const file of files) { + const blob = await createBlob(file.contents); + fileBlobs.push({ + name: file.name, + blob, + }); + } + // Create tree + const tree = await createTree(parentTree, fileBlobs); + const commit = await createCommit(parentCommit, tree, message); + const isBranchExisting = await branchExists(branchName); + if (isBranchExisting) { + await updateBranch(branchName, commit); + logger.debug({ branch: branchName }, 'Branch updated'); + return 'updated'; + } + await createBranch(branchName, commit); + logger.debug({ branch: branchName }, 'Branch created'); + // istanbul ignore if + if (branchList) { + branchList.push(branchName); + } + return 'created'; + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 404) { + throw new Error('repository-changed'); + } + throw err; + } + } + + // Internal branch operations + + // Creates a new branch with provided commit + async function createBranch(branchName, sha) { + logger.debug(`createBranch(${branchName})`); + const options = { + body: { + ref: `refs/heads/${branchName}`, + sha, + }, + }; + // istanbul ignore if + if (config.forkToken) { + options.token = config.forkToken; + } + try { + // istanbul ignore if + if (branchName.includes('/')) { + const [blockingBranch] = branchName.split('/'); + if (await branchExists(blockingBranch)) { + logger.warn({ blockingBranch }, 'Deleting blocking branch'); + await deleteBranch(blockingBranch); + } + } + logger.debug({ options, branch: branchName }, 'Creating branch'); + await get.post(`repos/${config.repository}/git/refs`, options); + branchList.push(branchName); + logger.debug('Created branch'); + } catch (err) /* istanbul ignore next */ { + const headers = err.response.req.getHeaders(); + delete headers.token; + logger.warn( + { + err, + options, + }, + 'Error creating branch' + ); + if (err.statusCode === 422) { + throw new Error('repository-changed'); + } + throw err; + } + } + + // Return the commit SHA for a branch + async function getBranchCommit(branchName) { + try { + const res = await get( + `repos/${config.repository}/git/refs/heads/${branchName}` + ); + return res.body.object.sha; + } catch (err) /* istanbul ignore next */ { + logger.debug({ err }, 'Error getting branch commit'); + if (err.statusCode === 404) { + throw new Error('repository-changed'); + } + if (err.statusCode === 409) { + throw new Error('empty'); + } + throw err; + } + } + + async function getCommitMessages() { + logger.debug('getCommitMessages'); + const res = await get(`repos/${config.repository}/commits`); + return res.body.map(commit => commit.commit.message); + } + + // Internal: Updates an existing branch to new commit sha + async function updateBranch(branchName, commit) { + logger.debug(`Updating branch ${branchName} with commit ${commit}`); + const options = { + body: { + sha: commit, + force: true, + }, + }; + // istanbul ignore if + if (config.forkToken) { + options.token = config.forkToken; + } + try { + await get.patch( + `repos/${config.repository}/git/refs/heads/${branchName}`, + options + ); + } catch (err) /* istanbul ignore next */ { + if (err.statusCode === 422) { + logger.info({ err }, 'Branch no longer exists - exiting'); + throw new Error('repository-changed'); + } + throw err; + } + } + // Low-level commit operations + + // Create a blob with fileContents and return sha + async function createBlob(fileContents) { + logger.debug('Creating blob'); + const options = { + body: { + encoding: 'base64', + content: Buffer.from(fileContents).toString('base64'), + }, + }; + // istanbul ignore if + if (config.forkToken) { + options.token = config.forkToken; + } + return (await get.post(`repos/${config.repository}/git/blobs`, options)) + .body.sha; + } + + // Return the tree SHA for a commit + async function getCommitTree(commit) { + logger.debug(`getCommitTree(${commit})`); + return (await get(`repos/${config.repository}/git/commits/${commit}`)) + .body.tree.sha; + } + + // Create a tree and return SHA + async function createTree(baseTree, files) { + logger.debug(`createTree(${baseTree}, files)`); + const body = { + base_tree: baseTree, + tree: [], + }; + files.forEach(file => { + body.tree.push({ + path: file.name, + mode: '100644', + type: 'blob', + sha: file.blob, + }); + }); + logger.trace({ body }, 'createTree body'); + const options = { body }; + // istanbul ignore if + if (config.forkToken) { + options.token = config.forkToken; + } + return (await get.post(`repos/${config.repository}/git/trees`, options)) + .body.sha; + } + + // Create a commit and return commit SHA + async function createCommit(parent, tree, message) { + logger.debug(`createCommit(${parent}, ${tree}, ${message})`); + const { gitPrivateKey } = config; + const now = moment(); + let author; + if (global.gitAuthor) { + logger.trace('Setting gitAuthor'); + author = { + name: global.gitAuthor.name, + email: global.gitAuthor.email, + date: now.format(), + }; + } + const body = { + message, + parents: [parent], + tree, + }; + if (author) { + body.author = author; + // istanbul ignore if + if (gitPrivateKey) { + logger.debug('Found gitPrivateKey'); + const privKeyObj = openpgp.key.readArmored(gitPrivateKey).keys[0]; + const commit = `tree ${tree}\nparent ${parent}\nauthor ${ + author.name + } <${author.email}> ${now.format('X ZZ')}\ncommitter ${ + author.name + } <${author.email}> ${now.format('X ZZ')}\n\n${message}`; + const { signature } = await openpgp.sign({ + data: openpgp.util.str2Uint8Array(commit), + privateKeys: privKeyObj, + detached: true, + armor: true, + }); + body.signature = signature; + } + } + const options = { + body, + }; + // istanbul ignore if + if (config.forkToken) { + options.token = config.forkToken; + } + return (await get.post(`repos/${config.repository}/git/commits`, options)) + .body.sha; + } + + async function getCommitDetails(commit) { + logger.debug(`getCommitDetails(${commit})`); + const results = await get( + `repos/${config.repository}/git/commits/${commit}` + ); + return results.body; + } + } +} + +module.exports = Storage; diff --git a/test/platform/github/__snapshots__/index.spec.js.snap b/test/platform/github/__snapshots__/index.spec.js.snap index ebecc55c24c1f1634069a09a051bd3a6e44ff9dd..0b903b4eca8c0b24b5baff4b4da1fac47e00a6f1 100644 --- a/test/platform/github/__snapshots__/index.spec.js.snap +++ b/test/platform/github/__snapshots__/index.spec.js.snap @@ -32,6 +32,46 @@ Array [ ] `; +exports[`platform/github commitFilesToBranch(branchName, files, message, parentBranch) should add a commit to a new branch if the branch does not already exist 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/refs/heads/master", + ], + Array [ + "repos/some/repo/git/commits/1111", + ], + Array [ + "repos/some/repo/branches?per_page=100", + Object { + "paginate": true, + }, + ], +] +`; + +exports[`platform/github commitFilesToBranch(branchName, files, message, parentBranch) should add a new commit to the branch 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/refs/heads/master", + ], + Array [ + "repos/some/repo/git/commits/1111", + ], + Array [ + "repos/some/repo/branches?per_page=100", + Object { + "paginate": true, + }, + ], +] +`; + exports[`platform/github createPr() should create and return a PR object 1`] = ` Object { "branchName": "some-branch", @@ -63,7 +103,7 @@ Array [ }, ], Array [ - "repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e", + "repos/some/repo/statuses/some-sha", Object { "body": Object { "context": "renovate/verify", @@ -138,6 +178,8 @@ content", ] `; +exports[`platform/github getBranchLastCommitTime should return a Date 1`] = `2011-04-14T16:00:49.000Z`; + exports[`platform/github getBranchPr(branchName) should return the PR object 1`] = ` Array [ Array [ @@ -154,6 +196,86 @@ Array [ exports[`platform/github getBranchPr(branchName) should return the PR object 2`] = `null`; +exports[`platform/github getCommitMessages() returns commits messages 1`] = ` +Array [ + "foo", + "bar", +] +`; + +exports[`platform/github getFile() should return large file via git API 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/trees/master?recursive=true", + ], + Array [ + "repos/some/repo/contents/package-lock.json?ref=master", + ], + Array [ + "repos/some/repo/git/trees/master", + ], + Array [ + "repos/some/repo/git/blobs/some-sha", + ], +] +`; + +exports[`platform/github getFile() should return large file via git API 2`] = `"{\\"hello\\":\\"workd\\"}"`; + +exports[`platform/github getFile() should return null if GitHub returns a 404 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/trees/master?recursive=true", + ], + Array [ + "repos/some/repo/contents/package.json?ref=master", + ], +] +`; + +exports[`platform/github getFile() should return null if getFile returns nothing 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/trees/master?recursive=true", + ], + Array [ + "repos/some/repo/contents/package.json?ref=master", + ], +] +`; + +exports[`platform/github getFile() should return the encoded file content 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/trees/master?recursive=true", + ], + Array [ + "repos/some/repo/contents/package.json?ref=master", + ], +] +`; + +exports[`platform/github getFileList should return the files matching the fileName 1`] = ` +Array [ + "package.json", + "some-dir/package.json.some-thing-else", + "src/app/package.json", + "src/otherapp/package.json", +] +`; + exports[`platform/github getPr(prNo) should return PR from closed graphql result 1`] = ` Object { "body": "dummy body", @@ -405,6 +527,86 @@ Object { } `; +exports[`platform/github mergeBranch(branchName) should perform a branch merge 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/refs/heads/thebranchname", + ], +] +`; + +exports[`platform/github mergeBranch(branchName) should perform a branch merge 2`] = ` +Array [ + Array [ + "repos/some/repo/git/refs/heads/master", + Object { + "body": Object { + "sha": "1235", + }, + }, + ], +] +`; + +exports[`platform/github mergeBranch(branchName) should perform a branch merge 3`] = `Array []`; + +exports[`platform/github mergeBranch(branchName) should perform a branch merge 4`] = `Array []`; + +exports[`platform/github mergeBranch(branchName) should perform a branch merge 5`] = ` +Array [ + Array [ + "repos/some/repo/git/refs/heads/thebranchname", + undefined, + ], +] +`; + +exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 1`] = `[Error: Branch automerge failed]`; + +exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 2`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/refs/heads/thebranchname", + ], +] +`; + +exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 3`] = ` +Array [ + Array [ + "repos/some/repo/git/refs/heads/master", + Object { + "body": Object { + "sha": "1235", + }, + }, + ], +] +`; + +exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 4`] = `Array []`; + +exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 5`] = `Array []`; + +exports[`platform/github mergeBranch(branchName) should throw if branch merge throws 6`] = `Array []`; + +exports[`platform/github setBaseBranch(branchName) sets the base branch 1`] = ` +Array [ + Array [ + "repos/some/repo", + ], + Array [ + "repos/some/repo/git/trees/some-branch?recursive=true", + ], +] +`; + exports[`platform/github updatePr(prNo, title, body) should update the PR 1`] = ` Array [ Array [ diff --git a/test/platform/github/index.spec.js b/test/platform/github/index.spec.js index e2a2cd9ca4382475f9b0a2f4ba624a0f950fcb44..bccb006dc7b40c7c6a73f2b2ece21c7a5c81ee85 100644 --- a/test/platform/github/index.spec.js +++ b/test/platform/github/index.spec.js @@ -4,7 +4,6 @@ describe('platform/github', () => { let github; let get; let hostRules; - let GitStorage; beforeEach(() => { // reset module jest.resetModules(); @@ -14,27 +13,6 @@ describe('platform/github', () => { get = require('../../../lib/platform/github/gh-got-wrapper'); github = require('../../../lib/platform/github'); hostRules = require('../../../lib/util/host-rules'); - jest.mock('../../../lib/platform/git/storage'); - GitStorage = require('../../../lib/platform/git/storage'); - GitStorage.mockImplementation(() => ({ - initRepo: jest.fn(), - cleanRepo: jest.fn(), - getFileList: jest.fn(), - branchExists: jest.fn(() => true), - isBranchStale: jest.fn(() => false), - setBaseBranch: jest.fn(), - getBranchLastCommitTime: jest.fn(), - getAllRenovateBranches: jest.fn(), - getCommitMessages: jest.fn(), - getFile: jest.fn(), - commitFilesToBranch: jest.fn(), - mergeBranch: jest.fn(), - deleteBranch: jest.fn(), - getRepoStatus: jest.fn(), - getBranchCommit: jest.fn( - () => '0d9c7726c3d628b7e28af234595cfd20febdbf8e' - ), - })); delete global.gitAuthor; hostRules.find.mockReturnValue({ hostType: 'github', @@ -119,14 +97,7 @@ describe('platform/github', () => { allow_merge_commit: true, }, })); - if (args.length) { - return github.initRepo(...args); - } - return github.initRepo({ - endpoint: 'https://github.com', - repository: 'some/repo', - token: 'token', - }); + return github.initRepo(...args); } describe('initRepo', () => { @@ -374,6 +345,184 @@ describe('platform/github', () => { }); }); }); + describe('setBaseBranch(branchName)', () => { + it('sets the base branch', async () => { + await initRepo({ + repository: 'some/repo', + }); + get.mockImplementationOnce(() => ({ + body: { + truncated: true, + tree: [], + }, + })); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1238', + }, + }, + })); + await github.setBaseBranch('some-branch'); + expect(get.mock.calls).toMatchSnapshot(); + }); + }); + describe('getFileList', () => { + beforeEach(async () => { + await initRepo({ + repository: 'some/repo', + }); + }); + it('throws if error', async () => { + get.mockImplementationOnce(() => { + throw new Error('some error'); + }); + await expect(github.getFileList('error-branch')).rejects.toThrow(); + }); + it('warns if truncated result', async () => { + get.mockImplementationOnce(() => ({ + body: { + truncated: true, + tree: [], + }, + })); + const files = await github.getFileList('truncated-branch'); + expect(files).toHaveLength(0); + }); + it('caches the result', async () => { + get.mockImplementationOnce(() => ({ + body: { + truncated: true, + tree: [], + }, + })); + let files = await github.getFileList('cached-branch'); + expect(files).toHaveLength(0); + files = await github.getFileList('cached-branch'); + expect(files).toHaveLength(0); + }); + it('should return the files matching the fileName', async () => { + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { type: 'blob', path: 'symlinks/package.json', mode: '120000' }, + { type: 'blob', path: 'package.json' }, + { + type: 'blob', + path: 'some-dir/package.json.some-thing-else', + }, + { type: 'blob', path: 'src/app/package.json' }, + { type: 'blob', path: 'src/otherapp/package.json' }, + ], + }, + })); + const files = await github.getFileList('npm-branch'); + expect(files).toMatchSnapshot(); + }); + }); + describe('branchExists(branchName)', () => { + it('should return true if the branch exists (one result)', async () => { + await initRepo({ + repository: 'some/repo', + }); + get.mockImplementationOnce(() => ({ + body: [ + { + name: 'thebranchname', + }, + ], + })); + const exists = await github.branchExists('thebranchname'); + expect(exists).toBe(true); + }); + }); + describe('getAllRenovateBranches()', () => { + it('should return all renovate branches', async () => { + await initRepo({ + repository: 'some/repo', + }); + get.mockImplementationOnce(() => ({ + body: [ + { + name: 'thebranchname', + }, + { + name: 'renovate', + }, + { + name: 'renovate/abc-1.x', + }, + ], + })); + const res = await github.getAllRenovateBranches('renovate/'); + expect(res).toHaveLength(1); + }); + }); + describe('isBranchStale(branchName)', () => { + it('should return false if same SHA as master', async () => { + await initRepo({ + repository: 'some/repo', + }); // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); + // getCommitDetails - same as master + get.mockImplementationOnce(() => ({ + body: { + parents: [ + { + sha: '1234', + }, + ], + }, + })); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1234', + }, + }, + })); + expect(await github.isBranchStale('thebranchname')).toBe(false); + }); + it('should return true if SHA different from master', async () => { + await initRepo({ + repository: 'some/repo', + }); // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); + // getCommitDetails - different + get.mockImplementationOnce(() => ({ + body: { + parents: [ + { + sha: '12345678', + }, + ], + }, + })); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1234', + }, + }, + })); + expect(await github.isBranchStale('thebranchname')).toBe(true); + }); + }); describe('getBranchPr(branchName)', () => { it('should return null if no PR exists', async () => { await initRepo({ @@ -547,8 +696,14 @@ describe('platform/github', () => { it('returns state if found', async () => { await initRepo({ repository: 'some/repo', - token: 'token', - }); + }); // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); get.mockImplementationOnce(() => ({ body: [ { @@ -565,16 +720,21 @@ describe('platform/github', () => { }, ], })); - const res = await github.getBranchStatusCheck( - 'renovate/future_branch', - 'context-2' - ); + const res = await github.getBranchStatusCheck('somebranch', 'context-2'); expect(res).toEqual('state-2'); }); it('returns null', async () => { await initRepo({ repository: 'some/repo', }); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); get.mockImplementationOnce(() => ({ body: [ { @@ -600,6 +760,14 @@ describe('platform/github', () => { await initRepo({ repository: 'some/repo', }); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); get.mockImplementationOnce(() => ({ body: [ { @@ -621,6 +789,14 @@ describe('platform/github', () => { await initRepo({ repository: 'some/repo', }); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); get.mockImplementationOnce(() => ({ body: [ { @@ -655,6 +831,112 @@ describe('platform/github', () => { expect(get.post).toHaveBeenCalledTimes(1); }); }); + describe('mergeBranch(branchName)', () => { + it('should perform a branch merge', async () => { + await initRepo({ + repository: 'some/repo', + }); // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); + get.patch.mockImplementationOnce(); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); + // deleteBranch + get.delete.mockImplementationOnce(); + await github.mergeBranch('thebranchname', 'branch'); + expect(get.mock.calls).toMatchSnapshot(); + expect(get.patch.mock.calls).toMatchSnapshot(); + expect(get.post.mock.calls).toMatchSnapshot(); + expect(get.put.mock.calls).toMatchSnapshot(); + expect(get.delete.mock.calls).toMatchSnapshot(); + }); + it('should throw if branch merge throws', async () => { + await initRepo({ + repository: 'some/repo', + }); // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); + get.patch.mockImplementationOnce(() => { + throw new Error('branch failed'); + }); + let e; + try { + await github.mergeBranch('thebranchname', 'branch'); + } catch (err) { + e = err; + } + expect(e).toMatchSnapshot(); + expect(get.mock.calls).toMatchSnapshot(); + expect(get.patch.mock.calls).toMatchSnapshot(); + expect(get.post.mock.calls).toMatchSnapshot(); + expect(get.put.mock.calls).toMatchSnapshot(); + expect(get.delete.mock.calls).toMatchSnapshot(); + }); + it('should throw not ready', async () => { + await initRepo({ + repository: 'some/repo', + }); // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); + get.patch.mockImplementationOnce(() => { + throw new Error('3 of 3 required status checks are expected.'); + }); + await expect( + github.mergeBranch('thebranchname', 'branch') + ).rejects.toThrow(Error('not ready')); + }); + }); + describe('getBranchLastCommitTime', () => { + it('should return a Date', async () => { + await initRepo({ + repository: 'some/repo', + }); + get.mockReturnValueOnce({ + body: [ + { + commit: { + committer: { + date: '2011-04-14T16:00:49Z', + }, + }, + }, + ], + }); + const res = await github.getBranchLastCommitTime('some-branch'); + expect(res).toMatchSnapshot(); + }); + it('handles error', async () => { + await initRepo({ + repository: 'some/repo', + }); + get.mockReturnValueOnce({ + body: [], + }); + const res = await github.getBranchLastCommitTime('some-branch'); + expect(res).toBeDefined(); + }); + }); describe('findIssue()', () => { it('returns null if no issue', async () => { get.mockReturnValueOnce({ @@ -999,6 +1281,14 @@ describe('platform/github', () => { number: 123, }, })); + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1235', + }, + }, + })); get.mockImplementationOnce(() => ({ body: [], })); @@ -1355,6 +1645,7 @@ describe('platform/github', () => { }; expect(await github.mergePr(pr)).toBe(true); expect(get.put).toHaveBeenCalledTimes(1); + expect(get.delete).toHaveBeenCalledTimes(1); expect(get).toHaveBeenCalledTimes(1); }); it('should handle merge error', async () => { @@ -1370,6 +1661,7 @@ describe('platform/github', () => { }); expect(await github.mergePr(pr)).toBe(false); expect(get.put).toHaveBeenCalledTimes(1); + expect(get.delete).toHaveBeenCalledTimes(0); expect(get).toHaveBeenCalledTimes(1); }); }); @@ -1452,6 +1744,7 @@ describe('platform/github', () => { }; expect(await github.mergePr(pr)).toBe(true); expect(get.put).toHaveBeenCalledTimes(1); + expect(get.delete).toHaveBeenCalledTimes(1); }); it('should try squash after rebase', async () => { const pr = { @@ -1465,6 +1758,7 @@ describe('platform/github', () => { }); await github.mergePr(pr); expect(get.put).toHaveBeenCalledTimes(2); + expect(get.delete).toHaveBeenCalledTimes(1); }); it('should try merge after squash', async () => { const pr = { @@ -1481,6 +1775,7 @@ describe('platform/github', () => { }); expect(await github.mergePr(pr)).toBe(true); expect(get.put).toHaveBeenCalledTimes(3); + expect(get.delete).toHaveBeenCalledTimes(1); }); it('should give up', async () => { const pr = { @@ -1500,6 +1795,354 @@ describe('platform/github', () => { }); expect(await github.mergePr(pr)).toBe(false); expect(get.put).toHaveBeenCalledTimes(3); + expect(get.delete).toHaveBeenCalledTimes(0); + }); + }); + describe('getFile()', () => { + it('should return the encoded file content', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + get.mockImplementationOnce(() => ({ + body: { + content: Buffer.from('hello world').toString('base64'), + }, + })); + const content = await github.getFile('package.json'); + expect(get.mock.calls).toMatchSnapshot(); + expect(content).toBe('hello world'); + }); + it('should return null if not in file list', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + const content = await github.getFile('.npmrc'); + expect(content).toBeNull(); + }); + it('should return null if GitHub returns a 404', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + get.mockImplementationOnce(() => + Promise.reject({ + statusCode: 404, + }) + ); + const content = await github.getFile('package.json'); + expect(get.mock.calls).toMatchSnapshot(); + expect(content).toBeNull(); + }); + it('should return large file via git API', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + get.mockImplementationOnce(() => + Promise.reject({ + statusCode: 403, + message: 'This API returns blobs up to 1 MB in size, OK?', + }) + ); + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + path: 'package-lock.json', + sha: 'some-sha', + }, + ], + }, + })); + get.mockImplementationOnce(() => ({ + body: { + content: Buffer.from('{"hello":"workd"}').toString('base64'), + }, + })); + const content = await github.getFile('package-lock.json'); + expect(get.mock.calls).toMatchSnapshot(); + expect(content).toMatchSnapshot(); + }); + it('should throw if cannot find large file via git API', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + get.mockImplementationOnce(() => + Promise.reject({ + statusCode: 403, + message: 'This API returns blobs up to 1 MB in size, OK?', + }) + ); + get.mockImplementationOnce(() => ({ + body: { + tree: [], + }, + })); + await expect(github.getFile('package-lock.json')).rejects.toEqual({ + statusCode: 403, + message: 'This API returns blobs up to 1 MB in size, OK?', + }); + }); + it('should return null if getFile returns nothing', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + get.mockImplementationOnce(() => ({})); + const content = await github.getFile('package.json'); + expect(get.mock.calls).toMatchSnapshot(); + expect(content).toBeNull(); + }); + it('should return propagate unknown errors', async () => { + await initRepo({ repository: 'some/repo', token: 'token' }); + // getFileList + get.mockImplementationOnce(() => ({ + body: { + tree: [ + { + type: 'blob', + path: 'package.json', + }, + { + type: 'blob', + path: 'package-lock.json', + }, + ], + }, + })); + get.mockImplementationOnce(() => { + throw new Error('Something went wrong'); + }); + await expect(github.getFile('package.json')).rejects.toThrow( + Error('Something went wrong') + ); + }); + }); + describe('commitFilesToBranch(branchName, files, message, parentBranch)', () => { + beforeEach(async () => { + global.gitAuthor = { + name: 'Renovate Bot', + email: 'bot@renovatebot.com', + }; + await initRepo({ + repository: 'some/repo', + }); + + // getBranchCommit + get.mockImplementationOnce(() => ({ + body: { + object: { + sha: '1111', + }, + }, + })); + + // getCommitTree + get.mockImplementationOnce(() => ({ + body: { + tree: { + sha: '2222', + }, + }, + })); + + // createBlob + get.post.mockImplementationOnce(() => ({ + body: { + sha: '3333', + }, + })); + + // createTree + get.post.mockImplementationOnce(() => ({ + body: { + sha: '4444', + }, + })); + + // createCommit + get.post.mockImplementationOnce(() => ({ + body: { + sha: '5555', + }, + })); + }); + it('should add a new commit to the branch', async () => { + // branchExists + get.mockImplementationOnce(() => ({ + body: [ + { + name: 'master', + }, + { + name: 'the-branch', + }, + ], + })); + const files = [ + { + name: 'package.json', + contents: 'hello world', + }, + ]; + await github.commitFilesToBranch( + 'the-branch', + files, + 'my commit message' + ); + expect(get.mock.calls).toMatchSnapshot(); + expect(get.post).toHaveBeenCalledTimes(3); + expect(get.patch).toHaveBeenCalledTimes(1); + }); + it('should add a commit to a new branch if the branch does not already exist', async () => { + // branchExists + get.mockImplementationOnce(() => ({ + body: [ + { + name: 'master', + }, + ], + })); + const files = [ + { + name: 'package.json', + contents: 'hello world', + }, + ]; + await github.commitFilesToBranch( + 'the-branch', + files, + 'my other commit message' + ); + expect(get.mock.calls).toMatchSnapshot(); + expect(get.post).toHaveBeenCalledTimes(4); + expect(get.patch).toHaveBeenCalledTimes(0); + }); + it('should parse valid gitAuthor', async () => { + // branchExists + get.mockImplementationOnce(() => ({ + body: [ + { + name: 'master', + }, + ], + })); + const files = [ + { + name: 'package.json', + contents: 'hello world', + }, + ]; + global.gitAuthor = { + name: 'Renovate Bot', + email: 'bot@renovatebot.com', + }; + await github.commitFilesToBranch( + 'the-branch', + files, + 'my other commit message' + ); + expect(get.post.mock.calls[2][1].body.author.name).toEqual( + 'Renovate Bot' + ); + expect(get.post.mock.calls[2][1].body.author.email).toEqual( + 'bot@renovatebot.com' + ); + }); + }); + describe('getCommitMessages()', () => { + it('returns commits messages', async () => { + await initRepo({ + repository: 'some/repo', + gitAuthor: 'Renovate Bot <bot@renovatebot.com>', + }); + get.mockReturnValueOnce({ + body: [ + { + commit: { message: 'foo' }, + }, + { + commit: { message: 'bar' }, + }, + ], + }); + const res = await github.getCommitMessages(); + expect(res).toMatchSnapshot(); }); }); describe('getVulnerabilityAlerts()', () => { diff --git a/test/platform/github/storage.spec.js b/test/platform/github/storage.spec.js new file mode 100644 index 0000000000000000000000000000000000000000..c12b039ca3f1602066c26a01394eff4cdfc42a1d --- /dev/null +++ b/test/platform/github/storage.spec.js @@ -0,0 +1,25 @@ +describe('platform/github/storage', () => { + const GithubStorage = require('../../../lib/platform/github/storage'); + const GitStorage = require('../../../lib/platform/git/storage'); + + function getAllPropertyNames(obj) { + let props = []; + let obj2 = obj; + + while (obj2 != null) { + props = props.concat(Object.getOwnPropertyNames(obj2)); + obj2 = Object.getPrototypeOf(obj2); + } + + return props.filter(p => !p.startsWith('_')); + } + + it('has same API for git storage', () => { + const githubMethods = getAllPropertyNames(new GithubStorage()).sort(); + const gitMethods = getAllPropertyNames(new GitStorage()).sort(); + expect(githubMethods).toMatchObject(gitMethods); + }); + it('getRepoStatus exists', async () => { + expect((await new GithubStorage()).getRepoStatus()).toEqual({}); + }); +});