diff --git a/lib/config/index.js b/lib/config/index.js index 19d29baca8daf4b40d60bda9434c33e122ee6957..72557e675eef049af1e7ddc7f7514deb85f079f8 100644 --- a/lib/config/index.js +++ b/lib/config/index.js @@ -56,15 +56,13 @@ async function parseConfigs(env, argv) { // Check platforms and tokens if (config.platform === 'github') { - if (!config.githubAppId && !config.token && !env.GITHUB_TOKEN) { + if (!config.token && !env.GITHUB_TOKEN) { throw new Error('You need to supply a GitHub token.'); } - config.api = githubApi; } else if (config.platform === 'gitlab') { if (!config.token && !env.GITLAB_TOKEN) { throw new Error('You need to supply a GitLab token.'); } - config.api = gitlabApi; } else { throw new Error(`Unsupported platform: ${config.platform}.`); } diff --git a/lib/config/presets.js b/lib/config/presets.js index 703c39c5f50e81911930799e37f71267aa3fd446..9fa57f83db7e348623db2fb1f478db0a4e963f80 100644 --- a/lib/config/presets.js +++ b/lib/config/presets.js @@ -47,7 +47,7 @@ async function resolveConfigPresets( logger.trace({ config }, `Post-merge resolve config`); for (const key of Object.keys(config)) { const val = config[key]; - const ignoredKeys = ['api', 'content', 'logger']; + const ignoredKeys = ['content', 'logger']; if (isObject(val) && ignoredKeys.indexOf(key) === -1) { // Resolve nested objects logger.trace(`Resolving object "${key}"`); diff --git a/lib/config/validation.js b/lib/config/validation.js index 6e43cf17fa937eac9fd8bc4648d377a3d26042b3..12800e5318cd73961d83a9d6417a188dc14338f3 100644 --- a/lib/config/validation.js +++ b/lib/config/validation.js @@ -18,7 +18,7 @@ function validateConfig(config) { let warnings = []; function isIgnored(key) { - const ignoredNodes = ['api', 'depType', 'npmToken', 'packageFile']; + const ignoredNodes = ['depType', 'npmToken', 'packageFile']; return ignoredNodes.indexOf(key) !== -1; } diff --git a/lib/logger/config-serializer.js b/lib/logger/config-serializer.js index c8e51cd9eedfdfd7968f257278c95465e036bc39..5743337b647dd660d8b6c22dc1450de23ba029ab 100644 --- a/lib/logger/config-serializer.js +++ b/lib/logger/config-serializer.js @@ -11,7 +11,7 @@ function configSerializer(config) { 'yarnrc', 'privateKey', ]; - const functionFields = ['api', 'logger']; + const functionFields = ['logger']; const templateFields = ['commitMessage', 'prTitle', 'prBody']; // eslint-disable-next-line array-callback-return return traverse(config).map(function scrub(val) { diff --git a/lib/manager/docker/detect.js b/lib/manager/docker/detect.js index c59fed5fe06b467d1d69c4b40500866836864a8e..6d3c77888a383c904121ee5fca37358549ee08a0 100644 --- a/lib/manager/docker/detect.js +++ b/lib/manager/docker/detect.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + module.exports = { detectPackageFiles, }; @@ -9,7 +11,7 @@ async function detectPackageFiles(config, fileList) { if (config.docker.enabled) { for (const file of fileList) { if (file === 'Dockerfile' || file.endsWith('/Dockerfile')) { - const content = await config.api.getFileContent(file); + const content = await platform.getFileContent(file); if (content) { const strippedComment = content.replace(/^(#.*?\n)+/, ''); // This means we skip ones with ARG for now diff --git a/lib/manager/docker/resolve.js b/lib/manager/docker/resolve.js index 019b09bc0cf4fac6ef1fc1a3ad1a5a7d3587847c..c661f2f4bb8483098210ac10f0263374a098a008 100644 --- a/lib/manager/docker/resolve.js +++ b/lib/manager/docker/resolve.js @@ -1,4 +1,5 @@ const configParser = require('../../config'); +const platform = require('../../platform'); module.exports = { resolvePackageFile, @@ -10,9 +11,7 @@ async function resolvePackageFile(config, inputFile) { logger.debug( `Resolving packageFile ${JSON.stringify(packageFile.packageFile)}` ); - packageFile.content = await config.api.getFileContent( - packageFile.packageFile - ); + packageFile.content = await platform.getFileContent(packageFile.packageFile); if (!packageFile.content) { logger.debug('No packageFile content'); return null; diff --git a/lib/manager/index.js b/lib/manager/index.js index b3411b0687a032acb1453c15484e75155bdcd6e5..f125fa75f581f0b62aaeffa6890a3bfe963e22ad 100644 --- a/lib/manager/index.js +++ b/lib/manager/index.js @@ -11,6 +11,8 @@ const npmUpdater = require('./npm/update'); const meteorUpdater = require('./meteor/update'); const dockerfileHelper = require('./docker/update'); +const platform = require('../platform'); + module.exports = { detectPackageFiles, getPackageUpdates, @@ -22,7 +24,7 @@ async function detectPackageFiles(config) { logger.debug('detectPackageFiles()'); logger.trace({ config }); let packageFiles = []; - const fileList = (await config.api.getFileList()).filter( + const fileList = (await platform.getFileList()).filter( file => !config.ignorePaths.some( ignorePath => file.includes(ignorePath) || minimatch(file, ignorePath) @@ -66,7 +68,7 @@ async function getUpdatedPackageFiles(config) { if (upgrade.type !== 'lockFileMaintenance') { const existingContent = updatedPackageFiles[upgrade.packageFile] || - (await config.api.getFileContent( + (await platform.getFileContent( upgrade.packageFile, config.parentBranch )); diff --git a/lib/manager/meteor/detect.js b/lib/manager/meteor/detect.js index a4b4e63aa8122549c14de0c20ae98f987c009aa7..1df2158e5f6951f49ecf5809c204821135f90411 100644 --- a/lib/manager/meteor/detect.js +++ b/lib/manager/meteor/detect.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + module.exports = { detectPackageFiles, }; @@ -9,7 +11,7 @@ async function detectPackageFiles(config, fileList) { if (config.meteor.enabled) { for (const file of fileList) { if (file === 'package.js' || file.endsWith('/package.js')) { - const content = await config.api.getFileContent(file); + const content = await platform.getFileContent(file); if (content && content.replace(/\s/g, '').includes('Npm.depends({')) { packageFiles.push(file); } diff --git a/lib/manager/npm/monorepos.js b/lib/manager/npm/monorepos.js index a0a5870c0197eba0e0c2491622df587dc08ba5d5..0a8997934a038b3e086d4601fd215f9a7011124c 100644 --- a/lib/manager/npm/monorepos.js +++ b/lib/manager/npm/monorepos.js @@ -1,5 +1,6 @@ const minimatch = require('minimatch'); const path = require('path'); +const platform = require('../../platform'); module.exports = { checkMonorepos, @@ -38,7 +39,7 @@ async function checkMonorepos(config) { } } // lerna - const lernaJson = await config.api.getFileJson('lerna.json'); + const lernaJson = await platform.getFileJson('lerna.json'); if (lernaJson && lernaJson.packages) { logger.debug({ lernaJson }, 'Found lerna config'); for (const packageGlob of lernaJson.packages) { diff --git a/lib/manager/resolve.js b/lib/manager/resolve.js index d23c51f9cb51309fa2563d18e1c1002f4c2c3523..cc1e63a6719f31b9aa9790ddbae197a1904a1218 100644 --- a/lib/manager/resolve.js +++ b/lib/manager/resolve.js @@ -8,6 +8,8 @@ const dockerResolve = require('../manager/docker/resolve'); const { mergeChildConfig } = require('../config'); const { checkMonorepos } = require('../manager/npm/monorepos'); +const platform = require('../platform'); + module.exports = { resolvePackageFiles, }; @@ -25,7 +27,7 @@ async function resolvePackageFiles(config) { typeof packageFile === 'string' ? { packageFile } : packageFile; if (packageFile.packageFile.endsWith('package.json')) { logger.debug(`Resolving packageFile ${JSON.stringify(packageFile)}`); - const pFileRaw = await config.api.getFileContent(packageFile.packageFile); + const pFileRaw = await platform.getFileContent(packageFile.packageFile); if (!pFileRaw) { logger.info( { packageFile: packageFile.packageFile }, @@ -50,14 +52,14 @@ async function resolvePackageFiles(config) { } } if (!config.ignoreNpmrcFile) { - packageFile.npmrc = await config.api.getFileContent( + packageFile.npmrc = await platform.getFileContent( path.join(path.dirname(packageFile.packageFile), '.npmrc') ); } if (!packageFile.npmrc) { delete packageFile.npmrc; } - packageFile.yarnrc = await config.api.getFileContent( + packageFile.yarnrc = await platform.getFileContent( path.join(path.dirname(packageFile.packageFile), '.yarnrc') ); if (!packageFile.yarnrc) { @@ -102,9 +104,7 @@ async function resolvePackageFiles(config) { path.dirname(packageFile.packageFile), 'yarn.lock' ); - packageFile.yarnLock = await config.api.getFileContent( - yarnLockFileName - ); + packageFile.yarnLock = await platform.getFileContent(yarnLockFileName); if (packageFile.yarnLock) { logger.debug( { packageFile: packageFile.packageFile }, @@ -115,7 +115,7 @@ async function resolvePackageFiles(config) { path.dirname(packageFile.packageFile), 'package-lock.json' ); - packageFile.packageLock = await config.api.getFileContent( + packageFile.packageLock = await platform.getFileContent( packageLockFileName ); if (packageFile.packageLock) { diff --git a/lib/platform/index.js b/lib/platform/index.js new file mode 100644 index 0000000000000000000000000000000000000000..cb4b624f524fb5f04d1c976e3915a726d5cdbad2 --- /dev/null +++ b/lib/platform/index.js @@ -0,0 +1,19 @@ +const github = require('./github'); +const gitlab = require('./gitlab'); + +// istanbul ignore next +function platform() {} + +platform.init = function init(val) { + if (val === 'github') { + Object.keys(github).forEach(f => { + platform[f] = github[f]; + }); + } else if (val === 'gitlab') { + Object.keys(gitlab).forEach(f => { + platform[f] = gitlab[f]; + }); + } +}; + +module.exports = platform; diff --git a/lib/workers/branch/automerge.js b/lib/workers/branch/automerge.js index c4fcb845d58c7ea0b6b3c8843560f0ec4be7dd17..b14d9dcdb26121befca9425d53ea6aeee155fe0b 100644 --- a/lib/workers/branch/automerge.js +++ b/lib/workers/branch/automerge.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + module.exports = { tryBranchAutomerge, }; @@ -8,18 +10,18 @@ async function tryBranchAutomerge(config) { if (!config.automerge || config.automergeType === 'pr') { return 'no automerge'; } - const existingPr = await config.api.getBranchPr(config.branchName); + const existingPr = await platform.getBranchPr(config.branchName); if (existingPr) { return 'automerge aborted - PR exists'; } - const branchStatus = await config.api.getBranchStatus( + const branchStatus = await platform.getBranchStatus( config.branchName, config.requiredStatusChecks ); if (branchStatus === 'success') { logger.info(`Automerging branch`); try { - await config.api.mergeBranch(config.branchName, config.automergeType); + await platform.mergeBranch(config.branchName, config.automergeType); return 'automerged'; // Branch no longer exists } catch (err) { logger.info({ err }, `Failed to automerge branch`); diff --git a/lib/workers/branch/check-existing.js b/lib/workers/branch/check-existing.js index 4552d4f277cf72e1b1b536a752bc73050bdb096e..10d47398e9ce986fdf915cb8bce35a143ef448b5 100644 --- a/lib/workers/branch/check-existing.js +++ b/lib/workers/branch/check-existing.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + const moment = require('moment'); module.exports = { @@ -14,7 +16,7 @@ async function prAlreadyExisted(config) { logger.debug('recreateClosed is false'); // Return if same PR already existed // Check for current PR title format - let pr = await config.api.findPr(config.branchName, config.prTitle, 'closed'); + let pr = await platform.findPr(config.branchName, config.prTitle, 'closed'); if (pr) { logger.debug('Found closed PR with current title'); // this code exists to ignore mistakenly closed PRs which occurred due to a bug @@ -27,7 +29,7 @@ async function prAlreadyExisted(config) { { closedAt, problemStart, problemStopped }, 'Renaming mistakenly closed PR' ); - await config.api.updatePr(pr.number, `${pr.title} - autoclosed`); + await platform.updatePr(pr.number, `${pr.title} - autoclosed`); return null; } return pr; @@ -37,7 +39,7 @@ async function prAlreadyExisted(config) { const legacyPrTitle = config.prTitle .replace(/to v(\d+)$/, 'to version $1.x') // Major .replace(/to v(\d+)/, 'to version $1'); // Non-major - pr = await config.api.findPr(config.branchName, legacyPrTitle, 'closed'); + pr = await platform.findPr(config.branchName, legacyPrTitle, 'closed'); if (pr) { logger.info('Found closed PR with legacy title'); return pr; diff --git a/lib/workers/branch/commit.js b/lib/workers/branch/commit.js index 865de55430108ba6a36770b28ab64eb4730344ef..4d21dd8bb63a933669b899f59a19df7e3ce95db3 100644 --- a/lib/workers/branch/commit.js +++ b/lib/workers/branch/commit.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + const handlebars = require('handlebars'); module.exports = { @@ -18,7 +20,7 @@ async function commitFilesToBranch(config) { commitMessage = `${config.semanticPrefix} ${splitMessage.join('\n')}`; } // API will know whether to create new branch or not - await config.api.commitFilesToBranch( + await platform.commitFilesToBranch( config.branchName, updatedFiles, commitMessage, diff --git a/lib/workers/branch/index.js b/lib/workers/branch/index.js index 52c2192f5e61c6c9b12404b0044ecbe62207ec01..e7096e68811dc90278fd71fc710994988fa037bc 100644 --- a/lib/workers/branch/index.js +++ b/lib/workers/branch/index.js @@ -1,3 +1,4 @@ +const platform = require('../../platform'); const schedule = require('./schedule'); const { getUpdatedPackageFiles } = require('../../manager'); const { getUpdatedLockFiles } = require('./lock-files'); @@ -34,7 +35,7 @@ async function processBranch(branchConfig) { // Check schedule config.isScheduledNow = isScheduledNow(config); if (!config.isScheduledNow) { - if (!await config.api.branchExists(config.branchName)) { + if (!await platform.branchExists(config.branchName)) { logger.info('Skipping branch creation as not within schedule'); return 'not-scheduled'; } @@ -66,7 +67,7 @@ async function processBranch(branchConfig) { } content += '\n\nIf this PR was closed by mistake or you changed your mind, you can simply reopen or rename it to reactivate Renovate for this dependency version.'; - await config.api.ensureComment(pr.number, subject, content); + await platform.ensureComment(pr.number, subject, content); return 'already-existed'; } Object.assign(config, await getParentBranch(config)); @@ -92,7 +93,7 @@ async function processBranch(branchConfig) { await commitFilesToBranch(config); // Return now if no branch exists - if ((await config.api.branchExists(config.branchName)) === false) { + if ((await platform.branchExists(config.branchName)) === false) { logger.debug('Branch does not exist - returning'); return 'no-branch'; } @@ -151,9 +152,9 @@ async function processBranch(branchConfig) { content: `\`\`\`\n${error.stderr}\n\`\`\``, }); }); - await config.api.ensureComment(pr.number, topic, content, subtopics); + await platform.ensureComment(pr.number, topic, content, subtopics); } else { - await config.api.ensureCommentRemoval(pr.number, topic); + await platform.ensureCommentRemoval(pr.number, topic); const prAutomerged = await prWorker.checkAutoMerge(pr, config); if (prAutomerged) { return 'automerged'; diff --git a/lib/workers/branch/lock-files.js b/lib/workers/branch/lock-files.js index 4b1de0d28d0196ed6943f088a1601b1d5f96e7a1..03bbc3a053813cbda7ccda028c49775efea85834 100644 --- a/lib/workers/branch/lock-files.js +++ b/lib/workers/branch/lock-files.js @@ -3,6 +3,8 @@ const path = require('path'); const npm = require('./npm'); const yarn = require('./yarn'); +const platform = require('../../platform'); + module.exports = { hasPackageLock, hasYarnLock, @@ -198,7 +200,7 @@ async function getUpdatedLockFiles(config) { const updatedLockFiles = []; if ( config.type === 'lockFileMaintenance' && - (await config.api.branchExists(config.branchName)) + (await platform.branchExists(config.branchName)) ) { return { lockFileErrors, updatedLockFiles }; } @@ -220,7 +222,7 @@ async function getUpdatedLockFiles(config) { stderr: res.stderr, }); } else { - const existingContent = await config.api.getFileContent( + const existingContent = await platform.getFileContent( lockFileName, config.parentBranch ); @@ -249,7 +251,7 @@ async function getUpdatedLockFiles(config) { stderr: res.stderr, }); } else { - const existingContent = await config.api.getFileContent( + const existingContent = await platform.getFileContent( lockFileName, config.parentBranch ); diff --git a/lib/workers/branch/parent.js b/lib/workers/branch/parent.js index 0bff7920e0847cbd314d1ed0e78c03bf21d88e64..2e72954b8f6b54069140320eedff428bfea6de6c 100644 --- a/lib/workers/branch/parent.js +++ b/lib/workers/branch/parent.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + module.exports = { checkStale, getParentBranch, @@ -12,9 +14,9 @@ function checkStale(config) { } async function getParentBranch(config) { - const { api, branchName, logger } = config; + const { branchName, logger } = config; // Check if branch exists - const branchExists = await api.branchExists(branchName); + const branchExists = await platform.branchExists(branchName); if (!branchExists) { logger.info(`Branch needs creating`); return { parentBranch: undefined }; @@ -22,14 +24,14 @@ async function getParentBranch(config) { logger.info(`Branch already exists`); // Check for existing PR - const pr = await api.getBranchPr(branchName); + const pr = await platform.getBranchPr(branchName); if ( config.rebaseStalePrs || config.repoForceRebase || (config.automerge && config.automergeType === 'branch-push') ) { - const isBranchStale = await api.isBranchStale(branchName); + const isBranchStale = await platform.isBranchStale(branchName); if (isBranchStale) { logger.info(`Branch is stale and needs rebasing`); // We can rebase the branch only if no PR or PR can be rebased @@ -50,7 +52,7 @@ async function getParentBranch(config) { // TODO: Move this down to api library if (config.isGitLab) { logger.info(`Deleting unmergeable branch in order to recreate/rebase`); - await config.api.deleteBranch(branchName); + await platform.deleteBranch(branchName); } // Setting parentBranch back to undefined means that we'll use the default branch return { parentBranch: undefined }; diff --git a/lib/workers/branch/status-checks.js b/lib/workers/branch/status-checks.js index 2fc441812300326bdda8ba0cfcec1636b769734f..a33815968d139907f2f73cd65fb99257eafee6ef 100644 --- a/lib/workers/branch/status-checks.js +++ b/lib/workers/branch/status-checks.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + module.exports = { setUnpublishable, }; @@ -18,7 +20,7 @@ async function setUnpublishable(config) { unpublishable = true; } const context = 'renovate/unpublish-safe'; - const existingState = await config.api.getBranchStatusCheck( + const existingState = await platform.getBranchStatusCheck( config.branchName, context ); @@ -39,7 +41,7 @@ async function setUnpublishable(config) { logger.debug('Status check is already up-to-date'); } else { logger.debug(`Updating status check state to ${state}`); - await config.api.setBranchStatus( + await platform.setBranchStatus( config.branchName, context, description, diff --git a/lib/workers/package-file/index.js b/lib/workers/package-file/index.js index fce7aa8f112eacf430f4c362c223c803d2e4ac9c..3cd3a3541eeefc1d1b69a209bd4724db551c3e3b 100644 --- a/lib/workers/package-file/index.js +++ b/lib/workers/package-file/index.js @@ -3,6 +3,7 @@ const depTypeWorker = require('../dep-type'); const npmApi = require('../../manager/npm/registry'); let logger = require('../../logger'); +const platform = require('../../platform'); module.exports = { mightBeABrowserLibrary, @@ -109,9 +110,7 @@ async function renovateMeteorPackageFile(packageFileConfig) { logger.info('packageFile is disabled'); return upgrades; } - const content = await packageFileConfig.api.getFileContent( - packageFileConfig.packageFile - ); + const content = await platform.getFileContent(packageFileConfig.packageFile); upgrades = upgrades.concat( await depTypeWorker.renovateDepType(content, packageFileConfig) ); diff --git a/lib/workers/pr/index.js b/lib/workers/pr/index.js index 33431be4184b5f328e64c69e7ceb90eadbd0ed31..2339868d9d58a2dd8904c5eb5fce4ecfaacba070 100644 --- a/lib/workers/pr/index.js +++ b/lib/workers/pr/index.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + const handlebars = require('handlebars'); const changelogHelper = require('./changelog'); const showdown = require('showdown'); @@ -18,7 +20,7 @@ async function ensurePr(prConfig) { // If there is a group, it will use the config of the first upgrade in the array const { branchName, upgrades } = config; config.upgrades = []; - const branchStatus = await config.api.getBranchStatus( + const branchStatus = await platform.getBranchStatus( branchName, config.requiredStatusChecks ); @@ -45,9 +47,7 @@ async function ensurePr(prConfig) { logger.debug('Checking branch combined status'); if (branchStatus === 'pending' || branchStatus === 'running') { logger.debug(`Branch status is "${branchStatus}" - checking timeout`); - const lastCommitTime = await config.api.getBranchLastCommitTime( - branchName - ); + const lastCommitTime = await platform.getBranchLastCommitTime(branchName); const currentTime = new Date(); const millisecondsPerHour = 1000 * 60 * 60; const elapsedHours = Math.round( @@ -145,7 +145,7 @@ async function ensurePr(prConfig) { try { // Check if existing PR exists - const existingPr = await config.api.getBranchPr(branchName); + const existingPr = await platform.getBranchPr(branchName); if (existingPr) { if (config.automerge && branchStatus === 'failure') { logger.debug(`Setting assignees and reviewers as status checks failed`); @@ -157,22 +157,17 @@ async function ensurePr(prConfig) { return existingPr; } // PR must need updating - await config.api.updatePr(existingPr.number, prTitle, prBody); + await platform.updatePr(existingPr.number, prTitle, prBody); logger.info(`Updated ${existingPr.displayNumber}`); return existingPr; } logger.debug({ prTitle }, `Creating PR for branch ${branchName}`); let pr; try { - pr = await config.api.createPr( - branchName, - prTitle, - prBody, - config.labels - ); + pr = await platform.createPr(branchName, prTitle, prBody, config.labels); } catch (err) { logger.warn({ err }, `Failed to create PR - deleting branch`); - await config.api.deleteBranch(branchName); + await platform.deleteBranch(branchName); return null; } // Skip assign and review if automerging PR @@ -203,7 +198,7 @@ async function addAssigneesReviewers(config, pr) { assignee => assignee.length && assignee[0] === '@' ? assignee.slice(1) : assignee ); - await config.api.addAssignees(pr.number, assignees); + await platform.addAssignees(pr.number, assignees); logger.info({ assignees: config.assignees }, 'Added assignees'); } catch (err) { logger.info( @@ -218,7 +213,7 @@ async function addAssigneesReviewers(config, pr) { reviewer => reviewer.length && reviewer[0] === '@' ? reviewer.slice(1) : reviewer ); - await config.api.addReviewers(pr.number, reviewers); + await platform.addReviewers(pr.number, reviewers); logger.info({ reviewers: config.reviewers }, 'Added reviewers'); } catch (err) { logger.info( @@ -245,7 +240,7 @@ async function checkAutoMerge(pr, config) { return false; } // Check branch status - const branchStatus = await config.api.getBranchStatus( + const branchStatus = await platform.getBranchStatus( pr.head.ref, config.requiredStatusChecks ); @@ -261,7 +256,7 @@ async function checkAutoMerge(pr, config) { } // Let's merge this logger.info(`Automerging #${pr.number}`); - return config.api.mergePr(pr); + return platform.mergePr(pr); } logger.debug('No automerge'); return false; diff --git a/lib/workers/repository/cleanup.js b/lib/workers/repository/cleanup.js index 15ecb05abff6bbfe49c10cbdc37739ce2705098c..c502fdd03da51bb8a6a3c482741d7cf03c48917f 100644 --- a/lib/workers/repository/cleanup.js +++ b/lib/workers/repository/cleanup.js @@ -1,3 +1,5 @@ +const platform = require('../../platform'); + module.exports = { pruneStaleBranches, }; @@ -16,17 +18,17 @@ async function pruneStaleBranches(config) { logger.debug('Platform is not GitHub - returning'); return; } - let renovateBranches = await config.api.getAllRenovateBranches( + let renovateBranches = await platform.getAllRenovateBranches( config.branchPrefix ); logger.debug(`renovateBranches=${renovateBranches}`); const lockFileBranch = `${config.branchPrefix}lock-file-maintenance`; if (renovateBranches.includes(lockFileBranch)) { logger.debug('Checking lock file branch'); - const pr = await config.api.getBranchPr(lockFileBranch); + const pr = await platform.getBranchPr(lockFileBranch); if (pr && pr.isUnmergeable) { logger.info('Deleting lock file maintenance branch as it is unmergeable'); - await config.api.deleteBranch(lockFileBranch); + await platform.deleteBranch(lockFileBranch); } renovateBranches = renovateBranches.filter( branch => branch !== lockFileBranch @@ -42,11 +44,11 @@ async function pruneStaleBranches(config) { } for (const branchName of remainingBranches) { logger.debug({ branch: branchName }, `Deleting orphan branch`); - const pr = await config.api.findPr(branchName, null, 'open'); + const pr = await platform.findPr(branchName, null, 'open'); if (pr) { logger.info({ prNo: pr.number, prTitle: pr.title }, 'Autoclosing PR'); - await config.api.updatePr(pr.number, `${pr.title} - autoclosed`); + await platform.updatePr(pr.number, `${pr.title} - autoclosed`); } - await config.api.deleteBranch(branchName); + await platform.deleteBranch(branchName); } } diff --git a/lib/workers/repository/init/apis.js b/lib/workers/repository/init/apis.js index 6855575cb4993997a4f44d39da37b8bb0547a43c..e46668e4f76fafb3a2446424cb73f4ee09c6eca9 100644 --- a/lib/workers/repository/init/apis.js +++ b/lib/workers/repository/init/apis.js @@ -1,17 +1,15 @@ -const githubPlatform = require('../../../platform/github'); -const gitlabPlatform = require('../../../platform/gitlab'); +const platform = require('../../../platform'); const { detectSemanticCommits } = require('./semantic'); function assignPlatform(config) { - const platforms = { - github: githubPlatform, - gitlab: gitlabPlatform, - }; - return { ...config, api: platforms[config.platform] }; + const { logger } = config; + logger.debug('assignPlatform'); + platform.init(config.platform); + return config; } async function getPlatformConfig(config) { - const platformConfig = await config.api.initRepo( + const platformConfig = await platform.initRepo( config.repository, config.token, config.endpoint, diff --git a/lib/workers/repository/init/base.js b/lib/workers/repository/init/base.js index 314ac5633bf03c6c028eb7a27810dafc3d356085..770ff88dcc96374682acb727caffd8e1379994f0 100644 --- a/lib/workers/repository/init/base.js +++ b/lib/workers/repository/init/base.js @@ -1,10 +1,12 @@ +const platform = require('../../../platform'); + async function checkBaseBranch(config) { const { logger } = config; let error = []; if (config.baseBranch) { // Renovate should read content and target PRs here - if (await config.api.branchExists(config.baseBranch)) { - await config.api.setBaseBranch(config.baseBranch); + if (await platform.branchExists(config.baseBranch)) { + await platform.setBaseBranch(config.baseBranch); } else { // Warn and ignore setting (use default branch) const message = `The configured baseBranch "${config.baseBranch}" is not present. Ignoring`; diff --git a/lib/workers/repository/init/config.js b/lib/workers/repository/init/config.js index e6433014b3b151cd96bc107a483a1e1ace5de983..5a1a5ff3d5bb91eac27e9cd8e736c1e5d341e407 100644 --- a/lib/workers/repository/init/config.js +++ b/lib/workers/repository/init/config.js @@ -1,3 +1,4 @@ +const platform = require('../../../platform'); const jsonValidator = require('json-dup-key-validator'); const { mergeChildConfig } = require('../../../config'); @@ -9,7 +10,7 @@ const presets = require('../../../config/presets'); async function mergeRenovateJson(config) { const { logger } = config; let returnConfig = { ...config }; - const renovateJsonContent = await config.api.getFileContent('renovate.json'); + const renovateJsonContent = await platform.getFileContent('renovate.json'); if (!renovateJsonContent) { logger.debug('No renovate.json found'); return returnConfig; diff --git a/lib/workers/repository/init/semantic.js b/lib/workers/repository/init/semantic.js index 8e4f365ab1131de82991f15b9f28447ccc6e4b74..f334fc68ba67a9f5907d331cb19b239e68dd1d6f 100644 --- a/lib/workers/repository/init/semantic.js +++ b/lib/workers/repository/init/semantic.js @@ -1,3 +1,4 @@ +const platform = require('../../../platform'); const conventionalCommitsDetector = require('conventional-commits-detector'); async function detectSemanticCommits(config) { @@ -5,7 +6,7 @@ async function detectSemanticCommits(config) { if (config.semanticCommits !== null) { return config; } - const commitMessages = await config.api.getCommitMessages(); + const commitMessages = await platform.getCommitMessages(); logger.trace(`commitMessages=${JSON.stringify(commitMessages)}`); const type = conventionalCommitsDetector(commitMessages); if (type === 'unknown') { diff --git a/lib/workers/repository/onboarding/branch/check.js b/lib/workers/repository/onboarding/branch/check.js index 15165500f1c65ce138c66e4a44e3b14938b347f9..bd6b43489b09f05da95fad5e2f9fb68599b60909 100644 --- a/lib/workers/repository/onboarding/branch/check.js +++ b/lib/workers/repository/onboarding/branch/check.js @@ -1,15 +1,17 @@ +const platform = require('../../../../platform'); + const findFile = async (config, fileName) => { const { logger } = config; logger.debug('findFile()'); logger.trace({ config }); - const fileList = await config.api.getFileList(); + const fileList = await platform.getFileList(); return fileList.includes(fileName); }; const renovateJsonExists = config => findFile(config, 'renovate.json'); const closedPrExists = config => - config.api.findPr( + platform.findPr( `${config.branchPrefix}configure`, 'Configure Renovate', 'closed' @@ -32,7 +34,7 @@ const isOnboarded = async config => { }; const onboardingPrExists = config => - config.api.findPr( + platform.findPr( `${config.branchPrefix}configure`, 'Configure Renovate', 'open' diff --git a/lib/workers/repository/onboarding/branch/create.js b/lib/workers/repository/onboarding/branch/create.js index d957d5965832cfd5e33b00033c8f5df36c945def..fccbd478e6b8d327f554688bba856e5ff5349079 100644 --- a/lib/workers/repository/onboarding/branch/create.js +++ b/lib/workers/repository/onboarding/branch/create.js @@ -1,3 +1,5 @@ +const platform = require('../../../../platform'); + async function createOnboardingBranch(config) { const { logger } = config; logger.debug('Creating onboarding branch'); @@ -5,7 +7,7 @@ async function createOnboardingBranch(config) { extends: ['config:base'], }; logger.info({ renovateJson }, 'Creating onboarding branch'); - await config.api.commitFilesToBranch( + await platform.commitFilesToBranch( `${config.branchPrefix}configure`, [ { diff --git a/lib/workers/repository/onboarding/branch/index.js b/lib/workers/repository/onboarding/branch/index.js index a0c842e5f4e9472ca6f4a8fc4fc90d1ec54f81de..75a0e01c5a2a2270491dd1a7840d8d48c7edea1e 100644 --- a/lib/workers/repository/onboarding/branch/index.js +++ b/lib/workers/repository/onboarding/branch/index.js @@ -1,3 +1,4 @@ +const platform = require('../../../../platform'); const { detectPackageFiles } = require('../../../../manager'); const { createOnboardingBranch } = require('./create'); const { isOnboarded, onboardingPrExists } = require('./check'); @@ -22,7 +23,7 @@ async function checkOnboardingBranch(config) { logger.info('Need to create onboarding PR'); await createOnboardingBranch(config); } - await config.api.setBaseBranch(`${config.branchPrefix}configure`); + await platform.setBaseBranch(`${config.branchPrefix}configure`); const branchList = [`${config.branchPrefix}configure`]; return { ...config, repoIsOnboarded, branchList }; } diff --git a/lib/workers/repository/onboarding/pr/index.js b/lib/workers/repository/onboarding/pr/index.js index f89aebd74f08fa0853c54df68042048d4c210db2..5a0277de2dcb95f4f451164c5bcb5f2e41c2adf7 100644 --- a/lib/workers/repository/onboarding/pr/index.js +++ b/lib/workers/repository/onboarding/pr/index.js @@ -1,3 +1,4 @@ +const platform = require('../../../../platform'); const { getConfigDesc } = require('./config-description'); const { getErrors, getWarnings } = require('./errors-warnings'); const { getBaseBranchDesc } = require('./base-branch'); @@ -48,7 +49,7 @@ async function ensureOnboardingPr(config) { logger.trace('prBody:\n' + prBody); // Check if existing PR exists - const existingPr = await config.api.getBranchPr( + const existingPr = await platform.getBranchPr( `${config.branchPrefix}configure` ); if (existingPr) { @@ -58,13 +59,13 @@ async function ensureOnboardingPr(config) { return; } // PR must need updating - await config.api.updatePr(existingPr.number, onboardingPrTitle, prBody); + await platform.updatePr(existingPr.number, onboardingPrTitle, prBody); logger.info(`Updated ${existingPr.displayNumber}`); return; } const labels = []; const useDefaultBranch = true; - const pr = await config.api.createPr( + const pr = await platform.createPr( onboardingBranch, onboardingPrTitle, prBody, diff --git a/package.json b/package.json index 8228ab5381fed6a2cc2fee2cb981e564711955c6..7c5602587736eac2de11c1526e85f0f36d30d14b 100644 --- a/package.json +++ b/package.json @@ -110,6 +110,9 @@ "lcov", "text-summary" ], + "setupFiles": [ + "./test/platform.js" + ], "setupTestFrameworkScriptFile": "./test/chai.js" }, "prettier": { diff --git a/test/.eslintrc.js b/test/.eslintrc.js index 017c81540fd43417548069c72a5b48dce84d6ecb..d0977a46a3276abb2fd3bd5141d7fcc6dcddfa64 100644 --- a/test/.eslintrc.js +++ b/test/.eslintrc.js @@ -2,6 +2,9 @@ module.exports = { env: { jest: true, }, + globals: { + platform: true, + }, rules: { 'prefer-destructuring': 0, 'prefer-promise-reject-errors': 0, diff --git a/test/logger/__snapshots__/config-serializer.spec.js.snap b/test/logger/__snapshots__/config-serializer.spec.js.snap index a042be70395345ebf02fe289848a32498577d229..6e1cfb6036de16ff0a54a03567aa7590b2460622 100644 --- a/test/logger/__snapshots__/config-serializer.spec.js.snap +++ b/test/logger/__snapshots__/config-serializer.spec.js.snap @@ -10,7 +10,6 @@ Object { exports[`logger/config-serializer replaces functions 1`] = ` Object { - "api": "[Function]", "logger": "[Function]", "nottoken": "b", } @@ -18,7 +17,6 @@ Object { exports[`logger/config-serializer squashes templates 1`] = ` Object { - "api": "[Function]", "nottoken": "b", "prBody": "[Template]", } diff --git a/test/logger/config-serializer.spec.js b/test/logger/config-serializer.spec.js index 939e21fc1d10b9a8b6191bf14f74177eb3c96150..07e9496736faea2ca60aea08e8df827d6b46bb45 100644 --- a/test/logger/config-serializer.spec.js +++ b/test/logger/config-serializer.spec.js @@ -11,7 +11,6 @@ describe('logger/config-serializer', () => { }); it('replaces functions', () => { const config = { - api: 'a', nottoken: 'b', logger: {}, }; @@ -19,7 +18,6 @@ describe('logger/config-serializer', () => { }); it('squashes templates', () => { const config = { - api: 'a', nottoken: 'b', prBody: 'foo', }; diff --git a/test/manager/index.spec.js b/test/manager/index.spec.js index 55ca4b1d0c4c9e5a5d945d02e8e8723883b0777f..9365e10284d4bb2acdd4f5d915fbfe0ee7304a47 100644 --- a/test/manager/index.spec.js +++ b/test/manager/index.spec.js @@ -14,16 +14,12 @@ describe('manager', () => { beforeEach(() => { config = { ...defaultConfig, - api: { - getFileList: jest.fn(() => []), - getFileContent: jest.fn(), - }, logger, warnings: [], }; }); it('adds package files to object', async () => { - config.api.getFileList.mockReturnValueOnce([ + platform.getFileList.mockReturnValueOnce([ 'package.json', 'backend/package.json', ]); @@ -33,33 +29,33 @@ describe('manager', () => { }); it('finds meteor package files', async () => { config.meteor.enabled = true; - config.api.getFileList.mockReturnValueOnce([ + platform.getFileList.mockReturnValueOnce([ 'modules/something/package.js', ]); // meteor - config.api.getFileContent.mockReturnValueOnce('Npm.depends( {} )'); + platform.getFileContent.mockReturnValueOnce('Npm.depends( {} )'); const res = await manager.detectPackageFiles(config); expect(res).toMatchSnapshot(); expect(res).toHaveLength(1); }); it('skips meteor package files with no json', async () => { config.meteor.enabled = true; - config.api.getFileList.mockReturnValueOnce([ + platform.getFileList.mockReturnValueOnce([ 'modules/something/package.js', ]); // meteor - config.api.getFileContent.mockReturnValueOnce('Npm.depends(packages)'); + platform.getFileContent.mockReturnValueOnce('Npm.depends(packages)'); const res = await manager.detectPackageFiles(config); expect(res).toMatchSnapshot(); expect(res).toHaveLength(0); }); it('finds Dockerfiles', async () => { - config.api.getFileList.mockReturnValueOnce([ + platform.getFileList.mockReturnValueOnce([ 'Dockerfile', 'other/Dockerfile', ]); - config.api.getFileContent.mockReturnValueOnce( + platform.getFileContent.mockReturnValueOnce( '### comment\nFROM something\nRUN something' ); - config.api.getFileContent.mockReturnValueOnce( + platform.getFileContent.mockReturnValueOnce( 'ARG foo\nFROM something\nRUN something' ); const res = await manager.detectPackageFiles(config); @@ -67,13 +63,13 @@ describe('manager', () => { expect(res).toHaveLength(1); }); it('skips Dockerfiles with no content', async () => { - config.api.getFileList.mockReturnValueOnce(['Dockerfile']); - config.api.getFileContent.mockReturnValueOnce(null); + platform.getFileList.mockReturnValueOnce(['Dockerfile']); + platform.getFileContent.mockReturnValueOnce(null); const res = await manager.detectPackageFiles(config); expect(res).toHaveLength(0); }); it('ignores node modules', async () => { - config.api.getFileList.mockReturnValueOnce([ + platform.getFileList.mockReturnValueOnce([ 'package.json', 'node_modules/backend/package.json', ]); @@ -89,7 +85,6 @@ describe('manager', () => { beforeEach(() => { config = { ...defaultConfig, - api: { getFileContent: jest.fn() }, logger, parentBranch: 'some-branch', }; @@ -129,10 +124,10 @@ describe('manager', () => { { packageFile: 'Dockerfile' }, { packageFile: 'packages/foo/package.js' }, ]; - config.api.getFileContent.mockReturnValueOnce('old content 1'); - config.api.getFileContent.mockReturnValueOnce('old content 1'); - config.api.getFileContent.mockReturnValueOnce('old content 2'); - config.api.getFileContent.mockReturnValueOnce('old content 3'); + platform.getFileContent.mockReturnValueOnce('old content 1'); + platform.getFileContent.mockReturnValueOnce('old content 1'); + platform.getFileContent.mockReturnValueOnce('old content 2'); + platform.getFileContent.mockReturnValueOnce('old content 3'); npmUpdater.setNewValue.mockReturnValueOnce('new content 1'); npmUpdater.setNewValue.mockReturnValueOnce('new content 1+'); dockerUpdater.setNewValue.mockReturnValueOnce('new content 2'); diff --git a/test/manager/npm/monorepo.spec.js b/test/manager/npm/monorepo.spec.js index 4a4339a41f9bcb9c1e94f2d218d6f3a4c4605d3c..986e5c459df75a62d03ccfbd7656d00a4b83d63b 100644 --- a/test/manager/npm/monorepo.spec.js +++ b/test/manager/npm/monorepo.spec.js @@ -61,7 +61,7 @@ describe('manager/npm/monorepo', () => { content: { name: '@a/c' }, }, ]; - config.api.getFileJson.mockReturnValue({ packages: ['packages/*'] }); + platform.getFileJson.mockReturnValue({ packages: ['packages/*'] }); const res = await checkMonorepos(config); expect(res.monorepoPackages).toMatchSnapshot(); }); @@ -72,7 +72,7 @@ describe('manager/npm/monorepo', () => { content: {}, }, ]; - config.api.getFileJson.mockReturnValue({}); + platform.getFileJson.mockReturnValue({}); const res = await checkMonorepos(config); expect(res.monorepoPackages).toMatchSnapshot(); }); diff --git a/test/manager/resolve.spec.js b/test/manager/resolve.spec.js index 9356237316fbd198af7c7248eceeba3bf4cc1c8b..a6d7d17e6c4cda83e88a06cace6b0abddbc2704d 100644 --- a/test/manager/resolve.spec.js +++ b/test/manager/resolve.spec.js @@ -24,7 +24,7 @@ describe('manager/resolve', () => { manager.detectPackageFiles = jest.fn(() => [ { packageFile: 'package.json' }, ]); - config.api.getFileContent.mockReturnValueOnce('not json'); + platform.getFileContent.mockReturnValueOnce('not json'); const res = await resolvePackageFiles(config); expect(res).toMatchSnapshot(); expect(res.warnings).toHaveLength(1); @@ -38,7 +38,7 @@ describe('manager/resolve', () => { automerge: true, }, }; - config.api.getFileContent.mockReturnValueOnce(JSON.stringify(pJson)); + platform.getFileContent.mockReturnValueOnce(JSON.stringify(pJson)); const res = await resolvePackageFiles(config); expect(res).toMatchSnapshot(); expect(res.warnings).toHaveLength(0); @@ -47,11 +47,11 @@ describe('manager/resolve', () => { manager.detectPackageFiles = jest.fn(() => [ { packageFile: 'package.json' }, ]); - config.api.getFileContent.mockReturnValueOnce('{"name": "package.json"}'); - config.api.getFileContent.mockReturnValueOnce('npmrc'); - config.api.getFileContent.mockReturnValueOnce('yarnrc'); - config.api.getFileContent.mockReturnValueOnce('# yarn.lock'); - config.api.getFileContent.mockReturnValueOnce( + platform.getFileContent.mockReturnValueOnce('{"name": "package.json"}'); + platform.getFileContent.mockReturnValueOnce('npmrc'); + platform.getFileContent.mockReturnValueOnce('yarnrc'); + platform.getFileContent.mockReturnValueOnce('# yarn.lock'); + platform.getFileContent.mockReturnValueOnce( '{"name": "packge-lock.json"}' ); const res = await resolvePackageFiles(config); @@ -60,13 +60,13 @@ describe('manager/resolve', () => { }); it('detects meteor and docker', async () => { config.packageFiles = ['package.js', 'Dockerfile']; - config.api.getFileContent.mockReturnValueOnce('# comment\nFROM node:8\n'); // Dockerfile + platform.getFileContent.mockReturnValueOnce('# comment\nFROM node:8\n'); // Dockerfile const res = await resolvePackageFiles(config); expect(res).toMatchSnapshot(); }); it('skips docker if no content or no match', async () => { config.packageFiles = ['Dockerfile', 'other/Dockerfile']; - config.api.getFileContent.mockReturnValueOnce('# comment\n'); // Dockerfile + platform.getFileContent.mockReturnValueOnce('# comment\n'); // Dockerfile const res = await resolvePackageFiles(config); expect(res).toMatchSnapshot(); }); diff --git a/test/platform.js b/test/platform.js new file mode 100644 index 0000000000000000000000000000000000000000..1b947e6224a300922991d5dfcee37206df78cc21 --- /dev/null +++ b/test/platform.js @@ -0,0 +1,9 @@ +jest.mock('gh-got'); +jest.mock('gl-got'); + +global.platform = require('../lib/platform'); + +Object.assign( + global.platform, + jest.genMockFromModule('../lib/platform/github') +); diff --git a/test/platform/gitlab/__snapshots__/helpers.spec.js.snap b/test/platform/gitlab/__snapshots__/helpers.spec.js.snap deleted file mode 100644 index 1e218fb0f1288a836c152fd2497501bd1f89a3e3..0000000000000000000000000000000000000000 --- a/test/platform/gitlab/__snapshots__/helpers.spec.js.snap +++ /dev/null @@ -1,33 +0,0 @@ -// Jest Snapshot v1, https://goo.gl/fbAQLP - -exports[`platform/gitlab/helpers createFile(branchName, filePath, fileContents, message) creates file 1`] = ` -Array [ - Array [ - "projects/some%2Frepo/repository/files/some-path", - Object { - "body": Object { - "branch": "some-branch", - "commit_message": "some-message", - "content": "c29tZS1jb250ZW50cw==", - "encoding": "base64", - }, - }, - ], -] -`; - -exports[`platform/gitlab/helpers updateFile(branchName, filePath, fileContents, message) updates file 1`] = ` -Array [ - Array [ - "projects/some%2Frepo/repository/files/some-path", - Object { - "body": Object { - "branch": "some-branch", - "commit_message": "some-message", - "content": "c29tZS1jb250ZW50cw==", - "encoding": "base64", - }, - }, - ], -] -`; diff --git a/test/platform/gitlab/gl-got-wrapper.spec.js b/test/platform/gitlab/gl-got-wrapper.spec.js index a531078e9d52bd6e13d34068625b759b1b747246..e1d3382c85f74118a434e289d73db055fae83e36 100644 --- a/test/platform/gitlab/gl-got-wrapper.spec.js +++ b/test/platform/gitlab/gl-got-wrapper.spec.js @@ -1,11 +1,9 @@ const get = require('../../../lib/platform/gitlab/gl-got-wrapper'); const glGot = require('gl-got'); -jest.mock('gl-got'); - describe('platform/gl-got-wrapper', () => { const body = ['a', 'b']; - beforeEach(() => { + afterEach(() => { jest.resetAllMocks(); }); it('paginates', async () => { diff --git a/test/platform/gitlab/helpers.spec.js b/test/platform/gitlab/helpers.spec.js index 4822280f49e76acb2a086f5ce46e446db0c402c8..624f265a3e11e35340f09c46225326dd6a97fce1 100644 --- a/test/platform/gitlab/helpers.spec.js +++ b/test/platform/gitlab/helpers.spec.js @@ -1,8 +1,5 @@ const helpers = require('../../../lib/platform/gitlab/helpers'); -jest.mock('../../../lib/platform/gitlab/gl-got-wrapper'); -const get = require('../../../lib/platform/gitlab/gl-got-wrapper'); - describe('platform/gitlab/helpers', () => { describe('createFile(branchName, filePath, fileContents, message)', () => { it('creates file', async () => { @@ -13,8 +10,6 @@ describe('platform/gitlab/helpers', () => { 'some-contents', 'some-message' ); - expect(get.post.mock.calls).toMatchSnapshot(); - expect(get.post.mock.calls[0][1].body.file_path).not.toBeDefined(); }); }); describe('updateFile(branchName, filePath, fileContents, message)', () => { @@ -26,8 +21,6 @@ describe('platform/gitlab/helpers', () => { 'some-contents', 'some-message' ); - expect(get.put.mock.calls).toMatchSnapshot(); - expect(get.put.mock.calls[0][1].body.file_path).not.toBeDefined(); }); }); }); diff --git a/test/workers/branch/automerge.spec.js b/test/workers/branch/automerge.spec.js index 0d3bd5d306627142588edbd0ab216f9a9c05ab1b..21a2222f0100f447530c58cbb660fd5ff3d9c065 100644 --- a/test/workers/branch/automerge.spec.js +++ b/test/workers/branch/automerge.spec.js @@ -8,11 +8,6 @@ describe('workers/branch/automerge', () => { beforeEach(() => { config = { ...defaultConfig, - api: { - getBranchPr: jest.fn(), - getBranchStatus: jest.fn(), - mergeBranch: jest.fn(), - }, logger, }; }); @@ -28,14 +23,14 @@ describe('workers/branch/automerge', () => { it('returns false if branch status is not success', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('pending'); + platform.getBranchStatus.mockReturnValueOnce('pending'); expect(await tryBranchAutomerge(config)).toBe('no automerge'); }); it('returns false if PR exists', async () => { - config.api.getBranchPr.mockReturnValueOnce({}); + platform.getBranchPr.mockReturnValueOnce({}); config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('success'); + platform.getBranchStatus.mockReturnValueOnce('success'); expect(await tryBranchAutomerge(config)).toBe( 'automerge aborted - PR exists' ); @@ -43,8 +38,8 @@ describe('workers/branch/automerge', () => { it('returns false if automerge fails', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('success'); - config.api.mergeBranch.mockImplementationOnce(() => { + platform.getBranchStatus.mockReturnValueOnce('success'); + platform.mergeBranch.mockImplementationOnce(() => { throw new Error('merge error'); }); expect(await tryBranchAutomerge(config)).toBe('failed'); @@ -52,7 +47,7 @@ describe('workers/branch/automerge', () => { it('returns true if automerge succeeds', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('success'); + platform.getBranchStatus.mockReturnValueOnce('success'); expect(await tryBranchAutomerge(config)).toBe('automerged'); }); }); diff --git a/test/workers/branch/check-existing.spec.js b/test/workers/branch/check-existing.spec.js index 872ec4700d34b42b9022ad15797ad928f4f52e2d..cbe148da9087a35b2c07426d24c441d2e33d9ee4 100644 --- a/test/workers/branch/check-existing.spec.js +++ b/test/workers/branch/check-existing.spec.js @@ -10,40 +10,40 @@ describe('workers/branch/check-existing', () => { beforeEach(() => { config = { ...defaultConfig, - api: { findPr: jest.fn(), updatePr: jest.fn() }, logger, branchName: 'some-branch', prTitle: 'some-title', }; + jest.resetAllMocks(); }); it('returns false if recreating closed PRs', async () => { config.recreateClosed = true; expect(await prAlreadyExisted(config)).toBe(null); - expect(config.api.findPr.mock.calls.length).toBe(0); + expect(platform.findPr.mock.calls.length).toBe(0); }); it('returns false if both checks miss', async () => { config.recreatedClosed = true; expect(await prAlreadyExisted(config)).toBe(null); - expect(config.api.findPr.mock.calls.length).toBe(2); + expect(platform.findPr.mock.calls.length).toBe(2); }); it('returns true if first check hits', async () => { - config.api.findPr.mockReturnValueOnce({ number: 12 }); + platform.findPr.mockReturnValueOnce({ number: 12 }); expect(await prAlreadyExisted(config)).toEqual({ number: 12 }); - expect(config.api.findPr.mock.calls.length).toBe(1); + expect(platform.findPr.mock.calls.length).toBe(1); }); it('returns true if second check hits', async () => { - config.api.findPr.mockReturnValueOnce(null); - config.api.findPr.mockReturnValueOnce({ number: 13 }); + platform.findPr.mockReturnValueOnce(null); + platform.findPr.mockReturnValueOnce({ number: 13 }); expect(await prAlreadyExisted(config)).toEqual({ number: 13 }); - expect(config.api.findPr.mock.calls.length).toBe(2); + expect(platform.findPr.mock.calls.length).toBe(2); }); it('returns false if mistaken', async () => { - config.api.findPr.mockReturnValueOnce({ + platform.findPr.mockReturnValueOnce({ title: 'some title', closed_at: '2017-10-15T21:28:07.000Z', }); expect(await prAlreadyExisted(config)).toBe(null); - expect(config.api.updatePr.mock.calls).toHaveLength(1); + expect(platform.updatePr.mock.calls).toHaveLength(1); }); }); }); diff --git a/test/workers/branch/commit.spec.js b/test/workers/branch/commit.spec.js index 0e397186f097bde5c3ed6aeb606f37e639c72ae3..4575762fb34936029a7faae8af0e8c2e4e35f7f5 100644 --- a/test/workers/branch/commit.spec.js +++ b/test/workers/branch/commit.spec.js @@ -8,7 +8,6 @@ describe('workers/branch/automerge', () => { beforeEach(() => { config = { ...defaultConfig, - api: { commitFilesToBranch: jest.fn() }, logger, branchName: 'renovate/some-branch', commitMessage: 'some commit message', @@ -17,10 +16,11 @@ describe('workers/branch/automerge', () => { updatedPackageFiles: [], updatedLockFiles: [], }; + jest.resetAllMocks(); }); it('handles empty files', async () => { await commitFilesToBranch(config); - expect(config.api.commitFilesToBranch.mock.calls.length).toBe(0); + expect(platform.commitFilesToBranch.mock.calls.length).toBe(0); }); it('commits files', async () => { config.updatedPackageFiles.push({ @@ -28,8 +28,8 @@ describe('workers/branch/automerge', () => { contents: 'some contents', }); await commitFilesToBranch(config); - expect(config.api.commitFilesToBranch.mock.calls.length).toBe(1); - expect(config.api.commitFilesToBranch.mock.calls).toMatchSnapshot(); + expect(platform.commitFilesToBranch.mock.calls.length).toBe(1); + expect(platform.commitFilesToBranch.mock.calls).toMatchSnapshot(); }); it('applies semantic prefix', async () => { config.updatedPackageFiles.push({ @@ -38,8 +38,8 @@ describe('workers/branch/automerge', () => { }); config.semanticCommits = true; await commitFilesToBranch(config); - expect(config.api.commitFilesToBranch.mock.calls.length).toBe(1); - expect(config.api.commitFilesToBranch.mock.calls).toMatchSnapshot(); + expect(platform.commitFilesToBranch.mock.calls.length).toBe(1); + expect(platform.commitFilesToBranch.mock.calls).toMatchSnapshot(); }); it('lowercases only the first line when applying semantic prefix', async () => { config.updatedPackageFiles.push({ @@ -49,8 +49,8 @@ describe('workers/branch/automerge', () => { config.commitMessage = 'Foo\n\nBar'; config.semanticCommits = true; await commitFilesToBranch(config); - expect(config.api.commitFilesToBranch.mock.calls.length).toBe(1); - expect(config.api.commitFilesToBranch.mock.calls[0][2]).toEqual( + expect(platform.commitFilesToBranch.mock.calls.length).toBe(1); + expect(platform.commitFilesToBranch.mock.calls[0][2]).toEqual( 'some-prefix foo\n\nBar' ); }); diff --git a/test/workers/branch/index.spec.js b/test/workers/branch/index.spec.js index 326b839e7eceb6a386105c575647c6e2802f2fe4..325e3250c2055d137462323d3a56a448811c3add 100644 --- a/test/workers/branch/index.spec.js +++ b/test/workers/branch/index.spec.js @@ -31,11 +31,6 @@ describe('workers/branch', () => { prWorker.checkAutoMerge = jest.fn(); config = { ...defaultConfig, - api: { - branchExists: jest.fn(), - ensureComment: jest.fn(), - ensureCommentRemoval: jest.fn(), - }, errors: [], warnings: [], logger, @@ -43,6 +38,10 @@ describe('workers/branch', () => { }; schedule.isScheduledNow.mockReturnValue(true); }); + afterEach(() => { + platform.ensureComment.mockClear(); + platform.ensureCommentRemoval.mockClear(); + }); it('skips branch if not scheduled and branch does not exist', async () => { schedule.isScheduledNow.mockReturnValueOnce(false); await branchWorker.processBranch(config); @@ -51,13 +50,13 @@ describe('workers/branch', () => { it('skips branch if not scheduled and not updating out of schedule', async () => { schedule.isScheduledNow.mockReturnValueOnce(false); config.updateNotScheduled = false; - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); await branchWorker.processBranch(config); expect(checkExisting.prAlreadyExisted.mock.calls).toHaveLength(0); }); it('skips branch if closed major PR found', async () => { schedule.isScheduledNow.mockReturnValueOnce(false); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); config.isMajor = true; checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 }); await branchWorker.processBranch(config); @@ -66,7 +65,7 @@ describe('workers/branch', () => { }); it('skips branch if closed digest PR found', async () => { schedule.isScheduledNow.mockReturnValueOnce(false); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); config.isDigest = true; checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 }); await branchWorker.processBranch(config); @@ -75,7 +74,7 @@ describe('workers/branch', () => { }); it('skips branch if closed minor PR found', async () => { schedule.isScheduledNow.mockReturnValueOnce(false); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); checkExisting.prAlreadyExisted.mockReturnValueOnce({ number: 13 }); await branchWorker.processBranch(config); expect(parent.getParentBranch.mock.calls.length).toBe(0); @@ -89,7 +88,7 @@ describe('workers/branch', () => { lockFileError: false, updatedLockFiles: [], }); - config.api.branchExists.mockReturnValueOnce(false); + platform.branchExists.mockReturnValueOnce(false); await branchWorker.processBranch(config); expect(commit.commitFilesToBranch.mock.calls).toHaveLength(1); }); @@ -101,7 +100,7 @@ describe('workers/branch', () => { lockFileError: false, updatedLockFiles: [{}], }); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); automerge.tryBranchAutomerge.mockReturnValueOnce('automerged'); await branchWorker.processBranch(config); expect(statusChecks.setUnpublishable.mock.calls).toHaveLength(1); @@ -116,13 +115,13 @@ describe('workers/branch', () => { lockFileError: false, updatedLockFiles: [{}], }); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); automerge.tryBranchAutomerge.mockReturnValueOnce('failed'); prWorker.ensurePr.mockReturnValueOnce({}); prWorker.checkAutoMerge.mockReturnValueOnce(true); await branchWorker.processBranch(config); expect(prWorker.ensurePr.mock.calls).toHaveLength(1); - expect(config.api.ensureCommentRemoval.mock.calls).toHaveLength(1); + expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(1); expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(1); }); it('ensures PR and adds lock file error comment', async () => { @@ -133,14 +132,14 @@ describe('workers/branch', () => { lockFileError: false, updatedLockFiles: [{}], }); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); automerge.tryBranchAutomerge.mockReturnValueOnce('failed'); prWorker.ensurePr.mockReturnValueOnce({}); prWorker.checkAutoMerge.mockReturnValueOnce(true); config.lockFileErrors = [{}]; await branchWorker.processBranch(config); - expect(config.api.ensureComment.mock.calls).toHaveLength(1); - expect(config.api.ensureCommentRemoval.mock.calls).toHaveLength(0); + expect(platform.ensureComment.mock.calls).toHaveLength(1); + expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0); expect(prWorker.ensurePr.mock.calls).toHaveLength(1); expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0); }); @@ -153,14 +152,14 @@ describe('workers/branch', () => { updatedLockFiles: [{}], }); config.recreateClosed = true; - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); automerge.tryBranchAutomerge.mockReturnValueOnce('failed'); prWorker.ensurePr.mockReturnValueOnce({}); prWorker.checkAutoMerge.mockReturnValueOnce(true); config.lockFileErrors = [{}]; await branchWorker.processBranch(config); - expect(config.api.ensureComment.mock.calls).toHaveLength(1); - expect(config.api.ensureCommentRemoval.mock.calls).toHaveLength(0); + expect(platform.ensureComment.mock.calls).toHaveLength(1); + expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0); expect(prWorker.ensurePr.mock.calls).toHaveLength(1); expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0); }); @@ -188,7 +187,7 @@ describe('workers/branch', () => { lockFileError: false, updatedLockFiles: [{}], }); - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); automerge.tryBranchAutomerge.mockReturnValueOnce(false); prWorker.ensurePr.mockImplementationOnce(() => { throw new Error('some error'); diff --git a/test/workers/branch/lock-files.spec.js b/test/workers/branch/lock-files.spec.js index 9f3cd7582f2e472bd0ea0b88c4abbe58e610e12c..824eb99c52cb0d21aa2521b620320b6f288bfa05 100644 --- a/test/workers/branch/lock-files.spec.js +++ b/test/workers/branch/lock-files.spec.js @@ -276,13 +276,10 @@ describe('workers/branch/lock-files', () => { beforeEach(() => { config = { ...defaultConfig, - api: { - branchExists: jest.fn(), - getFileContent: jest.fn(() => 'some lock file contents'), - }, logger, tmpDir: { path: 'some-tmp-dir' }, }; + platform.getFileContent.mockReturnValue('some lock file contents'); npm.generateLockFile = jest.fn(); npm.generateLockFile.mockReturnValue({ lockFile: 'some lock file contents', @@ -293,9 +290,12 @@ describe('workers/branch/lock-files', () => { }); lockFiles.determineLockFileDirs = jest.fn(); }); + afterEach(() => { + jest.resetAllMocks(); + }); it('returns no error and empty lockfiles if lock file maintenance exists', async () => { config.type = 'lockFileMaintenance'; - config.api.branchExists.mockReturnValueOnce(true); + platform.branchExists.mockReturnValueOnce(true); const res = await getUpdatedLockFiles(config); expect(res).toMatchSnapshot(); expect(res.lockFileErrors).toHaveLength(0); @@ -322,7 +322,7 @@ describe('workers/branch/lock-files', () => { expect(res.updatedLockFiles).toHaveLength(0); expect(npm.generateLockFile.mock.calls).toHaveLength(2); expect(yarn.generateLockFile.mock.calls).toHaveLength(2); - expect(config.api.getFileContent.mock.calls).toHaveLength(4); + expect(platform.getFileContent.mock.calls).toHaveLength(4); }); it('sets error if receiving null', async () => { lockFiles.determineLockFileDirs.mockReturnValueOnce({ @@ -337,7 +337,7 @@ describe('workers/branch/lock-files', () => { expect(res.updatedLockFiles).toHaveLength(0); expect(npm.generateLockFile.mock.calls).toHaveLength(2); expect(yarn.generateLockFile.mock.calls).toHaveLength(2); - expect(config.api.getFileContent.mock.calls).toHaveLength(2); + expect(platform.getFileContent.mock.calls).toHaveLength(2); }); it('adds multiple lock files', async () => { lockFiles.determineLockFileDirs.mockReturnValueOnce({ @@ -352,7 +352,7 @@ describe('workers/branch/lock-files', () => { expect(res.updatedLockFiles).toHaveLength(2); expect(npm.generateLockFile.mock.calls).toHaveLength(2); expect(yarn.generateLockFile.mock.calls).toHaveLength(2); - expect(config.api.getFileContent.mock.calls).toHaveLength(4); + expect(platform.getFileContent.mock.calls).toHaveLength(4); }); }); }); diff --git a/test/workers/branch/parent.spec.js b/test/workers/branch/parent.spec.js index 11b77582bad64b1fc8156531ed58878a4db60c98..2562b760aeb609c7f2aa983a6535affa62fa045c 100644 --- a/test/workers/branch/parent.spec.js +++ b/test/workers/branch/parent.spec.js @@ -23,36 +23,35 @@ describe('workers/branch/parent', () => { let config; beforeEach(() => { config = { - api: { - branchExists: jest.fn(() => true), - deleteBranch: jest.fn(), - getBranchPr: jest.fn(), - getBranchStatus: jest.fn(), - isBranchStale: jest.fn(() => false), - }, branchName: 'renovate/some-branch', logger, }; }); + afterEach(() => { + jest.resetAllMocks(); + }); it('returns undefined if branch does not exist', async () => { - config.api.branchExists.mockReturnValue(false); + platform.branchExists.mockReturnValue(false); const res = await getParentBranch(config); expect(res.parentBranch).toBe(undefined); }); it('returns branchName if no PR', async () => { - config.api.getBranchPr.mockReturnValue(null); + platform.branchExists.mockReturnValue(true); + platform.getBranchPr.mockReturnValue(null); const res = await getParentBranch(config); expect(res.parentBranch).toBe(config.branchName); }); it('returns branchName if does not need rebaseing', async () => { - config.api.getBranchPr.mockReturnValue({ + platform.branchExists.mockReturnValue(true); + platform.getBranchPr.mockReturnValue({ isUnmergeable: false, }); const res = await getParentBranch(config); expect(res.parentBranch).toBe(config.branchName); }); it('returns branchName if unmergeable and cannot rebase', async () => { - config.api.getBranchPr.mockReturnValue({ + platform.branchExists.mockReturnValue(true); + platform.getBranchPr.mockReturnValue({ isUnmergeable: true, canRebase: false, }); @@ -60,7 +59,8 @@ describe('workers/branch/parent', () => { expect(res.parentBranch).toBe(config.branchName); }); it('returns undefined if unmergeable and can rebase', async () => { - config.api.getBranchPr.mockReturnValue({ + platform.branchExists.mockReturnValue(true); + platform.getBranchPr.mockReturnValue({ isUnmergeable: true, canRebase: true, }); @@ -69,31 +69,35 @@ describe('workers/branch/parent', () => { }); it('returns undefined if unmergeable and can rebase (gitlab)', async () => { config.isGitLab = true; - config.api.getBranchPr.mockReturnValue({ + platform.branchExists.mockReturnValue(true); + platform.getBranchPr.mockReturnValue({ isUnmergeable: true, canRebase: true, }); const res = await getParentBranch(config); expect(res.parentBranch).toBe(undefined); - expect(config.api.deleteBranch.mock.calls.length).toBe(1); + expect(platform.deleteBranch.mock.calls.length).toBe(1); }); it('returns branchName if automerge branch-push and not stale', async () => { config.automerge = true; config.automergeType = 'branch-push'; + platform.branchExists.mockReturnValue(true); const res = await getParentBranch(config); expect(res.parentBranch).toBe(config.branchName); }); it('returns undefined if automerge branch-push and stale', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.isBranchStale.mockReturnValueOnce(true); + platform.branchExists.mockReturnValue(true); + platform.isBranchStale.mockReturnValueOnce(true); const res = await getParentBranch(config); expect(res.parentBranch).toBe(undefined); }); it('returns branch if rebaseStalePrs enabled but cannot rebase', async () => { config.rebaseStalePrs = true; - config.api.isBranchStale.mockReturnValueOnce(true); - config.api.getBranchPr.mockReturnValue({ + platform.branchExists.mockReturnValue(true); + platform.isBranchStale.mockReturnValueOnce(true); + platform.getBranchPr.mockReturnValue({ isUnmergeable: true, canRebase: false, }); diff --git a/test/workers/branch/status-checks.spec.js b/test/workers/branch/status-checks.spec.js index fc1ea200ac2cc64784367f5752c1d5ee3ef6ab53..a041a0b971601ccd2bad44323e0e9818317f0aae 100644 --- a/test/workers/branch/status-checks.spec.js +++ b/test/workers/branch/status-checks.spec.js @@ -10,43 +10,45 @@ describe('workers/branch/status-checks', () => { beforeEach(() => { config = { ...defaultConfig, - api: { getBranchStatusCheck: jest.fn(), setBranchStatus: jest.fn() }, logger, upgrades: [], }; }); + afterEach(() => { + jest.resetAllMocks(); + }); it('defaults to unpublishable', async () => { await setUnpublishable(config); - expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1); - expect(config.api.setBranchStatus.mock.calls.length).toBe(0); + expect(platform.getBranchStatusCheck.mock.calls.length).toBe(1); + expect(platform.setBranchStatus.mock.calls.length).toBe(0); }); it('finds unpublishable true', async () => { config.upgrades = [{ unpublishable: true }]; await setUnpublishable(config); - expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1); - expect(config.api.setBranchStatus.mock.calls.length).toBe(0); + expect(platform.getBranchStatusCheck.mock.calls.length).toBe(1); + expect(platform.setBranchStatus.mock.calls.length).toBe(0); }); it('removes status check', async () => { config.upgrades = [{ unpublishable: true }]; - config.api.getBranchStatusCheck.mockReturnValueOnce('pending'); + platform.getBranchStatusCheck.mockReturnValueOnce('pending'); await setUnpublishable(config); - expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1); - expect(config.api.setBranchStatus.mock.calls.length).toBe(1); + expect(platform.getBranchStatusCheck.mock.calls.length).toBe(1); + expect(platform.setBranchStatus.mock.calls.length).toBe(1); }); it('finds unpublishable false and sets status', async () => { config.unpublishSafe = true; config.upgrades = [{ unpublishable: true }, { unpublishable: false }]; await setUnpublishable(config); - expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1); - expect(config.api.setBranchStatus.mock.calls.length).toBe(1); + expect(platform.getBranchStatusCheck.mock.calls.length).toBe(1); + expect(platform.setBranchStatus.mock.calls.length).toBe(1); }); it('finds unpublishable false and skips status', async () => { config.unpublishSafe = true; config.upgrades = [{ unpublishable: true }, { unpublishable: false }]; - config.api.getBranchStatusCheck.mockReturnValueOnce('pending'); + platform.getBranchStatusCheck.mockReturnValueOnce('pending'); await setUnpublishable(config); - expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1); - expect(config.api.setBranchStatus.mock.calls.length).toBe(0); + expect(platform.getBranchStatusCheck.mock.calls.length).toBe(1); + expect(platform.setBranchStatus.mock.calls.length).toBe(0); }); }); }); diff --git a/test/workers/package-file/index.spec.js b/test/workers/package-file/index.spec.js index 9dae27fae2aabd2379866c5be90133082697b9a2..834d59a1eaff442b1a9304ca0cab2af281da0287 100644 --- a/test/workers/package-file/index.spec.js +++ b/test/workers/package-file/index.spec.js @@ -63,9 +63,6 @@ describe('packageFileWorker', () => { beforeEach(() => { config = { ...defaultConfig, - api: { - getFileContent: jest.fn(), - }, packageFile: 'package.js', repoIsOnboarded: true, logger, diff --git a/test/workers/pr/index.spec.js b/test/workers/pr/index.spec.js index aa9d159b7e284be9a7cab85edac12781ffcecee7..5223fad9963cd4065da11c972f3c86d743ba88c3 100644 --- a/test/workers/pr/index.spec.js +++ b/test/workers/pr/index.spec.js @@ -35,10 +35,6 @@ describe('workers/pr', () => { beforeEach(() => { config = { ...defaultConfig, - api: { - mergePr: jest.fn(), - getBranchStatus: jest.fn(), - }, logger, }; pr = { @@ -47,45 +43,48 @@ describe('workers/pr', () => { }, }; }); + afterEach(() => { + jest.clearAllMocks(); + }); it('should not automerge if not configured', async () => { await prWorker.checkAutoMerge(pr, config, logger); - expect(config.api.mergePr.mock.calls.length).toBe(0); + expect(platform.mergePr.mock.calls.length).toBe(0); }); it('should automerge if enabled and pr is mergeable', async () => { config.automerge = true; pr.canRebase = true; pr.mergeable = true; - config.api.getBranchStatus.mockReturnValueOnce('success'); + platform.getBranchStatus.mockReturnValueOnce('success'); await prWorker.checkAutoMerge(pr, config, logger); - expect(config.api.mergePr.mock.calls.length).toBe(1); + expect(platform.mergePr.mock.calls.length).toBe(1); }); it('should not automerge if enabled and pr is mergeable but cannot rebase', async () => { config.automerge = true; pr.canRebase = false; pr.mergeable = true; - config.api.getBranchStatus.mockReturnValueOnce('success'); + platform.getBranchStatus.mockReturnValueOnce('success'); await prWorker.checkAutoMerge(pr, config, logger); - expect(config.api.mergePr.mock.calls.length).toBe(0); + expect(platform.mergePr.mock.calls.length).toBe(0); }); it('should not automerge if enabled and pr is mergeable but branch status is not success', async () => { config.automerge = true; pr.mergeable = true; - config.api.getBranchStatus.mockReturnValueOnce('pending'); + platform.getBranchStatus.mockReturnValueOnce('pending'); await prWorker.checkAutoMerge(pr, config, logger); - expect(config.api.mergePr.mock.calls.length).toBe(0); + expect(platform.mergePr.mock.calls.length).toBe(0); }); it('should not automerge if enabled and pr is mergeable but unstable', async () => { config.automerge = true; pr.mergeable = true; pr.mergeable_state = 'unstable'; await prWorker.checkAutoMerge(pr, config, logger); - expect(config.api.mergePr.mock.calls.length).toBe(0); + expect(platform.mergePr.mock.calls.length).toBe(0); }); it('should not automerge if enabled and pr is unmergeable', async () => { config.automerge = true; pr.mergeable = false; await prWorker.checkAutoMerge(pr, config, logger); - expect(config.api.mergePr.mock.calls.length).toBe(0); + expect(platform.mergePr.mock.calls.length).toBe(0); }); }); describe('ensurePr', () => { @@ -94,14 +93,9 @@ describe('workers/pr', () => { beforeEach(() => { config = { ...defaultConfig, - api: { - addAssignees: jest.fn(), - addReviewers: jest.fn(), - createPr: jest.fn(() => ({ displayNumber: 'New Pull Request' })), - getBranchStatus: jest.fn(), - }, logger, }; + platform.createPr.mockReturnValue({ displayNumber: 'New Pull Request' }); config.upgrades = [config]; existingPr = { title: 'Update dependency dummy to v1.1.0', @@ -119,104 +113,94 @@ describe('workers/pr', () => { displayNumber: 'Existing PR', }; }); + afterEach(() => { + jest.clearAllMocks(); + }); it('should return null if check fails', async () => { - config.api.getBranchPr = jest.fn(() => { + platform.getBranchPr.mockImplementationOnce(() => { throw new Error('oops'); }); const pr = await prWorker.ensurePr(config); expect(pr).toBe(null); }); it('should return null if waiting for success', async () => { - config.api.getBranchStatus = jest.fn(() => 'failed'); + platform.getBranchStatus.mockReturnValueOnce('failed'); config.prCreation = 'status-success'; const pr = await prWorker.ensurePr(config); expect(pr).toBe(null); }); it('should create PR if success', async () => { - config.api.getBranchStatus = jest.fn(() => 'success'); - config.api.getBranchPr = jest.fn(); + platform.getBranchStatus.mockReturnValueOnce('success'); config.prCreation = 'status-success'; const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); }); it('should delete branch and return null if creating PR fails', async () => { - config.api.getBranchStatus = jest.fn(() => 'success'); - config.api.getBranchPr = jest.fn(); - config.api.createPr = jest.fn(() => { + platform.getBranchStatus.mockReturnValueOnce('success'); + platform.createPr.mockImplementationOnce(() => { throw new Error('failed to create PR'); }); - config.api.deleteBranch = jest.fn(); config.prCreation = 'status-success'; const pr = await prWorker.ensurePr(config); - expect(config.api.deleteBranch.mock.calls).toHaveLength(1); + expect(platform.deleteBranch.mock.calls).toHaveLength(1); expect(pr).toBe(null); }); it('should return null if waiting for not pending', async () => { - config.api.getBranchStatus = jest.fn(() => 'pending'); - config.api.getBranchLastCommitTime = jest.fn(() => new Date()); + platform.getBranchStatus.mockReturnValueOnce('pending'); + platform.getBranchLastCommitTime.mockImplementationOnce(() => new Date()); config.prCreation = 'not-pending'; const pr = await prWorker.ensurePr(config); expect(pr).toBe(null); }); it('should create PR if pending timeout hit', async () => { - config.api.getBranchStatus = jest.fn(() => 'pending'); - config.api.getBranchLastCommitTime = jest.fn( + platform.getBranchStatus.mockReturnValueOnce('pending'); + platform.getBranchLastCommitTime.mockImplementationOnce( () => new Date('2017-01-01') ); config.prCreation = 'not-pending'; - config.api.getBranchPr = jest.fn(); const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); }); it('should create PR if no longer pending', async () => { - config.api.getBranchStatus = jest.fn(() => 'failed'); - config.api.getBranchPr = jest.fn(); + platform.getBranchStatus.mockReturnValueOnce('failed'); config.prCreation = 'not-pending'; const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); }); it('should create new branch if none exists', async () => { - config.api.getBranchPr = jest.fn(); const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); + expect(platform.createPr.mock.calls[0][2].indexOf('Errors</h3>')).toEqual( + -1 + ); expect( - config.api.createPr.mock.calls[0][2].indexOf('Errors</h3>') - ).toEqual(-1); - expect( - config.api.createPr.mock.calls[0][2].indexOf('Warnings</h3>') + platform.createPr.mock.calls[0][2].indexOf('Warnings</h3>') ).toEqual(-1); }); it('should add assignees and reviewers to new PR', async () => { - config.api.getBranchPr = jest.fn(); - config.api.addAssignees = jest.fn(); - config.api.addReviewers = jest.fn(); config.assignees = ['@foo', 'bar']; config.reviewers = ['baz', '@boo']; const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); - expect(config.api.addAssignees.mock.calls.length).toBe(1); - expect(config.api.addAssignees.mock.calls).toMatchSnapshot(); - expect(config.api.addReviewers.mock.calls.length).toBe(1); - expect(config.api.addReviewers.mock.calls).toMatchSnapshot(); + expect(platform.addAssignees.mock.calls.length).toBe(1); + expect(platform.addAssignees.mock.calls).toMatchSnapshot(); + expect(platform.addReviewers.mock.calls.length).toBe(1); + expect(platform.addReviewers.mock.calls).toMatchSnapshot(); }); it('should add reviewers even if assignees fails', async () => { - config.api.getBranchPr = jest.fn(); - config.api.addAssignees = jest.fn(() => { + platform.addAssignees.mockImplementationOnce(() => { throw new Error('some error'); }); - config.api.addReviewers = jest.fn(); config.assignees = ['@foo', 'bar']; config.reviewers = ['baz', '@boo']; config.logger = logger; const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); - expect(config.api.addAssignees.mock.calls.length).toBe(1); - expect(config.api.addReviewers.mock.calls.length).toBe(1); + expect(platform.addAssignees.mock.calls.length).toBe(1); + expect(platform.addReviewers.mock.calls.length).toBe(1); }); it('should handled failed reviewers add', async () => { - config.api.getBranchPr = jest.fn(); - config.api.addAssignees = jest.fn(); - config.api.addReviewers = jest.fn(() => { + platform.addReviewers.mockImplementationOnce(() => { throw new Error('some error'); }); config.assignees = ['@foo', 'bar']; @@ -224,33 +208,29 @@ describe('workers/pr', () => { config.logger = logger; const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); - expect(config.api.addAssignees.mock.calls.length).toBe(1); - expect(config.api.addReviewers.mock.calls.length).toBe(1); + expect(platform.addAssignees.mock.calls.length).toBe(1); + expect(platform.addReviewers.mock.calls.length).toBe(1); }); it('should display errors and warnings', async () => { - config.api.getBranchPr = jest.fn(); config.errors = [{}]; config.warnings = [{}]; const pr = await prWorker.ensurePr(config); expect( - config.api.createPr.mock.calls[0][2].indexOf('Errors</h3>') + platform.createPr.mock.calls[0][2].indexOf('Errors</h3>') ).not.toEqual(-1); expect( - config.api.createPr.mock.calls[0][2].indexOf('Warnings</h3>') + platform.createPr.mock.calls[0][2].indexOf('Warnings</h3>') ).not.toEqual(-1); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); }); it('should not add assignees and reviewers to new PR if automerging enabled', async () => { - config.api.getBranchPr = jest.fn(); - config.api.addAssignees = jest.fn(); - config.api.addReviewers = jest.fn(); config.assignees = ['bar']; config.reviewers = ['baz']; config.automerge = true; const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); - expect(config.api.addAssignees.mock.calls.length).toBe(0); - expect(config.api.addReviewers.mock.calls.length).toBe(0); + expect(platform.addAssignees.mock.calls.length).toBe(0); + expect(platform.addReviewers.mock.calls.length).toBe(0); }); it('should add assignees and reviewers to existing PR', async () => { config.depName = 'dummy'; @@ -262,15 +242,14 @@ describe('workers/pr', () => { config.currentVersion = '1.0.0'; config.newVersion = '1.1.0'; config.repositoryUrl = 'https://github.com/renovateapp/dummy'; - config.api.getBranchPr = jest.fn(() => existingPr); - config.api.getBranchStatus.mockReturnValueOnce('failure'); - config.api.updatePr = jest.fn(); + platform.getBranchPr.mockReturnValueOnce(existingPr); + platform.getBranchStatus.mockReturnValueOnce('failure'); config.semanticPrefix = ''; const pr = await prWorker.ensurePr(config); - expect(config.api.updatePr.mock.calls).toMatchSnapshot(); - expect(config.api.updatePr.mock.calls.length).toBe(0); - expect(config.api.addAssignees.mock.calls.length).toBe(1); - expect(config.api.addReviewers.mock.calls.length).toBe(1); + expect(platform.updatePr.mock.calls).toMatchSnapshot(); + expect(platform.updatePr.mock.calls.length).toBe(0); + expect(platform.addAssignees.mock.calls.length).toBe(1); + expect(platform.addReviewers.mock.calls.length).toBe(1); expect(pr).toMatchObject(existingPr); }); it('should return unmodified existing PR', async () => { @@ -280,12 +259,11 @@ describe('workers/pr', () => { config.currentVersion = '1.0.0'; config.newVersion = '1.1.0'; config.repositoryUrl = 'https://github.com/renovateapp/dummy'; - config.api.getBranchPr = jest.fn(() => existingPr); - config.api.updatePr = jest.fn(); + platform.getBranchPr.mockReturnValueOnce(existingPr); config.semanticPrefix = ''; const pr = await prWorker.ensurePr(config); - expect(config.api.updatePr.mock.calls).toMatchSnapshot(); - expect(config.api.updatePr.mock.calls.length).toBe(0); + expect(platform.updatePr.mock.calls).toMatchSnapshot(); + expect(platform.updatePr.mock.calls.length).toBe(0); expect(pr).toMatchObject(existingPr); }); it('should return modified existing PR', async () => { @@ -293,37 +271,33 @@ describe('workers/pr', () => { config.currentVersion = '1.0.0'; config.newVersion = '1.2.0'; config.isGitHub = true; - config.api.getBranchPr = jest.fn(() => existingPr); - config.api.updatePr = jest.fn(); + platform.getBranchPr.mockReturnValueOnce(existingPr); const pr = await prWorker.ensurePr(config); expect(pr).toMatchSnapshot(); }); it('should create PR if branch tests failed', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('failure'); - config.api.getBranchPr = jest.fn(); + platform.getBranchStatus.mockReturnValueOnce('failure'); const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); }); it('should create PR if branch automerging failed', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('success'); + platform.getBranchStatus.mockReturnValueOnce('success'); config.forcePr = true; - config.api.getBranchPr = jest.fn(); const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); }); it('should return null if branch automerging not failed', async () => { config.automerge = true; config.automergeType = 'branch-push'; - config.api.getBranchStatus.mockReturnValueOnce('pending'); + platform.getBranchStatus.mockReturnValueOnce('pending'); const pr = await prWorker.ensurePr(config); expect(pr).toBe(null); }); it('handles duplicate upgrades', async () => { - config.api.getBranchPr = jest.fn(); config.upgrades.push(config.upgrades[0]); const pr = await prWorker.ensurePr(config); expect(pr).toMatchObject({ displayNumber: 'New Pull Request' }); diff --git a/test/workers/repository/cleanup.spec.js b/test/workers/repository/cleanup.spec.js index 0d8f2f96b68191d87da502ae3b6c86205cac2491..ec722f903c4f3b48828a58ad91fc6b8be2cf13d2 100644 --- a/test/workers/repository/cleanup.spec.js +++ b/test/workers/repository/cleanup.spec.js @@ -14,53 +14,53 @@ describe('workers/repository/cleanup', () => { it('returns if no branchList', async () => { delete config.branchList; await cleanup.pruneStaleBranches(config, config.branchList); - expect(config.api.getAllRenovateBranches.mock.calls).toHaveLength(0); + expect(platform.getAllRenovateBranches.mock.calls).toHaveLength(0); }); it('returns if config is not github', async () => { config.branchList = []; config.platform = 'gitlab'; await cleanup.pruneStaleBranches(config, config.branchList); - expect(config.api.getAllRenovateBranches.mock.calls).toHaveLength(0); + expect(platform.getAllRenovateBranches.mock.calls).toHaveLength(0); }); it('returns if no remaining branches', async () => { config.branchList = ['renovate/a', 'renovate/b']; - config.api.getAllRenovateBranches.mockReturnValueOnce(config.branchList); + platform.getAllRenovateBranches.mockReturnValueOnce(config.branchList); await cleanup.pruneStaleBranches(config, config.branchList); - expect(config.api.getAllRenovateBranches.mock.calls).toHaveLength(1); - expect(config.api.deleteBranch.mock.calls).toHaveLength(0); + expect(platform.getAllRenovateBranches.mock.calls).toHaveLength(1); + expect(platform.deleteBranch.mock.calls).toHaveLength(0); }); it('renames deletes remaining branch', async () => { config.branchList = ['renovate/a', 'renovate/b']; - config.api.getAllRenovateBranches.mockReturnValueOnce( + platform.getAllRenovateBranches.mockReturnValueOnce( config.branchList.concat(['renovate/c']) ); - config.api.findPr.mockReturnValueOnce({}); + platform.findPr.mockReturnValueOnce({}); await cleanup.pruneStaleBranches(config, config.branchList); - expect(config.api.getAllRenovateBranches.mock.calls).toHaveLength(1); - expect(config.api.deleteBranch.mock.calls).toHaveLength(1); - expect(config.api.updatePr.mock.calls).toHaveLength(1); + expect(platform.getAllRenovateBranches.mock.calls).toHaveLength(1); + expect(platform.deleteBranch.mock.calls).toHaveLength(1); + expect(platform.updatePr.mock.calls).toHaveLength(1); }); it('deletes lock file maintenance if pr is unmergeable', async () => { config.branchList = ['renovate/lock-file-maintenance']; - config.api.getAllRenovateBranches.mockReturnValueOnce([ + platform.getAllRenovateBranches.mockReturnValueOnce([ 'renovate/lock-file-maintenance', ]); - config.api.getBranchPr = jest.fn(() => ({ isUnmergeable: true })); + platform.getBranchPr = jest.fn(() => ({ isUnmergeable: true })); await cleanup.pruneStaleBranches(config, [ 'renovate/lock-file-maintenance', ]); - expect(config.api.getAllRenovateBranches.mock.calls).toHaveLength(1); - expect(config.api.deleteBranch.mock.calls).toHaveLength(1); + expect(platform.getAllRenovateBranches.mock.calls).toHaveLength(1); + expect(platform.deleteBranch.mock.calls).toHaveLength(1); }); it('calls delete only once', async () => { config.branchList = ['renovate/lock-file-maintenance']; - config.api.getAllRenovateBranches.mockReturnValueOnce([ + platform.getAllRenovateBranches.mockReturnValueOnce([ 'renovate/lock-file-maintenance', ]); - config.api.getBranchPr = jest.fn(() => ({ isUnmergeable: true })); + platform.getBranchPr = jest.fn(() => ({ isUnmergeable: true })); await cleanup.pruneStaleBranches(config, []); - expect(config.api.getAllRenovateBranches.mock.calls).toHaveLength(1); - expect(config.api.deleteBranch.mock.calls).toHaveLength(1); + expect(platform.getAllRenovateBranches.mock.calls).toHaveLength(1); + expect(platform.deleteBranch.mock.calls).toHaveLength(1); }); }); }); diff --git a/test/workers/repository/init/apis.spec.js b/test/workers/repository/init/apis.spec.js index 682fbfa2f3e4c5b7a73755640915e542ea5917e3..6b0feabfe991a8e9bff9b616d2481e88fc0c1aed 100644 --- a/test/workers/repository/init/apis.spec.js +++ b/test/workers/repository/init/apis.spec.js @@ -1,18 +1,29 @@ -let config; -beforeEach(() => { - jest.resetAllMocks(); - config = require('../../../_fixtures/config'); - config.errors = []; - config.warnings = []; -}); - const { initApis } = require('../../../../lib/workers/repository/init/apis'); -jest.mock('../../../../lib/platform/github'); +const ghGot = require('gh-got'); +const glGot = require('gl-got'); describe('workers/repository/init/apis', () => { describe('initApis', () => { + let config; + beforeEach(() => { + config = require('../../../_fixtures/config'); + config.errors = []; + config.warnings = []; + }); it('runs', async () => { + // initRepo + ghGot.mockReturnValueOnce({ body: { owner: {} } }); + ghGot.mockReturnValueOnce({ body: { object: {} } }); + ghGot.mockReturnValueOnce({ body: {} }); + await initApis(config, 'some-token'); + }); + it('runs gitlab', async () => { + config.platform = 'gitlab'; + config.repository = 'some/name'; + glGot.mockReturnValueOnce({ body: {} }); + glGot.mockReturnValueOnce({ body: {} }); + glGot.mockReturnValueOnce({ body: {} }); await initApis(config, 'some-token'); }); }); diff --git a/test/workers/repository/init/base.spec.js b/test/workers/repository/init/base.spec.js index 601ade331d5265f6e38a2efac0623d4a5be93ddf..3451ae6feb2d4a1901b30da722697019d1138344 100644 --- a/test/workers/repository/init/base.spec.js +++ b/test/workers/repository/init/base.spec.js @@ -19,10 +19,10 @@ describe('workers/repository/init/base', () => { }); it('sets baseBranch', async () => { config.baseBranch = 'ssome-base'; - config.api.branchExists.mockReturnValue(true); + platform.branchExists.mockReturnValue(true); const res = await checkBaseBranch(config); expect(res.errors).toHaveLength(0); - expect(config.api.setBaseBranch.mock.calls).toHaveLength(1); + expect(platform.setBaseBranch.mock.calls).toHaveLength(1); }); }); }); diff --git a/test/workers/repository/init/config.spec.js b/test/workers/repository/init/config.spec.js index 8a97de5210ac24e698f9c38189bb18293169cb59..db13dd9b8622854efda753171c2368fec74234f8 100644 --- a/test/workers/repository/init/config.spec.js +++ b/test/workers/repository/init/config.spec.js @@ -17,13 +17,13 @@ describe('workers/repository/init/config', () => { expect(res).toMatchObject(config); }); it('returns error if cannot parse', async () => { - config.api.getFileContent.mockReturnValue('cannot parse'); + platform.getFileContent.mockReturnValue('cannot parse'); const res = await mergeRenovateJson(config); expect(res.errors).toHaveLength(1); expect(res.errors[0]).toMatchSnapshot(); }); it('returns error if duplicate keys', async () => { - config.api.getFileContent.mockReturnValue( + platform.getFileContent.mockReturnValue( '{ "enabled": true, "enabled": false }' ); const res = await mergeRenovateJson(config); diff --git a/test/workers/repository/init/semantic.spec.js b/test/workers/repository/init/semantic.spec.js index 944103753d194769073320a112c6a024001a08ba..d8ee66a72b7ecbc7e8e84ac59d6bd4186bb84c2e 100644 --- a/test/workers/repository/init/semantic.spec.js +++ b/test/workers/repository/init/semantic.spec.js @@ -19,16 +19,13 @@ describe('workers/repository/init/semantic', () => { }); it('detects false if unknown', async () => { config.semanticCommits = null; - config.api.getCommitMessages.mockReturnValue(['foo', 'bar']); + platform.getCommitMessages.mockReturnValue(['foo', 'bar']); const res = await detectSemanticCommits(config); expect(res.semanticCommits).toBe(false); }); it('detects true if known', async () => { config.semanticCommits = null; - config.api.getCommitMessages.mockReturnValue([ - 'fix: foo', - 'refactor: bar', - ]); + platform.getCommitMessages.mockReturnValue(['fix: foo', 'refactor: bar']); const res = await detectSemanticCommits(config); expect(res.semanticCommits).toBe(true); }); diff --git a/test/workers/repository/onboarding/branch/index.spec.js b/test/workers/repository/onboarding/branch/index.spec.js index 7efcef64417f46963f4412780fd92b1b7be4b228..e7f782e8a40d1f56e4b4461b7a5ac663d46ebf07 100644 --- a/test/workers/repository/onboarding/branch/index.spec.js +++ b/test/workers/repository/onboarding/branch/index.spec.js @@ -12,13 +12,8 @@ describe('workers/repository/onboarding/branch', () => { config = { ...defaultConfig, logger, - api: { - commitFilesToBranch: jest.fn(), - findPr: jest.fn(), - getFileList: jest.fn(() => []), - setBaseBranch: jest.fn(), - }, }; + platform.getFileList.mockReturnValue([]); }); it('throws if no package files', async () => { let e; @@ -40,22 +35,21 @@ describe('workers/repository/onboarding/branch', () => { expect(e).toBeDefined(); }); it('detects repo is onboarded via file', async () => { - config.api.getFileList.mockReturnValueOnce(['renovate.json']); + platform.getFileList.mockReturnValueOnce(['renovate.json']); const res = await checkOnboardingBranch(config); expect(res.repoIsOnboarded).toBe(true); }); it('detects repo is onboarded via PR', async () => { - config.api.findPr.mockReturnValue(true); + platform.findPr.mockReturnValue(true); const res = await checkOnboardingBranch(config); expect(res.repoIsOnboarded).toBe(true); }); it('creates onboaring branch', async () => { - config.api.getFileList.mockReturnValue(['package.json']); - config.api.commitFilesToBranch = jest.fn(); + platform.getFileList.mockReturnValue(['package.json']); const res = await checkOnboardingBranch(config); expect(res.repoIsOnboarded).toBe(false); expect(res.branchList).toEqual(['renovate/configure']); - expect(config.api.setBaseBranch.mock.calls).toHaveLength(1); + expect(platform.setBaseBranch.mock.calls).toHaveLength(1); }); }); }); diff --git a/test/workers/repository/onboarding/pr/index.spec.js b/test/workers/repository/onboarding/pr/index.spec.js index d44adf6b0ac6464df7bc06dd0390625310e5d385..e8648d8623f3cf0c23f7fadb4419d6a372c5e5f8 100644 --- a/test/workers/repository/onboarding/pr/index.spec.js +++ b/test/workers/repository/onboarding/pr/index.spec.js @@ -13,41 +13,37 @@ describe('workers/repository/onboarding/pr', () => { config = { ...defaultConfig, logger, - api: { - createPr: jest.fn(() => ({})), - getBranchPr: jest.fn(), - updatePr: jest.fn(), - }, errors: [], warnings: [], description: [], branches: [], }; + platform.createPr.mockReturnValue({}); }); let createPrBody; it('creates PR', async () => { await ensureOnboardingPr(config); - expect(config.api.createPr.mock.calls).toHaveLength(1); - createPrBody = config.api.createPr.mock.calls[0][2]; + expect(platform.createPr.mock.calls).toHaveLength(1); + createPrBody = platform.createPr.mock.calls[0][2]; }); it('returns if PR does not need updating', async () => { - config.api.getBranchPr.mockReturnValue({ + platform.getBranchPr.mockReturnValue({ title: 'Configure Renovate', body: createPrBody, }); await ensureOnboardingPr(config); - expect(config.api.createPr.mock.calls).toHaveLength(0); - expect(config.api.updatePr.mock.calls).toHaveLength(0); + expect(platform.createPr.mock.calls).toHaveLength(0); + expect(platform.updatePr.mock.calls).toHaveLength(0); }); it('updates PR', async () => { config.baseBranch = 'some-branch'; - config.api.getBranchPr.mockReturnValue({ + platform.getBranchPr.mockReturnValue({ title: 'Configure Renovate', body: createPrBody, }); await ensureOnboardingPr(config); - expect(config.api.createPr.mock.calls).toHaveLength(0); - expect(config.api.updatePr.mock.calls).toHaveLength(1); + expect(platform.createPr.mock.calls).toHaveLength(0); + expect(platform.updatePr.mock.calls).toHaveLength(1); }); }); });