diff --git a/.eslintrc.js b/.eslintrc.js index bd494fb666cab1378c22e28b257494f587686ace..75b8430fec92b54e4fc3f1dfc4b32ba021d43944 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -15,7 +15,7 @@ module.exports = { 'no-use-before-define': 0, 'no-restricted-syntax': 0, 'no-await-in-loop': 0, - 'prettier/prettier': ['error', { 'trailingComma': 'all', 'singleQuote': true }], + 'prettier/prettier': ['error', { 'trailingComma': 'es5', 'singleQuote': true }], 'promise/always-return': 'error', 'promise/no-return-wrap': 'error', 'promise/param-names': 'error', diff --git a/bin/update-configuration-table.js b/bin/update-configuration-table.js index c26cefcff69ff3fbda8d7986f5ae938e5d83df6e..a1b342392ac79497b62c4ffd44f80c83712d02ec 100644 --- a/bin/update-configuration-table.js +++ b/bin/update-configuration-table.js @@ -11,10 +11,10 @@ const envParser = require('../lib/config/env'); console.log('## Configuration Options'); console.log(''); console.log( - '| Name | Description | Type | Default value | Environment | CLI |', + '| Name | Description | Type | Default value | Environment | CLI |' ); console.log( - '|------|-------------|------|---------------|-------------|-----|', + '|------|-------------|------|---------------|-------------|-----|' ); const options = definitions.getOptions(); @@ -32,7 +32,7 @@ options.forEach(option => { cliName = `\`${cliName}\``; } console.log( - `| \`${option.name}\` | ${option.description} | ${option.type} | ${optionDefault} | ${envName} | ${cliName} |`, + `| \`${option.name}\` | ${option.description} | ${option.type} | ${optionDefault} | ${envName} | ${cliName} |` ); }); /* eslint-enable no-console */ diff --git a/lib/api/github.js b/lib/api/github.js index 27f550e1a70fd94831c2d59f1d71d88998ce78d5..abcdf7a4f8914291df06d0d4d66e84741b502a84 100644 --- a/lib/api/github.js +++ b/lib/api/github.js @@ -89,7 +89,7 @@ async function initRepo(repoName, token, endpoint) { // Returns an array of file paths in current repo matching the fileName async function findFilePaths(fileName) { const res = await ghGot( - `search/code?q=repo:${config.repoName}+filename:${fileName}`, + `search/code?q=repo:${config.repoName}+filename:${fileName}` ); const exactMatches = res.body.items.filter(item => item.name === fileName); @@ -105,14 +105,14 @@ async function branchExists(branchName) { logger.debug(`Checking if branch exists: ${branchName}`); try { const res = await ghGot( - `repos/${config.repoName}/git/refs/heads/${branchName}`, + `repos/${config.repoName}/git/refs/heads/${branchName}` ); if (res.statusCode === 200) { logger.debug(JSON.stringify(res.body)); if (Array.isArray(res.body)) { // This seems to happen if GitHub has partial matches, so we check ref const matchedBranch = res.body.some( - branch => branch.ref === `refs/heads/${branchName}`, + branch => branch.ref === `refs/heads/${branchName}` ); if (matchedBranch) { logger.debug('Branch exists'); @@ -183,7 +183,7 @@ async function addReviewers(issueNo, reviewers) { body: { reviewers, }, - }, + } ); } @@ -220,7 +220,7 @@ async function checkForClosedPr(branchName, prTitle) { // Return true if any of the titles match exactly return res.body.some( pr => - pr.title === prTitle && pr.head.label === `${config.owner}:${branchName}`, + pr.title === prTitle && pr.head.label === `${config.owner}:${branchName}` ); } @@ -258,7 +258,7 @@ async function getPr(prNo) { // Check if only one author of all commits logger.debug('Checking all commits'); const prCommits = (await ghGot( - `repos/${config.repoName}/pulls/${prNo}/commits`, + `repos/${config.repoName}/pulls/${prNo}/commits` )).body; const authors = prCommits.reduce((arr, commit) => { const author = commit.author.login; @@ -299,7 +299,7 @@ async function mergePr(pr) { async function getFile(filePath, branchName = config.defaultBranch) { const res = await ghGot( - `repos/${config.repoName}/contents/${filePath}?ref=${branchName}`, + `repos/${config.repoName}/contents/${filePath}?ref=${branchName}` ); return res.body.content; } @@ -327,10 +327,10 @@ async function commitFilesToBranch( branchName, files, message, - parentBranch = config.defaultBranch, + parentBranch = config.defaultBranch ) { logger.debug( - `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`, + `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'` ); const parentCommit = await getBranchCommit(parentBranch); const parentTree = await getCommitTree(parentCommit); diff --git a/lib/api/gitlab.js b/lib/api/gitlab.js index 521c33d70d88424910ea14fbad96832c030bdcfc..859f5f1fe8bdec646e31aae9119bd59379944fd7 100644 --- a/lib/api/gitlab.js +++ b/lib/api/gitlab.js @@ -268,7 +268,7 @@ async function mergePr(pr) { body: { should_remove_source_branch: true, }, - }, + } ); } @@ -276,7 +276,7 @@ async function mergePr(pr) { async function getFile(filePath, branchName = config.defaultBranch) { const res = await glGot( - `projects/${config.repoName}/repository/files?file_path=${filePath}&ref=${branchName}`, + `projects/${config.repoName}/repository/files?file_path=${filePath}&ref=${branchName}` ); return res.body.content; } @@ -338,10 +338,10 @@ async function commitFilesToBranch( branchName, files, message, - parentBranch = config.defaultBranch, + parentBranch = config.defaultBranch ) { logger.debug( - `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`, + `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'` ); if (branchName !== parentBranch) { const isBranchExisting = await branchExists(branchName); diff --git a/lib/api/npm.js b/lib/api/npm.js index 39305ab186f78723bbfea87bc4d4847c2695a4bd..fd54e203d265ef27fbb8cd085c6070b0c293b8cf 100644 --- a/lib/api/npm.js +++ b/lib/api/npm.js @@ -14,7 +14,7 @@ async function getDependency(name) { const regUrl = registryUrl(scope); const pkgUrl = url.resolve( regUrl, - encodeURIComponent(name).replace(/^%40/, '@'), + encodeURIComponent(name).replace(/^%40/, '@') ); const authInfo = registryAuthToken(regUrl); const headers = {}; diff --git a/lib/config/cli.js b/lib/config/cli.js index 820416c7531bf416860e3f187c45d251d7aeb172..9830857e8d0f80a78921f9c6cb90140ad3f56975 100644 --- a/lib/config/cli.js +++ b/lib/config/cli.js @@ -34,7 +34,7 @@ function getConfig(argv) { program = program.option( optionString, option.description, - coersions[option.type], + coersions[option.type] ); } }); @@ -46,7 +46,7 @@ function getConfig(argv) { console.log(''); console.log(' $ renovate --token abc123 singapore/lint-condo'); console.log( - ' $ renovate --ignore-unstable=false --log-level verbose singapore/lint-condo', + ' $ renovate --ignore-unstable=false --log-level verbose singapore/lint-condo' ); console.log(' $ renovate singapore/lint-condo singapore/package-test'); /* eslint-enable no-console */ diff --git a/lib/config/index.js b/lib/config/index.js index 357acad0ca40204883d9b084e9fa4092b307f956..626eb29140c64c2be38d32663e68d0f828dcb9a1 100644 --- a/lib/config/index.js +++ b/lib/config/index.js @@ -59,26 +59,26 @@ async function parseConfigs(env, argv) { logger.info('Autodiscovering GitHub repositories'); config.repositories = await githubApi.getRepos( config.token, - config.endpoint, + config.endpoint ); } else if (config.platform === 'gitlab') { logger.info('Autodiscovering GitLab repositories'); config.repositories = await gitlabApi.getRepos( config.token, - config.endpoint, + config.endpoint ); } if (!config.repositories || config.repositories.length === 0) { // Soft fail (no error thrown) if no accessible repositories logger.info( - 'The account associated with your token does not have access to any repos', + 'The account associated with your token does not have access to any repos' ); return; } } else if (!config.repositories || config.repositories.length === 0) { // We need at least one repository defined throw new Error( - 'At least one repository must be configured, or use --autodiscover', + 'At least one repository must be configured, or use --autodiscover' ); } diff --git a/lib/helpers/package-json.js b/lib/helpers/package-json.js index f824b2c99f48c2d3c1f03be7e3cc5081f80571e4..44092b2e7809f0c1747bb26d66c354185b79d49d 100644 --- a/lib/helpers/package-json.js +++ b/lib/helpers/package-json.js @@ -19,7 +19,7 @@ function extractDependencies(packageJson, sections) { depType, depName, currentVersion: packageJson[depType][depName], - })), + })) ); }, []); } @@ -52,7 +52,7 @@ function setNewValue(currentFileContent, depType, depName, newVersion) { currentFileContent, searchIndex, searchString, - newString, + newString ); // Compare the parsed JSON structure of old and new if (_.isEqual(parsedContents, JSON.parse(testContent))) { diff --git a/lib/helpers/versions.js b/lib/helpers/versions.js index ad44e0738afdd216f9a0c3f0cc3e283a02342ec3..55de05b094f71e431600360710693320a1b8dc25 100644 --- a/lib/helpers/versions.js +++ b/lib/helpers/versions.js @@ -44,21 +44,21 @@ function determineUpgrades(dep, currentVersion, config) { version => config.ignoreUnstable && stable.is(changeLogFromVersion) && - !stable.is(version), + !stable.is(version) ) // Ignore future versions, unless the current version is marked as future .reject( version => config.ignoreFuture && !isFuture(versions[changeLogFromVersion]) && - isFuture(versions[version]), + isFuture(versions[version]) ) // Ignore versions newer than "latest", unless current version is newer than the "latest" .reject( version => config.respectLatest && isPastLatest(dep, version) && - !isPastLatest(dep, changeLogFromVersion), + !isPastLatest(dep, changeLogFromVersion) ) // Loop through all possible versions .forEach(newVersion => { @@ -119,7 +119,7 @@ function determineUpgrades(dep, currentVersion, config) { // Utilise that a.b is the same as ~a.b.0 const minSatisfying = semver.minSatisfying( versionList, - `${major}.${minor}`, + `${major}.${minor}` ); // Add a tilde before that version number return Object.assign(upgrade, { newVersion: `~${minSatisfying}` }); diff --git a/lib/helpers/yarn.js b/lib/helpers/yarn.js index d5c60bb784866cc38fb2f189a4c08db57396c09d..4ce02b9d575e1d6f552cc339ac025534f2f8ec9b 100644 --- a/lib/helpers/yarn.js +++ b/lib/helpers/yarn.js @@ -50,7 +50,7 @@ async function getLockFile(packageFile, packageContent, api) { const newYarnLockContent = await module.exports.generateLockFile( packageContent, npmrcContent, - yarnrcContent, + yarnrcContent ); // Return file object return { @@ -62,15 +62,15 @@ async function getLockFile(packageFile, packageContent, api) { async function maintainLockFile(inputConfig) { logger.debug(`maintainYarnLock(${JSON.stringify(inputConfig)})`); const packageContent = await inputConfig.api.getFileContent( - inputConfig.packageFile, + inputConfig.packageFile ); const yarnLockFileName = path.join( path.dirname(inputConfig.packageFile), - 'yarn.lock', + 'yarn.lock' ); logger.debug(`Checking for ${yarnLockFileName}`); const existingYarnLock = await inputConfig.api.getFileContent( - yarnLockFileName, + yarnLockFileName ); logger.silly(`existingYarnLock:\n${existingYarnLock}`); if (!existingYarnLock) { @@ -80,7 +80,7 @@ async function maintainLockFile(inputConfig) { const newYarnLock = await module.exports.getLockFile( inputConfig.packageFile, packageContent, - inputConfig.api, + inputConfig.api ); logger.silly(`newYarnLock:\n${newYarnLock.contents}`); if (existingYarnLock.toString() === newYarnLock.contents.toString()) { diff --git a/lib/index.js b/lib/index.js index d3862845aae06b53e32d50ff142666f1683855b3..5bc1228e7a5443da5fe11279957dcc354b2aa500 100644 --- a/lib/index.js +++ b/lib/index.js @@ -40,7 +40,7 @@ async function processRepo(repo) { api = gitlabApi; } else { logger.error( - `Unknown platform ${config.platform} for repository ${repo.repository}`, + `Unknown platform ${config.platform} for repository ${repo.repository}` ); return; } @@ -121,12 +121,12 @@ If the default settings are all suitable for you, simply close this Pull Request contents: defaultConfigString, }, ], - 'Add renovate.json', + 'Add renovate.json' ); const pr = await api.createPr( 'renovate/configure', 'Configure Renovate', - prBody, + prBody ); logger.info(`Created ${pr.displayNumber} for configuration`); } @@ -150,8 +150,8 @@ async function getAllRepoUpgrades(repo) { await worker.processPackageFile( repo.repository, packageFile.fileName, - cascadedConfig, - ), + cascadedConfig + ) ); } return upgrades; diff --git a/lib/worker.js b/lib/worker.js index 4c108dc3f604626fcd1b8fdec780a723b64e8177..78bb6a5fd013c78f65cc6fe4b1cb725a1069f2f3 100644 --- a/lib/worker.js +++ b/lib/worker.js @@ -30,7 +30,7 @@ async function processPackageFile(repoName, packageFile, packageConfig) { // Check for renovate config inside the package.json if (packageContent.renovate) { logger.debug( - `package.json>renovate config: ${stringify(packageContent.renovate)}`, + `package.json>renovate config: ${stringify(packageContent.renovate)}` ); Object.assign(config, packageContent.renovate, { repoConfigured: true }); } @@ -50,11 +50,11 @@ async function processPackageFile(repoName, packageFile, packageConfig) { // Extract all dependencies from the package.json let dependencies = await packageJson.extractDependencies( packageContent, - depTypes, + depTypes ); // Filter out ignored dependencies dependencies = dependencies.filter( - dependency => config.ignoreDeps.indexOf(dependency.depName) === -1, + dependency => config.ignoreDeps.indexOf(dependency.depName) === -1 ); dependencies = assignDepConfigs(config, dependencies); // Find all upgrades for remaining dependencies @@ -80,7 +80,7 @@ function assignDepConfigs(inputConfig, deps) { returnDep.config = Object.assign( {}, inputConfig, - getDepTypeConfig(inputConfig.depTypes, dep.depType), + getDepTypeConfig(inputConfig.depTypes, dep.depType) ); let packageRuleApplied = false; if (returnDep.config.packages) { @@ -139,7 +139,7 @@ async function findUpgrades(dependencies) { const upgrades = await versionsHelper.determineUpgrades( npmDependency, dep.currentVersion, - dep.config, + dep.config ); if (upgrades.length > 0) { logger.verbose(`${dep.depName}: Upgrades = ${JSON.stringify(upgrades)}`); @@ -213,7 +213,7 @@ async function updateBranch(upgrades) { (await upgrade0.api.checkForClosedPr(branchName, prTitle)) ) { logger.verbose( - `Skipping ${branchName} upgrade as matching closed PR already existed`, + `Skipping ${branchName} upgrade as matching closed PR already existed` ); return; } diff --git a/lib/workers/branch.js b/lib/workers/branch.js index 3547724b21df95d0415c40e903a0111294f9bbd0..509074048ba70c5fd819aba4631f9312b595f0e5 100644 --- a/lib/workers/branch.js +++ b/lib/workers/branch.js @@ -58,10 +58,10 @@ async function ensureBranch(upgrades) { // If undefined, this will mean the defaultBranch const parentBranch = await module.exports.getParentBranch( branchName, - upgrades[0], + upgrades[0] ); const commitMessage = handlebars.compile(upgrades[0].commitMessage)( - upgrades[0], + upgrades[0] ); const api = upgrades[0].api; const packageFiles = {}; @@ -78,14 +78,14 @@ async function ensureBranch(upgrades) { // If we are rebasing then existing content will be from master packageFiles[upgrade.packageFile] = await api.getFileContent( upgrade.packageFile, - parentBranch, + parentBranch ); } const newContent = packageJsonHelper.setNewValue( packageFiles[upgrade.packageFile], upgrade.depType, upgrade.depName, - upgrade.newVersion, + upgrade.newVersion ); if (packageFiles[upgrade.packageFile] === newContent) { logger.debug('packageFile content unchanged'); @@ -98,7 +98,7 @@ async function ensureBranch(upgrades) { } if (Object.keys(packageFiles).length > 0) { logger.debug( - `${Object.keys(packageFiles).length} package file(s) need updating.`, + `${Object.keys(packageFiles).length} package file(s) need updating.` ); for (const packageFile of Object.keys(packageFiles)) { logger.debug(`Adding ${packageFile}`); @@ -109,7 +109,7 @@ async function ensureBranch(upgrades) { const yarnLockFile = await yarnHelper.getLockFile( packageFile, packageFiles[packageFile], - api, + api ); if (yarnLockFile) { // Add new yarn.lock file too @@ -125,7 +125,7 @@ async function ensureBranch(upgrades) { branchName, commitFiles, commitMessage, - parentBranch, + parentBranch ); return true; } diff --git a/lib/workers/pr.js b/lib/workers/pr.js index 0bd878f0ae6698f9d583ee9883a99fe9d4176d74..b381106def0087d84c8c75712ad3b5a0d93888ca 100644 --- a/lib/workers/pr.js +++ b/lib/workers/pr.js @@ -36,7 +36,7 @@ async function ensurePr(upgradeConfig) { config.changelog = await getChangeLog( config.depName, config.changeLogFromVersion, - config.changeLogToVersion, + config.changeLogToVersion ); const prTitle = handlebars.compile(config.prTitle)(config); const prBody = handlebars.compile(config.prBody)(config); @@ -73,7 +73,7 @@ async function ensurePr(upgradeConfig) { } } else { logger.debug( - `Skipping assignees and reviewers as automerge=${config.automerge}`, + `Skipping assignees and reviewers as automerge=${config.automerge}` ); } logger.info(`Created ${pr.displayNumber}`); diff --git a/test/api/github.spec.js b/test/api/github.spec.js index 12ba13aab7a4a7d19f3ad6463912b16ebb6b5d16..4a80b35369f64da1ec18d59956b982d0bb483b00 100644 --- a/test/api/github.spec.js +++ b/test/api/github.spec.js @@ -111,7 +111,7 @@ describe('api/github', () => { err = e; } expect(err.message).toBe( - 'No token found for GitHub repository some/repo', + 'No token found for GitHub repository some/repo' ); }); it('should squash', async () => { @@ -277,7 +277,7 @@ describe('api/github', () => { ghGot.mockImplementationOnce(() => Promise.reject({ statusCode: 404, - }), + }) ); const exists = await github.branchExists('thebranchname'); expect(ghGot.mock.calls).toMatchSnapshot(); @@ -286,7 +286,7 @@ describe('api/github', () => { it('should propagate unknown errors', async () => { await initRepo('some/repo', 'token'); ghGot.mockImplementationOnce(() => - Promise.reject(new Error('Something went wrong')), + Promise.reject(new Error('Something went wrong')) ); let err; try { @@ -432,7 +432,7 @@ describe('api/github', () => { const pr = await github.createPr( 'some-branch', 'The Title', - 'Hello world', + 'Hello world' ); expect(pr).toMatchSnapshot(); expect(ghGot.post.mock.calls).toMatchSnapshot(); @@ -667,7 +667,7 @@ describe('api/github', () => { await github.commitFilesToBranch( 'package.json', files, - 'my commit message', + 'my commit message' ); expect(ghGot.mock.calls).toMatchSnapshot(); expect(ghGot.post.mock.calls).toMatchSnapshot(); @@ -687,7 +687,7 @@ describe('api/github', () => { await github.commitFilesToBranch( 'package.json', files, - 'my other commit message', + 'my other commit message' ); expect(ghGot.mock.calls).toMatchSnapshot(); expect(ghGot.post.mock.calls).toMatchSnapshot(); diff --git a/test/config/index.spec.js b/test/config/index.spec.js index 58683f701fd60c5064787d1ae4def5c4ac5a78c1..9d5cb1a5c817295168f94ccd51e771201326cdf7 100644 --- a/test/config/index.spec.js +++ b/test/config/index.spec.js @@ -56,7 +56,7 @@ describe('config/index', () => { err = e; } expect(err.message).toBe( - 'At least one repository must be configured, or use --autodiscover', + 'At least one repository must be configured, or use --autodiscover' ); }); it('supports token in CLI options', async () => { @@ -69,7 +69,7 @@ describe('config/index', () => { err = e; } expect(err.message).toBe( - 'At least one repository must be configured, or use --autodiscover', + 'At least one repository must be configured, or use --autodiscover' ); }); it('autodiscovers github platform', async () => { diff --git a/test/helpers/changelog.spec.js b/test/helpers/changelog.spec.js index 7b088be069a2e3874d64be3195834b927837a754..db357670581506982ce27cb3d0eebe09c278f184 100644 --- a/test/helpers/changelog.spec.js +++ b/test/helpers/changelog.spec.js @@ -18,7 +18,7 @@ describe('helpers/changelog', () => { it('returns header if generated markdown is valid', async () => { changelog.markdown.mockReturnValueOnce('dummy'); expect(await getChangeLog('renovate', '1.0.0', '2.0.0')).toBe( - '### Changelog\n\ndummy', + '### Changelog\n\ndummy' ); }); it('returns empty if error thrown', async () => { diff --git a/test/helpers/package-json.spec.js b/test/helpers/package-json.spec.js index e2dc18f49ccb4637868280f8517096f90b75ac8e..6d8befe7b19ae2c36c081e1fabbf1fe36e398b2e 100644 --- a/test/helpers/package-json.spec.js +++ b/test/helpers/package-json.spec.js @@ -11,7 +11,7 @@ const defaultTypes = [ function readFixture(fixture) { return fs.readFileSync( path.resolve(__dirname, `../_fixtures/package-json/${fixture}`), - 'utf8', + 'utf8' ); } @@ -23,7 +23,7 @@ describe('helpers/package-json', () => { it('returns an array of correct length', () => { const extractedDependencies = packageJson.extractDependencies( JSON.parse(input01Content), - defaultTypes, + defaultTypes ); extractedDependencies.should.be.instanceof(Array); extractedDependencies.should.have.length(10); @@ -31,7 +31,7 @@ describe('helpers/package-json', () => { it('each element contains non-null depType, depName, currentVersion', () => { const extractedDependencies = packageJson.extractDependencies( JSON.parse(input01Content), - defaultTypes, + defaultTypes ); extractedDependencies .every(dep => dep.depType && dep.depName && dep.currentVersion) @@ -40,7 +40,7 @@ describe('helpers/package-json', () => { it('supports null devDependencies', () => { const extractedDependencies = packageJson.extractDependencies( JSON.parse(input02Content), - defaultTypes, + defaultTypes ); extractedDependencies.should.be.instanceof(Array); extractedDependencies.should.have.length(6); @@ -53,7 +53,7 @@ describe('helpers/package-json', () => { input01Content, 'dependencies', 'cheerio', - '0.22.1', + '0.22.1' ); testContent.should.equal(outputContent); }); @@ -63,7 +63,7 @@ describe('helpers/package-json', () => { input01Content, 'devDependencies', 'angular-touch', - '1.6.1', + '1.6.1' ); testContent.should.equal(outputContent); }); @@ -73,7 +73,7 @@ describe('helpers/package-json', () => { input01Content, 'devDependencies', 'angular-sanitize', - '1.6.1', + '1.6.1' ); testContent.should.equal(outputContent); }); @@ -82,7 +82,7 @@ describe('helpers/package-json', () => { input01Content, 'devDependencies', 'angular-touch', - '1.5.8', + '1.5.8' ); testContent.should.equal(input01Content); }); diff --git a/test/helpers/versions.spec.js b/test/helpers/versions.spec.js index 7734b10eb9b04476c1de504cbfa2181928280d41..046096233c5ae07484fec43c101c01003a27619b 100644 --- a/test/helpers/versions.spec.js +++ b/test/helpers/versions.spec.js @@ -55,19 +55,19 @@ describe('helpers/versions', () => { it('returns only one update if grouping', () => { defaultConfig.groupName = 'somegroup'; expect( - versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig) ).toMatchSnapshot(); }); it('returns only one update if automerging any', () => { defaultConfig.automerge = 'any'; expect( - versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig) ).toMatchSnapshot(); }); it('returns both updates if automerging minor', () => { defaultConfig.automerge = 'minor'; expect( - versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig) ).toMatchSnapshot(); }); it('disables major release separation (major)', () => { @@ -136,7 +136,7 @@ describe('helpers/versions', () => { }, ]; expect( - versionsHelper.determineUpgrades(qJson, '~0.9.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '~0.9.0', defaultConfig) ).toEqual(pinVersions); }); it('upgrades minor ranged versions', () => { @@ -150,7 +150,7 @@ describe('helpers/versions', () => { }, ]; expect( - versionsHelper.determineUpgrades(qJson, '~1.0.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '~1.0.0', defaultConfig) ).toEqual(upgradeVersions); }); it('pins minor ranged versions', () => { @@ -162,13 +162,13 @@ describe('helpers/versions', () => { }, ]; expect( - versionsHelper.determineUpgrades(qJson, '^1.0.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '^1.0.0', defaultConfig) ).toEqual(upgradeVersions); }); it('ignores minor ranged versions when not pinning', () => { const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect( - versionsHelper.determineUpgrades(qJson, '^1.0.0', config), + versionsHelper.determineUpgrades(qJson, '^1.0.0', config) ).toHaveLength(0); }); it('upgrades tilde ranges', () => { @@ -182,7 +182,7 @@ describe('helpers/versions', () => { }, ]; expect( - versionsHelper.determineUpgrades(qJson, '~1.3.0', defaultConfig), + versionsHelper.determineUpgrades(qJson, '~1.3.0', defaultConfig) ).toEqual(upgradeVersions); }); it('upgrades .x minor ranges', () => { @@ -196,7 +196,7 @@ describe('helpers/versions', () => { }, ]; expect( - versionsHelper.determineUpgrades(qJson, '1.3.x', defaultConfig), + versionsHelper.determineUpgrades(qJson, '1.3.x', defaultConfig) ).toEqual(upgradeVersions); }); it('upgrades tilde ranges without pinning', () => { @@ -212,7 +212,7 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '~1.3.0', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('upgrades .x major ranges without pinning', () => { @@ -228,7 +228,7 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '0.x', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('upgrades .x minor ranges without pinning', () => { @@ -244,7 +244,7 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '1.3.x', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('upgrades shorthand major ranges without pinning', () => { @@ -260,7 +260,7 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '0', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('upgrades shorthand minor ranges without pinning', () => { @@ -276,7 +276,7 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '1.3', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('upgrades multiple tilde ranges without pinning', () => { @@ -300,7 +300,7 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '~0.7.0', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('upgrades multiple caret ranges without pinning', () => { @@ -324,19 +324,19 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect(versionsHelper.determineUpgrades(qJson, '^0.7.0', config)).toEqual( - upgradeVersions, + upgradeVersions ); }); it('ignores complex ranges when not pinning', () => { const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect( - versionsHelper.determineUpgrades(qJson, '^0.7.0 || ^0.8.0', config), + versionsHelper.determineUpgrades(qJson, '^0.7.0 || ^0.8.0', config) ).toHaveLength(0); }); it('returns nothing for greater than ranges', () => { const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect( - versionsHelper.determineUpgrades(qJson, '>= 0.7.0', config), + versionsHelper.determineUpgrades(qJson, '>= 0.7.0', config) ).toHaveLength(0); }); it('upgrades less than equal ranges without pinning', () => { @@ -360,13 +360,13 @@ describe('helpers/versions', () => { ]; const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect( - versionsHelper.determineUpgrades(qJson, '<= 0.7.2', config), + versionsHelper.determineUpgrades(qJson, '<= 0.7.2', config) ).toEqual(upgradeVersions); }); it('rejects less than ranges without pinning', () => { const config = Object.assign({}, defaultConfig, { pinVersions: false }); expect( - versionsHelper.determineUpgrades(qJson, '< 0.7.2', config), + versionsHelper.determineUpgrades(qJson, '< 0.7.2', config) ).toEqual([]); }); it('supports > latest versions if configured', () => { @@ -425,7 +425,7 @@ describe('helpers/versions', () => { }, }, '1.0.0', - defaultConfig, + defaultConfig ) .should.eql([]); }); @@ -449,7 +449,7 @@ describe('helpers/versions', () => { }, }, '1.0.0-beta', - defaultConfig, + defaultConfig ) .should.eql(upgradeVersions); }); diff --git a/test/helpers/yarn.spec.js b/test/helpers/yarn.spec.js index 2a7fdf128db058eabbdc29cb323d48842837cb88..06cc201a200fe9ffe467e382c97a6e0a2a0fc72f 100644 --- a/test/helpers/yarn.spec.js +++ b/test/helpers/yarn.spec.js @@ -21,7 +21,7 @@ describe('generateLockFile(newPackageJson, npmrcContent, yarnrcContent)', () => const yarnLock = await yarnHelper.generateLockFile( 'package-json-contents', 'npmrc-contents', - 'yarnrc-contents', + 'yarnrc-contents' ); expect(tmp.dirSync.mock.calls.length).toEqual(1); expect(fs.writeFileSync.mock.calls.length).toEqual(3); @@ -51,7 +51,7 @@ describe('getLockFile(packageJson, config)', () => { contents: 'New yarn.lock', }; expect(await yarnHelper.getLockFile('package.json', '', api)).toMatchObject( - yarnLockFile, + yarnLockFile ); }); }); diff --git a/test/workers/branch.spec.js b/test/workers/branch.spec.js index 1cfb47cdb0ffc050d38b046ddc6e532836b7c2d3..5fd76ec25c61c01a7943fb1bc4c78f25e5e4ed0d 100644 --- a/test/workers/branch.spec.js +++ b/test/workers/branch.spec.js @@ -21,13 +21,13 @@ describe('workers/branch', () => { it('returns undefined if branch does not exist', async () => { config.api.branchExists.mockReturnValue(false); expect(await branchWorker.getParentBranch(branchName, config)).toBe( - undefined, + undefined ); }); it('returns branchName if no PR', async () => { config.api.getBranchPr.mockReturnValue(null); expect(await branchWorker.getParentBranch(branchName, config)).toBe( - branchName, + branchName ); }); it('returns false if does not need rebaseing', async () => { @@ -35,7 +35,7 @@ describe('workers/branch', () => { isUnmergeable: false, }); expect(await branchWorker.getParentBranch(branchName, config)).toBe( - branchName, + branchName ); }); it('returns false if unmergeable and cannot rebase', async () => { @@ -44,7 +44,7 @@ describe('workers/branch', () => { canRebase: false, }); expect(await branchWorker.getParentBranch(branchName, config)).toBe( - branchName, + branchName ); }); it('returns true if unmergeable and can rebase', async () => { @@ -53,7 +53,7 @@ describe('workers/branch', () => { canRebase: true, }); expect(await branchWorker.getParentBranch(branchName, config)).toBe( - undefined, + undefined ); }); it('returns false if stale but not configured to rebase', async () => { @@ -64,7 +64,7 @@ describe('workers/branch', () => { }); config.rebaseStalePrs = false; expect(await branchWorker.getParentBranch(branchName, config)).toBe( - branchName, + branchName ); }); it('returns false if stale but cannot rebase', async () => { @@ -75,7 +75,7 @@ describe('workers/branch', () => { }); config.rebaseStalePrs = true; expect(await branchWorker.getParentBranch(branchName, config)).toBe( - branchName, + branchName ); }); it('returns true if stale and can rebase', async () => { @@ -86,7 +86,7 @@ describe('workers/branch', () => { }); config.rebaseStalePrs = true; expect(await branchWorker.getParentBranch(branchName, config)).toBe( - undefined, + undefined ); }); });