diff --git a/.eslintrc.js b/.eslintrc.js
index 5b3557beef9cf5fba574c66430843f2b040b1f84..bd494fb666cab1378c22e28b257494f587686ace 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -4,15 +4,18 @@ module.exports = {
     },
     'extends': [
       'airbnb-base',
+      'prettier',
     ],
     'plugins': [
-        'import',
-        'promise',
+      'import',
+      'promise',
+      'prettier',
     ],
     'rules': {
       'no-use-before-define': 0,
       'no-restricted-syntax': 0,
       'no-await-in-loop': 0,
+      'prettier/prettier': ['error', { 'trailingComma': 'all', 'singleQuote': true }],
       'promise/always-return': 'error',
       'promise/no-return-wrap': 'error',
       'promise/param-names': 'error',
diff --git a/bin/heroku/web.js b/bin/heroku/web.js
index e9742cf37293c05c82bb10d22a849f2ede8ec328..730f6b1589a169edb804d6f2945fd8a43a31c78a 100644
--- a/bin/heroku/web.js
+++ b/bin/heroku/web.js
@@ -6,11 +6,13 @@ const port = process.env.PORT || '3000';
 const requestHandler = (request, response) => {
   // Redirect users to Heroku dashboard
   const appName = request.headers.host.split(':')[0].split('.')[0];
-  response.writeHead(302, { Location: `https://dashboard.heroku.com/apps/${appName}/logs` });
+  response.writeHead(302, {
+    Location: `https://dashboard.heroku.com/apps/${appName}/logs`,
+  });
   response.end();
 };
 
-http.createServer(requestHandler).listen(port, (err) => {
+http.createServer(requestHandler).listen(port, err => {
   if (err) {
     console.log('Failed to start web server', err);
     return;
diff --git a/bin/update-configuration-table.js b/bin/update-configuration-table.js
index d68521c0508296c2199879b4c6fc9b6076b9c5a3..c26cefcff69ff3fbda8d7986f5ae938e5d83df6e 100644
--- a/bin/update-configuration-table.js
+++ b/bin/update-configuration-table.js
@@ -10,11 +10,15 @@ const envParser = require('../lib/config/env');
 // Print table header
 console.log('## Configuration Options');
 console.log('');
-console.log('| Name | Description | Type | Default value | Environment | CLI |');
-console.log('|------|-------------|------|---------------|-------------|-----|');
+console.log(
+  '| Name | Description | Type | Default value | Environment | CLI |',
+);
+console.log(
+  '|------|-------------|------|---------------|-------------|-----|',
+);
 
 const options = definitions.getOptions();
-options.forEach((option) => {
+options.forEach(option => {
   let optionDefault = defaultsParser.getDefault(option);
   if (optionDefault !== '') {
     optionDefault = `\`${stringify(optionDefault)}\``;
@@ -27,6 +31,8 @@ options.forEach((option) => {
   if (cliName.length) {
     cliName = `\`${cliName}\``;
   }
-  console.log(`| \`${option.name}\` | ${option.description} | ${option.type} | ${optionDefault} | ${envName} | ${cliName} |`);
+  console.log(
+    `| \`${option.name}\` | ${option.description} | ${option.type} | ${optionDefault} | ${envName} | ${cliName} |`,
+  );
 });
 /* eslint-enable no-console */
diff --git a/contributing.md b/contributing.md
index 31b8b4c9a3b9ab5a9c24387061017d301ef5251b..ee7e2033f1750ac9c843f06f6b07e438ced4c507 100644
--- a/contributing.md
+++ b/contributing.md
@@ -13,7 +13,9 @@ Before you submit any code, it's recommended that you raise an issue first if yo
 
 After you have cloned the project, first check that it's running OK locally.
 
-`renovate` supports node versions 6 and above. It is written using async/await so either needs `babel` transpilation for node 6, or a harmony flag for node 7.
+First you will need to install dependencies. We use [yarn](https://github.com/yarnpkg/yarn) so run `yarn` instead of `npm install`.
+
+`renovate` supports node versions 6.9 and above. It is written using async/await so needs `babel` transpilation for node 6.
 
 If running in node 6, you need to run a transpiled version of the code. You can do this without an explicit transpilation step by running `yarn run start-babel`.
 Examples:
@@ -29,10 +31,12 @@ If running on node 7, you can run just like the above, but use the `yarn run sta
 ## Adding configuration options
 
 We wish to keep backwards-compatibility as often as possible, as well as make the code configurable, so most new functionality should be controllable via configuration options.
-Please see [Cofiguration docs](docs/configuration.md) for a list of current options.
+Please see [Configuration docs](docs/configuration.md) for a list of current options.
 
 If you wish to add one, add it to `lib/config/definitions.js` and then run `yarn run update-docs`.
 
 ## Running tests
 
-You can run `yarn test` locally to test your code. We don't have 100% coverage so new tests are desirable. We also run Continuous Integration using CircleCI.
+You can run `yarn test` locally to test your code. We also run Continuous Integration using CircleCI.
+
+We use [Prettier](https://github.com/prettier/prettier) for code formatting. If your code fails `yarn test` due to a `prettier` rule in `eslint` then it can be fixed by running `yarn run eslint-fix`;
diff --git a/lib/api/github.js b/lib/api/github.js
index b7ab35fced8c6c2489220ecd79a2ca5291a638e3..27f550e1a70fd94831c2d59f1d71d88998ce78d5 100644
--- a/lib/api/github.js
+++ b/lib/api/github.js
@@ -88,7 +88,9 @@ async function initRepo(repoName, token, endpoint) {
 
 // Returns an array of file paths in current repo matching the fileName
 async function findFilePaths(fileName) {
-  const res = await ghGot(`search/code?q=repo:${config.repoName}+filename:${fileName}`);
+  const res = await ghGot(
+    `search/code?q=repo:${config.repoName}+filename:${fileName}`,
+  );
   const exactMatches = res.body.items.filter(item => item.name === fileName);
 
   // GitHub seems to return files in the root with a leading `/`
@@ -102,12 +104,16 @@ async function findFilePaths(fileName) {
 async function branchExists(branchName) {
   logger.debug(`Checking if branch exists: ${branchName}`);
   try {
-    const res = await ghGot(`repos/${config.repoName}/git/refs/heads/${branchName}`);
+    const res = await ghGot(
+      `repos/${config.repoName}/git/refs/heads/${branchName}`,
+    );
     if (res.statusCode === 200) {
       logger.debug(JSON.stringify(res.body));
       if (Array.isArray(res.body)) {
         // This seems to happen if GitHub has partial matches, so we check ref
-        const matchedBranch = res.body.some(branch => branch.ref === `refs/heads/${branchName}`);
+        const matchedBranch = res.body.some(
+          branch => branch.ref === `refs/heads/${branchName}`,
+        );
         if (matchedBranch) {
           logger.debug('Branch exists');
         } else {
@@ -119,12 +125,12 @@ async function branchExists(branchName) {
       return res.body.ref === `refs/heads/${branchName}`;
     }
     // This probably shouldn't happen
-    logger.debug('Branch doesn\'t exist');
+    logger.debug("Branch doesn't exist");
     return false;
   } catch (error) {
     if (error.statusCode === 404) {
       // If file not found, then return false
-      logger.debug('Branch doesn\'t exist');
+      logger.debug("Branch doesn't exist");
       return false;
     }
     // Propagate if it's any other error
@@ -135,7 +141,8 @@ async function branchExists(branchName) {
 // Returns the Pull Request for a branch. Null if not exists.
 async function getBranchPr(branchName) {
   logger.debug(`getBranchPr(${branchName})`);
-  const gotString = `repos/${config.repoName}/pulls?` +
+  const gotString =
+    `repos/${config.repoName}/pulls?` +
     `state=open&base=${config.defaultBranch}&head=${config.owner}:${branchName}`;
   const res = await ghGot(gotString);
   if (!res.body.length) {
@@ -167,14 +174,17 @@ async function addAssignees(issueNo, assignees) {
 
 async function addReviewers(issueNo, reviewers) {
   logger.debug(`Adding reviewers ${reviewers} to #${issueNo}`);
-  await ghGot.post(`repos/${config.repoName}/pulls/${issueNo}/requested_reviewers`, {
-    headers: {
-      accept: 'application/vnd.github.black-cat-preview+json',
+  await ghGot.post(
+    `repos/${config.repoName}/pulls/${issueNo}/requested_reviewers`,
+    {
+      headers: {
+        accept: 'application/vnd.github.black-cat-preview+json',
+      },
+      body: {
+        reviewers,
+      },
     },
-    body: {
-      reviewers,
-    },
-  });
+  );
 }
 
 async function addLabels(issueNo, labels) {
@@ -190,7 +200,7 @@ async function findPr(branchName, prTitle, state = 'all') {
   logger.debug(`findPr urlString: ${urlString}`);
   const res = await ghGot(urlString);
   let pr = null;
-  res.body.forEach((result) => {
+  res.body.forEach(result => {
     if (!prTitle || result.title === prTitle) {
       pr = result;
       if (pr.state === 'closed') {
@@ -208,7 +218,10 @@ async function checkForClosedPr(branchName, prTitle) {
   const url = `repos/${config.repoName}/pulls?state=closed&head=${config.owner}:${branchName}`;
   const res = await ghGot(url);
   // Return true if any of the titles match exactly
-  return res.body.some(pr => pr.title === prTitle && pr.head.label === `${config.owner}:${branchName}`);
+  return res.body.some(
+    pr =>
+      pr.title === prTitle && pr.head.label === `${config.owner}:${branchName}`,
+  );
 }
 
 // Creates PR and returns PR number
@@ -244,7 +257,9 @@ async function getPr(prNo) {
     } else {
       // Check if only one author of all commits
       logger.debug('Checking all commits');
-      const prCommits = (await ghGot(`repos/${config.repoName}/pulls/${prNo}/commits`)).body;
+      const prCommits = (await ghGot(
+        `repos/${config.repoName}/pulls/${prNo}/commits`,
+      )).body;
       const authors = prCommits.reduce((arr, commit) => {
         const author = commit.author.login;
         if (arr.indexOf(author) === -1) {
@@ -283,7 +298,9 @@ async function mergePr(pr) {
 // Generic File operations
 
 async function getFile(filePath, branchName = config.defaultBranch) {
-  const res = await ghGot(`repos/${config.repoName}/contents/${filePath}?ref=${branchName}`);
+  const res = await ghGot(
+    `repos/${config.repoName}/contents/${filePath}?ref=${branchName}`,
+  );
   return res.body.content;
 }
 
@@ -310,8 +327,11 @@ async function commitFilesToBranch(
   branchName,
   files,
   message,
-  parentBranch = config.defaultBranch) {
-  logger.debug(`commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`);
+  parentBranch = config.defaultBranch,
+) {
+  logger.debug(
+    `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`,
+  );
   const parentCommit = await getBranchCommit(parentBranch);
   const parentTree = await getCommitTree(parentCommit);
   const fileBlobs = [];
@@ -372,13 +392,15 @@ async function createBlob(fileContents) {
 
 // Return the commit SHA for a branch
 async function getBranchCommit(branchName) {
-  return (await ghGot(`repos/${config.repoName}/git/refs/heads/${branchName}`)).body.object.sha;
+  return (await ghGot(`repos/${config.repoName}/git/refs/heads/${branchName}`))
+    .body.object.sha;
 }
 
 // Return the tree SHA for a commit
 async function getCommitTree(commit) {
   logger.debug(`getCommitTree(${commit})`);
-  return (await ghGot(`repos/${config.repoName}/git/commits/${commit}`)).body.tree.sha;
+  return (await ghGot(`repos/${config.repoName}/git/commits/${commit}`)).body
+    .tree.sha;
 }
 
 // Create a tree and return SHA
@@ -388,7 +410,7 @@ async function createTree(baseTree, files) {
     base_tree: baseTree,
     tree: [],
   };
-  files.forEach((file) => {
+  files.forEach(file => {
     body.tree.push({
       path: file.name,
       mode: '100644',
@@ -397,7 +419,8 @@ async function createTree(baseTree, files) {
     });
   });
   logger.debug(body);
-  return (await ghGot.post(`repos/${config.repoName}/git/trees`, { body })).body.sha;
+  return (await ghGot.post(`repos/${config.repoName}/git/trees`, { body })).body
+    .sha;
 }
 
 // Create a commit and return commit SHA
diff --git a/lib/api/gitlab.js b/lib/api/gitlab.js
index b884235c758587167ef57f7d7e21158a90092257..521c33d70d88424910ea14fbad96832c030bdcfc 100644
--- a/lib/api/gitlab.js
+++ b/lib/api/gitlab.js
@@ -80,7 +80,7 @@ async function initRepo(repoName, token, endpoint) {
 
 // Returns an array of file paths in current repo matching the fileName
 async function findFilePaths(fileName) {
-  logger.verbose('Can\'t find multiple package.json files in GitLab');
+  logger.verbose("Can't find multiple package.json files in GitLab");
   return [fileName];
 }
 
@@ -97,12 +97,12 @@ async function branchExists(branchName) {
       return true;
     }
     // This probably shouldn't happen
-    logger.debug('Branch doesn\'t exist');
+    logger.debug("Branch doesn't exist");
     return false;
   } catch (error) {
     if (error.statusCode === 404) {
       // If file not found, then return false
-      logger.debug('Branch doesn\'t exist');
+      logger.debug("Branch doesn't exist");
       return false;
     }
     // Propagate if it's any other error
@@ -117,7 +117,7 @@ async function getBranchPr(branchName) {
   const res = await glGot(urlString);
   logger.debug(`Got res with ${res.body.length} results`);
   let pr = null;
-  res.body.forEach((result) => {
+  res.body.forEach(result => {
     if (result.source_branch === branchName) {
       pr = result;
     }
@@ -145,7 +145,7 @@ async function getBranchStatus(branchName) {
   }
   let status = 'success';
   // Return 'success' if all are success
-  res.body.forEach((check) => {
+  res.body.forEach(check => {
     // If one is failed then don't overwrite that
     if (status !== 'failed') {
       if (check.status === 'failed') {
@@ -187,8 +187,11 @@ async function findPr(branchName, prTitle, state = 'all') {
   const urlString = `projects/${config.repoName}/merge_requests?state=${state}`;
   const res = await glGot(urlString);
   let pr = null;
-  res.body.forEach((result) => {
-    if ((!prTitle || result.title === prTitle) && result.source_branch === branchName) {
+  res.body.forEach(result => {
+    if (
+      (!prTitle || result.title === prTitle) &&
+      result.source_branch === branchName
+    ) {
       pr = result;
       // GitHub uses number, GitLab uses iid
       pr.number = pr.id;
@@ -259,17 +262,22 @@ async function updatePr(prNo, title, body) {
 }
 
 async function mergePr(pr) {
-  await glGot.put(`projects/${config.repoName}/merge_requests/${pr.number}/merge`, {
-    body: {
-      should_remove_source_branch: true,
+  await glGot.put(
+    `projects/${config.repoName}/merge_requests/${pr.number}/merge`,
+    {
+      body: {
+        should_remove_source_branch: true,
+      },
     },
-  });
+  );
 }
 
 // Generic File operations
 
 async function getFile(filePath, branchName = config.defaultBranch) {
-  const res = await glGot(`projects/${config.repoName}/repository/files?file_path=${filePath}&ref=${branchName}`);
+  const res = await glGot(
+    `projects/${config.repoName}/repository/files?file_path=${filePath}&ref=${branchName}`,
+  );
   return res.body.content;
 }
 
@@ -330,8 +338,11 @@ async function commitFilesToBranch(
   branchName,
   files,
   message,
-  parentBranch = config.defaultBranch) {
-  logger.debug(`commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`);
+  parentBranch = config.defaultBranch,
+) {
+  logger.debug(
+    `commitFilesToBranch('${branchName}', files, message, '${parentBranch})'`,
+  );
   if (branchName !== parentBranch) {
     const isBranchExisting = await branchExists(branchName);
     if (isBranchExisting) {
diff --git a/lib/api/npm.js b/lib/api/npm.js
index 529ea3c7371afc29408166c07d66b866c3c990cb..39305ab186f78723bbfea87bc4d4847c2695a4bd 100644
--- a/lib/api/npm.js
+++ b/lib/api/npm.js
@@ -12,7 +12,10 @@ module.exports = {
 async function getDependency(name) {
   const scope = name.split('/')[0];
   const regUrl = registryUrl(scope);
-  const pkgUrl = url.resolve(regUrl, encodeURIComponent(name).replace(/^%40/, '@'));
+  const pkgUrl = url.resolve(
+    regUrl,
+    encodeURIComponent(name).replace(/^%40/, '@'),
+  );
   const authInfo = registryAuthToken(regUrl);
   const headers = {};
 
diff --git a/lib/config/cli.js b/lib/config/cli.js
index aa6856e9a63507a64402da5b958bbaf804ebcef2..820416c7531bf416860e3f187c45d251d7aeb172 100644
--- a/lib/config/cli.js
+++ b/lib/config/cli.js
@@ -20,18 +20,22 @@ function getConfig(argv) {
   const config = {};
 
   const coersions = {
-    boolean: val => (val === 'true'),
+    boolean: val => val === 'true',
     list: val => val.split(',').map(el => el.trim()),
     string: val => val,
   };
 
   let program = new commander.Command().arguments('[repositories...]');
 
-  options.forEach((option) => {
+  options.forEach(option => {
     if (option.cli !== false) {
       const param = `<${option.type}>`.replace('<boolean>', '[boolean]');
       const optionString = `${getCliName(option)} ${param}`;
-      program = program.option(optionString, option.description, coersions[option.type]);
+      program = program.option(
+        optionString,
+        option.description,
+        coersions[option.type],
+      );
     }
   });
 
@@ -41,19 +45,21 @@ function getConfig(argv) {
     console.log('  Examples:');
     console.log('');
     console.log('    $ renovate --token abc123 singapore/lint-condo');
-    console.log('    $ renovate --ignore-unstable=false --log-level verbose singapore/lint-condo');
+    console.log(
+      '    $ renovate --ignore-unstable=false --log-level verbose singapore/lint-condo',
+    );
     console.log('    $ renovate singapore/lint-condo singapore/package-test');
     /* eslint-enable no-console */
   }
 
   program = program
     .on('--help', helpConsole)
-    .action((repositories) => {
+    .action(repositories => {
       config.repositories = repositories;
     })
     .parse(argv);
 
-  options.forEach((option) => {
+  options.forEach(option => {
     if (option.cli !== false) {
       if (program[option.name] !== undefined) {
         config[option.name] = program[option.name];
diff --git a/lib/config/defaults.js b/lib/config/defaults.js
index 103213c7a7a64028a228732c7a8a34c906831db5..576644d9e1f2722c6375a7a5b1528a2e6041670e 100644
--- a/lib/config/defaults.js
+++ b/lib/config/defaults.js
@@ -12,14 +12,16 @@ const defaultValues = {
 };
 
 function getDefault(option) {
-  return option.default === undefined ? defaultValues[option.type] : option.default;
+  return option.default === undefined
+    ? defaultValues[option.type]
+    : option.default;
 }
 
 function getConfig() {
   const options = configDefinitions.getOptions();
   const config = {};
 
-  options.forEach((option) => {
+  options.forEach(option => {
     config[option.name] = getDefault(option);
   });
 
diff --git a/lib/config/env.js b/lib/config/env.js
index 133e96fe83b75c03c0c0977ebf95b4b30a0426ac..1de8a97f3f75764a451465d2bc94427e4d45f30f 100644
--- a/lib/config/env.js
+++ b/lib/config/env.js
@@ -22,12 +22,12 @@ function getConfig(env) {
   const config = {};
 
   const coersions = {
-    boolean: val => (val === 'true'),
+    boolean: val => val === 'true',
     list: val => val.split(',').map(el => el.trim()),
     string: val => val,
   };
 
-  options.forEach((option) => {
+  options.forEach(option => {
     if (option.env !== false) {
       const envName = getEnvName(option);
       if (env[envName]) {
diff --git a/lib/config/index.js b/lib/config/index.js
index 5993002ab627e29558ce25e6363e442946fdb3fc..357acad0ca40204883d9b084e9fa4092b307f956 100644
--- a/lib/config/index.js
+++ b/lib/config/index.js
@@ -57,23 +57,33 @@ async function parseConfigs(env, argv) {
   if (config.autodiscover) {
     if (config.platform === 'github') {
       logger.info('Autodiscovering GitHub repositories');
-      config.repositories = await githubApi.getRepos(config.token, config.endpoint);
+      config.repositories = await githubApi.getRepos(
+        config.token,
+        config.endpoint,
+      );
     } else if (config.platform === 'gitlab') {
       logger.info('Autodiscovering GitLab repositories');
-      config.repositories = await gitlabApi.getRepos(config.token, config.endpoint);
+      config.repositories = await gitlabApi.getRepos(
+        config.token,
+        config.endpoint,
+      );
     }
     if (!config.repositories || config.repositories.length === 0) {
       // Soft fail (no error thrown) if no accessible repositories
-      logger.info('The account associated with your token does not have access to any repos');
+      logger.info(
+        'The account associated with your token does not have access to any repos',
+      );
       return;
     }
   } else if (!config.repositories || config.repositories.length === 0) {
     // We need at least one repository defined
-    throw new Error('At least one repository must be configured, or use --autodiscover');
+    throw new Error(
+      'At least one repository must be configured, or use --autodiscover',
+    );
   }
 
   // Configure each repository
-  config.repositories = config.repositories.map((item) => {
+  config.repositories = config.repositories.map(item => {
     // Convert any repository strings to objects
     const repo = typeof item === 'string' ? { repository: item } : item;
 
@@ -89,7 +99,7 @@ async function parseConfigs(env, argv) {
     }
 
     // Expand packageFile format
-    repo.packageFiles = repo.packageFiles.map((packageFile) => {
+    repo.packageFiles = repo.packageFiles.map(packageFile => {
       if (typeof packageFile === 'string') {
         return { fileName: packageFile };
       }
@@ -119,8 +129,7 @@ function getRepositories() {
 function redact(inputConfig) {
   const tokenConfig = {};
   if (inputConfig.token) {
-    tokenConfig.token =
-      `${inputConfig.token.substr(0, 4)}${new Array(inputConfig.token.length - 3).join('*')}`;
+    tokenConfig.token = `${inputConfig.token.substr(0, 4)}${new Array(inputConfig.token.length - 3).join('*')}`;
   }
   const redactedConfig = Object.assign({}, inputConfig, tokenConfig);
   return stringify(redactedConfig);
diff --git a/lib/helpers/package-json.js b/lib/helpers/package-json.js
index 14a26adeafedb8b4c80ca26fd5a4aad0b38494d2..f824b2c99f48c2d3c1f03be7e3cc5081f80571e4 100644
--- a/lib/helpers/package-json.js
+++ b/lib/helpers/package-json.js
@@ -11,12 +11,16 @@ function extractDependencies(packageJson, sections) {
   // loop through dependency types
   return sections.reduce((allDeps, depType) => {
     // loop through each dependency within a type
-    const depNames = packageJson[depType] ? Object.keys(packageJson[depType]) : [];
-    return allDeps.concat(depNames.map(depName => ({
-      depType,
-      depName,
-      currentVersion: packageJson[depType][depName],
-    })));
+    const depNames = packageJson[depType]
+      ? Object.keys(packageJson[depType])
+      : [];
+    return allDeps.concat(
+      depNames.map(depName => ({
+        depType,
+        depName,
+        currentVersion: packageJson[depType][depName],
+      })),
+    );
   }, []);
 }
 
@@ -44,7 +48,12 @@ function setNewValue(currentFileContent, depType, depName, newVersion) {
     if (matchAt(currentFileContent, searchIndex, searchString)) {
       logger.debug(`Found match at index ${searchIndex}`);
       // Now test if the result matches
-      const testContent = replaceAt(currentFileContent, searchIndex, searchString, newString);
+      const testContent = replaceAt(
+        currentFileContent,
+        searchIndex,
+        searchString,
+        newString,
+      );
       // Compare the parsed JSON structure of old and new
       if (_.isEqual(parsedContents, JSON.parse(testContent))) {
         newFileContent = testContent;
@@ -67,5 +76,9 @@ function matchAt(content, index, match) {
 // Replace oldString with newString at location index of content
 function replaceAt(content, index, oldString, newString) {
   logger.debug(`Replacing ${oldString} with ${newString} at index ${index}`);
-  return content.substr(0, index) + newString + content.substr(index + oldString.length);
+  return (
+    content.substr(0, index) +
+    newString +
+    content.substr(index + oldString.length)
+  );
 }
diff --git a/lib/helpers/versions.js b/lib/helpers/versions.js
index 9c4f418b5103282c9874edb8bb463774c5b19307..ad44e0738afdd216f9a0c3f0cc3e283a02342ec3 100644
--- a/lib/helpers/versions.js
+++ b/lib/helpers/versions.js
@@ -39,30 +39,45 @@ function determineUpgrades(dep, currentVersion, config) {
   _(versionList)
     // Filter out older versions as we can't upgrade to those
     .filter(version => semver.gt(version, changeLogFromVersion))
-
     // Ignore unstable versions, unless the current version is unstable
-    .reject(version => config.ignoreUnstable &&
-            stable.is(changeLogFromVersion) && !stable.is(version))
-
+    .reject(
+      version =>
+        config.ignoreUnstable &&
+        stable.is(changeLogFromVersion) &&
+        !stable.is(version),
+    )
     // Ignore future versions, unless the current version is marked as future
-    .reject(version => config.ignoreFuture &&
-            !isFuture(versions[changeLogFromVersion]) && isFuture(versions[version]))
-
+    .reject(
+      version =>
+        config.ignoreFuture &&
+        !isFuture(versions[changeLogFromVersion]) &&
+        isFuture(versions[version]),
+    )
     // Ignore versions newer than "latest", unless current version is newer than the "latest"
-    .reject(version => config.respectLatest &&
-            isPastLatest(dep, version) && !isPastLatest(dep, changeLogFromVersion))
-
+    .reject(
+      version =>
+        config.respectLatest &&
+        isPastLatest(dep, version) &&
+        !isPastLatest(dep, changeLogFromVersion),
+    )
     // Loop through all possible versions
-    .forEach((newVersion) => {
+    .forEach(newVersion => {
       // Group by major versions
       const newVersionMajor = semver.major(newVersion);
       // Only split majors if configured to do so, and no group or 'any' automerge
-      const separateMajors = config.separateMajorReleases && !config.groupName && config.automerge !== 'any';
+      const separateMajors =
+        config.separateMajorReleases &&
+        !config.groupName &&
+        config.automerge !== 'any';
       const upgradeKey = separateMajors ? newVersionMajor : 'latest';
       // Save this, if it's a new major version or greater than the previous greatest
-      if (!allUpgrades[upgradeKey] ||
-          semver.gt(newVersion, allUpgrades[upgradeKey].newVersion)) {
-        const upgradeType = newVersionMajor > semver.major(changeLogFromVersion) ? 'major' : 'minor';
+      if (
+        !allUpgrades[upgradeKey] ||
+        semver.gt(newVersion, allUpgrades[upgradeKey].newVersion)
+      ) {
+        const upgradeType = newVersionMajor > semver.major(changeLogFromVersion)
+          ? 'major'
+          : 'minor';
         const changeLogToVersion = newVersion;
         allUpgrades[upgradeKey] = {
           upgradeType,
@@ -96,48 +111,53 @@ function determineUpgrades(dep, currentVersion, config) {
   const currentSemver = semverParsed[0];
   // Loop through all upgrades and convert to ranges
   return _(upgrades)
-  .reject(upgrade => upgrade.upgradeType === 'pin')
-  .map(upgrade => Object.assign(upgrade, { isRange: true }))
-  .map((upgrade) => {
-    const { major, minor } = semverUtils.parse(upgrade.newVersion);
-    if (currentSemver.operator === '~') {
-      // Utilise that a.b is the same as ~a.b.0
-      const minSatisfying = semver.minSatisfying(versionList, `${major}.${minor}`);
-      // Add a tilde before that version number
-      return Object.assign(upgrade, { newVersion: `~${minSatisfying}` });
-    } else if (currentSemver.operator === '^') {
-      // If version is < 1, then semver treats ^ same as ~
-      const newRange = major === '0' ? `${major}.${minor}` : `${major}`;
-      const minSatisfying = semver.minSatisfying(versionList, newRange);
-      // Add in the caret
-      return Object.assign(upgrade, { newVersion: `^${minSatisfying}` });
-    } else if (currentSemver.operator === '<=') {
-      // Example: <= 1.2.0
-      return Object.assign(upgrade, { newVersion: `<= ${upgrade.newVersion}` });
-    } else if (currentSemver.minor === undefined) {
-      // Example: 1
-      return Object.assign(upgrade, { newVersion: `${major}` });
-    } else if (currentSemver.minor === 'x') {
-      // Example: 1.x
-      return Object.assign(upgrade, { newVersion: `${major}.x` });
-    } else if (currentSemver.patch === undefined) {
-      // Example: 1.2
-      return Object.assign(upgrade, { newVersion: `${major}.${minor}` });
-    } else if (currentSemver.patch === 'x') {
-      // Example: 1.2.x
-      return Object.assign(upgrade, { newVersion: `${major}.${minor}.x` });
-    }
-    logger.warn(`Unsupported semver type: ${currentSemver}`);
-    return null;
-  })
-  .compact()
-  .value();
+    .reject(upgrade => upgrade.upgradeType === 'pin')
+    .map(upgrade => Object.assign(upgrade, { isRange: true }))
+    .map(upgrade => {
+      const { major, minor } = semverUtils.parse(upgrade.newVersion);
+      if (currentSemver.operator === '~') {
+        // Utilise that a.b is the same as ~a.b.0
+        const minSatisfying = semver.minSatisfying(
+          versionList,
+          `${major}.${minor}`,
+        );
+        // Add a tilde before that version number
+        return Object.assign(upgrade, { newVersion: `~${minSatisfying}` });
+      } else if (currentSemver.operator === '^') {
+        // If version is < 1, then semver treats ^ same as ~
+        const newRange = major === '0' ? `${major}.${minor}` : `${major}`;
+        const minSatisfying = semver.minSatisfying(versionList, newRange);
+        // Add in the caret
+        return Object.assign(upgrade, { newVersion: `^${minSatisfying}` });
+      } else if (currentSemver.operator === '<=') {
+        // Example: <= 1.2.0
+        return Object.assign(upgrade, {
+          newVersion: `<= ${upgrade.newVersion}`,
+        });
+      } else if (currentSemver.minor === undefined) {
+        // Example: 1
+        return Object.assign(upgrade, { newVersion: `${major}` });
+      } else if (currentSemver.minor === 'x') {
+        // Example: 1.x
+        return Object.assign(upgrade, { newVersion: `${major}.x` });
+      } else if (currentSemver.patch === undefined) {
+        // Example: 1.2
+        return Object.assign(upgrade, { newVersion: `${major}.${minor}` });
+      } else if (currentSemver.patch === 'x') {
+        // Example: 1.2.x
+        return Object.assign(upgrade, { newVersion: `${major}.${minor}.x` });
+      }
+      logger.warn(`Unsupported semver type: ${currentSemver}`);
+      return null;
+    })
+    .compact()
+    .value();
 }
 
 function isRange(input) {
   // Pinned versions also return true for semver.validRange
   // We need to check first that they're not 'valid' to get only ranges
-  return (semver.valid(input) === null && semver.validRange(input) !== null);
+  return semver.valid(input) === null && semver.validRange(input) !== null;
 }
 
 function isValidVersion(input) {
@@ -145,7 +165,9 @@ function isValidVersion(input) {
 }
 
 function isFuture(version) {
-  return version && version.publishConfig && version.publishConfig.tag === 'future';
+  return (
+    version && version.publishConfig && version.publishConfig.tag === 'future'
+  );
 }
 
 function isPastLatest(dep, version) {
diff --git a/lib/helpers/yarn.js b/lib/helpers/yarn.js
index 43d3190616c82a97879b126bad69299d71dfc5ae..d5c60bb784866cc38fb2f189a4c08db57396c09d 100644
--- a/lib/helpers/yarn.js
+++ b/lib/helpers/yarn.js
@@ -23,7 +23,10 @@ async function generateLockFile(newPackageJson, npmrcContent, yarnrcContent) {
       fs.writeFileSync(path.join(tmpDir.name, '.yarnrc'), yarnrcContent);
     }
     logger.debug('Spawning yarn install');
-    const result = cp.spawnSync('yarn', ['install'], { cwd: tmpDir.name, shell: true });
+    const result = cp.spawnSync('yarn', ['install'], {
+      cwd: tmpDir.name,
+      shell: true,
+    });
     logger.debug(String(result.stdout));
     logger.debug(String(result.stderr));
     yarnLock = fs.readFileSync(path.join(tmpDir.name, 'yarn.lock'));
@@ -44,28 +47,41 @@ async function getLockFile(packageFile, packageContent, api) {
   const npmrcContent = await api.getFileContent('.npmrc');
   const yarnrcContent = await api.getFileContent('.yarnrc');
   // Generate yarn.lock using shell command
-  const newYarnLockContent =
-    await module.exports.generateLockFile(packageContent, npmrcContent, yarnrcContent);
+  const newYarnLockContent = await module.exports.generateLockFile(
+    packageContent,
+    npmrcContent,
+    yarnrcContent,
+  );
   // Return file object
-  return ({
+  return {
     name: yarnLockFileName,
     contents: newYarnLockContent,
-  });
+  };
 }
 
 async function maintainLockFile(inputConfig) {
   logger.debug(`maintainYarnLock(${JSON.stringify(inputConfig)})`);
-  const packageContent = await inputConfig.api.getFileContent(inputConfig.packageFile);
-  const yarnLockFileName = path.join(path.dirname(inputConfig.packageFile), 'yarn.lock');
+  const packageContent = await inputConfig.api.getFileContent(
+    inputConfig.packageFile,
+  );
+  const yarnLockFileName = path.join(
+    path.dirname(inputConfig.packageFile),
+    'yarn.lock',
+  );
   logger.debug(`Checking for ${yarnLockFileName}`);
-  const existingYarnLock = await inputConfig.api.getFileContent(yarnLockFileName);
+  const existingYarnLock = await inputConfig.api.getFileContent(
+    yarnLockFileName,
+  );
   logger.silly(`existingYarnLock:\n${existingYarnLock}`);
   if (!existingYarnLock) {
     return null;
   }
   logger.debug('Found existing yarn.lock file');
-  const newYarnLock =
-    await module.exports.getLockFile(inputConfig.packageFile, packageContent, inputConfig.api);
+  const newYarnLock = await module.exports.getLockFile(
+    inputConfig.packageFile,
+    packageContent,
+    inputConfig.api,
+  );
   logger.silly(`newYarnLock:\n${newYarnLock.contents}`);
   if (existingYarnLock.toString() === newYarnLock.contents.toString()) {
     logger.debug('Yarn lock file does not need updating');
diff --git a/lib/index.js b/lib/index.js
index 5595905d32c7d399c233e1ce4dff493b3dd73e56..d3862845aae06b53e32d50ff142666f1683855b3 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -39,7 +39,9 @@ async function processRepo(repo) {
   } else if (config.platform === 'gitlab') {
     api = gitlabApi;
   } else {
-    logger.error(`Unknown platform ${config.platform} for repository ${repo.repository}`);
+    logger.error(
+      `Unknown platform ${config.platform} for repository ${repo.repository}`,
+    );
     return;
   }
   logger.debug(`Processing repository: ${stringify(config)}`);
@@ -113,12 +115,19 @@ If the default settings are all suitable for you, simply close this Pull Request
   const defaultConfigString = `${stringify(defaultConfig)}\n`;
   await api.commitFilesToBranch(
     'renovate/configure',
-    [{
-      name: 'renovate.json',
-      contents: defaultConfigString,
-    }],
-    'Add renovate.json');
-  const pr = await api.createPr('renovate/configure', 'Configure Renovate', prBody);
+    [
+      {
+        name: 'renovate.json',
+        contents: defaultConfigString,
+      },
+    ],
+    'Add renovate.json',
+  );
+  const pr = await api.createPr(
+    'renovate/configure',
+    'Configure Renovate',
+    prBody,
+  );
   logger.info(`Created ${pr.displayNumber} for configuration`);
 }
 
@@ -137,8 +146,13 @@ async function getAllRepoUpgrades(repo) {
   let upgrades = [];
   for (const packageFile of repo.packageFiles) {
     const cascadedConfig = configParser.getCascadedConfig(repo, packageFile);
-    upgrades = upgrades.concat(await worker.processPackageFile(repo.repository,
-      packageFile.fileName, cascadedConfig));
+    upgrades = upgrades.concat(
+      await worker.processPackageFile(
+        repo.repository,
+        packageFile.fileName,
+        cascadedConfig,
+      ),
+    );
   }
   return upgrades;
 }
diff --git a/lib/logger.js b/lib/logger.js
index 6a23307bd06419802fd8a9534674454137f0314d..793d1ef3d592a431baf64cdc41f45d091b98b9a7 100644
--- a/lib/logger.js
+++ b/lib/logger.js
@@ -3,9 +3,7 @@ const logger = require('winston');
 // Colorize console logs
 logger.configure({
   level: process.env.LOG_LEVEL || 'info',
-  transports: [
-    new (logger.transports.Console)({ colorize: true }),
-  ],
+  transports: [new logger.transports.Console({ colorize: true })],
 });
 
 module.exports = logger;
diff --git a/lib/worker.js b/lib/worker.js
index b63d39cddcf45bbe10839595917e9c85e4d68452..4c108dc3f604626fcd1b8fdec780a723b64e8177 100644
--- a/lib/worker.js
+++ b/lib/worker.js
@@ -29,7 +29,9 @@ async function processPackageFile(repoName, packageFile, packageConfig) {
   const packageContent = await config.api.getFileJson(packageFile);
   // Check for renovate config inside the package.json
   if (packageContent.renovate) {
-    logger.debug(`package.json>renovate config: ${stringify(packageContent.renovate)}`);
+    logger.debug(
+      `package.json>renovate config: ${stringify(packageContent.renovate)}`,
+    );
     Object.assign(config, packageContent.renovate, { repoConfigured: true });
   }
   // Now check if config is disabled
@@ -38,7 +40,7 @@ async function processPackageFile(repoName, packageFile, packageConfig) {
     return [];
   }
 
-  const depTypes = config.depTypes.map((depType) => {
+  const depTypes = config.depTypes.map(depType => {
     if (typeof depType === 'string') {
       return depType;
     }
@@ -46,16 +48,22 @@ async function processPackageFile(repoName, packageFile, packageConfig) {
   });
 
   // Extract all dependencies from the package.json
-  let dependencies = await packageJson.extractDependencies(packageContent, depTypes);
+  let dependencies = await packageJson.extractDependencies(
+    packageContent,
+    depTypes,
+  );
   // Filter out ignored dependencies
-  dependencies =
-    dependencies.filter(dependency => config.ignoreDeps.indexOf(dependency.depName) === -1);
+  dependencies = dependencies.filter(
+    dependency => config.ignoreDeps.indexOf(dependency.depName) === -1,
+  );
   dependencies = assignDepConfigs(config, dependencies);
   // Find all upgrades for remaining dependencies
   const upgrades = await findUpgrades(dependencies);
   // Process all upgrades sequentially
   if (config.maintainYarnLock) {
-    const upgrade = Object.assign({}, config, { upgradeType: 'maintainYarnLock' });
+    const upgrade = Object.assign({}, config, {
+      upgradeType: 'maintainYarnLock',
+    });
     upgrade.upgradeType = 'maintainYarnLock';
     upgrade.branchName = upgrade.yarnMaintenanceBranchName;
     upgrade.prTitle = upgrade.yarnMaintenancePrTitle;
@@ -67,17 +75,21 @@ async function processPackageFile(repoName, packageFile, packageConfig) {
 
 // Add custom config for each dep
 function assignDepConfigs(inputConfig, deps) {
-  return deps.map((dep) => {
+  return deps.map(dep => {
     const returnDep = Object.assign({}, dep);
-    returnDep.config =
-      Object.assign({}, inputConfig, getDepTypeConfig(inputConfig.depTypes, dep.depType));
+    returnDep.config = Object.assign(
+      {},
+      inputConfig,
+      getDepTypeConfig(inputConfig.depTypes, dep.depType),
+    );
     let packageRuleApplied = false;
     if (returnDep.config.packages) {
       // Loop through list looking for match
       // Exit after first match
-      returnDep.config.packages.forEach((packageConfig) => {
+      returnDep.config.packages.forEach(packageConfig => {
         if (!packageRuleApplied) {
-          const pattern = packageConfig.packagePattern || `^${packageConfig.packageName}$`;
+          const pattern =
+            packageConfig.packagePattern || `^${packageConfig.packageName}$`;
           const packageRegex = new RegExp(pattern);
           if (dep.depName.match(packageRegex)) {
             packageRuleApplied = true;
@@ -110,7 +122,7 @@ function assignDepConfigs(inputConfig, deps) {
 function getDepTypeConfig(depTypes, depTypeName) {
   let depTypeConfig = {};
   if (depTypes) {
-    depTypes.forEach((depType) => {
+    depTypes.forEach(depType => {
       if (typeof depType !== 'string' && depType.depType === depTypeName) {
         depTypeConfig = depType;
       }
@@ -124,11 +136,14 @@ async function findUpgrades(dependencies) {
   // findDepUpgrades can add more than one upgrade to allUpgrades
   async function findDepUpgrades(dep) {
     const npmDependency = await npmApi.getDependency(dep.depName);
-    const upgrades =
-      await versionsHelper.determineUpgrades(npmDependency, dep.currentVersion, dep.config);
+    const upgrades = await versionsHelper.determineUpgrades(
+      npmDependency,
+      dep.currentVersion,
+      dep.config,
+    );
     if (upgrades.length > 0) {
       logger.verbose(`${dep.depName}: Upgrades = ${JSON.stringify(upgrades)}`);
-      upgrades.forEach((upgrade) => {
+      upgrades.forEach(upgrade => {
         allUpgrades.push(Object.assign({}, dep, upgrade));
       });
     } else {
@@ -163,7 +178,8 @@ async function processUpgrades(upgrades) {
     // Check whether to use a group name
     if (flattened.groupName) {
       flattened.groupSlug =
-        flattened.groupSlug || flattened.groupName.toLowerCase().replace(/[^a-z0-9+]+/g, '-');
+        flattened.groupSlug ||
+        flattened.groupName.toLowerCase().replace(/[^a-z0-9+]+/g, '-');
       flattened.branchName = flattened.groupBranchName;
       flattened.commitMessage = flattened.groupCommitMessage;
       flattened.prTitle = flattened.groupPrTitle;
@@ -190,10 +206,15 @@ async function updateBranch(upgrades) {
   logger.verbose(`branchName '${branchName}' length is ${upgrades.length}`);
 
   try {
-    if (upgrade0.upgradeType !== 'maintainYarnLock' &&
+    if (
+      upgrade0.upgradeType !== 'maintainYarnLock' &&
       upgrade0.groupName === null &&
-      !upgrade0.recreateClosed && await upgrade0.api.checkForClosedPr(branchName, prTitle)) {
-      logger.verbose(`Skipping ${branchName} upgrade as matching closed PR already existed`);
+      !upgrade0.recreateClosed &&
+      (await upgrade0.api.checkForClosedPr(branchName, prTitle))
+    ) {
+      logger.verbose(
+        `Skipping ${branchName} upgrade as matching closed PR already existed`,
+      );
       return;
     }
     const branchCreated = await branchWorker.ensureBranch(upgrades);
diff --git a/lib/workers/branch.js b/lib/workers/branch.js
index 5838551e779c075a16d25fceaac1e4d180724e00..3547724b21df95d0415c40e903a0111294f9bbd0 100644
--- a/lib/workers/branch.js
+++ b/lib/workers/branch.js
@@ -10,7 +10,7 @@ module.exports = {
 
 async function getParentBranch(branchName, config) {
   // Check if branch exists
-  if (await config.api.branchExists(branchName) === false) {
+  if ((await config.api.branchExists(branchName)) === false) {
     logger.verbose(`Creating new branch ${branchName}`);
     return undefined;
   }
@@ -56,8 +56,13 @@ async function ensureBranch(upgrades) {
   const branchName = handlebars.compile(upgrades[0].branchName)(upgrades[0]);
   // parentBranch is the branch we will base off
   // If undefined, this will mean the defaultBranch
-  const parentBranch = await module.exports.getParentBranch(branchName, upgrades[0]);
-  const commitMessage = handlebars.compile(upgrades[0].commitMessage)(upgrades[0]);
+  const parentBranch = await module.exports.getParentBranch(
+    branchName,
+    upgrades[0],
+  );
+  const commitMessage = handlebars.compile(upgrades[0].commitMessage)(
+    upgrades[0],
+  );
   const api = upgrades[0].api;
   const packageFiles = {};
   const commitFiles = [];
@@ -71,14 +76,17 @@ async function ensureBranch(upgrades) {
       // See if this is the first time editing this file
       if (!packageFiles[upgrade.packageFile]) {
         // If we are rebasing then existing content will be from master
-        packageFiles[upgrade.packageFile] =
-          await api.getFileContent(upgrade.packageFile, parentBranch);
+        packageFiles[upgrade.packageFile] = await api.getFileContent(
+          upgrade.packageFile,
+          parentBranch,
+        );
       }
       const newContent = packageJsonHelper.setNewValue(
         packageFiles[upgrade.packageFile],
         upgrade.depType,
         upgrade.depName,
-        upgrade.newVersion);
+        upgrade.newVersion,
+      );
       if (packageFiles[upgrade.packageFile] === newContent) {
         logger.debug('packageFile content unchanged');
         delete packageFiles[upgrade.packageFile];
@@ -89,15 +97,20 @@ async function ensureBranch(upgrades) {
     }
   }
   if (Object.keys(packageFiles).length > 0) {
-    logger.debug(`${Object.keys(packageFiles).length} package file(s) need updating.`);
+    logger.debug(
+      `${Object.keys(packageFiles).length} package file(s) need updating.`,
+    );
     for (const packageFile of Object.keys(packageFiles)) {
       logger.debug(`Adding ${packageFile}`);
       commitFiles.push({
         name: packageFile,
         contents: packageFiles[packageFile],
       });
-      const yarnLockFile =
-        await yarnHelper.getLockFile(packageFile, packageFiles[packageFile], api);
+      const yarnLockFile = await yarnHelper.getLockFile(
+        packageFile,
+        packageFiles[packageFile],
+        api,
+      );
       if (yarnLockFile) {
         // Add new yarn.lock file too
         logger.debug(`Adding ${yarnLockFile.name}`);
@@ -108,7 +121,12 @@ async function ensureBranch(upgrades) {
   if (commitFiles.length) {
     logger.debug(`Commit ${commitFiles.length} files to branch ${branchName}`);
     // API will know whether to create new branch or not
-    await api.commitFilesToBranch(branchName, commitFiles, commitMessage, parentBranch);
+    await api.commitFilesToBranch(
+      branchName,
+      commitFiles,
+      commitMessage,
+      parentBranch,
+    );
     return true;
   }
   logger.debug(`No files to commit to branch ${branchName}`);
diff --git a/lib/workers/pr.js b/lib/workers/pr.js
index 8432e1263de1f8ae33f305d1aa4e6f332dd4b902..0bd878f0ae6698f9d583ee9883a99fe9d4176d74 100644
--- a/lib/workers/pr.js
+++ b/lib/workers/pr.js
@@ -33,8 +33,11 @@ async function ensurePr(upgradeConfig) {
   }
 
   // Get changelog and then generate template strings
-  config.changelog =
-    await getChangeLog(config.depName, config.changeLogFromVersion, config.changeLogToVersion);
+  config.changelog = await getChangeLog(
+    config.depName,
+    config.changeLogFromVersion,
+    config.changeLogToVersion,
+  );
   const prTitle = handlebars.compile(config.prTitle)(config);
   const prBody = handlebars.compile(config.prBody)(config);
 
@@ -58,8 +61,10 @@ async function ensurePr(upgradeConfig) {
       await config.api.addLabels(pr.number, config.labels);
     }
     // Don't assign or review if automerging
-    if (config.automerge === 'none' ||
-      (config.automerge === 'minor' && config.upgradeType !== 'minor')) {
+    if (
+      config.automerge === 'none' ||
+      (config.automerge === 'minor' && config.upgradeType !== 'minor')
+    ) {
       if (config.assignees.length > 0) {
         await config.api.addAssignees(pr.number, config.assignees);
       }
@@ -67,7 +72,9 @@ async function ensurePr(upgradeConfig) {
         await config.api.addReviewers(pr.number, config.reviewers);
       }
     } else {
-      logger.debug(`Skipping assignees and reviewers as automerge=${config.automerge}`);
+      logger.debug(
+        `Skipping assignees and reviewers as automerge=${config.automerge}`,
+      );
     }
     logger.info(`Created ${pr.displayNumber}`);
     return pr;
@@ -79,8 +86,10 @@ async function ensurePr(upgradeConfig) {
 
 async function checkAutoMerge(pr, config) {
   logger.debug(`Checking #${pr.number} for automerge`);
-  if (config.automerge === 'any' ||
-    (config.automerge === 'minor' && config.upgradeType === 'minor')) {
+  if (
+    config.automerge === 'any' ||
+    (config.automerge === 'minor' && config.upgradeType === 'minor')
+  ) {
     logger.verbose('PR is configured for automerge');
     logger.debug(JSON.stringify(pr));
     // Return if PR not ready for automerge
diff --git a/package.json b/package.json
index bbb46075feef8074e700871610dbe62e2eb8bfe4..a82f003569e732bedeaab53ad6ecb0878bb918cc 100644
--- a/package.json
+++ b/package.json
@@ -63,10 +63,13 @@
     "chai": "3.5.0",
     "eslint": "3.19.0",
     "eslint-config-airbnb-base": "11.1.3",
+    "eslint-config-prettier": "1.7.0",
     "eslint-plugin-import": "2.2.0",
+    "eslint-plugin-prettier": "2.0.1",
     "eslint-plugin-promise": "3.5.0",
     "mkdirp": "0.5.1",
     "np": "2.13.2",
+    "prettier": "1.2.2",
     "rimraf": "2.6.1"
   },
   "babel": {
diff --git a/test/api/github.spec.js b/test/api/github.spec.js
index f8d74a5cbfea7ce573543b07119c6ba20ceb997f..12ba13aab7a4a7d19f3ad6463912b16ebb6b5d16 100644
--- a/test/api/github.spec.js
+++ b/test/api/github.spec.js
@@ -84,7 +84,12 @@ describe('api/github', () => {
   describe('initRepo', () => {
     [
       [undefined, ['mytoken'], 'mytoken', undefined],
-      [undefined, ['mytoken', 'https://my.custom.endpoint/'], 'mytoken', 'https://my.custom.endpoint/'],
+      [
+        undefined,
+        ['mytoken', 'https://my.custom.endpoint/'],
+        'mytoken',
+        'https://my.custom.endpoint/',
+      ],
       ['myenvtoken', [], 'myenvtoken', undefined],
     ].forEach(([envToken, args, token, endpoint], i) => {
       it(`should initialise the config for the repo - ${i}`, async () => {
@@ -105,7 +110,9 @@ describe('api/github', () => {
       } catch (e) {
         err = e;
       }
-      expect(err.message).toBe('No token found for GitHub repository some/repo');
+      expect(err.message).toBe(
+        'No token found for GitHub repository some/repo',
+      );
     });
     it('should squash', async () => {
       async function squashInitRepo(...args) {
@@ -182,10 +189,13 @@ describe('api/github', () => {
       ghGot.mockImplementationOnce(() => ({
         body: {
           items: [
-              { name: 'package.json', path: '/package.json' },
-              { name: 'package.json.something-else', path: 'some-dir/package.json.some-thing-else' },
-              { name: 'package.json', path: 'src/app/package.json' },
-              { name: 'package.json', path: 'src/otherapp/package.json' },
+            { name: 'package.json', path: '/package.json' },
+            {
+              name: 'package.json.something-else',
+              path: 'some-dir/package.json.some-thing-else',
+            },
+            { name: 'package.json', path: 'src/app/package.json' },
+            { name: 'package.json', path: 'src/otherapp/package.json' },
           ],
         },
       }));
@@ -211,11 +221,14 @@ describe('api/github', () => {
       await initRepo('some/repo', 'token');
       ghGot.mockImplementationOnce(() => ({
         statusCode: 200,
-        body: [{
-          ref: 'refs/heads/notthebranchname',
-        }, {
-          ref: 'refs/heads/thebranchname',
-        }],
+        body: [
+          {
+            ref: 'refs/heads/notthebranchname',
+          },
+          {
+            ref: 'refs/heads/thebranchname',
+          },
+        ],
       }));
       const exists = await github.branchExists('thebranchname');
       expect(ghGot.mock.calls).toMatchSnapshot();
@@ -237,11 +250,14 @@ describe('api/github', () => {
       await initRepo('some/repo', 'token');
       ghGot.mockImplementationOnce(() => ({
         statusCode: 200,
-        body: [{
-          ref: 'refs/heads/notthebranchname',
-        }, {
-          ref: 'refs/heads/alsonotthebranchname',
-        }],
+        body: [
+          {
+            ref: 'refs/heads/notthebranchname',
+          },
+          {
+            ref: 'refs/heads/alsonotthebranchname',
+          },
+        ],
       }));
       const exists = await github.branchExists('thebranchname');
       expect(ghGot.mock.calls).toMatchSnapshot();
@@ -258,16 +274,20 @@ describe('api/github', () => {
     });
     it('should return false if a 404 is returned', async () => {
       await initRepo('some/repo', 'token');
-      ghGot.mockImplementationOnce(() => Promise.reject({
-        statusCode: 404,
-      }));
+      ghGot.mockImplementationOnce(() =>
+        Promise.reject({
+          statusCode: 404,
+        }),
+      );
       const exists = await github.branchExists('thebranchname');
       expect(ghGot.mock.calls).toMatchSnapshot();
       expect(exists).toBe(false);
     });
     it('should propagate unknown errors', async () => {
       await initRepo('some/repo', 'token');
-      ghGot.mockImplementationOnce(() => Promise.reject(new Error('Something went wrong')));
+      ghGot.mockImplementationOnce(() =>
+        Promise.reject(new Error('Something went wrong')),
+      );
       let err;
       try {
         await github.branchExists('thebranchname');
@@ -291,9 +311,7 @@ describe('api/github', () => {
     it('should return the PR object', async () => {
       await initRepo('some/repo', 'token');
       ghGot.mockImplementationOnce(() => ({
-        body: [
-            { number: 91 },
-        ],
+        body: [{ number: 91 }],
       }));
       ghGot.mockImplementationOnce(() => ({
         body: {
@@ -358,15 +376,13 @@ describe('api/github', () => {
     it('should return a PR object', async () => {
       await initRepo('some/repo', 'token');
       ghGot.mockImplementationOnce(() => ({
-        body: [
-            { title: 'PR Title', state: 'open', number: 42 },
-        ],
+        body: [{ title: 'PR Title', state: 'open', number: 42 }],
       }));
       const pr = await github.findPr('master', 'PR Title');
       expect(ghGot.mock.calls).toMatchSnapshot();
       expect(pr).toMatchSnapshot();
     });
-    it('should return null if no PR\'s are found', async () => {
+    it("should return null if no PR's are found", async () => {
       await initRepo('some/repo', 'token');
       ghGot.mockImplementationOnce(() => ({
         body: [],
@@ -378,9 +394,7 @@ describe('api/github', () => {
     it('should set the isClosed attribute of the PR to true if the PR is closed', async () => {
       await initRepo('some/repo', 'token');
       ghGot.mockImplementationOnce(() => ({
-        body: [
-            { title: 'PR Title', state: 'closed', number: 42 },
-        ],
+        body: [{ title: 'PR Title', state: 'closed', number: 42 }],
       }));
       const pr = await github.findPr('master');
       expect(ghGot.mock.calls).toMatchSnapshot();
@@ -397,9 +411,9 @@ describe('api/github', () => {
         await initRepo('some/repo', 'token');
         ghGot.mockImplementationOnce(() => ({
           body: [
-              { title: 'foo', head: { label: 'theowner:some-branch' } },
-              { title: 'bar', head: { label: 'theowner:some-other-branch' } },
-              { title: 'baz', head: { label: 'theowner:some-branch' } },
+            { title: 'foo', head: { label: 'theowner:some-branch' } },
+            { title: 'bar', head: { label: 'theowner:some-other-branch' } },
+            { title: 'baz', head: { label: 'theowner:some-branch' } },
           ],
         }));
         const res = await github.checkForClosedPr(branch, title);
@@ -415,7 +429,11 @@ describe('api/github', () => {
           number: 123,
         },
       }));
-      const pr = await github.createPr('some-branch', 'The Title', 'Hello world');
+      const pr = await github.createPr(
+        'some-branch',
+        'The Title',
+        'Hello world',
+      );
       expect(pr).toMatchSnapshot();
       expect(ghGot.post.mock.calls).toMatchSnapshot();
     });
@@ -435,7 +453,13 @@ describe('api/github', () => {
     });
     [
       { number: 1, state: 'closed', base: { sha: '1234' } },
-      { number: 1, state: 'open', mergeable_state: 'dirty', base: { sha: '1234' }, commits: 1 },
+      {
+        number: 1,
+        state: 'open',
+        mergeable_state: 'dirty',
+        base: { sha: '1234' },
+        commits: 1,
+      },
       { number: 1, state: 'open', base: { sha: '5678' }, commits: 1 },
     ].forEach((body, i) => {
       it(`should return a PR object - ${i}`, async () => {
@@ -459,11 +483,13 @@ describe('api/github', () => {
         },
       }));
       ghGot.mockImplementationOnce(() => ({
-        body: [{
-          author: {
-            login: 'foo',
+        body: [
+          {
+            author: {
+              login: 'foo',
+            },
           },
-        }],
+        ],
       }));
       const pr = await github.getPr(1234);
       expect(pr).toMatchSnapshot();
@@ -480,15 +506,18 @@ describe('api/github', () => {
         },
       }));
       ghGot.mockImplementationOnce(() => ({
-        body: [{
-          author: {
-            login: 'foo',
+        body: [
+          {
+            author: {
+              login: 'foo',
+            },
           },
-        }, {
-          author: {
-            login: 'bar',
+          {
+            author: {
+              login: 'bar',
+            },
           },
-        }],
+        ],
       }));
       const pr = await github.getPr(1234);
       expect(pr).toMatchSnapshot();
@@ -629,11 +658,17 @@ describe('api/github', () => {
           ref: 'refs/heads/package.json',
         },
       }));
-      const files = [{
-        name: 'package.json',
-        contents: 'hello world',
-      }];
-      await github.commitFilesToBranch('package.json', files, 'my commit message');
+      const files = [
+        {
+          name: 'package.json',
+          contents: 'hello world',
+        },
+      ];
+      await github.commitFilesToBranch(
+        'package.json',
+        files,
+        'my commit message',
+      );
       expect(ghGot.mock.calls).toMatchSnapshot();
       expect(ghGot.post.mock.calls).toMatchSnapshot();
       expect(ghGot.patch.mock.calls).toMatchSnapshot();
@@ -643,11 +678,17 @@ describe('api/github', () => {
       ghGot.mockImplementationOnce(() => ({
         statusCode: 404,
       }));
-      const files = [{
-        name: 'package.json',
-        contents: 'hello world',
-      }];
-      await github.commitFilesToBranch('package.json', files, 'my other commit message');
+      const files = [
+        {
+          name: 'package.json',
+          contents: 'hello world',
+        },
+      ];
+      await github.commitFilesToBranch(
+        'package.json',
+        files,
+        'my other commit message',
+      );
       expect(ghGot.mock.calls).toMatchSnapshot();
       expect(ghGot.post.mock.calls).toMatchSnapshot();
       expect(ghGot.patch.mock.calls).toMatchSnapshot();
diff --git a/test/api/npm.spec.js b/test/api/npm.spec.js
index fe9b5ac4e796cb3566800d287a891e6ef55e1923..919121320e89c63a1f9f4bdd1fce42474fc2d370 100644
--- a/test/api/npm.spec.js
+++ b/test/api/npm.spec.js
@@ -17,18 +17,29 @@ describe('api/npm', () => {
     const res = await npm.getDependency('foobar');
     expect(res).toMatchObject({ some: 'data' });
     const call = got.mock.calls[0];
-    expect(call).toMatchObject(['https://npm.mycustomregistry.com/foobar', { json: true, headers: {} }]);
+    expect(call).toMatchObject([
+      'https://npm.mycustomregistry.com/foobar',
+      { json: true, headers: {} },
+    ]);
   });
   it('should send an authorization header if provided', async () => {
     registryUrl.mockImplementation(() => 'https://npm.mycustomregistry.com/');
-    registryAuthToken.mockImplementation(() => ({ type: 'Basic', token: '1234' }));
+    registryAuthToken.mockImplementation(() => ({
+      type: 'Basic',
+      token: '1234',
+    }));
     got.mockImplementation(() => Promise.resolve({ body: { some: 'data' } }));
     const res = await npm.getDependency('foobar');
     expect(res).toMatchObject({ some: 'data' });
     const call = got.mock.calls[0];
-    expect(call).toMatchObject(['https://npm.mycustomregistry.com/foobar', { json: true,
-      headers: {
-        authorization: 'Basic 1234',
-      } }]);
+    expect(call).toMatchObject([
+      'https://npm.mycustomregistry.com/foobar',
+      {
+        json: true,
+        headers: {
+          authorization: 'Basic 1234',
+        },
+      },
+    ]);
   });
 });
diff --git a/test/config/file.spec.js b/test/config/file.spec.js
index 007adadc71b7e5bc1ef2921456dafe872c58662a..0573dda9ca59c2142de4d7de6f112d2259aaa56d 100644
--- a/test/config/file.spec.js
+++ b/test/config/file.spec.js
@@ -9,7 +9,9 @@ describe('config/file', () => {
     });
     it('parses custom config file', () => {
       const configFile = path.resolve(__dirname, '../_fixtures/config/file.js');
-      file.getConfig({ RENOVATE_CONFIG_FILE: configFile }).should.eql(customConfig);
+      file
+        .getConfig({ RENOVATE_CONFIG_FILE: configFile })
+        .should.eql(customConfig);
     });
   });
 });
diff --git a/test/config/index.spec.js b/test/config/index.spec.js
index 05d3b198d5aef0174a9d1f2858f7d74262ea438c..58683f701fd60c5064787d1ae4def5c4ac5a78c1 100644
--- a/test/config/index.spec.js
+++ b/test/config/index.spec.js
@@ -55,7 +55,9 @@ describe('config/index', () => {
       } catch (e) {
         err = e;
       }
-      expect(err.message).toBe('At least one repository must be configured, or use --autodiscover');
+      expect(err.message).toBe(
+        'At least one repository must be configured, or use --autodiscover',
+      );
     });
     it('supports token in CLI options', async () => {
       defaultArgv = defaultArgv.concat(['--token=abc']);
@@ -66,7 +68,9 @@ describe('config/index', () => {
       } catch (e) {
         err = e;
       }
-      expect(err.message).toBe('At least one repository must be configured, or use --autodiscover');
+      expect(err.message).toBe(
+        'At least one repository must be configured, or use --autodiscover',
+      );
     });
     it('autodiscovers github platform', async () => {
       const env = {};
@@ -87,7 +91,11 @@ describe('config/index', () => {
     });
     it('autodiscovers gitlab platform', async () => {
       const env = {};
-      defaultArgv = defaultArgv.concat(['--autodiscover', '--platform=gitlab', '--token=abc']);
+      defaultArgv = defaultArgv.concat([
+        '--autodiscover',
+        '--platform=gitlab',
+        '--token=abc',
+      ]);
       glGot.mockImplementationOnce(() => ({
         body: [
           {
diff --git a/test/helpers/changelog.spec.js b/test/helpers/changelog.spec.js
index fba9c98e32170a191ca00f411cd55ee9a48ab887..7b088be069a2e3874d64be3195834b927837a754 100644
--- a/test/helpers/changelog.spec.js
+++ b/test/helpers/changelog.spec.js
@@ -17,7 +17,9 @@ describe('helpers/changelog', () => {
     });
     it('returns header if generated markdown is valid', async () => {
       changelog.markdown.mockReturnValueOnce('dummy');
-      expect(await getChangeLog('renovate', '1.0.0', '2.0.0')).toBe('### Changelog\n\ndummy');
+      expect(await getChangeLog('renovate', '1.0.0', '2.0.0')).toBe(
+        '### Changelog\n\ndummy',
+      );
     });
     it('returns empty if error thrown', async () => {
       changelog.markdown = jest.fn(() => {
diff --git a/test/helpers/package-json.spec.js b/test/helpers/package-json.spec.js
index 3d00eac92dd0ff24f48f39d937e1c6ba353f30b0..e2dc18f49ccb4637868280f8517096f90b75ac8e 100644
--- a/test/helpers/package-json.spec.js
+++ b/test/helpers/package-json.spec.js
@@ -2,10 +2,17 @@ const fs = require('fs');
 const path = require('path');
 const packageJson = require('../../lib/helpers/package-json');
 
-const defaultTypes = ['dependencies', 'devDependencies', 'optionalDependencies'];
+const defaultTypes = [
+  'dependencies',
+  'devDependencies',
+  'optionalDependencies',
+];
 
 function readFixture(fixture) {
-  return fs.readFileSync(path.resolve(__dirname, `../_fixtures/package-json/${fixture}`), 'utf8');
+  return fs.readFileSync(
+    path.resolve(__dirname, `../_fixtures/package-json/${fixture}`),
+    'utf8',
+  );
 }
 
 const input01Content = readFixture('inputs/01.json');
@@ -14,20 +21,27 @@ const input02Content = readFixture('inputs/02.json');
 describe('helpers/package-json', () => {
   describe('.extractDependencies(packageJson, sections)', () => {
     it('returns an array of correct length', () => {
-      const extractedDependencies =
-        packageJson.extractDependencies(JSON.parse(input01Content), defaultTypes);
+      const extractedDependencies = packageJson.extractDependencies(
+        JSON.parse(input01Content),
+        defaultTypes,
+      );
       extractedDependencies.should.be.instanceof(Array);
       extractedDependencies.should.have.length(10);
     });
     it('each element contains non-null depType, depName, currentVersion', () => {
-      const extractedDependencies =
-        packageJson.extractDependencies(JSON.parse(input01Content), defaultTypes);
-      extractedDependencies.every(dep => dep.depType && dep.depName && dep.currentVersion)
+      const extractedDependencies = packageJson.extractDependencies(
+        JSON.parse(input01Content),
+        defaultTypes,
+      );
+      extractedDependencies
+        .every(dep => dep.depType && dep.depName && dep.currentVersion)
         .should.eql(true);
     });
     it('supports null devDependencies', () => {
-      const extractedDependencies =
-        packageJson.extractDependencies(JSON.parse(input02Content), defaultTypes);
+      const extractedDependencies = packageJson.extractDependencies(
+        JSON.parse(input02Content),
+        defaultTypes,
+      );
       extractedDependencies.should.be.instanceof(Array);
       extractedDependencies.should.have.length(6);
     });
@@ -35,25 +49,41 @@ describe('helpers/package-json', () => {
   describe('.setNewValue(currentFileContent, depType, depName, newVersion)', () => {
     it('replaces a dependency value', () => {
       const outputContent = readFixture('outputs/011.json');
-      const testContent =
-        packageJson.setNewValue(input01Content, 'dependencies', 'cheerio', '0.22.1');
+      const testContent = packageJson.setNewValue(
+        input01Content,
+        'dependencies',
+        'cheerio',
+        '0.22.1',
+      );
       testContent.should.equal(outputContent);
     });
     it('replaces only the first instance of a value', () => {
       const outputContent = readFixture('outputs/012.json');
-      const testContent =
-        packageJson.setNewValue(input01Content, 'devDependencies', 'angular-touch', '1.6.1');
+      const testContent = packageJson.setNewValue(
+        input01Content,
+        'devDependencies',
+        'angular-touch',
+        '1.6.1',
+      );
       testContent.should.equal(outputContent);
     });
     it('replaces only the second instance of a value', () => {
       const outputContent = readFixture('outputs/013.json');
-      const testContent =
-        packageJson.setNewValue(input01Content, 'devDependencies', 'angular-sanitize', '1.6.1');
+      const testContent = packageJson.setNewValue(
+        input01Content,
+        'devDependencies',
+        'angular-sanitize',
+        '1.6.1',
+      );
       testContent.should.equal(outputContent);
     });
     it('handles the case where the desired version is already supported', () => {
-      const testContent =
-        packageJson.setNewValue(input01Content, 'devDependencies', 'angular-touch', '1.5.8');
+      const testContent = packageJson.setNewValue(
+        input01Content,
+        'devDependencies',
+        'angular-touch',
+        '1.5.8',
+      );
       testContent.should.equal(input01Content);
     });
   });
diff --git a/test/helpers/versions.spec.js b/test/helpers/versions.spec.js
index c5f23210bfd91db080622af6fb864290096cae3a..7734b10eb9b04476c1de504cbfa2181928280d41 100644
--- a/test/helpers/versions.spec.js
+++ b/test/helpers/versions.spec.js
@@ -10,20 +10,26 @@ describe('helpers/versions', () => {
 
   describe('.determineUpgrades(dep, currentVersion, defaultConfig)', () => {
     it('return empty if invalid current version', () => {
-      versionsHelper.determineUpgrades(qJson, 'invalid', defaultConfig).should.have.length(0);
+      versionsHelper
+        .determineUpgrades(qJson, 'invalid', defaultConfig)
+        .should.have.length(0);
     });
     it('return empty if null versions', () => {
       const testDep = {
         name: 'q',
       };
-      versionsHelper.determineUpgrades(testDep, '1.0.0', defaultConfig).should.have.length(0);
+      versionsHelper
+        .determineUpgrades(testDep, '1.0.0', defaultConfig)
+        .should.have.length(0);
     });
     it('return empty if empty versions', () => {
       const testDep = {
         name: 'q',
         versions: [],
       };
-      versionsHelper.determineUpgrades(testDep, '1.0.0', defaultConfig).should.have.length(0);
+      versionsHelper
+        .determineUpgrades(testDep, '1.0.0', defaultConfig)
+        .should.have.length(0);
     });
     it('supports minor and major upgrades for tilde ranges', () => {
       const upgradeVersions = [
@@ -42,22 +48,32 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '^0.4.0', defaultConfig)
+        .should.eql(upgradeVersions);
     });
     it('returns only one update if grouping', () => {
       defaultConfig.groupName = 'somegroup';
-      expect(versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig)).toMatchSnapshot();
+      expect(
+        versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig),
+      ).toMatchSnapshot();
     });
     it('returns only one update if automerging any', () => {
       defaultConfig.automerge = 'any';
-      expect(versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig)).toMatchSnapshot();
+      expect(
+        versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig),
+      ).toMatchSnapshot();
     });
     it('returns both updates if automerging minor', () => {
       defaultConfig.automerge = 'minor';
-      expect(versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig)).toMatchSnapshot();
+      expect(
+        versionsHelper.determineUpgrades(qJson, '^0.4.0', defaultConfig),
+      ).toMatchSnapshot();
     });
     it('disables major release separation (major)', () => {
-      const config = Object.assign({}, defaultConfig, { separateMajorReleases: false });
+      const config = Object.assign({}, defaultConfig, {
+        separateMajorReleases: false,
+      });
       const upgradeVersions = [
         {
           newVersion: '1.4.1',
@@ -67,10 +83,14 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '^0.4.0', config).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '^0.4.0', config)
+        .should.eql(upgradeVersions);
     });
     it('disables major release separation (minor)', () => {
-      const config = Object.assign({}, defaultConfig, { separateMajorReleases: false });
+      const config = Object.assign({}, defaultConfig, {
+        separateMajorReleases: false,
+      });
       const upgradeVersions = [
         {
           newVersion: '1.4.1',
@@ -80,7 +100,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '1.0.0', config).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '1.0.0', config)
+        .should.eql(upgradeVersions);
     });
     it('supports minor and major upgrades for ranged versions', () => {
       const pinVersions = [
@@ -99,7 +121,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '~0.4.0', defaultConfig).should.eql(pinVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '~0.4.0', defaultConfig)
+        .should.eql(pinVersions);
     });
     it('ignores pinning for ranges when other upgrade exists', () => {
       const pinVersions = [
@@ -111,7 +135,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      expect(versionsHelper.determineUpgrades(qJson, '~0.9.0', defaultConfig)).toEqual(pinVersions);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '~0.9.0', defaultConfig),
+      ).toEqual(pinVersions);
     });
     it('upgrades minor ranged versions', () => {
       const upgradeVersions = [
@@ -123,7 +149,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      expect(versionsHelper.determineUpgrades(qJson, '~1.0.0', defaultConfig)).toEqual(upgradeVersions);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '~1.0.0', defaultConfig),
+      ).toEqual(upgradeVersions);
     });
     it('pins minor ranged versions', () => {
       const upgradeVersions = [
@@ -133,11 +161,15 @@ describe('helpers/versions', () => {
           upgradeType: 'pin',
         },
       ];
-      expect(versionsHelper.determineUpgrades(qJson, '^1.0.0', defaultConfig)).toEqual(upgradeVersions);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '^1.0.0', defaultConfig),
+      ).toEqual(upgradeVersions);
     });
     it('ignores minor ranged versions when not pinning', () => {
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '^1.0.0', config)).toHaveLength(0);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '^1.0.0', config),
+      ).toHaveLength(0);
     });
     it('upgrades tilde ranges', () => {
       const upgradeVersions = [
@@ -149,7 +181,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      expect(versionsHelper.determineUpgrades(qJson, '~1.3.0', defaultConfig)).toEqual(upgradeVersions);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '~1.3.0', defaultConfig),
+      ).toEqual(upgradeVersions);
     });
     it('upgrades .x minor ranges', () => {
       const upgradeVersions = [
@@ -161,7 +195,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.4.1',
         },
       ];
-      expect(versionsHelper.determineUpgrades(qJson, '1.3.x', defaultConfig)).toEqual(upgradeVersions);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '1.3.x', defaultConfig),
+      ).toEqual(upgradeVersions);
     });
     it('upgrades tilde ranges without pinning', () => {
       const upgradeVersions = [
@@ -175,7 +211,9 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '~1.3.0', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '~1.3.0', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('upgrades .x major ranges without pinning', () => {
       const upgradeVersions = [
@@ -189,7 +227,9 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '0.x', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '0.x', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('upgrades .x minor ranges without pinning', () => {
       const upgradeVersions = [
@@ -203,7 +243,9 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '1.3.x', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '1.3.x', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('upgrades shorthand major ranges without pinning', () => {
       const upgradeVersions = [
@@ -217,7 +259,9 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '0', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '0', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('upgrades shorthand minor ranges without pinning', () => {
       const upgradeVersions = [
@@ -231,7 +275,9 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '1.3', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '1.3', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('upgrades multiple tilde ranges without pinning', () => {
       const upgradeVersions = [
@@ -253,7 +299,9 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '~0.7.0', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '~0.7.0', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('upgrades multiple caret ranges without pinning', () => {
       const upgradeVersions = [
@@ -275,15 +323,21 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '^0.7.0', config)).toEqual(upgradeVersions);
+      expect(versionsHelper.determineUpgrades(qJson, '^0.7.0', config)).toEqual(
+        upgradeVersions,
+      );
     });
     it('ignores complex ranges when not pinning', () => {
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '^0.7.0 || ^0.8.0', config)).toHaveLength(0);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '^0.7.0 || ^0.8.0', config),
+      ).toHaveLength(0);
     });
     it('returns nothing for greater than ranges', () => {
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '>= 0.7.0', config)).toHaveLength(0);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '>= 0.7.0', config),
+      ).toHaveLength(0);
     });
     it('upgrades less than equal ranges without pinning', () => {
       const upgradeVersions = [
@@ -305,11 +359,15 @@ describe('helpers/versions', () => {
         },
       ];
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '<= 0.7.2', config)).toEqual(upgradeVersions);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '<= 0.7.2', config),
+      ).toEqual(upgradeVersions);
     });
     it('rejects less than ranges without pinning', () => {
       const config = Object.assign({}, defaultConfig, { pinVersions: false });
-      expect(versionsHelper.determineUpgrades(qJson, '< 0.7.2', config)).toEqual([]);
+      expect(
+        versionsHelper.determineUpgrades(qJson, '< 0.7.2', config),
+      ).toEqual([]);
     });
     it('supports > latest versions if configured', () => {
       const config = Object.assign({}, defaultConfig);
@@ -323,7 +381,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '2.0.1',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '1.4.1', config).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '1.4.1', config)
+        .should.eql(upgradeVersions);
     });
     it('supports future versions if configured', () => {
       const config = Object.assign({}, defaultConfig);
@@ -338,7 +398,9 @@ describe('helpers/versions', () => {
           changeLogToVersion: '2.0.3',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '1.4.1', config).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '1.4.1', config)
+        .should.eql(upgradeVersions);
     });
     it('supports future versions if already future', () => {
       const upgradeVersions = [
@@ -348,16 +410,24 @@ describe('helpers/versions', () => {
           upgradeType: 'pin',
         },
       ];
-      versionsHelper.determineUpgrades(qJson, '^2.0.0', defaultConfig).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(qJson, '^2.0.0', defaultConfig)
+        .should.eql(upgradeVersions);
     });
     it('should ignore unstable versions if the current version is stable', () => {
-      versionsHelper.determineUpgrades({
-        name: 'amazing-package',
-        versions: {
-          '1.0.0': {},
-          '1.1.0-beta': {},
-        },
-      }, '1.0.0', defaultConfig).should.eql([]);
+      versionsHelper
+        .determineUpgrades(
+          {
+            name: 'amazing-package',
+            versions: {
+              '1.0.0': {},
+              '1.1.0-beta': {},
+            },
+          },
+          '1.0.0',
+          defaultConfig,
+        )
+        .should.eql([]);
     });
     it('should allow unstable versions if the current version is unstable', () => {
       const upgradeVersions = [
@@ -369,13 +439,19 @@ describe('helpers/versions', () => {
           changeLogToVersion: '1.1.0-beta',
         },
       ];
-      versionsHelper.determineUpgrades({
-        name: 'amazing-package',
-        versions: {
-          '1.0.0-beta': {},
-          '1.1.0-beta': {},
-        },
-      }, '1.0.0-beta', defaultConfig).should.eql(upgradeVersions);
+      versionsHelper
+        .determineUpgrades(
+          {
+            name: 'amazing-package',
+            versions: {
+              '1.0.0-beta': {},
+              '1.1.0-beta': {},
+            },
+          },
+          '1.0.0-beta',
+          defaultConfig,
+        )
+        .should.eql(upgradeVersions);
     });
   });
   describe('.isRange(input)', () => {
@@ -406,8 +482,12 @@ describe('helpers/versions', () => {
     });
     it('should reject github repositories', () => {
       versionsHelper.isValidVersion('singapore/renovate').should.eql(false);
-      versionsHelper.isValidVersion('singapore/renovate#master').should.eql(false);
-      versionsHelper.isValidVersion('https://github.com/singapore/renovate.git').should.eql(false);
+      versionsHelper
+        .isValidVersion('singapore/renovate#master')
+        .should.eql(false);
+      versionsHelper
+        .isValidVersion('https://github.com/singapore/renovate.git')
+        .should.eql(false);
     });
   });
   describe('.isPastLatest(dep, version)', () => {
diff --git a/test/helpers/yarn.spec.js b/test/helpers/yarn.spec.js
index 65152ea3eb3eb65478a4cfeb9bc4a86b41f74a56..2a7fdf128db058eabbdc29cb323d48842837cb88 100644
--- a/test/helpers/yarn.spec.js
+++ b/test/helpers/yarn.spec.js
@@ -18,8 +18,11 @@ describe('generateLockFile(newPackageJson, npmrcContent, yarnrcContent)', () =>
     stderror: '',
   }));
   it('generates lock files', async () => {
-    const yarnLock =
-      await yarnHelper.generateLockFile('package-json-contents', 'npmrc-contents', 'yarnrc-contents');
+    const yarnLock = await yarnHelper.generateLockFile(
+      'package-json-contents',
+      'npmrc-contents',
+      'yarnrc-contents',
+    );
     expect(tmp.dirSync.mock.calls.length).toEqual(1);
     expect(fs.writeFileSync.mock.calls.length).toEqual(3);
     expect(fs.readFileSync.mock.calls.length).toEqual(1);
@@ -47,7 +50,9 @@ describe('getLockFile(packageJson, config)', () => {
       name: 'yarn.lock',
       contents: 'New yarn.lock',
     };
-    expect(await yarnHelper.getLockFile('package.json', '', api)).toMatchObject(yarnLockFile);
+    expect(await yarnHelper.getLockFile('package.json', '', api)).toMatchObject(
+      yarnLockFile,
+    );
   });
 });
 
@@ -69,14 +74,18 @@ describe('maintainLockFile(inputConfig)', () => {
   });
   it('returns null if contents match', async () => {
     config.api.getFileContent.mockReturnValueOnce('oldYarnLockContent');
-    yarnHelper.getLockFile.mockReturnValueOnce({ contents: 'oldYarnLockContent' });
+    yarnHelper.getLockFile.mockReturnValueOnce({
+      contents: 'oldYarnLockContent',
+    });
     const yarnLock = await yarnHelper.maintainLockFile(config);
     expect(config.api.getFileContent.mock.calls.length).toBe(2);
     expect(yarnLock).toEqual(null);
   });
   it('returns new yarn lock if contents differ', async () => {
     config.api.getFileContent.mockReturnValueOnce('oldYarnLockContent');
-    yarnHelper.getLockFile.mockReturnValueOnce({ contents: 'newYarnLockContent' });
+    yarnHelper.getLockFile.mockReturnValueOnce({
+      contents: 'newYarnLockContent',
+    });
     const yarnLock = await yarnHelper.maintainLockFile(config);
     expect(config.api.getFileContent.mock.calls.length).toBe(2);
     expect(yarnLock).toEqual({ contents: 'newYarnLockContent' });
diff --git a/test/worker.spec.js b/test/worker.spec.js
index c82f0026e98f7b2edc3459c05b867629b674cb12..c766a587a86bf9df6f75af5d2191de1e75a9f68c 100644
--- a/test/worker.spec.js
+++ b/test/worker.spec.js
@@ -136,10 +136,12 @@ describe('worker', () => {
     });
     it('handles depType config without override', () => {
       config.foo = 'bar';
-      config.depTypes = [{
-        depType: 'dependencies',
-        alpha: 'beta',
-      }];
+      config.depTypes = [
+        {
+          depType: 'dependencies',
+          alpha: 'beta',
+        },
+      ];
       deps.push({
         depName: 'a',
         depType: 'dependencies',
@@ -149,10 +151,12 @@ describe('worker', () => {
     });
     it('handles depType config with override', () => {
       config.foo = 'bar';
-      config.depTypes = [{
-        depType: 'dependencies',
-        foo: 'beta',
-      }];
+      config.depTypes = [
+        {
+          depType: 'dependencies',
+          foo: 'beta',
+        },
+      ];
       deps.push({
         depName: 'a',
         depType: 'dependencies',
@@ -162,10 +166,12 @@ describe('worker', () => {
     });
     it('handles package config', () => {
       config.foo = 'bar';
-      config.packages = [{
-        packageName: 'a',
-        labels: ['renovate'],
-      }];
+      config.packages = [
+        {
+          packageName: 'a',
+          labels: ['renovate'],
+        },
+      ];
       deps.push({
         depName: 'a',
       });
@@ -174,14 +180,18 @@ describe('worker', () => {
     });
     it('package config overrides depType and general config', () => {
       config.foo = 'bar';
-      config.depTypes = [{
-        depType: 'dependencies',
-        foo: 'beta',
-      }];
-      config.packages = [{
-        packageName: 'a',
-        foo: 'gamma',
-      }];
+      config.depTypes = [
+        {
+          depType: 'dependencies',
+          foo: 'beta',
+        },
+      ];
+      config.packages = [
+        {
+          packageName: 'a',
+          foo: 'gamma',
+        },
+      ];
       deps.push({
         depName: 'a',
         depType: 'dependencies',
@@ -191,14 +201,18 @@ describe('worker', () => {
     });
     it('nested package config overrides depType and general config', () => {
       config.foo = 'bar';
-      config.depTypes = [{
-        depType: 'dependencies',
-        foo: 'beta',
-        packages: [{
-          packageName: 'a',
-          foo: 'gamma',
-        }],
-      }];
+      config.depTypes = [
+        {
+          depType: 'dependencies',
+          foo: 'beta',
+          packages: [
+            {
+              packageName: 'a',
+              foo: 'gamma',
+            },
+          ],
+        },
+      ];
       deps.push({
         depName: 'a',
         depType: 'dependencies',
@@ -208,10 +222,12 @@ describe('worker', () => {
     });
     it('handles regex package pattern', () => {
       config.foo = 'bar';
-      config.packages = [{
-        packagePattern: 'eslint',
-        labels: ['eslint'],
-      }];
+      config.packages = [
+        {
+          packagePattern: 'eslint',
+          labels: ['eslint'],
+        },
+      ];
       deps.push({
         depName: 'eslint',
       });
@@ -229,10 +245,12 @@ describe('worker', () => {
     });
     it('handles regex wildcard package pattern', () => {
       config.foo = 'bar';
-      config.packages = [{
-        packagePattern: '^eslint',
-        labels: ['eslint'],
-      }];
+      config.packages = [
+        {
+          packagePattern: '^eslint',
+          labels: ['eslint'],
+        },
+      ];
       deps.push({
         depName: 'eslint',
       });
@@ -250,10 +268,12 @@ describe('worker', () => {
     });
     it('handles non-regex package name', () => {
       config.foo = 'bar';
-      config.packages = [{
-        packageName: 'eslint',
-        labels: ['eslint'],
-      }];
+      config.packages = [
+        {
+          packageName: 'eslint',
+          labels: ['eslint'],
+        },
+      ];
       deps.push({
         depName: 'eslint',
       });
diff --git a/test/workers/branch.spec.js b/test/workers/branch.spec.js
index 45d75729d5c9708f06592b78bcf2373eaba8f5af..1cfb47cdb0ffc050d38b046ddc6e532836b7c2d3 100644
--- a/test/workers/branch.spec.js
+++ b/test/workers/branch.spec.js
@@ -20,31 +20,41 @@ describe('workers/branch', () => {
     });
     it('returns undefined if branch does not exist', async () => {
       config.api.branchExists.mockReturnValue(false);
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(undefined);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        undefined,
+      );
     });
     it('returns branchName if no PR', async () => {
       config.api.getBranchPr.mockReturnValue(null);
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(branchName);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        branchName,
+      );
     });
     it('returns false if does not need rebaseing', async () => {
       config.api.getBranchPr.mockReturnValue({
         isUnmergeable: false,
       });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(branchName);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        branchName,
+      );
     });
     it('returns false if unmergeable and cannot rebase', async () => {
       config.api.getBranchPr.mockReturnValue({
         isUnmergeable: true,
         canRebase: false,
       });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(branchName);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        branchName,
+      );
     });
     it('returns true if unmergeable and can rebase', async () => {
       config.api.getBranchPr.mockReturnValue({
         isUnmergeable: true,
         canRebase: true,
       });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(undefined);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        undefined,
+      );
     });
     it('returns false if stale but not configured to rebase', async () => {
       config.api.getBranchPr.mockReturnValue({
@@ -53,7 +63,9 @@ describe('workers/branch', () => {
         canRebase: true,
       });
       config.rebaseStalePrs = false;
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(branchName);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        branchName,
+      );
     });
     it('returns false if stale but cannot rebase', async () => {
       config.api.getBranchPr.mockReturnValueOnce({
@@ -62,7 +74,9 @@ describe('workers/branch', () => {
         canRebase: false,
       });
       config.rebaseStalePrs = true;
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(branchName);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        branchName,
+      );
     });
     it('returns true if stale and can rebase', async () => {
       config.api.getBranchPr.mockReturnValueOnce({
@@ -71,7 +85,9 @@ describe('workers/branch', () => {
         canRebase: true,
       });
       config.rebaseStalePrs = true;
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(undefined);
+      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
+        undefined,
+      );
     });
   });
   describe('ensureBranch(config)', () => {
diff --git a/yarn.lock b/yarn.lock
index af4c42be1101f774bb81c10c190c4e312dc6e67d..2bb9395738e05da79595343b55f0e2b702b8228b 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -171,6 +171,10 @@ assertion-error@^1.0.1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.0.2.tgz#13ca515d86206da0bac66e834dd397d87581094c"
 
+ast-types@0.9.8:
+  version "0.9.8"
+  resolved "https://registry.yarnpkg.com/ast-types/-/ast-types-0.9.8.tgz#6cb6a40beba31f49f20928e28439fc14a3dab078"
+
 async-each@^1.0.0:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.1.tgz#19d386a1d9edc6e7c1c85d388aedbcc56d33602d"
@@ -222,7 +226,7 @@ babel-cli@6.24.1:
   optionalDependencies:
     chokidar "^1.6.1"
 
-babel-code-frame@^6.16.0, babel-code-frame@^6.22.0:
+babel-code-frame@6.22.0, babel-code-frame@^6.16.0, babel-code-frame@^6.22.0:
   version "6.22.0"
   resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4"
   dependencies:
@@ -405,6 +409,10 @@ babel-types@^6.18.0, babel-types@^6.24.1:
     lodash "^4.2.0"
     to-fast-properties "^1.0.1"
 
+babylon@7.0.0-beta.8:
+  version "7.0.0-beta.8"
+  resolved "https://registry.yarnpkg.com/babylon/-/babylon-7.0.0-beta.8.tgz#2bdc5ae366041442c27e068cce6f0d7c06ea9949"
+
 babylon@^6.11.0, babylon@^6.13.0, babylon@^6.15.0:
   version "6.17.0"
   resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.0.tgz#37da948878488b9c4e3c4038893fa3314b3fc932"
@@ -993,6 +1001,12 @@ eslint-config-airbnb-base@11.1.3:
   version "11.1.3"
   resolved "https://registry.yarnpkg.com/eslint-config-airbnb-base/-/eslint-config-airbnb-base-11.1.3.tgz#0e8db71514fa36b977fbcf977c01edcf863e0cf0"
 
+eslint-config-prettier@1.7.0:
+  version "1.7.0"
+  resolved "https://registry.yarnpkg.com/eslint-config-prettier/-/eslint-config-prettier-1.7.0.tgz#cda3ce22df1e852daa9370f1f3446e8b8a02ce44"
+  dependencies:
+    get-stdin "^5.0.1"
+
 eslint-import-resolver-node@^0.2.0:
   version "0.2.3"
   resolved "https://registry.yarnpkg.com/eslint-import-resolver-node/-/eslint-import-resolver-node-0.2.3.tgz#5add8106e8c928db2cba232bcd9efa846e3da16c"
@@ -1023,6 +1037,12 @@ eslint-plugin-import@2.2.0:
     minimatch "^3.0.3"
     pkg-up "^1.0.0"
 
+eslint-plugin-prettier@2.0.1:
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/eslint-plugin-prettier/-/eslint-plugin-prettier-2.0.1.tgz#2ae1216cf053dd728360ca8560bf1aabc8af3fa9"
+  dependencies:
+    requireindex "~1.1.0"
+
 eslint-plugin-promise@3.5.0:
   version "3.5.0"
   resolved "https://registry.yarnpkg.com/eslint-plugin-promise/-/eslint-plugin-promise-3.5.0.tgz#78fbb6ffe047201627569e85a6c5373af2a68fca"
@@ -1107,7 +1127,7 @@ estraverse@~4.1.0:
   version "4.1.1"
   resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.1.1.tgz#f6caca728933a850ef90661d0e17982ba47111a2"
 
-esutils@^2.0.2:
+esutils@2.0.2, esutils@^2.0.2:
   version "2.0.2"
   resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b"
 
@@ -1270,6 +1290,10 @@ flat-cache@^1.2.1:
     graceful-fs "^4.1.2"
     write "^0.2.1"
 
+flow-parser@0.43.0:
+  version "0.43.0"
+  resolved "https://registry.yarnpkg.com/flow-parser/-/flow-parser-0.43.0.tgz#e2b8eb1ac83dd53f7b6b04a7c35b6a52c33479b7"
+
 for-in@^1.0.1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80"
@@ -1355,6 +1379,10 @@ get-caller-file@^1.0.1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-1.0.2.tgz#f702e63127e7e231c160a80c1554acb70d5047e5"
 
+get-stdin@5.0.1, get-stdin@^5.0.1:
+  version "5.0.1"
+  resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-5.0.1.tgz#122e161591e21ff4c52530305693f20e6393a398"
+
 get-stdin@^4.0.1:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-4.0.1.tgz#b968c6b0a04384324902e8bf1a5df32579a450fe"
@@ -1396,7 +1424,7 @@ glob-parent@^2.0.0:
   dependencies:
     is-glob "^2.0.0"
 
-glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1:
+glob@7.1.1, glob@^7.0.0, glob@^7.0.3, glob@^7.0.5, glob@^7.1.1:
   version "7.1.1"
   resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.1.tgz#805211df04faaf1c63a3600306cdf5ade50b2ec8"
   dependencies:
@@ -2051,6 +2079,15 @@ jest-util@^19.0.2:
     leven "^2.0.0"
     mkdirp "^0.5.1"
 
+jest-validate@19.0.0:
+  version "19.0.0"
+  resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-19.0.0.tgz#8c6318a20ecfeaba0ba5378bfbb8277abded4173"
+  dependencies:
+    chalk "^1.1.1"
+    jest-matcher-utils "^19.0.0"
+    leven "^2.0.0"
+    pretty-format "^19.0.0"
+
 jest-validate@^19.0.2:
   version "19.0.2"
   resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-19.0.2.tgz#dc534df5f1278d5b63df32b14241d4dbf7244c0c"
@@ -2383,7 +2420,7 @@ minimist@0.0.8, minimist@~0.0.1:
   version "0.0.8"
   resolved "https://registry.yarnpkg.com/minimist/-/minimist-0.0.8.tgz#857fcabfc3397d2625b8228262e86aa7a011b05d"
 
-minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0:
+minimist@1.2.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0:
   version "1.2.0"
   resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
 
@@ -2742,6 +2779,21 @@ preserve@^0.2.0:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/preserve/-/preserve-0.2.0.tgz#815ed1f6ebc65926f865b310c0713bcb3315ce4b"
 
+prettier@1.2.2:
+  version "1.2.2"
+  resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.2.2.tgz#22d17c1132faaaea1f1d4faea31f19f7a1959f3e"
+  dependencies:
+    ast-types "0.9.8"
+    babel-code-frame "6.22.0"
+    babylon "7.0.0-beta.8"
+    chalk "1.1.3"
+    esutils "2.0.2"
+    flow-parser "0.43.0"
+    get-stdin "5.0.1"
+    glob "7.1.1"
+    jest-validate "19.0.0"
+    minimist "1.2.0"
+
 pretty-format@^19.0.0:
   version "19.0.0"
   resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-19.0.0.tgz#56530d32acb98a3fa4851c4e2b9d37b420684c84"
@@ -2984,6 +3036,10 @@ require-uncached@^1.0.2:
     caller-path "^0.1.0"
     resolve-from "^1.0.0"
 
+requireindex@~1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/requireindex/-/requireindex-1.1.0.tgz#e5404b81557ef75db6e49c5a72004893fe03e162"
+
 resolve-from@^1.0.0:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226"