From ea9ace2a76a1843473c92dbc8330b9c96cd27f26 Mon Sep 17 00:00:00 2001
From: Rhys Arkins <rhys@keylocation.sg>
Date: Sat, 26 Aug 2017 16:10:18 +0200
Subject: [PATCH] feat: branch worker updates (#736)

This is a major refactor of branch code to prepare for Yarn workspaces plus creating PRs for branches with failing lockfiles. Marked as "feature" to cause a minor version bump due to the moderate chance of accidentally breaking something.
---
 lib/api/npm.js                                |   3 +-
 lib/config/definitions.js                     |   3 +-
 lib/logger/config-serializer.js               |   2 +-
 lib/logger/pretty-stdout.js                   |   1 +
 lib/workers/branch/automerge.js               |  27 +
 lib/workers/branch/check-existing.js          |  30 +
 lib/workers/branch/commit.js                  |  28 +
 lib/workers/branch/index.js                   | 362 +++---------
 lib/workers/branch/lock-files.js              | 190 +++++++
 lib/workers/branch/npm.js                     | 113 +---
 lib/workers/branch/package-files.js           |  36 ++
 lib/workers/branch/parent.js                  |  64 +++
 lib/workers/branch/status-checks.js           |  50 ++
 lib/workers/branch/yarn.js                    | 102 +---
 lib/workers/package-file/index.js             |   6 +-
 lib/workers/pr/index.js                       |  17 +-
 lib/workers/repository/apis.js                |  12 +-
 lib/workers/repository/index.js               |   2 +-
 package.json                                  |   1 -
 test/config/__snapshots__/index.spec.js.snap  |   1 -
 .../branch/__snapshots__/commit.spec.js.snap  |  33 ++
 .../branch/__snapshots__/index.spec.js.snap   |  75 ---
 .../__snapshots__/lock-files.spec.js.snap     | 108 ++++
 .../branch/__snapshots__/npm.spec.js.snap     |   5 -
 test/workers/branch/automerge.spec.js         |  46 ++
 test/workers/branch/check-existing.spec.js    |  41 ++
 test/workers/branch/commit.spec.js            |  45 ++
 test/workers/branch/index.spec.js             | 529 +++---------------
 test/workers/branch/lock-files.spec.js        | 320 +++++++++++
 test/workers/branch/npm.spec.js               | 120 +---
 test/workers/branch/package-files.spec.js     |  34 ++
 test/workers/branch/parent.spec.js            |  95 ++++
 test/workers/branch/status-checks.spec.js     |  52 ++
 test/workers/branch/yarn.spec.js              |  95 +---
 test/workers/package-file/index.spec.js       |  11 +-
 .../package/__snapshots__/index.spec.js.snap  |   4 +
 .../pr/__snapshots__/index.spec.js.snap       |   8 +-
 test/workers/pr/index.spec.js                 |  64 ++-
 .../__snapshots__/apis.spec.js.snap           |   4 +
 test/workers/repository/apis.spec.js          |  14 +-
 test/workers/repository/index.spec.js         |  10 +-
 yarn.lock                                     |   4 -
 42 files changed, 1508 insertions(+), 1259 deletions(-)
 create mode 100644 lib/workers/branch/automerge.js
 create mode 100644 lib/workers/branch/check-existing.js
 create mode 100644 lib/workers/branch/commit.js
 create mode 100644 lib/workers/branch/lock-files.js
 create mode 100644 lib/workers/branch/package-files.js
 create mode 100644 lib/workers/branch/parent.js
 create mode 100644 lib/workers/branch/status-checks.js
 create mode 100644 test/workers/branch/__snapshots__/commit.spec.js.snap
 delete mode 100644 test/workers/branch/__snapshots__/index.spec.js.snap
 create mode 100644 test/workers/branch/__snapshots__/lock-files.spec.js.snap
 delete mode 100644 test/workers/branch/__snapshots__/npm.spec.js.snap
 create mode 100644 test/workers/branch/automerge.spec.js
 create mode 100644 test/workers/branch/check-existing.spec.js
 create mode 100644 test/workers/branch/commit.spec.js
 create mode 100644 test/workers/branch/lock-files.spec.js
 create mode 100644 test/workers/branch/package-files.spec.js
 create mode 100644 test/workers/branch/parent.spec.js
 create mode 100644 test/workers/branch/status-checks.spec.js

diff --git a/lib/api/npm.js b/lib/api/npm.js
index aafe80f7b6..52b16396d6 100644
--- a/lib/api/npm.js
+++ b/lib/api/npm.js
@@ -2,6 +2,7 @@
 
 const got = require('got');
 const url = require('url');
+const ini = require('ini');
 const registryUrl = require('registry-url');
 const registryAuthToken = require('registry-auth-token');
 const parse = require('github-url-from-git');
@@ -20,7 +21,7 @@ function resetCache() {
 }
 
 async function setNpmrc(input) {
-  npmrc = input;
+  npmrc = ini.parse(input);
 }
 
 async function getDependency(name, logger) {
diff --git a/lib/config/definitions.js b/lib/config/definitions.js
index 2a43aa7754..20a9f07f0d 100644
--- a/lib/config/definitions.js
+++ b/lib/config/definitions.js
@@ -150,7 +150,7 @@ const options = [
     name: 'packageFiles',
     description: 'Package file paths',
     type: 'list',
-    stage: 'repository',
+    stage: 'branch',
   },
   {
     name: 'ignoreNodeModules',
@@ -436,7 +436,6 @@ const options = [
     type: 'json',
     default: {
       enabled: true,
-      groupName: 'Lock File Maintenance',
       recreateClosed: true,
       branchName: template('branchName', 'lock-file-maintenance'),
       commitMessage: template('commitMessage', 'lock-file-maintenance'),
diff --git a/lib/logger/config-serializer.js b/lib/logger/config-serializer.js
index 703ea55701..136b13b077 100644
--- a/lib/logger/config-serializer.js
+++ b/lib/logger/config-serializer.js
@@ -17,7 +17,7 @@ function configSerializer(config) {
     if (val && templateFields.indexOf(this.key) !== -1) {
       this.update('[Template]');
     }
-    if (this.key === 'content') {
+    if (this.key === 'content' || this.key === 'contents') {
       this.update('[content]');
     }
   });
diff --git a/lib/logger/pretty-stdout.js b/lib/logger/pretty-stdout.js
index 40b98665a5..a48055940f 100644
--- a/lib/logger/pretty-stdout.js
+++ b/lib/logger/pretty-stdout.js
@@ -21,6 +21,7 @@ const metaFields = [
   'packageFile',
   'depType',
   'dependency',
+  'dependencies',
   'branch',
 ];
 
diff --git a/lib/workers/branch/automerge.js b/lib/workers/branch/automerge.js
new file mode 100644
index 0000000000..feb7dd25b8
--- /dev/null
+++ b/lib/workers/branch/automerge.js
@@ -0,0 +1,27 @@
+module.exports = {
+  tryBranchAutomerge,
+};
+
+async function tryBranchAutomerge(config) {
+  const { logger } = config;
+  logger.debug('Checking if we can automerge branch');
+  if (!config.automerge || config.automergeType === 'pr') {
+    return false;
+  }
+  const branchStatus = await config.api.getBranchStatus(
+    config.branchName,
+    config.requiredStatusChecks
+  );
+  if (branchStatus === 'success') {
+    logger.info(`Automerging branch`);
+    try {
+      await config.api.mergeBranch(config.branchName, config.automergeType);
+      return true; // Branch no longer exists
+    } catch (err) {
+      logger.error({ err }, `Failed to automerge branch`);
+    }
+  } else {
+    logger.debug(`Branch status is "${branchStatus}" - skipping automerge`);
+  }
+  return false;
+}
diff --git a/lib/workers/branch/check-existing.js b/lib/workers/branch/check-existing.js
new file mode 100644
index 0000000000..3060b6c865
--- /dev/null
+++ b/lib/workers/branch/check-existing.js
@@ -0,0 +1,30 @@
+module.exports = {
+  prAlreadyExisted,
+};
+
+async function prAlreadyExisted(config) {
+  const { logger } = config;
+  logger.trace({ config }, 'prAlreadyExisted');
+  if (config.recreateClosed) {
+    logger.debug('recreateClosed is true');
+    return false;
+  }
+  logger.debug('recreateClosed is false');
+  // Return if same PR already existed
+  // Check for current PR title format
+  if (await config.api.checkForClosedPr(config.branchName, config.prTitle)) {
+    logger.debug('Found closed PR with current title');
+    return true;
+  }
+  // Check for legacy PR title format
+  // TODO: remove this in v10
+  const legacyPrTitle = config.prTitle
+    .replace(/to v(\d+)$/, 'to version $1.x') // Major
+    .replace(/to v(\d+)/, 'to version $1'); // Non-major
+  if (await config.api.checkForClosedPr(config.branchName, legacyPrTitle)) {
+    logger.debug('Found closed PR with legacy title');
+    return true;
+  }
+  logger.debug('prAlreadyExisted=false');
+  return false;
+}
diff --git a/lib/workers/branch/commit.js b/lib/workers/branch/commit.js
new file mode 100644
index 0000000000..e00460011a
--- /dev/null
+++ b/lib/workers/branch/commit.js
@@ -0,0 +1,28 @@
+const handlebars = require('handlebars');
+
+module.exports = {
+  commitFilesToBranch,
+};
+
+async function commitFilesToBranch(config) {
+  const { logger } = config;
+  const updatedFiles = config.updatedPackageFiles.concat(
+    config.updatedLockFiles
+  );
+  if (updatedFiles.length) {
+    logger.debug(`${updatedFiles.length} file(s) to commit`);
+    let commitMessage = handlebars.compile(config.commitMessage)(config);
+    if (config.semanticCommits) {
+      commitMessage = `${config.semanticPrefix} ${commitMessage.toLowerCase()}`;
+    }
+    // API will know whether to create new branch or not
+    await config.api.commitFilesToBranch(
+      config.branchName,
+      updatedFiles,
+      commitMessage,
+      config.parentBranch
+    );
+  } else {
+    logger.debug(`No files to commit`);
+  }
+}
diff --git a/lib/workers/branch/index.js b/lib/workers/branch/index.js
index 9afba6e97a..4a22d0d8f4 100644
--- a/lib/workers/branch/index.js
+++ b/lib/workers/branch/index.js
@@ -1,310 +1,100 @@
-const path = require('path');
-const handlebars = require('handlebars');
-const packageJsonHelper = require('./package-json');
-const npm = require('./npm');
-const yarn = require('./yarn');
 const schedule = require('./schedule');
+const { getUpdatedPackageFiles } = require('./package-files');
+const { getUpdatedLockFiles } = require('./lock-files');
+const { commitFilesToBranch } = require('./commit');
+const { getParentBranch } = require('./parent');
+const { tryBranchAutomerge } = require('./automerge');
+const { setUnpublishable } = require('./status-checks');
+const { prAlreadyExisted } = require('./check-existing');
 const prWorker = require('../pr');
-let logger = require('../../logger');
+
+const { isScheduledNow } = schedule;
 
 module.exports = {
-  checkStale,
-  getParentBranch,
-  ensureBranch,
-  processBranchUpgrades,
+  processBranch,
 };
 
-function checkStale(config) {
-  // Manually configured
-  if (config.rebaseStalePrs || config.repoForceRebase) {
-    return true;
-  }
-  // Commits can't be pushed to a branch unless they are up-to-date
-  if (config.automerge === true && config.automergeType === 'branch-push') {
-    return true;
-  }
-  return false;
-}
-
-async function getParentBranch(branchName, config) {
-  // Check if branch exists
-  const branchExists = await config.api.branchExists(branchName);
-  if (!branchExists) {
-    logger.info(`Branch needs creating`);
-    return undefined;
-  }
-  logger.info(`Branch already exists`);
-
-  // Check for existing PR
-  const pr = await config.api.getBranchPr(branchName);
-
-  if (checkStale(config)) {
-    const isBranchStale = await config.api.isBranchStale(branchName);
-    if (isBranchStale) {
-      logger.info(`Branch is stale and needs rebasing`);
-      // We can rebase the branch only if no PR or PR can be rebased
-      if (!pr || pr.canRebase) {
-        return undefined;
-      }
-      // TODO: Warn here so that it appears in PR body
-      logger.info('Cannot rebase branch');
-      return branchName;
-    }
-  }
-
-  // Now check if PR is unmergeable. If so then we also rebase
-  if (pr && pr.isUnmergeable) {
-    logger.debug('PR is unmergeable');
-    if (pr.canRebase) {
-      logger.info(`Branch is not mergeable and needs rebasing`);
-      // TODO: Move this down to api library
-      if (config.isGitLab) {
-        logger.info(`Deleting unmergeable branch in order to recreate/rebase`);
-        await config.api.deleteBranch(branchName);
-      }
-      // Setting parentBranch back to undefined means that we'll use the default branch
-      return undefined;
+async function processBranch(branchConfig) {
+  const config = { ...branchConfig };
+  const dependencies = config.upgrades
+    .map(upgrade => upgrade.depName)
+    .filter(v => v); // remove nulls (happens for lock file maintenance)
+  const logger = config.logger.child({
+    repository: config.repository,
+    branch: config.branchName,
+    dependencies,
+  });
+  config.logger = logger;
+  logger.trace({ config }, 'processBranch');
+  try {
+    // Check schedule
+    if (!isScheduledNow(config)) {
+      logger.info('Skipping branch as it is not scheduled');
+      return;
     }
-    // Don't do anything different, but warn
-    // TODO: Add warning to PR
-    logger.warn(`Branch is not mergeable but can't be rebased`);
-  }
-  logger.debug(`Branch does not need rebasing`);
-  return branchName;
-}
 
-// Ensure branch exists with appropriate content
-async function ensureBranch(config) {
-  logger.trace({ config }, 'ensureBranch');
-  // Use the first upgrade for all the templates
-  const branchName = handlebars.compile(config.branchName)(config);
-  // parentBranch is the branch we will base off
-  // If undefined, this will mean the defaultBranch
-  const parentBranch = await module.exports.getParentBranch(branchName, config);
+    logger.info(`Branch has ${dependencies.length} upgrade(s)`);
 
-  let commitMessage = handlebars.compile(config.commitMessage)(config);
-  if (config.semanticCommits) {
-    commitMessage = `${config.semanticPrefix} ${commitMessage.toLowerCase()}`;
-  }
-  const api = config.api;
-  const packageFiles = {};
-  const commitFiles = [];
-  let unpublishable;
-  for (const upgrade of config.upgrades) {
-    if (typeof upgrade.unpublishable !== 'undefined') {
-      if (typeof unpublishable !== 'undefined') {
-        unpublishable = unpublishable && upgrade.unpublishable;
-      } else {
-        unpublishable = upgrade.unpublishable;
-      }
+    if (await prAlreadyExisted(config)) {
+      logger.info('Closed PR already exists. Skipping branch.');
+      return;
     }
-    if (upgrade.type === 'lockFileMaintenance') {
-      logger.debug('branch lockFileMaintenance');
-      try {
-        if (upgrade.hasYarnLock) {
-          const newYarnLock = await yarn.maintainLockFile(upgrade);
-          if (newYarnLock) {
-            commitFiles.push(newYarnLock);
-          }
-        }
-        if (upgrade.hasPackageLock) {
-          const newPackageLock = await npm.maintainLockFile(upgrade);
-          if (newPackageLock) {
-            commitFiles.push(newPackageLock);
-          }
-        }
-      } catch (err) {
-        logger.debug({ err }, 'Error maintaining lock files');
-        throw new Error('Error maintaining lock files');
-      }
-    } else {
-      // See if this is the first time editing this file
-      if (!packageFiles[upgrade.packageFile]) {
-        // If we are rebasing then existing content will be from master
-        packageFiles[upgrade.packageFile] = await api.getFileContent(
-          upgrade.packageFile,
-          parentBranch
-        );
-      }
-      const newContent = packageJsonHelper.setNewValue(
-        packageFiles[upgrade.packageFile],
-        upgrade.depType,
-        upgrade.depName,
-        upgrade.newVersion,
-        logger
+    config.parentBranch = await getParentBranch(config);
+    logger.debug(`Using parentBranch: ${config.parentBranch}`);
+    config.updatedPackageFiles = await getUpdatedPackageFiles(config);
+    if (config.updatedPackageFiles.length) {
+      logger.debug(
+        { updatedPackageFiles: config.updatedPackageFiles },
+        `Updated ${config.updatedPackageFiles.length} package files`
       );
-      if (packageFiles[upgrade.packageFile] === newContent) {
-        logger.debug('packageFile content unchanged');
-        delete packageFiles[upgrade.packageFile];
-      } else {
-        logger.debug('Updating packageFile content');
-        packageFiles[upgrade.packageFile] = newContent;
-      }
+    } else {
+      logger.debug('No package files need updating');
     }
-  }
-  if (Object.keys(packageFiles).length > 0) {
-    logger.info(
-      `${Object.keys(packageFiles).length} package file(s) need updating.`
-    );
-    for (const packageFile of Object.keys(packageFiles)) {
-      logger.debug(`Adding ${packageFile}`);
-      commitFiles.push({
-        name: packageFile,
-        contents: packageFiles[packageFile],
-      });
-      try {
-        const yarnLockFile = await yarn.getLockFile(
-          path.join(config.tmpDir.name, path.dirname(packageFile)),
-          packageFile,
-          packageFiles[packageFile],
-          api,
-          logger
-        );
-        if (yarnLockFile) {
-          // Add new yarn.lock file too
-          logger.info(`Adding ${yarnLockFile.name}`);
-          commitFiles.push(yarnLockFile);
-        }
-        const packageLockFile = await npm.getLockFile(
-          path.join(config.tmpDir.name, path.dirname(packageFile)),
-          packageFile,
-          packageFiles[packageFile],
-          api,
-          config.versions.npm,
-          logger
-        );
-        if (packageLockFile) {
-          // Add new package-lock.json file too
-          logger.info(`Adding ${packageLockFile.name}`);
-          commitFiles.push(packageLockFile);
-        }
-      } catch (err) {
-        logger.info('Could not generate necessary lock file');
-        throw err;
-      }
+    Object.assign(config, await getUpdatedLockFiles(config));
+    if (config.lockFileError) {
+      throw new Error('lockFileError');
     }
-  }
-  if (commitFiles.length) {
-    logger.debug(`${commitFiles.length} file(s) to commit`);
-    // API will know whether to create new branch or not
-    await api.commitFilesToBranch(
-      branchName,
-      commitFiles,
-      commitMessage,
-      parentBranch
-    );
-  } else {
-    logger.debug(`No files to commit`);
-  }
-  if ((await api.branchExists(branchName)) === false) {
-    // Return now if no branch exists
-    return false;
-  }
-  const context = 'renovate/unpublish-safe';
-  const existingState = await api.getBranchStatusCheck(branchName, context);
-  // If status check was enabled and then is disabled, any "pending" status check needs to be set to "success"
-  const removeStatusCheck =
-    existingState === 'pending' && !config.unpublishSafe;
-  if (
-    (config.unpublishSafe || removeStatusCheck) &&
-    typeof unpublishable !== 'undefined'
-  ) {
-    // Set unpublishable status check
-    const state = unpublishable || removeStatusCheck ? 'success' : 'pending';
-    const description = unpublishable
-      ? 'Packages are at least 24 hours old'
-      : 'Packages < 24 hours old can be unpublished';
-    // Check if state needs setting
-    if (existingState === state) {
-      logger.debug('Status check is already up-to-date');
-    } else {
-      logger.debug(`Updating status check state to ${state}`);
-      await api.setBranchStatus(
-        branchName,
-        context,
-        description,
-        state,
-        'https://github.com/singapore/renovate/blob/master/docs/status-checks.md#unpublish-safe'
+    if (config.updatedLockFiles.length) {
+      logger.debug(
+        { updatedLockFiles: config.updatedLockFiles },
+        `Updated ${config.updatedLockFiles.length} lock files`
       );
+    } else {
+      logger.debug('No updated lock files in branch');
     }
-  }
-  if (config.automerge === false || config.automergeType === 'pr') {
-    // No branch automerge
-    return true;
-  }
-  logger.debug('Checking if we can automerge branch');
-  const branchStatus = await api.getBranchStatus(
-    branchName,
-    config.requiredStatusChecks
-  );
-  if (branchStatus === 'success') {
-    logger.info(`Automerging branch`);
-    try {
-      await api.mergeBranch(branchName, config.automergeType);
-      return false; // Branch no longer exists
-    } catch (err) {
-      logger.error({ err }, `Failed to automerge branch`);
-      throw err;
+    await commitFilesToBranch(config);
+
+    // Return now if no branch exists
+    if ((await config.api.branchExists(config.branchName)) === false) {
+      logger.debug('Branch does not exist - returning');
+      return;
     }
-  } else {
-    logger.debug(`Branch status is "${branchStatus}" - skipping automerge`);
-  }
-  // Return true as branch exists
-  return true;
-}
 
-async function processBranchUpgrades(branchUpgrades, errors, warnings) {
-  logger = branchUpgrades.logger || logger;
-  const config = { ...branchUpgrades };
-  logger = logger.child({
-    repository: config.repository,
-    branch: config.branchName,
-  });
-  config.logger = logger;
-  logger.trace({ config: branchUpgrades }, 'processBranchUpgrades');
-  // Check schedule
-  if (
-    config.schedule &&
-    config.schedule.length &&
-    schedule.isScheduledNow(config) === false
-  ) {
-    logger.info('Skipping branch as it is not scheduled');
+    // Set branch statuses
+    await setUnpublishable(config);
+
+    // Try to automerge branch and finish if successful
+    logger.debug('Checking if we should automerge the branch');
+    const branchMerged = await tryBranchAutomerge(config);
+    if (branchMerged) {
+      logger.debug('Branch is automerged - returning');
+      return;
+    }
+  } catch (err) {
+    logger.error({ err }, `Error updating branch: ${err.message}`);
     return;
   }
-
-  const packageNames = config.upgrades.map(upgrade => upgrade.depName);
-  logger.info(`Branch has ${packageNames.length} upgrade(s): ${packageNames}`);
-
   try {
-    // Groups and lock file maintenance should set this to true
-    if (config.recreateClosed === false) {
-      if (
-        // Check for current PR title format
-        await config.api.checkForClosedPr(config.branchName, config.prTitle)
-      ) {
-        return;
-      }
-      // Check for legacy PR title format
-      const legacyPrTitle = config.prTitle
-        .replace(/to v(\d+)$/, 'to version $1.x') // Major
-        .replace(/to v(\d+)/, 'to version $1'); // Non-major
-      if (await config.api.checkForClosedPr(config.branchName, legacyPrTitle)) {
-        return;
-      }
-    }
-    const branchCreated = await module.exports.ensureBranch(config);
-    if (branchCreated) {
-      const pr = await prWorker.ensurePr(config, logger, errors, warnings);
-      if (pr) {
-        await prWorker.checkAutoMerge(pr, config, logger);
-      }
+    logger.debug('Ensuring PR');
+    logger.trace({ config }, 'test');
+    const pr = await prWorker.ensurePr(config);
+    // TODO: ensurePr should check for automerge itself
+    if (pr) {
+      await prWorker.checkAutoMerge(pr, config);
     }
   } catch (err) {
-    if (err.message !== 'Error generating lock file') {
-      logger.error({ err }, `Error updating branch: ${err.message}`);
-    } else {
-      logger.info('Error updating branch');
-    }
-    // Don't throw here - we don't want to stop the other renovations
+    logger.error({ err }, `Error ensuring PR: ${err.message}`);
   }
+
+  // Don't throw here - we don't want to stop the other renovations
 }
diff --git a/lib/workers/branch/lock-files.js b/lib/workers/branch/lock-files.js
new file mode 100644
index 0000000000..28829ee739
--- /dev/null
+++ b/lib/workers/branch/lock-files.js
@@ -0,0 +1,190 @@
+const fs = require('fs-extra');
+const path = require('path');
+const npm = require('./npm');
+const yarn = require('./yarn');
+
+module.exports = {
+  hasPackageLock,
+  hasYarnLock,
+  determineLockFileDirs,
+  writeExistingFiles,
+  writeUpdatedPackageFiles,
+  getUpdatedLockFiles,
+};
+
+function hasPackageLock(config, packageFile) {
+  config.logger.trace(
+    { packageFiles: config.packageFiles, packageFile },
+    'hasPackageLock'
+  );
+  for (const p of config.packageFiles) {
+    if (p.packageFile === packageFile) {
+      return p.hasPackageLock === true;
+    }
+  }
+  throw new Error(`hasPackageLock cannot find ${packageFile}`);
+}
+
+function hasYarnLock(config, packageFile) {
+  config.logger.trace(
+    { packageFiles: config.packageFiles, packageFile },
+    'hasYarnLock'
+  );
+  for (const p of config.packageFiles) {
+    if (p.packageFile === packageFile) {
+      return p.hasYarnLock === true;
+    }
+  }
+  throw new Error(`hasYarnLock cannot find ${packageFile}`);
+}
+
+function determineLockFileDirs(config) {
+  const packageLockFileDirs = [];
+  const yarnLockFileDirs = [];
+
+  for (const upgrade of config.upgrades) {
+    if (upgrade.type === 'lockFileMaintenance') {
+      // Return every direcotry that contains a lockfile
+      for (const packageFile of config.packageFiles) {
+        const dirname = path.dirname(packageFile.packageFile);
+        if (packageFile.hasYarnLock) {
+          yarnLockFileDirs.push(dirname);
+        }
+        if (packageFile.hasPackageLock) {
+          packageLockFileDirs.push(dirname);
+        }
+      }
+      return { packageLockFileDirs, yarnLockFileDirs };
+    }
+  }
+
+  for (const packageFile of config.updatedPackageFiles) {
+    if (module.exports.hasYarnLock(config, packageFile.name)) {
+      yarnLockFileDirs.push(path.dirname(packageFile.name));
+    }
+    if (module.exports.hasPackageLock(config, packageFile.name)) {
+      packageLockFileDirs.push(path.dirname(packageFile.name));
+    }
+  }
+
+  return { yarnLockFileDirs, packageLockFileDirs };
+}
+
+async function writeExistingFiles(config) {
+  const { logger } = config;
+  if (!config.packageFiles) {
+    return;
+  }
+  for (const packageFile of config.packageFiles) {
+    const basedir = path.join(
+      config.tmpDir.name,
+      path.dirname(packageFile.packageFile)
+    );
+    logger.debug(`Writing package.json to ${basedir}`);
+    await fs.outputFile(
+      path.join(basedir, 'package.json'),
+      JSON.stringify(packageFile.content)
+    );
+    if (packageFile.npmrc) {
+      logger.debug(`Writing .npmrc to ${basedir}`);
+      await fs.outputFile(path.join(basedir, '.npmrc'), packageFile.npmrc);
+    }
+    if (packageFile.yarnrc) {
+      logger.debug(`Writing .yarnrc to ${basedir}`);
+      await fs.outputFile(
+        path.join(basedir, '.yarnrc'),
+        packageFile.yarnrc.replace('--install.pure-lockfile true', '')
+      );
+    }
+    logger.debug('Removing any previous lock files');
+    await fs.remove(path.join(basedir, 'yarn.lock'));
+    await fs.remove(path.join(basedir, 'package-lock.json'));
+  }
+}
+
+async function writeUpdatedPackageFiles(config) {
+  const { logger } = config;
+  logger.trace({ config }, 'writeUpdatedPackageFiles');
+  logger.debug('Writing any updated package files');
+  if (!config.updatedPackageFiles) {
+    logger.debug('No files found');
+    return;
+  }
+  for (const packageFile of config.updatedPackageFiles) {
+    logger.debug(`Writing ${packageFile.name}`);
+    await fs.outputFile(
+      path.join(config.tmpDir.name, packageFile.name),
+      packageFile.contents
+    );
+  }
+}
+
+async function getUpdatedLockFiles(config) {
+  const { logger } = config;
+  logger.trace({ config }, 'getUpdatedLockFiles');
+  logger.debug('Getting updated lock files');
+  let lockFileError = false;
+  const updatedLockFiles = [];
+  try {
+    const dirs = module.exports.determineLockFileDirs(config);
+    logger.debug({ dirs }, 'lock file dirs');
+    await module.exports.writeExistingFiles(config);
+    await module.exports.writeUpdatedPackageFiles(config);
+
+    for (const lockFileDir of dirs.packageLockFileDirs) {
+      logger.debug(`Generating package-lock.json for ${lockFileDir}`);
+      const newContent = await npm.generateLockFile(
+        path.join(config.tmpDir.name, lockFileDir),
+        logger
+      );
+      if (newContent) {
+        const lockFileName = path.join(lockFileDir, 'package-lock.json');
+        const existingContent = await config.api.getFileContent(
+          lockFileName,
+          config.parentBranch
+        );
+        if (newContent !== existingContent) {
+          logger.debug('package-lock.json needs updating');
+          updatedLockFiles.push({
+            name: lockFileName,
+            contents: newContent,
+          });
+        } else {
+          logger.debug("package-lock.json hasn't changed");
+        }
+      } else {
+        lockFileError = true;
+      }
+    }
+
+    for (const lockFileDir of dirs.yarnLockFileDirs) {
+      logger.debug(`Generating yarn.lock for ${lockFileDir}`);
+      const newContent = await yarn.generateLockFile(
+        path.join(config.tmpDir.name, lockFileDir),
+        logger
+      );
+      if (newContent) {
+        const lockFileName = path.join(lockFileDir, 'yarn.lock');
+        const existingContent = await config.api.getFileContent(
+          lockFileName,
+          config.parentBranch
+        );
+        if (newContent !== existingContent) {
+          logger.debug('yarn.lock needs updating');
+          updatedLockFiles.push({
+            name: lockFileName,
+            contents: newContent,
+          });
+        } else {
+          logger.debug("yarn.lock hasn't changed");
+        }
+      } else {
+        lockFileError = true;
+      }
+    }
+  } catch (err) {
+    logger.error({ err }, 'getUpdatedLockFiles error');
+    lockFileError = true;
+  }
+  return { lockFileError, updatedLockFiles };
+}
diff --git a/lib/workers/branch/npm.js b/lib/workers/branch/npm.js
index 0a6b23d924..6b157a028f 100644
--- a/lib/workers/branch/npm.js
+++ b/lib/workers/branch/npm.js
@@ -1,130 +1,33 @@
-const eol = require('eol');
 const fs = require('fs-extra');
 const cp = require('child_process');
 const path = require('path');
 
 module.exports = {
   generateLockFile,
-  getLockFile,
-  maintainLockFile,
 };
 
-async function generateLockFile(tmpDir, newPackageJson, npmrcContent, logger) {
-  logger.debug('Generating new package-lock.json file');
-  let packageLock;
+async function generateLockFile(tmpDir, logger) {
+  logger.debug(`Spawning npm install to create ${tmpDir}/package-lock.json`);
+  let lockFile = null;
   let result = {};
   try {
-    await fs.outputFile(path.join(tmpDir, 'package.json'), newPackageJson);
-    if (npmrcContent) {
-      await fs.outputFile(path.join(tmpDir, '.npmrc'), npmrcContent);
-    }
-    await fs.remove(path.join(tmpDir, 'package-lock.json'));
-    logger.debug(
-      `Spawning npm install to generate ${tmpDir}/package-lock.json`
-    );
     result = cp.spawnSync('npm', ['install', '--ignore-scripts'], {
       cwd: tmpDir,
       shell: true,
       env: { ...process.env, ...{ NODE_ENV: 'dev' } },
     });
-    logger.debug(
-      { stdout: String(result.stdout), stderr: String(result.stderr) },
-      'npm install complete'
-    );
-    packageLock = fs.readFileSync(path.join(tmpDir, 'package-lock.json'));
+    logger.debug(`npm stdout:\n${String(result.stdout)}`);
+    logger.debug(`pm stderr:\n${String(result.stderr)}`);
+    lockFile = fs.readFileSync(path.join(tmpDir, 'package-lock.json'), 'utf8');
   } catch (err) /* istanbul ignore next */ {
     logger.warn(
       {
         err,
-        packageJson: JSON.parse(newPackageJson),
-        npmrc: npmrcContent,
         stdout: String(result.stdout),
         stderr: String(result.stderr),
       },
-      'Error generating package-lock.json'
-    );
-    throw Error('Error generating lock file');
-  }
-  return packageLock;
-}
-
-async function getLockFile(
-  tmpDir,
-  packageFile,
-  packageContent,
-  api,
-  npmVersion,
-  logger
-) {
-  // Detect if a package-lock.json file is in use
-  const packageLockFileName = path.join(
-    path.dirname(packageFile),
-    'package-lock.json'
-  );
-  if (!await api.getFileContent(packageLockFileName)) {
-    return null;
-  }
-  if (npmVersion === '') {
-    throw new Error(
-      'Need to generate package-lock.json but npm is not installed'
+      'npm install error'
     );
   }
-  // TODO: have a more forwards-compatible check
-  if (npmVersion[0] !== '5') {
-    throw new Error(
-      `Need to generate package-lock.json but npm version is "${npmVersion}"`
-    );
-  }
-  // Copy over custom config commitFiles
-  const npmrcContent = await api.getFileContent('.npmrc');
-  // Generate package-lock.json using shell command
-  const newPackageLockContent = await module.exports.generateLockFile(
-    tmpDir,
-    packageContent,
-    npmrcContent,
-    logger
-  );
-  // Return file object
-  return {
-    name: packageLockFileName,
-    contents: newPackageLockContent,
-  };
-}
-
-async function maintainLockFile(inputConfig) {
-  const logger = inputConfig.logger;
-  logger.trace({ config: inputConfig }, `maintainLockFile`);
-  const packageContent = await inputConfig.api.getFileContent(
-    inputConfig.packageFile
-  );
-  const packageLockFileName = path.join(
-    path.dirname(inputConfig.packageFile),
-    'package-lock.json'
-  );
-  logger.debug(`Checking for ${packageLockFileName}`);
-  const existingPackageLock = await inputConfig.api.getFileContent(
-    packageLockFileName
-  );
-  logger.trace(`existingPackageLock:\n${existingPackageLock}`);
-  if (!existingPackageLock) {
-    return null;
-  }
-  logger.debug('Found existing package-lock.json file');
-  const newPackageLock = await module.exports.getLockFile(
-    path.join(inputConfig.tmpDir.name, path.dirname(inputConfig.packageFile)),
-    inputConfig.packageFile,
-    packageContent,
-    inputConfig.api,
-    inputConfig.versions.npm,
-    logger
-  );
-  logger.trace(`newPackageLock:\n${newPackageLock.contents}`);
-  if (
-    eol.lf(existingPackageLock) === eol.lf(newPackageLock.contents.toString())
-  ) {
-    logger.debug('npm lock file does not need updating');
-    return null;
-  }
-  logger.debug('npm lock needs updating');
-  return newPackageLock;
+  return lockFile;
 }
diff --git a/lib/workers/branch/package-files.js b/lib/workers/branch/package-files.js
new file mode 100644
index 0000000000..8567f248a9
--- /dev/null
+++ b/lib/workers/branch/package-files.js
@@ -0,0 +1,36 @@
+const packageJsonHelper = require('./package-json');
+
+module.exports = {
+  getUpdatedPackageFiles,
+};
+
+async function getUpdatedPackageFiles(config) {
+  const updatedPackageFiles = {};
+
+  for (const upgrade of config.upgrades) {
+    if (upgrade.type !== 'lockFileMaintenance') {
+      const existingContent =
+        updatedPackageFiles[upgrade.packageFile] ||
+        (await config.api.getFileContent(
+          upgrade.packageFile,
+          config.parentBranch
+        ));
+      const newContent = packageJsonHelper.setNewValue(
+        existingContent,
+        upgrade.depType,
+        upgrade.depName,
+        upgrade.newVersion,
+        config.logger
+      );
+      if (newContent !== existingContent) {
+        config.logger.debug('Updating packageFile content');
+        updatedPackageFiles[upgrade.packageFile] = newContent;
+      }
+    }
+  }
+
+  return Object.keys(updatedPackageFiles).map(packageFile => ({
+    name: packageFile,
+    contents: updatedPackageFiles[packageFile],
+  }));
+}
diff --git a/lib/workers/branch/parent.js b/lib/workers/branch/parent.js
new file mode 100644
index 0000000000..89f44d09c7
--- /dev/null
+++ b/lib/workers/branch/parent.js
@@ -0,0 +1,64 @@
+module.exports = {
+  checkStale,
+  getParentBranch,
+};
+
+function checkStale(config) {
+  return (
+    config.rebaseStalePrs ||
+    config.repoForceRebase ||
+    (config.automerge === true && config.automergeType === 'branch-push')
+  );
+}
+
+async function getParentBranch(config) {
+  const { api, branchName, logger } = config;
+  // Check if branch exists
+  const branchExists = await api.branchExists(branchName);
+  if (!branchExists) {
+    logger.info(`Branch needs creating`);
+    return undefined;
+  }
+  logger.info(`Branch already exists`);
+
+  // Check for existing PR
+  const pr = await api.getBranchPr(branchName);
+
+  if (
+    config.rebaseStalePrs ||
+    config.repoForceRebase ||
+    (config.automerge && config.automergeType === 'branch-push')
+  ) {
+    const isBranchStale = await api.isBranchStale(branchName);
+    if (isBranchStale) {
+      logger.info(`Branch is stale and needs rebasing`);
+      // We can rebase the branch only if no PR or PR can be rebased
+      if (!pr || pr.canRebase) {
+        return undefined;
+      }
+      // TODO: Warn here so that it appears in PR body
+      logger.info('Cannot rebase branch');
+      return branchName;
+    }
+  }
+
+  // Now check if PR is unmergeable. If so then we also rebase
+  if (pr && pr.isUnmergeable) {
+    logger.debug('PR is unmergeable');
+    if (pr.canRebase) {
+      logger.info(`Branch is not mergeable and needs rebasing`);
+      // TODO: Move this down to api library
+      if (config.isGitLab) {
+        logger.info(`Deleting unmergeable branch in order to recreate/rebase`);
+        await config.api.deleteBranch(branchName);
+      }
+      // Setting parentBranch back to undefined means that we'll use the default branch
+      return undefined;
+    }
+    // Don't do anything different, but warn
+    // TODO: Add warning to PR
+    logger.warn(`Branch is not mergeable but can't be rebased`);
+  }
+  logger.debug(`Branch does not need rebasing`);
+  return branchName;
+}
diff --git a/lib/workers/branch/status-checks.js b/lib/workers/branch/status-checks.js
new file mode 100644
index 0000000000..f031dabda7
--- /dev/null
+++ b/lib/workers/branch/status-checks.js
@@ -0,0 +1,50 @@
+module.exports = {
+  setUnpublishable,
+};
+
+async function setUnpublishable(config) {
+  let unpublishable;
+  for (const upgrade of config.upgrades) {
+    if (typeof upgrade.unpublishable !== 'undefined') {
+      if (typeof unpublishable !== 'undefined') {
+        unpublishable = unpublishable && upgrade.unpublishable;
+      } else {
+        unpublishable = upgrade.unpublishable;
+      }
+    }
+  }
+  if (typeof unpublishable === 'undefined') {
+    unpublishable = true;
+  }
+  const context = 'renovate/unpublish-safe';
+  const existingState = await config.api.getBranchStatusCheck(
+    config.branchName,
+    context
+  );
+  // If status check was enabled and then is disabled, any "pending" status check needs to be set to "success"
+  const removeStatusCheck =
+    existingState === 'pending' && !config.unpublishSafe;
+  if (
+    (config.unpublishSafe || removeStatusCheck) &&
+    typeof unpublishable !== 'undefined'
+  ) {
+    // Set unpublishable status check
+    const state = unpublishable || removeStatusCheck ? 'success' : 'pending';
+    const description = unpublishable
+      ? 'Packages are at least 24 hours old'
+      : 'Packages < 24 hours old can be unpublished';
+    // Check if state needs setting
+    if (existingState === state) {
+      config.logger.debug('Status check is already up-to-date');
+    } else {
+      config.logger.debug(`Updating status check state to ${state}`);
+      await config.api.setBranchStatus(
+        config.branchName,
+        context,
+        description,
+        state,
+        'https://github.com/singapore/renovate/blob/master/docs/status-checks.md#unpublish-safe'
+      );
+    }
+  }
+}
diff --git a/lib/workers/branch/yarn.js b/lib/workers/branch/yarn.js
index a1f998ebd9..b9f3ef310b 100644
--- a/lib/workers/branch/yarn.js
+++ b/lib/workers/branch/yarn.js
@@ -1,39 +1,18 @@
-const eol = require('eol');
 const fs = require('fs-extra');
 const cp = require('child_process');
 const path = require('path');
 
 module.exports = {
   generateLockFile,
-  getLockFile,
-  maintainLockFile,
 };
 
 const yarnVersion = '0.27.5';
 
-async function generateLockFile(
-  tmpDir,
-  newPackageJson,
-  npmrcContent,
-  yarnrcContent,
-  logger
-) {
+async function generateLockFile(tmpDir, logger) {
   logger.debug('Generating new yarn.lock file');
-  let yarnLock;
+  let lockFile = null;
   let result = {};
   try {
-    await fs.outputFile(path.join(tmpDir, 'package.json'), newPackageJson);
-    if (npmrcContent) {
-      await fs.outputFile(path.join(tmpDir, '.npmrc'), npmrcContent);
-    }
-    if (yarnrcContent) {
-      const filteredYarnrc = yarnrcContent.replace(
-        '--install.pure-lockfile true',
-        ''
-      );
-      await fs.outputFile(path.join(tmpDir, '.yarnrc'), filteredYarnrc);
-    }
-    await fs.remove(path.join(tmpDir, 'yarn.lock'));
     logger.debug(`Spawning yarn install to create ${tmpDir}/yarn.lock`);
     // Use an embedded yarn
     const yarnBin = path.join(
@@ -47,85 +26,18 @@ async function generateLockFile(
       shell: true,
       env: { ...process.env, ...{ NODE_ENV: 'dev' } },
     });
-    logger.debug(String(result.stdout));
-    logger.debug(String(result.stderr));
-    yarnLock = fs.readFileSync(path.join(tmpDir, 'yarn.lock'));
+    logger.debug(`yarn stdout:\n${String(result.stdout)}`);
+    logger.debug(`yarn stderr:\n${String(result.stderr)}`);
+    lockFile = fs.readFileSync(path.join(tmpDir, 'yarn.lock'), 'utf8');
   } catch (err) /* istanbul ignore next */ {
     logger.warn(
       {
         err,
-        newPackageJson: JSON.parse(newPackageJson),
-        npmrcContent,
-        yarnrcContent,
         stdout: String(result.stdout),
         stderr: String(result.stderr),
       },
-      'Error generating yarn.lock'
+      'yarn install error'
     );
-    throw Error('Error generating lock file');
   }
-  return yarnLock;
-}
-
-async function getLockFile(tmpDir, packageFile, packageContent, api, logger) {
-  // Detect if a yarn.lock file is in use
-  const yarnLockFileName = path.join(path.dirname(packageFile), 'yarn.lock');
-  if (!await api.getFileContent(yarnLockFileName)) {
-    return null;
-  }
-  // Copy over custom config commitFiles
-  const npmrcContent = await api.getFileContent('.npmrc');
-  const yarnrcContent = await api.getFileContent('.yarnrc');
-  // Generate yarn.lock using shell command
-  const newYarnLockContent = await module.exports.generateLockFile(
-    tmpDir,
-    packageContent,
-    npmrcContent,
-    yarnrcContent,
-    logger
-  );
-  // Return file object
-  return {
-    name: yarnLockFileName,
-    contents: newYarnLockContent,
-  };
-}
-
-async function maintainLockFile(inputConfig) {
-  const logger = inputConfig.logger;
-  logger.trace({ config: inputConfig }, `maintainLockFile`);
-  const packageContent = await inputConfig.api.getFileContent(
-    inputConfig.packageFile
-  );
-  const yarnLockFileName = path.join(
-    path.dirname(inputConfig.packageFile),
-    'yarn.lock'
-  );
-  logger.debug(`Checking for ${yarnLockFileName}`);
-  let existingYarnLock = await inputConfig.api.getFileContent(
-    yarnLockFileName,
-    inputConfig.branchName
-  );
-  if (!existingYarnLock) {
-    existingYarnLock = await inputConfig.api.getFileContent(yarnLockFileName);
-  }
-  logger.trace(`existingYarnLock:\n${existingYarnLock}`);
-  if (!existingYarnLock) {
-    return null;
-  }
-  logger.debug('Found existing yarn.lock file');
-  const newYarnLock = await module.exports.getLockFile(
-    path.join(inputConfig.tmpDir.name, path.dirname(inputConfig.packageFile)),
-    inputConfig.packageFile,
-    packageContent,
-    inputConfig.api,
-    logger
-  );
-  logger.trace(`newYarnLock:\n${newYarnLock.contents}`);
-  if (eol.lf(existingYarnLock) === eol.lf(newYarnLock.contents.toString())) {
-    logger.debug('Yarn lock file does not need updating');
-    return null;
-  }
-  logger.debug('Yarn lock needs updating');
-  return newYarnLock;
+  return lockFile;
 }
diff --git a/lib/workers/package-file/index.js b/lib/workers/package-file/index.js
index ed2f153a31..46916f359d 100644
--- a/lib/workers/package-file/index.js
+++ b/lib/workers/package-file/index.js
@@ -1,5 +1,6 @@
 const configParser = require('../../config');
 const depTypeWorker = require('../dep-type');
+const npmApi = require('../../api/npm');
 
 let logger = require('../../logger');
 
@@ -9,6 +10,9 @@ module.exports = {
 
 async function renovatePackageFile(packageFileConfig) {
   const config = { ...packageFileConfig };
+  if (config.npmrc) {
+    npmApi.setNpmrc(config.npmrc);
+  }
   let upgrades = [];
   logger = config.logger;
   logger.info(`Processing package file`);
@@ -70,7 +74,7 @@ async function renovatePackageFile(packageFileConfig) {
       { config: lockFileMaintenanceConf },
       `lockFileMaintenanceConf`
     );
-    upgrades.push(lockFileMaintenanceConf);
+    upgrades.push(configParser.filterConfig(lockFileMaintenanceConf, 'branch'));
   }
 
   logger.info('Finished processing package file');
diff --git a/lib/workers/pr/index.js b/lib/workers/pr/index.js
index feff6d9189..d3b704bf4d 100644
--- a/lib/workers/pr/index.js
+++ b/lib/workers/pr/index.js
@@ -11,10 +11,12 @@ module.exports = {
 };
 
 // Ensures that PR exists with matching title/body
-async function ensurePr(inputConfig, logger, errors, warnings) {
-  logger.trace({ config: inputConfig }, 'ensurePr');
+async function ensurePr(prConfig) {
+  const config = { ...prConfig };
+  const { logger } = config;
+  logger.debug('ensuring PR');
+  logger.trace({ config }, 'ensurePr');
   // If there is a group, it will use the config of the first upgrade in the array
-  const config = { ...inputConfig };
   const upgrades = config.upgrades;
   config.upgrades = [];
   const branchName = config.branchName;
@@ -102,13 +104,11 @@ async function ensurePr(inputConfig, logger, errors, warnings) {
 
   // Update the config object
   Object.assign(config, upgrades[0]);
-  if (errors && errors.length) {
+  if (config.errors && config.errors.length) {
     config.hasErrors = true;
-    config.errors = errors;
   }
-  if (warnings && warnings.length) {
+  if (config.warnings && config.warnings.length) {
     config.hasWarnings = true;
-    config.warnings = warnings;
   }
 
   const prTitle = handlebars.compile(config.prTitle)(config);
@@ -183,7 +183,8 @@ async function ensurePr(inputConfig, logger, errors, warnings) {
   return null;
 }
 
-async function checkAutoMerge(pr, config, logger) {
+async function checkAutoMerge(pr, config) {
+  const { logger } = config;
   logger.trace({ config }, 'checkAutoMerge');
   logger.debug(`Checking #${pr.number} for automerge`);
   if (config.automerge === true && config.automergeType === 'pr') {
diff --git a/lib/workers/repository/apis.js b/lib/workers/repository/apis.js
index 05787595f1..649d759160 100644
--- a/lib/workers/repository/apis.js
+++ b/lib/workers/repository/apis.js
@@ -40,13 +40,11 @@ async function detectSemanticCommits(config) {
 // Check for .npmrc in repository and pass it to npm api if found
 async function setNpmrc(config) {
   try {
-    let npmrc = null;
     const npmrcContent = await config.api.getFileContent('.npmrc');
     if (npmrcContent) {
       config.logger.debug('Found .npmrc file in repository');
-      npmrc = ini.parse(npmrcContent);
+      npmApi.setNpmrc(npmrcContent);
     }
-    npmApi.setNpmrc(npmrc);
   } catch (err) {
     config.logger.error('Failed to set .npmrc');
   }
@@ -256,6 +254,14 @@ async function resolvePackageFiles(inputConfig) {
       packageFile.packageFile,
       config.baseBranch
     );
+    packageFile.npmrc = await config.api.getFileContent(
+      path.join(path.dirname(packageFile.packageFile), '.npmrc'),
+      config.baseBranch
+    );
+    packageFile.yarnrc = await config.api.getFileContent(
+      path.join(path.dirname(packageFile.packageFile), '.yarnrc'),
+      config.baseBranch
+    );
     if (packageFile.content) {
       // hoist renovate config if exists
       if (packageFile.content.renovate) {
diff --git a/lib/workers/repository/index.js b/lib/workers/repository/index.js
index c1dd05a1ba..23c08f0433 100644
--- a/lib/workers/repository/index.js
+++ b/lib/workers/repository/index.js
@@ -104,7 +104,7 @@ async function renovateRepository(repoConfig, token) {
     let branchList;
     if (config.repoIsOnboarded) {
       for (const branchUpgrade of branchUpgrades) {
-        await branchWorker.processBranchUpgrades(
+        await branchWorker.processBranch(
           branchUpgrade,
           config.errors,
           config.warnings
diff --git a/package.json b/package.json
index e24f993a5e..1dfb8443de 100644
--- a/package.json
+++ b/package.json
@@ -45,7 +45,6 @@
     "changelog": "1.4.0",
     "commander": "2.11.0",
     "conventional-commits-detector": "0.1.1",
-    "eol": "0.9.0",
     "fs-extra": "4.0.1",
     "gh-got": "6.0.0",
     "github-url-from-git": "1.5.0",
diff --git a/test/config/__snapshots__/index.spec.js.snap b/test/config/__snapshots__/index.spec.js.snap
index 710412d130..afa5515716 100644
--- a/test/config/__snapshots__/index.spec.js.snap
+++ b/test/config/__snapshots__/index.spec.js.snap
@@ -5,7 +5,6 @@ Object {
   "branchName": "{{branchPrefix}}lock-file-maintenance",
   "commitMessage": "Update lock file",
   "enabled": true,
-  "groupName": "Lock File Maintenance",
   "prBody": "This {{#if isGitHub}}Pull{{else}}Merge{{/if}} Request updates \`package.json\` lock files to use the latest dependency versions.
 
 {{#if schedule}}
diff --git a/test/workers/branch/__snapshots__/commit.spec.js.snap b/test/workers/branch/__snapshots__/commit.spec.js.snap
new file mode 100644
index 0000000000..02ba7911f2
--- /dev/null
+++ b/test/workers/branch/__snapshots__/commit.spec.js.snap
@@ -0,0 +1,33 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`workers/branch/automerge commitFilesToBranch applies semantic prefix 1`] = `
+Array [
+  Array [
+    "renovate/some-branch",
+    Array [
+      Object {
+        "contents": "some contents",
+        "name": "package.json",
+      },
+    ],
+    "some-prefix some commit message",
+    undefined,
+  ],
+]
+`;
+
+exports[`workers/branch/automerge commitFilesToBranch commits files 1`] = `
+Array [
+  Array [
+    "renovate/some-branch",
+    Array [
+      Object {
+        "contents": "some contents",
+        "name": "package.json",
+      },
+    ],
+    "some commit message",
+    undefined,
+  ],
+]
+`;
diff --git a/test/workers/branch/__snapshots__/index.spec.js.snap b/test/workers/branch/__snapshots__/index.spec.js.snap
deleted file mode 100644
index 61afb6cc4f..0000000000
--- a/test/workers/branch/__snapshots__/index.spec.js.snap
+++ /dev/null
@@ -1,75 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`workers/branch ensureBranch(config) automerges successful branches 1`] = `
-Object {
-  "calls": Array [
-    Array [
-      "renovate/dummy-1.x",
-      "branch-push",
-    ],
-  ],
-  "instances": Array [
-    Object {
-      "branchExists": [Function],
-      "commitFilesToBranch": [Function],
-      "getBranchStatus": [Function],
-      "getBranchStatusCheck": [Function],
-      "getFileContent": [Function],
-      "mergeBranch": [Function],
-      "setBranchStatus": [Function],
-    },
-  ],
-}
-`;
-
-exports[`workers/branch ensureBranch(config) throws error if cannot maintain yarn.lock file 1`] = `"Error maintaining lock files"`;
-
-exports[`workers/branch ensureBranch(config) throws if automerge throws 1`] = `[Error: automerge failed]`;
-
-exports[`workers/branch ensureBranch(config) throws if automerge throws 2`] = `
-Object {
-  "calls": Array [
-    Array [
-      "renovate/dummy-1.x",
-      "branch-push",
-    ],
-  ],
-  "instances": Array [
-    Object {
-      "branchExists": [Function],
-      "commitFilesToBranch": [Function],
-      "getBranchStatus": [Function],
-      "getBranchStatusCheck": [Function],
-      "getFileContent": [Function],
-      "mergeBranch": [Function],
-      "setBranchStatus": [Function],
-    },
-  ],
-}
-`;
-
-exports[`workers/branch processBranchUpgrades(upgrades) returns if legacy closed major PR found 1`] = `
-Array [
-  Array [
-    "renovate/a-2.x",
-    "Upgrade a to v2",
-  ],
-  Array [
-    "renovate/a-2.x",
-    "Upgrade a to version 2.x",
-  ],
-]
-`;
-
-exports[`workers/branch processBranchUpgrades(upgrades) returns if legacy closed minor PR found 1`] = `
-Array [
-  Array [
-    "renovate/a-2.x",
-    "Upgrade a to v2.1.0",
-  ],
-  Array [
-    "renovate/a-2.x",
-    "Upgrade a to version 2.1.0",
-  ],
-]
-`;
diff --git a/test/workers/branch/__snapshots__/lock-files.spec.js.snap b/test/workers/branch/__snapshots__/lock-files.spec.js.snap
new file mode 100644
index 0000000000..5e234a665e
--- /dev/null
+++ b/test/workers/branch/__snapshots__/lock-files.spec.js.snap
@@ -0,0 +1,108 @@
+// Jest Snapshot v1, https://goo.gl/fbAQLP
+
+exports[`workers/branch/lock-files determineLockFileDirs returns all directories if lock file maintenance 1`] = `
+Object {
+  "packageLockFileDirs": Array [
+    "backend",
+  ],
+  "yarnLockFileDirs": Array [
+    ".",
+  ],
+}
+`;
+
+exports[`workers/branch/lock-files determineLockFileDirs returns directories from updated package files 1`] = `
+Object {
+  "packageLockFileDirs": Array [
+    "backend",
+  ],
+  "yarnLockFileDirs": Array [
+    ".",
+  ],
+}
+`;
+
+exports[`workers/branch/lock-files getUpdatedLockFiles adds multiple lock files 1`] = `
+Object {
+  "lockFileError": false,
+  "updatedLockFiles": Array [
+    Object {
+      "contents": "some new lock file contents",
+      "name": "a/package-lock.json",
+    },
+    Object {
+      "contents": "some new lock file contents",
+      "name": "c/yarn.lock",
+    },
+  ],
+}
+`;
+
+exports[`workers/branch/lock-files getUpdatedLockFiles returns no error and empty lockfiles if none updated 1`] = `
+Object {
+  "lockFileError": false,
+  "updatedLockFiles": Array [],
+}
+`;
+
+exports[`workers/branch/lock-files getUpdatedLockFiles returns npm errors 1`] = `
+Object {
+  "lockFileError": true,
+  "updatedLockFiles": Array [],
+}
+`;
+
+exports[`workers/branch/lock-files getUpdatedLockFiles returns yarn errors 1`] = `
+Object {
+  "lockFileError": true,
+  "updatedLockFiles": Array [],
+}
+`;
+
+exports[`workers/branch/lock-files getUpdatedLockFiles sets error if receiving null 1`] = `
+Object {
+  "lockFileError": true,
+  "updatedLockFiles": Array [],
+}
+`;
+
+exports[`workers/branch/lock-files getUpdatedLockFiles tries multiple lock files 1`] = `
+Object {
+  "lockFileError": false,
+  "updatedLockFiles": Array [],
+}
+`;
+
+exports[`workers/branch/lock-files writeExistingFiles writes files and removes files 1`] = `
+Array [
+  Array [
+    "some-tmp-dir/package.json",
+    "{\\"name\\":\\"package 1\\"}",
+  ],
+  Array [
+    "some-tmp-dir/.npmrc",
+    "some npmrc",
+  ],
+  Array [
+    "some-tmp-dir/backend/package.json",
+    "{\\"name\\":\\"package 2\\"}",
+  ],
+  Array [
+    "some-tmp-dir/backend/.yarnrc",
+    "some yarnrc",
+  ],
+]
+`;
+
+exports[`workers/branch/lock-files writeUpdatedPackageFiles writes updated packageFiles 1`] = `
+Array [
+  Array [
+    "some-tmp-dir/package.json",
+    "raw contents",
+  ],
+  Array [
+    "some-tmp-dir/backend/package.json",
+    "more raw contents",
+  ],
+]
+`;
diff --git a/test/workers/branch/__snapshots__/npm.spec.js.snap b/test/workers/branch/__snapshots__/npm.spec.js.snap
deleted file mode 100644
index 2ab30bd604..0000000000
--- a/test/workers/branch/__snapshots__/npm.spec.js.snap
+++ /dev/null
@@ -1,5 +0,0 @@
-// Jest Snapshot v1, https://goo.gl/fbAQLP
-
-exports[`getLockFile throws if no npm 1`] = `[Error: Need to generate package-lock.json but npm is not installed]`;
-
-exports[`getLockFile throws if wrong npm version 1`] = `[Error: Need to generate package-lock.json but npm version is "4.0.0"]`;
diff --git a/test/workers/branch/automerge.spec.js b/test/workers/branch/automerge.spec.js
new file mode 100644
index 0000000000..f957ea1131
--- /dev/null
+++ b/test/workers/branch/automerge.spec.js
@@ -0,0 +1,46 @@
+const { tryBranchAutomerge } = require('../../../lib/workers/branch/automerge');
+const defaultConfig = require('../../../lib/config/defaults').getConfig();
+const logger = require('../../_fixtures/logger');
+
+describe('workers/branch/automerge', () => {
+  describe('tryBranchAutomerge', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        api: { getBranchStatus: jest.fn(), mergeBranch: jest.fn() },
+        logger,
+      };
+    });
+    it('returns false if not configured for automerge', async () => {
+      config.automerge = false;
+      expect(await tryBranchAutomerge(config)).toBe(false);
+    });
+    it('returns false if automergType is pr', async () => {
+      config.automerge = true;
+      config.automergeType = 'pr';
+      expect(await tryBranchAutomerge(config)).toBe(false);
+    });
+    it('returns false if branch status is not success', async () => {
+      config.automerge = true;
+      config.automergeType = 'branch-push';
+      config.api.getBranchStatus.mockReturnValueOnce('pending');
+      expect(await tryBranchAutomerge(config)).toBe(false);
+    });
+    it('returns false if automerge fails', async () => {
+      config.automerge = true;
+      config.automergeType = 'branch-push';
+      config.api.getBranchStatus.mockReturnValueOnce('success');
+      config.api.mergeBranch.mockImplementationOnce(() => {
+        throw new Error('merge error');
+      });
+      expect(await tryBranchAutomerge(config)).toBe(false);
+    });
+    it('returns true if automerge succeeds', async () => {
+      config.automerge = true;
+      config.automergeType = 'branch-push';
+      config.api.getBranchStatus.mockReturnValueOnce('success');
+      expect(await tryBranchAutomerge(config)).toBe(true);
+    });
+  });
+});
diff --git a/test/workers/branch/check-existing.spec.js b/test/workers/branch/check-existing.spec.js
new file mode 100644
index 0000000000..a4e3c70e68
--- /dev/null
+++ b/test/workers/branch/check-existing.spec.js
@@ -0,0 +1,41 @@
+const {
+  prAlreadyExisted,
+} = require('../../../lib/workers/branch/check-existing');
+const defaultConfig = require('../../../lib/config/defaults').getConfig();
+const logger = require('../../_fixtures/logger');
+
+describe('workers/branch/check-existing', () => {
+  describe('prAlreadyExisted', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        api: { checkForClosedPr: jest.fn() },
+        logger,
+        branchName: 'some-branch',
+        prTitle: 'some-title',
+      };
+    });
+    it('returns false if recreating closed PRs', async () => {
+      config.recreateClosed = true;
+      expect(await prAlreadyExisted(config)).toBe(false);
+      expect(config.api.checkForClosedPr.mock.calls.length).toBe(0);
+    });
+    it('returns false if both checks miss', async () => {
+      config.recreatedClosed = true;
+      expect(await prAlreadyExisted(config)).toBe(false);
+      expect(config.api.checkForClosedPr.mock.calls.length).toBe(2);
+    });
+    it('returns true if first check hits', async () => {
+      config.api.checkForClosedPr.mockReturnValueOnce(true);
+      expect(await prAlreadyExisted(config)).toBe(true);
+      expect(config.api.checkForClosedPr.mock.calls.length).toBe(1);
+    });
+    it('returns true if second check hits', async () => {
+      config.api.checkForClosedPr.mockReturnValueOnce(false);
+      config.api.checkForClosedPr.mockReturnValueOnce(true);
+      expect(await prAlreadyExisted(config)).toBe(true);
+      expect(config.api.checkForClosedPr.mock.calls.length).toBe(2);
+    });
+  });
+});
diff --git a/test/workers/branch/commit.spec.js b/test/workers/branch/commit.spec.js
new file mode 100644
index 0000000000..27bc9d5b61
--- /dev/null
+++ b/test/workers/branch/commit.spec.js
@@ -0,0 +1,45 @@
+const { commitFilesToBranch } = require('../../../lib/workers/branch/commit');
+const defaultConfig = require('../../../lib/config/defaults').getConfig();
+const logger = require('../../_fixtures/logger');
+
+describe('workers/branch/automerge', () => {
+  describe('commitFilesToBranch', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        api: { commitFilesToBranch: jest.fn() },
+        logger,
+        branchName: 'renovate/some-branch',
+        commitMessage: 'some commit message',
+        semanticCommits: false,
+        semanticPrefix: 'some-prefix',
+        updatedPackageFiles: [],
+        updatedLockFiles: [],
+      };
+    });
+    it('handles empty files', async () => {
+      await commitFilesToBranch(config);
+      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(0);
+    });
+    it('commits files', async () => {
+      config.updatedPackageFiles.push({
+        name: 'package.json',
+        contents: 'some contents',
+      });
+      await commitFilesToBranch(config);
+      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(1);
+      expect(config.api.commitFilesToBranch.mock.calls).toMatchSnapshot();
+    });
+    it('applies semantic prefix', async () => {
+      config.updatedPackageFiles.push({
+        name: 'package.json',
+        contents: 'some contents',
+      });
+      config.semanticCommits = true;
+      await commitFilesToBranch(config);
+      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(1);
+      expect(config.api.commitFilesToBranch.mock.calls).toMatchSnapshot();
+    });
+  });
+});
diff --git a/test/workers/branch/index.spec.js b/test/workers/branch/index.spec.js
index aef038de9e..e35ae00044 100644
--- a/test/workers/branch/index.spec.js
+++ b/test/workers/branch/index.spec.js
@@ -1,465 +1,112 @@
 const branchWorker = require('../../../lib/workers/branch');
-const prWorker = require('../../../lib/workers/pr');
-const schedule = require('../../../lib/workers/branch/schedule');
-const npm = require('../../../lib/workers/branch/npm');
-const yarn = require('../../../lib/workers/branch/yarn');
 const defaultConfig = require('../../../lib/config/defaults').getConfig();
-const packageJsonHelper = require('../../../lib/workers/branch/package-json');
 
-const logger = require('../../_fixtures/logger');
+const schedule = require('../../../lib/workers/branch/schedule');
+const checkExisting = require('../../../lib/workers/branch/check-existing');
+const parent = require('../../../lib/workers/branch/parent');
+const packageFiles = require('../../../lib/workers/branch/package-files');
+const lockFiles = require('../../../lib/workers/branch/lock-files');
+const commit = require('../../../lib/workers/branch/commit');
+const statusChecks = require('../../../lib/workers/branch/status-checks');
+const automerge = require('../../../lib/workers/branch/automerge');
+const prWorker = require('../../../lib/workers/pr');
 
-jest.mock('../../../lib/workers/branch/yarn');
-jest.mock('../../../lib/workers/branch/package-json');
+jest.mock('../../../lib/workers/branch/schedule');
+jest.mock('../../../lib/workers/branch/check-existing');
+jest.mock('../../../lib/workers/branch/parent');
+jest.mock('../../../lib/workers/branch/package-files');
+jest.mock('../../../lib/workers/branch/lock-files');
+jest.mock('../../../lib/workers/branch/commit');
+jest.mock('../../../lib/workers/branch/status-checks');
+jest.mock('../../../lib/workers/branch/automerge');
+jest.mock('../../../lib/workers/pr');
+
+const logger = require('../../_fixtures/logger');
 
 describe('workers/branch', () => {
-  describe('getParentBranch(branchName, config)', () => {
+  describe('processBranch', () => {
     let config;
-    const branchName = 'foo';
     beforeEach(() => {
-      schedule.isScheduledNow = jest.fn();
       config = {
-        api: {
-          branchExists: jest.fn(() => true),
-          deleteBranch: jest.fn(),
-          getBranchPr: jest.fn(),
-          getBranchStatus: jest.fn(),
-          isBranchStale: jest.fn(() => false),
-        },
+        ...defaultConfig,
+        api: { branchExists: jest.fn() },
+        logger,
+        upgrades: [{}],
       };
+      schedule.isScheduledNow.mockReturnValue(true);
     });
-    it('returns undefined if branch does not exist', async () => {
-      config.api.branchExists.mockReturnValue(false);
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        undefined
-      );
-    });
-    it('returns branchName if no PR', async () => {
-      config.api.getBranchPr.mockReturnValue(null);
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        branchName
-      );
-    });
-    it('returns branchName if does not need rebaseing', async () => {
-      config.api.getBranchPr.mockReturnValue({
-        isUnmergeable: false,
-      });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        branchName
-      );
-    });
-    it('returns branchName if unmergeable and cannot rebase', async () => {
-      config.api.getBranchPr.mockReturnValue({
-        isUnmergeable: true,
-        canRebase: false,
-      });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        branchName
-      );
-    });
-    it('returns undefined if unmergeable and can rebase', async () => {
-      config.api.getBranchPr.mockReturnValue({
-        isUnmergeable: true,
-        canRebase: true,
-      });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        undefined
-      );
-    });
-    it('returns undefined if unmergeable and can rebase (gitlab)', async () => {
-      config.isGitLab = true;
-      config.api.getBranchPr.mockReturnValue({
-        isUnmergeable: true,
-        canRebase: true,
-      });
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        undefined
-      );
-      expect(config.api.deleteBranch.mock.calls.length).toBe(1);
-    });
-    it('returns branchName if automerge branch-push and not stale', async () => {
-      config.automerge = true;
-      config.automergeType = 'branch-push';
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        branchName
-      );
-    });
-    it('returns undefined if automerge branch-push and stale', async () => {
-      config.automerge = true;
-      config.automergeType = 'branch-push';
-      config.api.isBranchStale.mockReturnValueOnce(true);
-      expect(await branchWorker.getParentBranch(branchName, config)).toBe(
-        undefined
-      );
-    });
-    it('returns branch if rebaseStalePrs enabled but cannot rebase', async () => {
-      config.rebaseStalePrs = true;
-      config.api.isBranchStale.mockReturnValueOnce(true);
-      config.api.getBranchPr.mockReturnValue({
-        isUnmergeable: true,
-        canRebase: false,
+    it('skips branch if not scheduled', async () => {
+      schedule.isScheduledNow.mockReturnValueOnce(false);
+      await branchWorker.processBranch(config);
+      expect(checkExisting.prAlreadyExisted.mock.calls).toHaveLength(0);
+    });
+    it('skips branch if closed PR found', async () => {
+      checkExisting.prAlreadyExisted.mockReturnValueOnce(true);
+      await branchWorker.processBranch(config);
+      expect(parent.getParentBranch.mock.calls.length).toBe(0);
+    });
+    it('returns if no branch exists', async () => {
+      packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([]);
+      lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
+        lockFileError: false,
+        updatedLockFiles: [],
       });
-      expect(await branchWorker.getParentBranch(branchName, config)).not.toBe(
-        undefined
-      );
-    });
-  });
-  describe('ensureBranch(config)', () => {
-    let config;
-    beforeEach(() => {
-      packageJsonHelper.setNewValue = jest.fn();
-      branchWorker.getParentBranch = jest.fn();
-      npm.getLockFile = jest.fn();
-      npm.maintainLockFile = jest.fn();
-      yarn.getLockFile = jest.fn();
-      yarn.maintainLockFile = jest.fn();
-      config = { ...defaultConfig };
-      config.api = {};
-      config.api.getFileContent = jest.fn();
-      config.api.branchExists = jest.fn();
-      config.api.commitFilesToBranch = jest.fn();
-      config.api.getFileContent.mockReturnValueOnce('old content');
-      config.api.getBranchStatus = jest.fn();
-      config.api.getBranchStatusCheck = jest.fn();
-      config.api.setBranchStatus = jest.fn();
-      config.tmpDir = { name: 'some-dir' };
-      config.depName = 'dummy';
-      config.currentVersion = '1.0.0';
-      config.newVersion = '1.1.0';
-      config.newVersionMajor = 1;
-      config.versions = {};
-      config.upgrades = [{ ...config }];
-    });
-    it('returns if new content matches old', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('old content');
       config.api.branchExists.mockReturnValueOnce(false);
-      expect(await branchWorker.ensureBranch(config)).toBe(false);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(0);
-      expect(yarn.getLockFile.mock.calls.length).toBe(0);
-    });
-    it('commits one file if no yarn lock or package-lock.json found', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.semanticCommits = true;
-      expect(await branchWorker.ensureBranch(config)).toBe(true);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(1);
-    });
-    it('returns true if automerging pr', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.automerge = true;
-      config.automergeType = 'pr';
-      expect(await branchWorker.ensureBranch(config)).toBe(true);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(1);
-      expect(config.api.setBranchStatus.mock.calls).toHaveLength(0);
-    });
-    it('sets branch status pending', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.unpublishSafe = true;
-      config.upgrades[0].unpublishable = true;
-      config.upgrades.push({ ...config });
-      config.upgrades[1].unpublishable = false;
-      expect(await branchWorker.ensureBranch(config)).toBe(true);
-      expect(config.api.setBranchStatus.mock.calls).toHaveLength(1);
-    });
-    it('skips branch status pending', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.unpublishSafe = true;
-      config.api.getBranchStatusCheck.mockReturnValueOnce('pending');
-      config.upgrades[0].unpublishable = true;
-      config.upgrades.push({ ...config });
-      config.upgrades[1].unpublishable = false;
-      expect(await branchWorker.ensureBranch(config)).toBe(true);
-      expect(config.api.setBranchStatus.mock.calls).toHaveLength(0);
-    });
-    it('skips branch status success if setting disabled', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.upgrades[0].unpublishable = true;
-      config.api.getBranchStatusCheck.mockReturnValueOnce('pending');
-      expect(await branchWorker.ensureBranch(config)).toBe(true);
-      expect(config.api.setBranchStatus.mock.calls).toHaveLength(1);
-    });
-    it('automerges successful branches', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.api.getBranchStatus.mockReturnValueOnce('success');
-      config.api.mergeBranch = jest.fn();
-      config.automerge = true;
-      config.automergeType = 'branch-push';
-      expect(await branchWorker.ensureBranch(config)).toBe(false);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(config.api.getBranchStatus.mock.calls.length).toBe(1);
-      expect(config.api.mergeBranch.mock).toMatchSnapshot();
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(1);
-    });
-    it('skips automerge if status not success', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.api.getBranchStatus.mockReturnValueOnce('pending');
-      config.api.mergeBranch = jest.fn();
-      config.automerge = true;
-      config.automergeType = 'branch-push';
-      expect(await branchWorker.ensureBranch(config)).toBe(true);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(config.api.getBranchStatus.mock.calls.length).toBe(1);
-      expect(config.api.mergeBranch.mock.calls.length).toBe(0);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(1);
-    });
-    it('throws if automerge throws', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      config.api.branchExists.mockReturnValueOnce(true);
-      config.api.getBranchStatus.mockReturnValueOnce('success');
-      config.automerge = true;
-      config.automergeType = 'branch-push';
-      config.api.mergeBranch = jest.fn(() => {
-        throw new Error('automerge failed');
+      await branchWorker.processBranch(config);
+      expect(commit.commitFilesToBranch.mock.calls).toHaveLength(1);
+    });
+    it('returns if branch automerged', async () => {
+      packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
+      lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
+        lockFileError: false,
+        updatedLockFiles: [{}],
       });
-      let e;
-      try {
-        await branchWorker.ensureBranch(config);
-      } catch (err) {
-        e = err;
-      }
-      expect(e).toMatchSnapshot();
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(config.api.getBranchStatus.mock.calls.length).toBe(1);
-      expect(config.api.mergeBranch.mock).toMatchSnapshot();
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(1);
-    });
-    it('commits two files if yarn lock found', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      yarn.getLockFile.mockReturnValueOnce('non null response');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      await branchWorker.ensureBranch(config);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(2);
-    });
-    it('commits two files if package lock found', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      npm.getLockFile.mockReturnValueOnce('non null response');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      await branchWorker.ensureBranch(config);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(2);
-    });
-    it('commits three files if yarn lock and package lock found', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      npm.getLockFile.mockReturnValueOnce('non null response');
-      yarn.getLockFile.mockReturnValueOnce('non null response');
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      await branchWorker.ensureBranch(config);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(3);
-    });
-    it('throws an error if no yarn lock generation possible', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      yarn.getLockFile.mockImplementationOnce(() => {
-        throw new Error('yarn not found');
+      config.api.branchExists.mockReturnValueOnce(true);
+      automerge.tryBranchAutomerge.mockReturnValueOnce(true);
+      await branchWorker.processBranch(config);
+      expect(statusChecks.setUnpublishable.mock.calls).toHaveLength(1);
+      expect(automerge.tryBranchAutomerge.mock.calls).toHaveLength(1);
+      expect(prWorker.ensurePr.mock.calls).toHaveLength(0);
+    });
+    it('ensures PR and tries automerge', async () => {
+      packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
+      lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
+        lockFileError: false,
+        updatedLockFiles: [{}],
       });
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      let err;
-      try {
-        await branchWorker.ensureBranch(config);
-      } catch (e) {
-        err = e;
-      }
-      expect(err.message).toBe('yarn not found');
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(0);
-      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(0);
-    });
-    it('throws an error if no package lock generation possible', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      npm.getLockFile.mockImplementationOnce(() => {
-        throw new Error('no package lock generated');
+      config.api.branchExists.mockReturnValueOnce(true);
+      automerge.tryBranchAutomerge.mockReturnValueOnce(false);
+      prWorker.ensurePr.mockReturnValueOnce({});
+      await branchWorker.processBranch(config);
+      expect(prWorker.ensurePr.mock.calls).toHaveLength(1);
+      expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(1);
+    });
+    it('swallows branch errors', async () => {
+      packageFiles.getUpdatedPackageFiles.mockImplementationOnce(() => {
+        throw new Error('some error');
       });
-      packageJsonHelper.setNewValue.mockReturnValueOnce('new content');
-      let err;
-      try {
-        await branchWorker.ensureBranch(config);
-      } catch (e) {
-        err = e;
-      }
-      expect(err.message).toBe('no package lock generated');
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(1);
-      expect(yarn.getLockFile.mock.calls.length).toBe(1);
-      expect(npm.getLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(0);
-    });
-    it('maintains lock files if needing updates', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      yarn.maintainLockFile.mockReturnValueOnce('non null response');
-      npm.maintainLockFile.mockReturnValueOnce('non null response');
-      config.upgrades[0].type = 'lockFileMaintenance';
-      config.upgrades[0].hasYarnLock = true;
-      config.upgrades[0].hasPackageLock = true;
-      await branchWorker.ensureBranch(config);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(0);
-      expect(yarn.getLockFile.mock.calls.length).toBe(0);
-      expect(npm.getLockFile.mock.calls.length).toBe(0);
-      expect(yarn.maintainLockFile.mock.calls.length).toBe(1);
-      expect(npm.maintainLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls[0][1].length).toBe(2);
+      await branchWorker.processBranch(config);
     });
-    it('skips maintaining lock files if no updates', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      config.upgrades[0].type = 'lockFileMaintenance';
-      config.upgrades[0].hasYarnLock = true;
-      config.upgrades[0].hasPackageLock = true;
-      await branchWorker.ensureBranch(config);
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(0);
-      expect(yarn.getLockFile.mock.calls.length).toBe(0);
-      expect(npm.getLockFile.mock.calls.length).toBe(0);
-      expect(yarn.maintainLockFile.mock.calls.length).toBe(1);
-      expect(npm.maintainLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(0);
-    });
-    it('throws error if cannot maintain yarn.lock file', async () => {
-      branchWorker.getParentBranch.mockReturnValueOnce('dummy branch');
-      config.upgrades[0].type = 'lockFileMaintenance';
-      config.upgrades[0].hasYarnLock = true;
-      yarn.maintainLockFile.mockImplementationOnce(() => {
-        throw new Error('yarn not found');
+    it('throws and swallows branch errors', async () => {
+      packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
+      lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
+        lockFileError: true,
+        updatedLockFiles: [{}],
       });
-      let err;
-      try {
-        await branchWorker.ensureBranch(config);
-      } catch (e) {
-        err = e;
-      }
-      expect(err.message).toMatchSnapshot();
-      expect(branchWorker.getParentBranch.mock.calls.length).toBe(1);
-      expect(packageJsonHelper.setNewValue.mock.calls.length).toBe(0);
-      expect(yarn.getLockFile.mock.calls.length).toBe(0);
-      expect(npm.getLockFile.mock.calls.length).toBe(0);
-      expect(yarn.maintainLockFile.mock.calls.length).toBe(1);
-      expect(config.api.commitFilesToBranch.mock.calls.length).toBe(0);
-    });
-  });
-  describe('processBranchUpgrades(upgrades)', () => {
-    let config;
-    beforeEach(() => {
-      config = { ...defaultConfig };
-      config.api = {
-        checkForClosedPr: jest.fn(),
-      };
-      config.logger = logger;
-      branchWorker.ensureBranch = jest.fn(() => true);
-      prWorker.ensurePr = jest.fn(() => true);
-      config.upgrades = [{ depName: 'a' }];
-    });
-    it('skips branch if not scheduled', async () => {
-      config.schedule = ['some-schedule'];
-      schedule.isScheduledNow.mockReturnValueOnce(false);
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(0);
-    });
-    it('returns immediately if closed PR found', async () => {
-      config.api.checkForClosedPr.mockReturnValue(true);
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(0);
-    });
-    it('returns if legacy closed major PR found', async () => {
-      config.branchName = 'renovate/a-2.x';
-      config.prTitle = 'Upgrade a to v2';
-      config.api.checkForClosedPr.mockReturnValueOnce(false);
-      config.api.checkForClosedPr.mockReturnValueOnce(true);
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(0);
-      expect(config.api.checkForClosedPr.mock.calls).toMatchSnapshot();
+      await branchWorker.processBranch(config);
     });
-    it('returns if legacy closed minor PR found', async () => {
-      config.branchName = 'renovate/a-2.x';
-      config.prTitle = 'Upgrade a to v2.1.0';
-      config.api.checkForClosedPr.mockReturnValueOnce(false);
-      config.api.checkForClosedPr.mockReturnValueOnce(true);
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(0);
-      expect(config.api.checkForClosedPr.mock.calls).toMatchSnapshot();
-    });
-    it('does not return immediately if recreateClosed true', async () => {
-      config.api.checkForClosedPr.mockReturnValue(true);
-      config.recreateClosed = true;
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(1);
-    });
-    it('pins', async () => {
-      config.type = 'pin';
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(1);
-    });
-    it('majors', async () => {
-      config.type = 'major';
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(1);
-    });
-    it('minors', async () => {
-      config.type = 'minor';
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(1);
-    });
-    it('handles semantic commits', async () => {
-      config.type = 'minor';
-      config.semanticCommits = true;
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(1);
-    });
-    it('handles errors', async () => {
-      config.api.checkForClosedPr = jest.fn(() => {
-        throw new Error('oops');
+    it('swallows pr errors', async () => {
+      packageFiles.getUpdatedPackageFiles.mockReturnValueOnce([{}]);
+      lockFiles.getUpdatedLockFiles.mockReturnValueOnce({
+        lockFileError: false,
+        updatedLockFiles: [{}],
       });
-      await branchWorker.processBranchUpgrades(config);
-      expect(branchWorker.ensureBranch.mock.calls.length).toBe(0);
-    });
-    it('handles known errors', async () => {
-      branchWorker.ensureBranch.mockImplementationOnce(() => {
-        throw Error('Error generating lock file');
+      config.api.branchExists.mockReturnValueOnce(true);
+      automerge.tryBranchAutomerge.mockReturnValueOnce(false);
+      prWorker.ensurePr.mockImplementationOnce(() => {
+        throw new Error('some error');
       });
-      await branchWorker.processBranchUpgrades(config);
+      await branchWorker.processBranch(config);
     });
   });
 });
diff --git a/test/workers/branch/lock-files.spec.js b/test/workers/branch/lock-files.spec.js
new file mode 100644
index 0000000000..d4231e658f
--- /dev/null
+++ b/test/workers/branch/lock-files.spec.js
@@ -0,0 +1,320 @@
+const fs = require('fs-extra');
+const lockFiles = require('../../../lib/workers/branch/lock-files');
+const defaultConfig = require('../../../lib/config/defaults').getConfig();
+const logger = require('../../_fixtures/logger');
+const npm = require('../../../lib/workers/branch/npm');
+const yarn = require('../../../lib/workers/branch/yarn');
+
+const {
+  hasPackageLock,
+  hasYarnLock,
+  determineLockFileDirs,
+  writeExistingFiles,
+  writeUpdatedPackageFiles,
+  getUpdatedLockFiles,
+} = lockFiles;
+
+describe('workers/branch/lock-files', () => {
+  describe('hasPackageLock', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        logger,
+      };
+    });
+    it('returns true if found and true', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          hasPackageLock: true,
+        },
+      ];
+      expect(hasPackageLock(config, 'package.json')).toBe(true);
+    });
+    it('returns false if found and false', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          hasPackageLock: true,
+        },
+        {
+          packageFile: 'backend/package.json',
+        },
+      ];
+      expect(hasPackageLock(config, 'backend/package.json')).toBe(false);
+    });
+    it('throws error if not found', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          hasPackageLock: true,
+        },
+        {
+          packageFile: 'backend/package.json',
+        },
+      ];
+      let e;
+      try {
+        hasPackageLock(config, 'frontend/package.json');
+      } catch (err) {
+        e = err;
+      }
+      expect(e).toBeDefined();
+    });
+  });
+  describe('hasYarnLock', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        logger,
+      };
+    });
+    it('returns true if found and true', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          hasYarnLock: true,
+        },
+      ];
+      expect(hasYarnLock(config, 'package.json')).toBe(true);
+    });
+    it('returns false if found and false', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          hasYarnLock: true,
+        },
+        {
+          packageFile: 'backend/package.json',
+        },
+      ];
+      expect(hasYarnLock(config, 'backend/package.json')).toBe(false);
+    });
+    it('throws error if not found', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          hasYarnLock: true,
+        },
+        {
+          packageFile: 'backend/package.json',
+        },
+      ];
+      let e;
+      try {
+        hasYarnLock(config, 'frontend/package.json');
+      } catch (err) {
+        e = err;
+      }
+      expect(e).toBeDefined();
+    });
+  });
+  describe('determineLockFileDirs', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        logger,
+        packageFiles: [
+          {
+            packageFile: 'package.json',
+            hasYarnLock: true,
+          },
+          {
+            packageFile: 'backend/package.json',
+            hasPackageLock: true,
+          },
+        ],
+      };
+    });
+    it('returns all directories if lock file maintenance', () => {
+      config.upgrades = [{ type: 'lockFileMaintenance' }];
+      const res = determineLockFileDirs(config);
+      expect(res).toMatchSnapshot();
+    });
+    it('returns directories from updated package files', () => {
+      config.upgrades = [{}];
+      config.updatedPackageFiles = [
+        {
+          name: 'package.json',
+          contents: 'some contents',
+        },
+        {
+          name: 'backend/package.json',
+          contents: 'some contents',
+        },
+      ];
+      const res = determineLockFileDirs(config);
+      expect(res).toMatchSnapshot();
+    });
+  });
+  describe('writeExistingFiles', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        logger,
+        tmpDir: { name: 'some-tmp-dir' },
+      };
+      fs.outputFile = jest.fn();
+      fs.remove = jest.fn();
+    });
+    it('returns if no packageFiles', async () => {
+      delete config.packageFiles;
+      await writeExistingFiles(config);
+      expect(fs.outputFile.mock.calls).toHaveLength(0);
+    });
+    it('writes files and removes files', async () => {
+      config.packageFiles = [
+        {
+          packageFile: 'package.json',
+          content: { name: 'package 1' },
+          npmrc: 'some npmrc',
+        },
+        {
+          packageFile: 'backend/package.json',
+          content: { name: 'package 2' },
+          yarnrc: 'some yarnrc',
+        },
+      ];
+      await writeExistingFiles(config);
+      expect(fs.outputFile.mock.calls).toMatchSnapshot();
+      expect(fs.outputFile.mock.calls).toHaveLength(4);
+      expect(fs.remove.mock.calls).toHaveLength(4);
+    });
+  });
+  describe('writeUpdatedPackageFiles', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        logger,
+        tmpDir: { name: 'some-tmp-dir' },
+      };
+      fs.outputFile = jest.fn();
+    });
+    it('returns if no updated packageFiles', async () => {
+      delete config.updatedPackageFiles;
+      await writeUpdatedPackageFiles(config);
+      expect(fs.outputFile.mock.calls).toHaveLength(0);
+    });
+    it('writes updated packageFiles', async () => {
+      config.updatedPackageFiles = [
+        {
+          name: 'package.json',
+          contents: 'raw contents',
+        },
+        {
+          name: 'backend/package.json',
+          contents: 'more raw contents',
+        },
+      ];
+      await writeUpdatedPackageFiles(config);
+      expect(fs.outputFile.mock.calls).toMatchSnapshot();
+      expect(fs.outputFile.mock.calls).toHaveLength(2);
+    });
+  });
+  describe('getUpdatedLockFiles', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        api: { getFileContent: jest.fn(() => 'some lock file contents') },
+        logger,
+        tmpDir: { name: 'some-tmp-dir' },
+      };
+      npm.generateLockFile = jest.fn();
+      npm.generateLockFile.mockReturnValue('some lock file contents');
+      yarn.generateLockFile = jest.fn();
+      yarn.generateLockFile.mockReturnValue('some lock file contents');
+      lockFiles.determineLockFileDirs = jest.fn();
+    });
+    it('returns no error and empty lockfiles if none updated', async () => {
+      lockFiles.determineLockFileDirs.mockReturnValueOnce({
+        packageLockFileDirs: [],
+        yarnLockFileDirs: [],
+      });
+      const res = await getUpdatedLockFiles(config);
+      expect(res).toMatchSnapshot();
+      expect(res.lockFileError).toBe(false);
+      expect(res.updatedLockFiles).toHaveLength(0);
+    });
+    it('tries multiple lock files', async () => {
+      lockFiles.determineLockFileDirs.mockReturnValueOnce({
+        packageLockFileDirs: ['a', 'b'],
+        yarnLockFileDirs: ['c', 'd'],
+      });
+      const res = await getUpdatedLockFiles(config);
+      expect(res).toMatchSnapshot();
+      expect(res.lockFileError).toBe(false);
+      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(npm.generateLockFile.mock.calls).toHaveLength(2);
+      expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
+      expect(config.api.getFileContent.mock.calls).toHaveLength(4);
+    });
+    it('sets error if receiving null', async () => {
+      lockFiles.determineLockFileDirs.mockReturnValueOnce({
+        packageLockFileDirs: ['a', 'b'],
+        yarnLockFileDirs: ['c', 'd'],
+      });
+      npm.generateLockFile.mockReturnValueOnce(null);
+      yarn.generateLockFile.mockReturnValueOnce(null);
+      const res = await getUpdatedLockFiles(config);
+      expect(res).toMatchSnapshot();
+      expect(res.lockFileError).toBe(true);
+      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(npm.generateLockFile.mock.calls).toHaveLength(2);
+      expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
+      expect(config.api.getFileContent.mock.calls).toHaveLength(2);
+    });
+    it('adds multiple lock files', async () => {
+      lockFiles.determineLockFileDirs.mockReturnValueOnce({
+        packageLockFileDirs: ['a', 'b'],
+        yarnLockFileDirs: ['c', 'd'],
+      });
+      npm.generateLockFile.mockReturnValueOnce('some new lock file contents');
+      yarn.generateLockFile.mockReturnValueOnce('some new lock file contents');
+      const res = await getUpdatedLockFiles(config);
+      expect(res).toMatchSnapshot();
+      expect(res.lockFileError).toBe(false);
+      expect(res.updatedLockFiles).toHaveLength(2);
+      expect(npm.generateLockFile.mock.calls).toHaveLength(2);
+      expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
+      expect(config.api.getFileContent.mock.calls).toHaveLength(4);
+    });
+    it('returns npm errors', async () => {
+      lockFiles.determineLockFileDirs.mockReturnValueOnce({
+        packageLockFileDirs: ['a', 'b'],
+        yarnLockFileDirs: ['c', 'd'],
+      });
+      npm.generateLockFile.mockImplementationOnce(() => {
+        throw new Error('some error');
+      });
+      const res = await getUpdatedLockFiles(config);
+      expect(res).toMatchSnapshot();
+      expect(res.lockFileError).toBe(true);
+      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(npm.generateLockFile.mock.calls).toHaveLength(1);
+      expect(yarn.generateLockFile.mock.calls).toHaveLength(0);
+      expect(config.api.getFileContent.mock.calls).toHaveLength(0);
+    });
+    it('returns yarn errors', async () => {
+      lockFiles.determineLockFileDirs.mockReturnValueOnce({
+        packageLockFileDirs: [],
+        yarnLockFileDirs: ['c', 'd'],
+      });
+      yarn.generateLockFile.mockImplementationOnce(() => {
+        throw new Error('some error');
+      });
+      const res = await getUpdatedLockFiles(config);
+      expect(res).toMatchSnapshot();
+      expect(res.lockFileError).toBe(true);
+      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(npm.generateLockFile.mock.calls).toHaveLength(0);
+      expect(yarn.generateLockFile.mock.calls).toHaveLength(1);
+      expect(config.api.getFileContent.mock.calls).toHaveLength(0);
+    });
+  });
+});
diff --git a/test/workers/branch/npm.spec.js b/test/workers/branch/npm.spec.js
index 023fd67c35..6467ee0885 100644
--- a/test/workers/branch/npm.spec.js
+++ b/test/workers/branch/npm.spec.js
@@ -7,111 +7,27 @@ jest.mock('child_process');
 const fs = require('fs-extra');
 const cp = require('child_process');
 
-const tmpDir = { name: 'some-dir' };
-
 describe('generateLockFile', () => {
-  fs.outputFile = jest.fn();
-  fs.readFileSync = jest.fn(() => 'package-lock-contents');
-  cp.spawnSync = jest.fn(() => ({
-    stdout: '',
-    stderror: '',
-  }));
   it('generates lock files', async () => {
-    const packageLock = await npmHelper.generateLockFile(
-      tmpDir.name,
-      {},
-      'npmrc-contents',
-      logger
-    );
-    expect(fs.outputFile.mock.calls.length).toEqual(2);
+    cp.spawnSync = jest.fn(() => ({
+      stdout: '',
+      stderror: '',
+    }));
+    fs.readFileSync = jest.fn(() => 'package-lock-contents');
+    const lockFile = await npmHelper.generateLockFile('some-dir', logger);
     expect(fs.readFileSync.mock.calls.length).toEqual(1);
-    expect(packageLock).toEqual('package-lock-contents');
-  });
-});
-describe('getLockFile', () => {
-  let api;
-  beforeEach(() => {
-    api = {
-      getFileContent: jest.fn(),
-    };
-  });
-  it('returns null if no existing package-lock.json', async () => {
-    api.getFileContent.mockReturnValueOnce(false);
-    expect(await npmHelper.getLockFile(tmpDir, 'package.json', '', api)).toBe(
-      null
-    );
-  });
-  it('returns package-lock.json file', async () => {
-    api.getFileContent.mockReturnValueOnce('Existing package-lock.json');
-    api.getFileContent.mockReturnValueOnce(null); // npmrc
-    npmHelper.generateLockFile = jest.fn();
-    npmHelper.generateLockFile.mockReturnValueOnce('New package-lock.json');
-    const packageLockFile = {
-      name: 'package-lock.json',
-      contents: 'New package-lock.json',
-    };
-    expect(
-      await npmHelper.getLockFile(tmpDir, 'package.json', '', api, '5.0.4')
-    ).toMatchObject(packageLockFile);
-  });
-  it('throws if no npm', async () => {
-    api.getFileContent.mockReturnValueOnce('Existing package-lock.json');
-    let e;
-    try {
-      await npmHelper.getLockFile(tmpDir, 'package.json', '', api, '');
-    } catch (err) {
-      e = err;
-    }
-    expect(e).toMatchSnapshot();
-  });
-  it('throws if wrong npm version', async () => {
-    api.getFileContent.mockReturnValueOnce('Existing package-lock.json');
-    let e;
-    try {
-      await npmHelper.getLockFile(tmpDir, 'package.json', '', api, '4.0.0');
-    } catch (err) {
-      e = err;
-    }
-    expect(e).toMatchSnapshot();
-  });
-});
-
-describe('maintainLockFile', () => {
-  let config;
-  beforeEach(() => {
-    config = { logger };
-    config.packageFile = 'package.json';
-    config.api = {
-      getFileContent: jest.fn(),
-    };
-    config.versions = {
-      npm: '5.3.0',
-    };
-    config.tmpDir = tmpDir;
-    config.api.getFileContent.mockReturnValueOnce('oldPackageContent');
-    npmHelper.getLockFile = jest.fn();
-  });
-  it('returns null if no file to maintain', async () => {
-    const packageLock = await npmHelper.maintainLockFile(config);
-    expect(config.api.getFileContent.mock.calls.length).toBe(2);
-    expect(packageLock).toEqual(null);
-  });
-  it('returns null if contents match', async () => {
-    config.api.getFileContent.mockReturnValueOnce('oldPackageLockContent');
-    npmHelper.getLockFile.mockReturnValueOnce({
-      contents: 'oldPackageLockContent',
+    expect(lockFile).toEqual('package-lock-contents');
+  });
+  it('catches errors', async () => {
+    cp.spawnSync = jest.fn(() => ({
+      stdout: '',
+      stderror: 'some-error',
+    }));
+    fs.readFileSync = jest.fn(() => {
+      throw new Error('not found');
     });
-    const packageLock = await npmHelper.maintainLockFile(config);
-    expect(config.api.getFileContent.mock.calls.length).toBe(2);
-    expect(packageLock).toEqual(null);
-  });
-  it('returns new package lock if contents differ', async () => {
-    config.api.getFileContent.mockReturnValueOnce('oldPackageLockContent');
-    npmHelper.getLockFile.mockReturnValueOnce({
-      contents: 'newPackageLockContent',
-    });
-    const packageLock = await npmHelper.maintainLockFile(config);
-    expect(config.api.getFileContent.mock.calls.length).toBe(2);
-    expect(packageLock).toEqual({ contents: 'newPackageLockContent' });
+    const lockFile = await npmHelper.generateLockFile('some-dir', logger);
+    expect(fs.readFileSync.mock.calls.length).toEqual(1);
+    expect(lockFile).toBe(null);
   });
 });
diff --git a/test/workers/branch/package-files.spec.js b/test/workers/branch/package-files.spec.js
new file mode 100644
index 0000000000..520145377a
--- /dev/null
+++ b/test/workers/branch/package-files.spec.js
@@ -0,0 +1,34 @@
+const packageJsonHelper = require('../../../lib/workers/branch/package-json');
+const {
+  getUpdatedPackageFiles,
+} = require('../../../lib/workers/branch/package-files');
+const defaultConfig = require('../../../lib/config/defaults').getConfig();
+const logger = require('../../_fixtures/logger');
+
+describe('workers/branch/package-files', () => {
+  describe('getUpdatedPackageFiles', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        api: { getFileContent: jest.fn() },
+        logger,
+      };
+      packageJsonHelper.setNewValue = jest.fn();
+    });
+    it('returns empty if lock file maintenance', async () => {
+      config.upgrades = [{ type: 'lockFileMaintenance' }];
+      const res = await getUpdatedPackageFiles(config);
+      expect(res).toHaveLength(0);
+    });
+    it('returns updated files', async () => {
+      config.upgrades = [{}, {}];
+      config.api.getFileContent.mockReturnValueOnce('old content 1');
+      config.api.getFileContent.mockReturnValueOnce('old content 2');
+      packageJsonHelper.setNewValue.mockReturnValueOnce('old content 1');
+      packageJsonHelper.setNewValue.mockReturnValueOnce('new content 2');
+      const res = await getUpdatedPackageFiles(config);
+      expect(res).toHaveLength(1);
+    });
+  });
+});
diff --git a/test/workers/branch/parent.spec.js b/test/workers/branch/parent.spec.js
new file mode 100644
index 0000000000..9c69709beb
--- /dev/null
+++ b/test/workers/branch/parent.spec.js
@@ -0,0 +1,95 @@
+const {
+  checkStale,
+  getParentBranch,
+} = require('../../../lib/workers/branch/parent');
+const logger = require('../../_fixtures/logger');
+
+describe('workers/branch/parent', () => {
+  describe('checkStale', () => {
+    it('returns true if rebaseStalePrs', () => {
+      const config = { rebaseStalePrs: true };
+      expect(checkStale(config)).toBe(true);
+    });
+    it('returns true if repoForceRebase', () => {
+      const config = { repoForceRebase: true };
+      expect(checkStale(config)).toBe(true);
+    });
+    it('returns true if repoForceRebase', () => {
+      const config = { automerge: true, automergeType: 'branch-push' };
+      expect(checkStale(config)).toBe(true);
+    });
+  });
+  describe('getParentBranch(config)', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        api: {
+          branchExists: jest.fn(() => true),
+          deleteBranch: jest.fn(),
+          getBranchPr: jest.fn(),
+          getBranchStatus: jest.fn(),
+          isBranchStale: jest.fn(() => false),
+        },
+        branchName: 'renovate/some-branch',
+        logger,
+      };
+    });
+    it('returns undefined if branch does not exist', async () => {
+      config.api.branchExists.mockReturnValue(false);
+      expect(await getParentBranch(config)).toBe(undefined);
+    });
+    it('returns branchName if no PR', async () => {
+      config.api.getBranchPr.mockReturnValue(null);
+      expect(await getParentBranch(config)).toBe(config.branchName);
+    });
+    it('returns branchName if does not need rebaseing', async () => {
+      config.api.getBranchPr.mockReturnValue({
+        isUnmergeable: false,
+      });
+      expect(await getParentBranch(config)).toBe(config.branchName);
+    });
+    it('returns branchName if unmergeable and cannot rebase', async () => {
+      config.api.getBranchPr.mockReturnValue({
+        isUnmergeable: true,
+        canRebase: false,
+      });
+      expect(await getParentBranch(config)).toBe(config.branchName);
+    });
+    it('returns undefined if unmergeable and can rebase', async () => {
+      config.api.getBranchPr.mockReturnValue({
+        isUnmergeable: true,
+        canRebase: true,
+      });
+      expect(await getParentBranch(config)).toBe(undefined);
+    });
+    it('returns undefined if unmergeable and can rebase (gitlab)', async () => {
+      config.isGitLab = true;
+      config.api.getBranchPr.mockReturnValue({
+        isUnmergeable: true,
+        canRebase: true,
+      });
+      expect(await getParentBranch(config)).toBe(undefined);
+      expect(config.api.deleteBranch.mock.calls.length).toBe(1);
+    });
+    it('returns branchName if automerge branch-push and not stale', async () => {
+      config.automerge = true;
+      config.automergeType = 'branch-push';
+      expect(await getParentBranch(config)).toBe(config.branchName);
+    });
+    it('returns undefined if automerge branch-push and stale', async () => {
+      config.automerge = true;
+      config.automergeType = 'branch-push';
+      config.api.isBranchStale.mockReturnValueOnce(true);
+      expect(await getParentBranch(config)).toBe(undefined);
+    });
+    it('returns branch if rebaseStalePrs enabled but cannot rebase', async () => {
+      config.rebaseStalePrs = true;
+      config.api.isBranchStale.mockReturnValueOnce(true);
+      config.api.getBranchPr.mockReturnValue({
+        isUnmergeable: true,
+        canRebase: false,
+      });
+      expect(await getParentBranch(config)).not.toBe(undefined);
+    });
+  });
+});
diff --git a/test/workers/branch/status-checks.spec.js b/test/workers/branch/status-checks.spec.js
new file mode 100644
index 0000000000..fc1ea200ac
--- /dev/null
+++ b/test/workers/branch/status-checks.spec.js
@@ -0,0 +1,52 @@
+const {
+  setUnpublishable,
+} = require('../../../lib/workers/branch/status-checks');
+const defaultConfig = require('../../../lib/config/defaults').getConfig();
+const logger = require('../../_fixtures/logger');
+
+describe('workers/branch/status-checks', () => {
+  describe('setUnpublishable', () => {
+    let config;
+    beforeEach(() => {
+      config = {
+        ...defaultConfig,
+        api: { getBranchStatusCheck: jest.fn(), setBranchStatus: jest.fn() },
+        logger,
+        upgrades: [],
+      };
+    });
+    it('defaults to unpublishable', async () => {
+      await setUnpublishable(config);
+      expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1);
+      expect(config.api.setBranchStatus.mock.calls.length).toBe(0);
+    });
+    it('finds unpublishable true', async () => {
+      config.upgrades = [{ unpublishable: true }];
+      await setUnpublishable(config);
+      expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1);
+      expect(config.api.setBranchStatus.mock.calls.length).toBe(0);
+    });
+    it('removes status check', async () => {
+      config.upgrades = [{ unpublishable: true }];
+      config.api.getBranchStatusCheck.mockReturnValueOnce('pending');
+      await setUnpublishable(config);
+      expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1);
+      expect(config.api.setBranchStatus.mock.calls.length).toBe(1);
+    });
+    it('finds unpublishable false and sets status', async () => {
+      config.unpublishSafe = true;
+      config.upgrades = [{ unpublishable: true }, { unpublishable: false }];
+      await setUnpublishable(config);
+      expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1);
+      expect(config.api.setBranchStatus.mock.calls.length).toBe(1);
+    });
+    it('finds unpublishable false and skips status', async () => {
+      config.unpublishSafe = true;
+      config.upgrades = [{ unpublishable: true }, { unpublishable: false }];
+      config.api.getBranchStatusCheck.mockReturnValueOnce('pending');
+      await setUnpublishable(config);
+      expect(config.api.getBranchStatusCheck.mock.calls.length).toBe(1);
+      expect(config.api.setBranchStatus.mock.calls.length).toBe(0);
+    });
+  });
+});
diff --git a/test/workers/branch/yarn.spec.js b/test/workers/branch/yarn.spec.js
index 4877659df4..9331834208 100644
--- a/test/workers/branch/yarn.spec.js
+++ b/test/workers/branch/yarn.spec.js
@@ -7,90 +7,27 @@ jest.mock('child_process');
 const fs = require('fs-extra');
 const cp = require('child_process');
 
-const tmpDir = { name: 'some-dir' };
-
 describe('generateLockFile', () => {
-  fs.outputFile = jest.fn();
-  fs.readFileSync = jest.fn(() => 'yarn-lock-contents');
-  cp.spawnSync = jest.fn(() => ({
-    stdout: '',
-    stderror: '',
-  }));
   it('generates lock files', async () => {
-    const yarnLock = await yarnHelper.generateLockFile(
-      tmpDir.name,
-      {},
-      'npmrc-contents',
-      'yarnrc-contents',
-      logger
-    );
-    expect(fs.outputFile.mock.calls.length).toEqual(3);
+    cp.spawnSync = jest.fn(() => ({
+      stdout: '',
+      stderror: '',
+    }));
+    fs.readFileSync = jest.fn(() => 'yarn-lock-contents');
+    const yarnLock = await yarnHelper.generateLockFile('some-dir', logger);
     expect(fs.readFileSync.mock.calls.length).toEqual(1);
     expect(yarnLock).toEqual('yarn-lock-contents');
   });
-});
-describe('getLockFile', () => {
-  let api;
-  beforeEach(() => {
-    api = {
-      getFileContent: jest.fn(),
-    };
-  });
-  it('returns null if no existing yarn.lock', async () => {
-    api.getFileContent.mockReturnValueOnce(false);
-    expect(
-      await yarnHelper.getLockFile(tmpDir, 'package.json', '', api, '')
-    ).toBe(null);
-  });
-  it('returns yarn.lock file', async () => {
-    api.getFileContent.mockReturnValueOnce('Existing yarn.lock');
-    api.getFileContent.mockReturnValueOnce(null); // npmrc
-    api.getFileContent.mockReturnValueOnce(null); // yarnrc
-    yarnHelper.generateLockFile = jest.fn();
-    yarnHelper.generateLockFile.mockReturnValueOnce('New yarn.lock');
-    const yarnLockFile = {
-      name: 'yarn.lock',
-      contents: 'New yarn.lock',
-    };
-    expect(
-      await yarnHelper.getLockFile(tmpDir, 'package.json', '', api, '')
-    ).toMatchObject(yarnLockFile);
-  });
-});
-
-describe('maintainLockFile', () => {
-  let config;
-  beforeEach(() => {
-    config = { logger };
-    config.packageFile = 'package.json';
-    config.api = {
-      getFileContent: jest.fn(),
-    };
-    config.tmpDir = tmpDir;
-    config.api.getFileContent.mockReturnValueOnce('oldPackageContent');
-    yarnHelper.getLockFile = jest.fn();
-  });
-  it('returns null if no file to maintain', async () => {
-    const yarnLock = await yarnHelper.maintainLockFile(config);
-    expect(config.api.getFileContent.mock.calls.length).toBe(3);
-    expect(yarnLock).toEqual(null);
-  });
-  it('returns null if contents match', async () => {
-    config.api.getFileContent.mockReturnValueOnce('oldYarnLockContent');
-    yarnHelper.getLockFile.mockReturnValueOnce({
-      contents: 'oldYarnLockContent',
+  it('catches and throws errors', async () => {
+    cp.spawnSync = jest.fn(() => ({
+      stdout: '',
+      stderror: 'some-error',
+    }));
+    fs.readFileSync = jest.fn(() => {
+      throw new Error('not found');
     });
-    const yarnLock = await yarnHelper.maintainLockFile(config);
-    expect(config.api.getFileContent.mock.calls.length).toBe(2);
-    expect(yarnLock).toEqual(null);
-  });
-  it('returns new yarn lock if contents differ', async () => {
-    config.api.getFileContent.mockReturnValueOnce('oldYarnLockContent');
-    yarnHelper.getLockFile.mockReturnValueOnce({
-      contents: 'newYarnLockContent',
-    });
-    const yarnLock = await yarnHelper.maintainLockFile(config);
-    expect(config.api.getFileContent.mock.calls.length).toBe(2);
-    expect(yarnLock).toEqual({ contents: 'newYarnLockContent' });
+    const lockFile = await yarnHelper.generateLockFile('some-dir', logger);
+    expect(fs.readFileSync.mock.calls.length).toEqual(1);
+    expect(lockFile).toBe(null);
   });
 });
diff --git a/test/workers/package-file/index.spec.js b/test/workers/package-file/index.spec.js
index d3a9bdab79..826d8f16ea 100644
--- a/test/workers/package-file/index.spec.js
+++ b/test/workers/package-file/index.spec.js
@@ -13,12 +13,11 @@ describe('packageFileWorker', () => {
     beforeEach(() => {
       config = {
         ...defaultConfig,
-        ...{
-          packageFile: 'package.json',
-          content: {},
-          repoIsOnboarded: true,
-          logger,
-        },
+        packageFile: 'package.json',
+        content: {},
+        repoIsOnboarded: true,
+        npmrc: '# nothing',
+        logger,
       };
       depTypeWorker.renovateDepType.mockReturnValue([]);
     });
diff --git a/test/workers/package/__snapshots__/index.spec.js.snap b/test/workers/package/__snapshots__/index.spec.js.snap
index 9cf9f49539..fcd381fbb5 100644
--- a/test/workers/package/__snapshots__/index.spec.js.snap
+++ b/test/workers/package/__snapshots__/index.spec.js.snap
@@ -5,6 +5,7 @@ Array [
   "description",
   "timezone",
   "schedule",
+  "packageFiles",
   "branchPrefix",
   "semanticCommits",
   "semanticPrefix",
@@ -38,6 +39,7 @@ Array [
   "description",
   "timezone",
   "schedule",
+  "packageFiles",
   "branchPrefix",
   "semanticCommits",
   "semanticPrefix",
@@ -148,6 +150,7 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
     "labels": Array [],
     "lazyGrouping": true,
     "message": "Failed to look up dependency",
+    "packageFiles": Array [],
     "prBody": "This {{#if isGitHub}}Pull{{else}}Merge{{/if}} Request {{#if isRollback}}rolls back{{else}}updates{{/if}} dependency {{#if repositoryUrl}}[{{depName}}]({{repositoryUrl}}){{else}}\`{{depName}}\`{{/if}} from \`v{{currentVersion}}\` to \`v{{newVersion}}\`{{#if isRollback}}. This is necessary and important because \`v{{currentVersion}}\` cannot be found in the npm registry - probably because of it being unpublished.{{/if}}
 {{#if releases.length}}
 
@@ -300,6 +303,7 @@ This {{#if isGitHub}}PR{{else}}MR{{/if}} has been generated by [Renovate Bot](ht
     "labels": Array [],
     "lazyGrouping": true,
     "message": "Failed to look up dependency",
+    "packageFiles": Array [],
     "prBody": "This {{#if isGitHub}}Pull{{else}}Merge{{/if}} Request {{#if isRollback}}rolls back{{else}}updates{{/if}} dependency {{#if repositoryUrl}}[{{depName}}]({{repositoryUrl}}){{else}}\`{{depName}}\`{{/if}} from \`v{{currentVersion}}\` to \`v{{newVersion}}\`{{#if isRollback}}. This is necessary and important because \`v{{currentVersion}}\` cannot be found in the npm registry - probably because of it being unpublished.{{/if}}
 {{#if releases.length}}
 
diff --git a/test/workers/pr/__snapshots__/index.spec.js.snap b/test/workers/pr/__snapshots__/index.spec.js.snap
index 8e53afe7c2..619d8f2a1d 100644
--- a/test/workers/pr/__snapshots__/index.spec.js.snap
+++ b/test/workers/pr/__snapshots__/index.spec.js.snap
@@ -1,6 +1,6 @@
 // Jest Snapshot v1, https://goo.gl/fbAQLP
 
-exports[`workers/pr ensurePr(upgrades, logger) should add assignees and reviewers to new PR 1`] = `
+exports[`workers/pr ensurePr should add assignees and reviewers to new PR 1`] = `
 Array [
   Array [
     undefined,
@@ -12,7 +12,7 @@ Array [
 ]
 `;
 
-exports[`workers/pr ensurePr(upgrades, logger) should add assignees and reviewers to new PR 2`] = `
+exports[`workers/pr ensurePr should add assignees and reviewers to new PR 2`] = `
 Array [
   Array [
     undefined,
@@ -24,7 +24,7 @@ Array [
 ]
 `;
 
-exports[`workers/pr ensurePr(upgrades, logger) should return modified existing PR 1`] = `
+exports[`workers/pr ensurePr should return modified existing PR 1`] = `
 Object {
   "body": "<p>This Pull Request updates dependency <a href=\\"https://github.com/renovateapp/dummy\\">dummy</a> from <code>v1.0.0</code> to <code>v1.1.0</code></p>
 <h3 id=\\"commits\\">Commits</h3>
@@ -42,4 +42,4 @@ Object {
 }
 `;
 
-exports[`workers/pr ensurePr(upgrades, logger) should return unmodified existing PR 1`] = `Array []`;
+exports[`workers/pr ensurePr should return unmodified existing PR 1`] = `Array []`;
diff --git a/test/workers/pr/index.spec.js b/test/workers/pr/index.spec.js
index 0d6ab11427..14b97df3b8 100644
--- a/test/workers/pr/index.spec.js
+++ b/test/workers/pr/index.spec.js
@@ -33,16 +33,19 @@ describe('workers/pr', () => {
     let config;
     let pr;
     beforeEach(() => {
-      config = { ...defaultConfig };
+      config = {
+        ...defaultConfig,
+        api: {
+          mergePr: jest.fn(),
+          getBranchStatus: jest.fn(),
+        },
+        logger,
+      };
       pr = {
         head: {
           ref: 'somebranch',
         },
       };
-      config.api = {
-        mergePr: jest.fn(),
-        getBranchStatus: jest.fn(),
-      };
     });
     it('should not automerge if not configured', async () => {
       await prWorker.checkAutoMerge(pr, config, logger);
@@ -85,14 +88,17 @@ describe('workers/pr', () => {
       expect(config.api.mergePr.mock.calls.length).toBe(0);
     });
   });
-  describe('ensurePr(upgrades, logger)', () => {
+  describe('ensurePr', () => {
     let config;
     let existingPr;
     beforeEach(() => {
-      config = { ...defaultConfig };
-      config.api = {
-        createPr: jest.fn(() => ({ displayNumber: 'New Pull Request' })),
-        getBranchStatus: jest.fn(),
+      config = {
+        ...defaultConfig,
+        api: {
+          createPr: jest.fn(() => ({ displayNumber: 'New Pull Request' })),
+          getBranchStatus: jest.fn(),
+        },
+        logger,
       };
       config.upgrades = [config];
       existingPr = {
@@ -115,38 +121,38 @@ describe('workers/pr', () => {
       config.api.getBranchPr = jest.fn(() => {
         throw new Error('oops');
       });
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toBe(null);
     });
     it('should return null if waiting for success', async () => {
       config.api.getBranchStatus = jest.fn(() => 'failed');
       config.prCreation = 'status-success';
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toBe(null);
     });
     it('should create PR if success', async () => {
       config.api.getBranchStatus = jest.fn(() => 'success');
       config.api.getBranchPr = jest.fn();
       config.prCreation = 'status-success';
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
     });
     it('should return null if waiting for not pending', async () => {
       config.api.getBranchStatus = jest.fn(() => 'pending');
       config.prCreation = 'not-pending';
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toBe(null);
     });
     it('should create PR if no longer pending', async () => {
       config.api.getBranchStatus = jest.fn(() => 'failed');
       config.api.getBranchPr = jest.fn();
       config.prCreation = 'not-pending';
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
     });
     it('should create new branch if none exists', async () => {
       config.api.getBranchPr = jest.fn();
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(
         config.api.createPr.mock.calls[0][2].indexOf('Errors</h3>')
@@ -159,7 +165,7 @@ describe('workers/pr', () => {
       config.api.getBranchPr = jest.fn();
       config.api.addLabels = jest.fn();
       config.labels = ['foo'];
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(config.api.addLabels.mock.calls.length).toBe(1);
     });
@@ -167,7 +173,7 @@ describe('workers/pr', () => {
       config.api.getBranchPr = jest.fn();
       config.api.addLabels = jest.fn();
       config.labels = [];
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(config.api.addLabels.mock.calls.length).toBe(0);
     });
@@ -177,7 +183,7 @@ describe('workers/pr', () => {
       config.api.addReviewers = jest.fn();
       config.assignees = ['@foo', 'bar'];
       config.reviewers = ['baz', '@boo'];
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(config.api.addAssignees.mock.calls.length).toBe(1);
       expect(config.api.addAssignees.mock.calls).toMatchSnapshot();
@@ -193,7 +199,7 @@ describe('workers/pr', () => {
       config.assignees = ['@foo', 'bar'];
       config.reviewers = ['baz', '@boo'];
       config.logger = logger;
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(config.api.addAssignees.mock.calls.length).toBe(1);
       expect(config.api.addReviewers.mock.calls.length).toBe(1);
@@ -207,14 +213,16 @@ describe('workers/pr', () => {
       config.assignees = ['@foo', 'bar'];
       config.reviewers = ['baz', '@boo'];
       config.logger = logger;
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(config.api.addAssignees.mock.calls.length).toBe(1);
       expect(config.api.addReviewers.mock.calls.length).toBe(1);
     });
     it('should display errors and warnings', async () => {
       config.api.getBranchPr = jest.fn();
-      const pr = await prWorker.ensurePr(config, logger, [{}], [{}]);
+      config.errors = [{}];
+      config.warnings = [{}];
+      const pr = await prWorker.ensurePr(config);
       expect(
         config.api.createPr.mock.calls[0][2].indexOf('Errors</h3>')
       ).not.toEqual(-1);
@@ -230,7 +238,7 @@ describe('workers/pr', () => {
       config.assignees = ['bar'];
       config.reviewers = ['baz'];
       config.automerge = true;
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
       expect(config.api.addAssignees.mock.calls.length).toBe(0);
       expect(config.api.addReviewers.mock.calls.length).toBe(0);
@@ -245,7 +253,7 @@ describe('workers/pr', () => {
       config.api.getBranchPr = jest.fn(() => existingPr);
       config.api.updatePr = jest.fn();
       config.semanticPrefix = '';
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(config.api.updatePr.mock.calls).toMatchSnapshot();
       expect(config.api.updatePr.mock.calls.length).toBe(0);
       expect(pr).toMatchObject(existingPr);
@@ -257,7 +265,7 @@ describe('workers/pr', () => {
       config.isGitHub = true;
       config.api.getBranchPr = jest.fn(() => existingPr);
       config.api.updatePr = jest.fn();
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchSnapshot();
     });
     it('should create PR if branch automerging failed', async () => {
@@ -265,20 +273,20 @@ describe('workers/pr', () => {
       config.automergeType = 'branch-push';
       config.api.getBranchStatus.mockReturnValueOnce('failure');
       config.api.getBranchPr = jest.fn();
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
     });
     it('should return null if branch automerging not failed', async () => {
       config.automerge = true;
       config.automergeType = 'branch-push';
       config.api.getBranchStatus.mockReturnValueOnce('pending');
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toBe(null);
     });
     it('handles duplicate upgrades', async () => {
       config.api.getBranchPr = jest.fn();
       config.upgrades.push(config.upgrades[0]);
-      const pr = await prWorker.ensurePr(config, logger);
+      const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });
     });
   });
diff --git a/test/workers/repository/__snapshots__/apis.spec.js.snap b/test/workers/repository/__snapshots__/apis.spec.js.snap
index 4a7267da3b..140daabb65 100644
--- a/test/workers/repository/__snapshots__/apis.spec.js.snap
+++ b/test/workers/repository/__snapshots__/apis.spec.js.snap
@@ -97,14 +97,18 @@ Array [
     "errors": Array [],
     "hasPackageLock": true,
     "hasYarnLock": true,
+    "npmrc": null,
     "packageFile": "package.json",
     "warnings": Array [],
+    "yarnrc": null,
   },
   Object {
     "content": Object {},
     "hasPackageLock": false,
     "hasYarnLock": false,
+    "npmrc": null,
     "packageFile": "a/package.json",
+    "yarnrc": null,
   },
 ]
 `;
diff --git a/test/workers/repository/apis.spec.js b/test/workers/repository/apis.spec.js
index 32dc8191ad..30998c1eb3 100644
--- a/test/workers/repository/apis.spec.js
+++ b/test/workers/repository/apis.spec.js
@@ -125,7 +125,7 @@ describe('workers/repository/apis', () => {
       expect(res.platform).toEqual('github');
       expect(githubApi.initRepo.mock.calls.length).toBe(1);
       expect(gitlabApi.initRepo.mock.calls.length).toBe(0);
-      expect(npmApi.setNpmrc.mock.calls.length).toBe(1);
+      expect(npmApi.setNpmrc.mock.calls.length).toBe(0);
     });
     it('returns gitlab api', async () => {
       const config = { logger, platform: 'gitlab' };
@@ -133,7 +133,7 @@ describe('workers/repository/apis', () => {
       expect(res.platform).toEqual('gitlab');
       expect(githubApi.initRepo.mock.calls.length).toBe(0);
       expect(gitlabApi.initRepo.mock.calls.length).toBe(1);
-      expect(npmApi.setNpmrc.mock.calls.length).toBe(1);
+      expect(npmApi.setNpmrc.mock.calls.length).toBe(0);
     });
     it('throws if unknown platform', async () => {
       const config = { platform: 'foo' };
@@ -274,7 +274,7 @@ describe('workers/repository/apis', () => {
       config = {
         packageFiles: ['package.json', { packageFile: 'a/package.json' }],
         api: {
-          getFileContent: jest.fn(),
+          getFileContent: jest.fn(() => null),
           getFileJson: jest.fn(),
         },
         logger,
@@ -287,8 +287,12 @@ describe('workers/repository/apis', () => {
     it('includes files with content', async () => {
       config.api.getFileJson.mockReturnValueOnce({ renovate: {} });
       config.api.getFileJson.mockReturnValueOnce({});
-      config.api.getFileContent.mockReturnValueOnce(true);
-      config.api.getFileContent.mockReturnValueOnce(true);
+      config.api.getFileContent.mockReturnValueOnce(null);
+      config.api.getFileContent.mockReturnValueOnce(null);
+      config.api.getFileContent.mockReturnValueOnce('some-content');
+      config.api.getFileContent.mockReturnValueOnce('some-content');
+      config.api.getFileContent.mockReturnValueOnce(null);
+      config.api.getFileContent.mockReturnValueOnce(null);
       const res = await apis.resolvePackageFiles(config);
       expect(res.packageFiles).toHaveLength(2);
       expect(res.packageFiles).toMatchSnapshot();
diff --git a/test/workers/repository/index.spec.js b/test/workers/repository/index.spec.js
index 5a83539750..c452d92fcc 100644
--- a/test/workers/repository/index.spec.js
+++ b/test/workers/repository/index.spec.js
@@ -20,7 +20,7 @@ describe('workers/repository', () => {
       onboarding.ensurePr = jest.fn();
       upgrades.determineRepoUpgrades = jest.fn(() => []);
       upgrades.branchifyUpgrades = jest.fn(() => ({ branchUpgrades: {} }));
-      branchWorker.processBranchUpgrades = jest.fn(() => 'some-branch');
+      branchWorker.processBranch = jest.fn(() => 'some-branch');
       config = {
         lockFileMaintenance: true,
         api: {
@@ -91,7 +91,7 @@ describe('workers/repository', () => {
       });
       await repositoryWorker.renovateRepository(config);
       expect(onboarding.getOnboardingStatus.mock.calls.length).toBe(1);
-      expect(branchWorker.processBranchUpgrades.mock.calls.length).toBe(0);
+      expect(branchWorker.processBranch.mock.calls.length).toBe(0);
       expect(onboarding.ensurePr.mock.calls.length).toBe(1);
       expect(config.logger.error.mock.calls.length).toBe(0);
     });
@@ -115,7 +115,7 @@ describe('workers/repository', () => {
       });
       await repositoryWorker.renovateRepository(config);
       expect(onboarding.getOnboardingStatus.mock.calls.length).toBe(1);
-      expect(branchWorker.processBranchUpgrades.mock.calls.length).toBe(0);
+      expect(branchWorker.processBranch.mock.calls.length).toBe(0);
       expect(onboarding.ensurePr.mock.calls.length).toBe(1);
       expect(config.logger.error.mock.calls.length).toBe(0);
     });
@@ -138,7 +138,7 @@ describe('workers/repository', () => {
       });
       await repositoryWorker.renovateRepository(config);
       expect(onboarding.getOnboardingStatus.mock.calls.length).toBe(1);
-      expect(branchWorker.processBranchUpgrades.mock.calls.length).toBe(0);
+      expect(branchWorker.processBranch.mock.calls.length).toBe(0);
       expect(onboarding.ensurePr.mock.calls.length).toBe(1);
       expect(config.logger.error.mock.calls.length).toBe(0);
     });
@@ -150,7 +150,7 @@ describe('workers/repository', () => {
         upgrades: [{}, {}, {}],
       });
       await repositoryWorker.renovateRepository(config);
-      expect(branchWorker.processBranchUpgrades.mock.calls.length).toBe(3);
+      expect(branchWorker.processBranch.mock.calls.length).toBe(3);
       expect(config.logger.error.mock.calls.length).toBe(0);
     });
     it('swallows errors', async () => {
diff --git a/yarn.lock b/yarn.lock
index 5669f2132f..5a2c3bb46f 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -1147,10 +1147,6 @@ ecdsa-sig-formatter@1.0.9:
     base64url "^2.0.0"
     safe-buffer "^5.0.1"
 
-eol@0.9.0:
-  version "0.9.0"
-  resolved "https://registry.yarnpkg.com/eol/-/eol-0.9.0.tgz#5c33e25b7001bbb69ca6947593d3332e36e04f6a"
-
 errno@^0.1.4:
   version "0.1.4"
   resolved "https://registry.yarnpkg.com/errno/-/errno-0.1.4.tgz#b896e23a9e5e8ba33871fc996abd3635fc9a1c7d"
-- 
GitLab