diff --git a/lib/config/definitions.js b/lib/config/definitions.js
index a6ac4aa16186aca1eafa5ce8644d267309147546..feddb01d631aa553fba96a2a7e8b44a962f26699 100644
--- a/lib/config/definitions.js
+++ b/lib/config/definitions.js
@@ -1468,6 +1468,7 @@ const options = [
       'prEditNotification',
       'branchAutomergeFailure',
       'lockFileErrors',
+      'artifactErrors',
       'deprecationWarningIssues',
       'onboardingClose',
     ],
diff --git a/lib/manager/cargo/artifacts.js b/lib/manager/cargo/artifacts.js
index 6826f3d5408de3bc22b827fc38efc90c8fabc95e..723409b2d1d8f7acaafe0512168213ad185b9d97 100644
--- a/lib/manager/cargo/artifacts.js
+++ b/lib/manager/cargo/artifacts.js
@@ -16,7 +16,7 @@ async function getArtifacts(cargoTomlFileName) {
       'Failed to update Cargo lock file'
     );
     return {
-      lockFileError: {
+      artifactError: {
         lockFile: cargoLockFileName,
         stderr: err.message,
       },
diff --git a/lib/manager/composer/artifacts.js b/lib/manager/composer/artifacts.js
index 54d0bf88bbc801cb7468620ea846291000f7eed4..5c3d893d674055e2cfb030e12622ded02f59805f 100644
--- a/lib/manager/composer/artifacts.js
+++ b/lib/manager/composer/artifacts.js
@@ -172,7 +172,7 @@ async function getArtifacts(
       );
     }
     return {
-      lockFileError: {
+      artifactError: {
         lockFile: lockFileName,
         stderr: err.message,
       },
diff --git a/lib/manager/gomod/artifacts.js b/lib/manager/gomod/artifacts.js
index ad1f0cc4d524f6a5a91dc6395180293d1fb9960f..f21f604dfb15239aa57412084c9edd5063ba65d3 100644
--- a/lib/manager/gomod/artifacts.js
+++ b/lib/manager/gomod/artifacts.js
@@ -102,7 +102,7 @@ async function getArtifacts(
   } catch (err) {
     logger.warn({ err, message: err.message }, 'Failed to update go.sum');
     return {
-      lockFileError: {
+      artifactError: {
         lockFile: sumFileName,
         stderr: err.message,
       },
diff --git a/lib/manager/npm/post-update/index.js b/lib/manager/npm/post-update/index.js
index 4e5579ef40eff6364667ece0ae58c122f54bc155..922aa79c379133bb32d49ffb8ae3987d0b3db652 100644
--- a/lib/manager/npm/post-update/index.js
+++ b/lib/manager/npm/post-update/index.js
@@ -317,14 +317,14 @@ async function writeUpdatedPackageFiles(config) {
 // istanbul ignore next
 async function getAdditionalFiles(config, packageFiles) {
   logger.trace({ config }, 'getAdditionalFiles');
-  const lockFileErrors = [];
-  const updatedLockFiles = [];
+  const artifactErrors = [];
+  const updatedArtifacts = [];
   if (!(packageFiles.npm && packageFiles.npm.length)) {
-    return { lockFileErrors, updatedLockFiles };
+    return { artifactErrors, updatedArtifacts };
   }
   if (!config.updateLockFiles) {
     logger.info('Skipping lock file generation');
-    return { lockFileErrors, updatedLockFiles };
+    return { artifactErrors, updatedArtifacts };
   }
   logger.debug('Getting updated lock files');
   if (
@@ -333,7 +333,7 @@ async function getAdditionalFiles(config, packageFiles) {
     (await platform.branchExists(config.branchName))
   ) {
     logger.debug('Skipping lockFileMaintenance update');
-    return { lockFileErrors, updatedLockFiles };
+    return { artifactErrors, updatedArtifacts };
   }
   const dirs = module.exports.determineLockFileDirs(config, packageFiles);
   logger.debug({ dirs }, 'lock file dirs');
@@ -399,7 +399,7 @@ async function getAdditionalFiles(config, packageFiles) {
           }
         }
       }
-      lockFileErrors.push({
+      artifactErrors.push({
         lockFile,
         stderr: res.stderr,
       });
@@ -410,7 +410,7 @@ async function getAdditionalFiles(config, packageFiles) {
       );
       if (res.lockFile !== existingContent) {
         logger.debug(`${lockFile} needs updating`);
-        updatedLockFiles.push({
+        updatedArtifacts.push({
           name: lockFile,
           contents: res.lockFile.replace(new RegExp(`${token}`, 'g'), ''),
         });
@@ -452,7 +452,7 @@ async function getAdditionalFiles(config, packageFiles) {
           /* eslint-enable no-useless-escape */
         }
       }
-      lockFileErrors.push({
+      artifactErrors.push({
         lockFile,
         stderr: res.stderr,
       });
@@ -463,7 +463,7 @@ async function getAdditionalFiles(config, packageFiles) {
       );
       if (res.lockFile !== existingContent) {
         logger.debug('yarn.lock needs updating');
-        updatedLockFiles.push({
+        updatedArtifacts.push({
           name: lockFileName,
           contents: res.lockFile,
         });
@@ -498,7 +498,7 @@ async function getAdditionalFiles(config, packageFiles) {
           }
         }
       }
-      lockFileErrors.push({
+      artifactErrors.push({
         lockFile,
         stderr: res.stderr,
       });
@@ -509,7 +509,7 @@ async function getAdditionalFiles(config, packageFiles) {
       );
       if (res.lockFile !== existingContent) {
         logger.debug('shrinkwrap.yaml needs updating');
-        updatedLockFiles.push({
+        updatedArtifacts.push({
           name: lockFile,
           contents: res.lockFile,
         });
@@ -576,7 +576,7 @@ async function getAdditionalFiles(config, packageFiles) {
           throw new Error('registry-failure');
         }
       }
-      lockFileErrors.push({
+      artifactErrors.push({
         lockFile,
         stderr: res.stderr,
       });
@@ -596,7 +596,7 @@ async function getAdditionalFiles(config, packageFiles) {
             const newContent = await fs.readFile(lockFilePath, 'utf8');
             if (newContent !== existingContent) {
               logger.debug('File is updated: ' + lockFilePath);
-              updatedLockFiles.push({
+              updatedArtifacts.push({
                 name: filename,
                 contents: newContent,
               });
@@ -616,5 +616,5 @@ async function getAdditionalFiles(config, packageFiles) {
     }
   }
 
-  return { lockFileErrors, updatedLockFiles };
+  return { artifactErrors, updatedArtifacts };
 }
diff --git a/lib/manager/pipenv/artifacts.js b/lib/manager/pipenv/artifacts.js
index abe0f1329d328a49805fb85ef18dea21e58000b4..1dd4a10e36a929e813723bccae4321d93f99b97c 100644
--- a/lib/manager/pipenv/artifacts.js
+++ b/lib/manager/pipenv/artifacts.js
@@ -95,7 +95,7 @@ async function getArtifacts(
   } catch (err) {
     logger.warn({ err, message: err.message }, 'Failed to update Pipfile.lock');
     return {
-      lockFileError: {
+      artifactError: {
         lockFile: lockFileName,
         stderr: err.message,
       },
diff --git a/lib/workers/branch/commit.js b/lib/workers/branch/commit.js
index 3fcd4a330c5f5c91100cecfd337991aff7b4292a..3737c1f6581a9a9689a7f0e5d04a727926efae37 100644
--- a/lib/workers/branch/commit.js
+++ b/lib/workers/branch/commit.js
@@ -6,7 +6,7 @@ module.exports = {
 
 async function commitFilesToBranch(config) {
   const updatedFiles = config.updatedPackageFiles.concat(
-    config.updatedLockFiles
+    config.updatedArtifacts
   );
   if (is.nonEmptyArray(updatedFiles)) {
     logger.debug(`${updatedFiles.length} file(s) to commit`);
diff --git a/lib/workers/branch/get-updated.js b/lib/workers/branch/get-updated.js
index 9f88217102bc5e14607092fd69e514b748fd08e3..58967385785a806c2c404afdd0ecb0dd4ecfe68e 100644
--- a/lib/workers/branch/get-updated.js
+++ b/lib/workers/branch/get-updated.js
@@ -56,8 +56,8 @@ async function getUpdatedPackageFiles(config) {
     name,
     contents: updatedFileContents[name],
   }));
-  const updatedLockFiles = [];
-  const lockFileErrors = [];
+  const updatedArtifacts = [];
+  const artifactErrors = [];
   for (const packageFile of updatedPackageFiles) {
     const manager = packageFileManagers[packageFile.name];
     const updatedDeps = packageFileUpdatedDeps[packageFile.name];
@@ -70,11 +70,11 @@ async function getUpdatedPackageFiles(config) {
         config
       );
       if (res) {
-        const { file, lockFileError } = res;
+        const { file, artifactError } = res;
         if (file) {
-          updatedLockFiles.push(file);
-        } else if (lockFileError) {
-          lockFileErrors.push(lockFileError);
+          updatedArtifacts.push(file);
+        } else if (artifactError) {
+          artifactErrors.push(artifactError);
         }
       }
     }
@@ -82,7 +82,7 @@ async function getUpdatedPackageFiles(config) {
   return {
     parentBranch: config.parentBranch, // Need to overwrite original config
     updatedPackageFiles,
-    updatedLockFiles,
-    lockFileErrors,
+    updatedArtifacts,
+    artifactErrors,
   };
 }
diff --git a/lib/workers/branch/index.js b/lib/workers/branch/index.js
index 206fa5d2c778f14e007626b79ebd3720f87e4c1d..684b34316bbc97d5e1445503078257a06575786b 100644
--- a/lib/workers/branch/index.js
+++ b/lib/workers/branch/index.js
@@ -203,8 +203,8 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
     logger.debug(`Using parentBranch: ${config.parentBranch}`);
     const res = await getUpdatedPackageFiles(config);
     // istanbul ignore if
-    if (res.lockFileErrors && config.lockFileErrors) {
-      res.lockFileErrors = config.lockFileErrors.concat(res.lockFileErrors);
+    if (res.artifactErrors && config.artifactErrors) {
+      res.artifactErrors = config.artifactErrors.concat(res.artifactErrors);
     }
     Object.assign(config, res);
     if (config.updatedPackageFiles && config.updatedPackageFiles.length) {
@@ -215,21 +215,21 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
       logger.debug('No package files need updating');
     }
     const additionalFiles = await getAdditionalFiles(config, packageFiles);
-    config.lockFileErrors = (config.lockFileErrors || []).concat(
-      additionalFiles.lockFileErrors
+    config.artifactErrors = (config.artifactErrors || []).concat(
+      additionalFiles.artifactErrors
     );
-    config.updatedLockFiles = (config.updatedLockFiles || []).concat(
-      additionalFiles.updatedLockFiles
+    config.updatedArtifacts = (config.updatedArtifacts || []).concat(
+      additionalFiles.updatedArtifacts
     );
-    if (config.updatedLockFiles && config.updatedLockFiles.length) {
+    if (config.updatedArtifacts && config.updatedArtifacts.length) {
       logger.debug(
-        { updatedLockFiles: config.updatedLockFiles.map(f => f.name) },
-        `Updated ${config.updatedLockFiles.length} lock files`
+        { updatedArtifacts: config.updatedArtifacts.map(f => f.name) },
+        `Updated ${config.updatedArtifacts.length} lock files`
       );
     } else {
       logger.debug('No updated lock files in branch');
     }
-    if (config.lockFileErrors && config.lockFileErrors.length) {
+    if (config.artifactErrors && config.artifactErrors.length) {
       if (config.releaseTimestamp) {
         logger.debug(`Branch timestamp: ` + config.releaseTimestamp);
         const releaseTimestamp = DateTime.fromISO(config.releaseTimestamp);
@@ -355,28 +355,36 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
     const pr = await prWorker.ensurePr(config);
     // TODO: ensurePr should check for automerge itself
     if (pr) {
-      const topic = ':warning: Lock file problem';
-      if (config.lockFileErrors && config.lockFileErrors.length) {
+      const topic = ':warning: Artifact update problem';
+      if (config.artifactErrors && config.artifactErrors.length) {
         logger.warn(
-          { lockFileErrors: config.lockFileErrors },
-          'lockFileErrors'
+          { artifactErrors: config.artifactErrors },
+          'artifactErrors'
         );
         let content = `${appName} failed to update `;
         content +=
-          config.lockFileErrors.length > 1 ? 'lock files' : 'a lock file';
-        content += '. You probably do not want to merge this PR as-is.';
-        content += `\n\n:recycle: ${appName} will retry this branch, including lockfile, only when one of the following happens:\n\n`;
+          config.artifactErrors.length > 1 ? 'artifacts' : 'an artifact';
+        content +=
+          ' related to this branch. You probably do not want to merge this PR as-is.';
+        content += `\n\n:recycle: ${appName} will retry this branch, including artifacts, only when one of the following happens:\n\n`;
         content +=
           ' - any of the package files in this branch needs updating, or \n';
         content += ' - the branch becomes conflicted, or\n';
+        content +=
+          ' - you check the rebase/retry checkbox if found above, or\n';
         content +=
           ' - you rename this PR\'s title to start with "rebase!" to trigger it manually';
-        content += '\n\nThe lock file failure details are included below:\n\n';
-        config.lockFileErrors.forEach(error => {
-          content += `##### ${error.lockFile}\n\n`;
+        content += '\n\nThe artifact failure details are included below:\n\n';
+        config.artifactErrors.forEach(error => {
+          content += `##### File name: ${error.lockFile}\n\n`;
           content += `\`\`\`\n${error.stderr}\n\`\`\`\n\n`;
         });
-        if (!config.suppressNotifications.includes('lockFileErrors')) {
+        if (
+          !(
+            config.suppressNotifications.includes('artifactErrors') ||
+            config.suppressNotifications.includes('lockFileErrors')
+          )
+        ) {
           // istanbul ignore if
           if (config.dryRun) {
             logger.info(
@@ -385,10 +393,15 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
             );
           } else {
             await platform.ensureComment(pr.number, topic, content);
+            // TODO: remoe this soon once they're all cleared out
+            await platform.ensureCommentRemoval(
+              pr.number,
+              ':warning: Lock file problem'
+            );
           }
         }
-        const context = `${appSlug}/lock-files`;
-        const description = 'Lock file update failure';
+        const context = `${appSlug}/artifacts`;
+        const description = 'Artifact file update failure';
         const state = 'failure';
         const existingState = await platform.getBranchStatusCheck(
           config.branchName,
@@ -412,7 +425,7 @@ async function processBranch(branchConfig, prHourlyLimitReached, packageFiles) {
           }
         }
       } else {
-        if (config.updatedLockFiles && config.updatedLockFiles.length) {
+        if (config.updatedArtifacts && config.updatedArtifacts.length) {
           // istanbul ignore if
           if (config.dryRun) {
             logger.info(
diff --git a/lib/workers/pr/index.js b/lib/workers/pr/index.js
index 80d6392d74c6b583490d8b313a43918938a001f6..f7c34de6d95cf5ebf59cdfe6e325cf191a446d05 100644
--- a/lib/workers/pr/index.js
+++ b/lib/workers/pr/index.js
@@ -19,8 +19,8 @@ async function ensurePr(prConfig) {
   }
   config.upgrades = [];
 
-  if (config.lockFileErrors && config.lockFileErrors.length) {
-    logger.debug('Forcing PR because of lock file errors');
+  if (config.artifactErrors && config.artifactErrors.length) {
+    logger.debug('Forcing PR because of artifact errors');
     config.forcePr = true;
   }
 
diff --git a/test/manager/cargo/__snapshots__/artifacts.spec.js.snap b/test/manager/cargo/__snapshots__/artifacts.spec.js.snap
index 9ab65d6d65bc49d07d5e3e3b923efa12cca392c9..f8966d6004b43be27b985b8e494943ab066e0b19 100644
--- a/test/manager/cargo/__snapshots__/artifacts.spec.js.snap
+++ b/test/manager/cargo/__snapshots__/artifacts.spec.js.snap
@@ -2,7 +2,7 @@
 
 exports[`cargo.getArtifacts() catches errors 1`] = `
 Object {
-  "lockFileError": Object {
+  "artifactError": Object {
     "lockFile": undefined,
     "stderr": "Cannot read property 'replace' of undefined",
   },
diff --git a/test/manager/composer/__snapshots__/artifacts.spec.js.snap b/test/manager/composer/__snapshots__/artifacts.spec.js.snap
index 912776a0896b0493f7fc78cd89a5d1ae525f2c2e..d5576599a94b239d1fea4f1e32934f115096e004 100644
--- a/test/manager/composer/__snapshots__/artifacts.spec.js.snap
+++ b/test/manager/composer/__snapshots__/artifacts.spec.js.snap
@@ -2,7 +2,7 @@
 
 exports[`.getArtifacts() catches errors 1`] = `
 Object {
-  "lockFileError": Object {
+  "artifactError": Object {
     "lockFile": "composer.lock",
     "stderr": "not found",
   },
@@ -11,7 +11,7 @@ Object {
 
 exports[`.getArtifacts() catches unmet requirements errors 1`] = `
 Object {
-  "lockFileError": Object {
+  "artifactError": Object {
     "lockFile": "composer.lock",
     "stderr": "fooYour requirements could not be resolved to an installable set of packages.bar",
   },
diff --git a/test/manager/gomod/__snapshots__/artifacts.spec.js.snap b/test/manager/gomod/__snapshots__/artifacts.spec.js.snap
index cf4fc1bc4bebb7659b3be97b468ce54319899a63..15e0db1f79abc2d69d474530dda33be222957119 100644
--- a/test/manager/gomod/__snapshots__/artifacts.spec.js.snap
+++ b/test/manager/gomod/__snapshots__/artifacts.spec.js.snap
@@ -2,7 +2,7 @@
 
 exports[`.getArtifacts() catches errors 1`] = `
 Object {
-  "lockFileError": Object {
+  "artifactError": Object {
     "lockFile": "go.sum",
     "stderr": "This update totally doesnt work",
   },
diff --git a/test/manager/pipenv/__snapshots__/artifacts.spec.js.snap b/test/manager/pipenv/__snapshots__/artifacts.spec.js.snap
index 7c50564902f60324e3d5ec6b34838cb45fa572bc..444dc17b12f9e029128178ee1c1c80a0465e56a3 100644
--- a/test/manager/pipenv/__snapshots__/artifacts.spec.js.snap
+++ b/test/manager/pipenv/__snapshots__/artifacts.spec.js.snap
@@ -2,7 +2,7 @@
 
 exports[`.getArtifacts() catches errors 1`] = `
 Object {
-  "lockFileError": Object {
+  "artifactError": Object {
     "lockFile": "Pipfile.lock",
     "stderr": "not found",
   },
diff --git a/test/workers/branch/__snapshots__/get-updated.spec.js.snap b/test/workers/branch/__snapshots__/get-updated.spec.js.snap
index 19e15b5cb9e871d150dca9d06454b98ddde4fe5c..ef9ad00feaf863f3f00c619bad12a38abd766d2f 100644
--- a/test/workers/branch/__snapshots__/get-updated.spec.js.snap
+++ b/test/workers/branch/__snapshots__/get-updated.spec.js.snap
@@ -2,9 +2,9 @@
 
 exports[`workers/branch/get-updated getUpdatedPackageFiles() handles content change 1`] = `
 Object {
-  "lockFileErrors": Array [],
+  "artifactErrors": Array [],
   "parentBranch": undefined,
-  "updatedLockFiles": Array [],
+  "updatedArtifacts": Array [],
   "updatedPackageFiles": Array [
     Object {
       "contents": "some new content",
@@ -16,23 +16,23 @@ Object {
 
 exports[`workers/branch/get-updated getUpdatedPackageFiles() handles empty 1`] = `
 Object {
-  "lockFileErrors": Array [],
+  "artifactErrors": Array [],
   "parentBranch": undefined,
-  "updatedLockFiles": Array [],
+  "updatedArtifacts": Array [],
   "updatedPackageFiles": Array [],
 }
 `;
 
 exports[`workers/branch/get-updated getUpdatedPackageFiles() handles lock file errors 1`] = `
 Object {
-  "lockFileErrors": Array [
+  "artifactErrors": Array [
     Object {
       "name": "composer.lock",
       "stderr": "some error",
     },
   ],
   "parentBranch": undefined,
-  "updatedLockFiles": Array [],
+  "updatedArtifacts": Array [],
   "updatedPackageFiles": Array [
     Object {
       "contents": "some new content",
@@ -44,9 +44,9 @@ Object {
 
 exports[`workers/branch/get-updated getUpdatedPackageFiles() handles lock files 1`] = `
 Object {
-  "lockFileErrors": Array [],
+  "artifactErrors": Array [],
   "parentBranch": undefined,
-  "updatedLockFiles": Array [
+  "updatedArtifacts": Array [
     Object {
       "contents": "some contents",
       "name": "composer.json",
diff --git a/test/workers/branch/commit.spec.js b/test/workers/branch/commit.spec.js
index 0f9bd621e5d8967a147034f96607fa8a00eee8d9..04ced66b35e4ad7810ae11a6cb89bfb456808047 100644
--- a/test/workers/branch/commit.spec.js
+++ b/test/workers/branch/commit.spec.js
@@ -13,7 +13,7 @@ describe('workers/branch/automerge', () => {
         semanticCommitType: 'a',
         semanticCommitScope: 'b',
         updatedPackageFiles: [],
-        updatedLockFiles: [],
+        updatedArtifacts: [],
       };
       jest.resetAllMocks();
       platform.commitFilesToBranch.mockReturnValueOnce('created');
diff --git a/test/workers/branch/get-updated.spec.js b/test/workers/branch/get-updated.spec.js
index 1f70d2e856e098475ab85c12d2b196a228b20272..f1766028c31bc9d01d726f73eccd366137bae22d 100644
--- a/test/workers/branch/get-updated.spec.js
+++ b/test/workers/branch/get-updated.spec.js
@@ -65,7 +65,7 @@ describe('workers/branch/get-updated', () => {
       });
       composer.updateDependency.mockReturnValue('some new content');
       composer.getArtifacts.mockReturnValue({
-        lockFileError: {
+        artifactError: {
           name: 'composer.lock',
           stderr: 'some error',
         },
diff --git a/test/workers/branch/index.spec.js b/test/workers/branch/index.spec.js
index 147597dabe05aedbb8dceb1d222cce949d09db6e..48015c01ae15207a20b1d827eb0a326d28b93f36 100644
--- a/test/workers/branch/index.spec.js
+++ b/test/workers/branch/index.spec.js
@@ -151,8 +151,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [],
-        updatedLockFiles: [],
+        artifactErrors: [],
+        updatedArtifacts: [],
       });
       platform.branchExists.mockReturnValue(false);
       expect(await branchWorker.processBranch(config, true)).toEqual(
@@ -164,8 +164,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [],
-        updatedLockFiles: [],
+        artifactErrors: [],
+        updatedArtifacts: [],
       });
       platform.branchExists.mockReturnValueOnce(false);
       expect(await branchWorker.processBranch(config)).toEqual('no-work');
@@ -175,8 +175,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [],
-        updatedLockFiles: [{}],
+        artifactErrors: [],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(true);
       automerge.tryBranchAutomerge.mockReturnValueOnce('automerged');
@@ -190,8 +190,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [],
-        updatedLockFiles: [{}],
+        artifactErrors: [],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(true);
       automerge.tryBranchAutomerge.mockReturnValueOnce('failed');
@@ -207,8 +207,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [{}],
-        updatedLockFiles: [{}],
+        artifactErrors: [{}],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(true);
       automerge.tryBranchAutomerge.mockReturnValueOnce('failed');
@@ -216,7 +216,7 @@ describe('workers/branch', () => {
       prWorker.checkAutoMerge.mockReturnValueOnce(true);
       await branchWorker.processBranch(config);
       expect(platform.ensureComment.mock.calls).toHaveLength(1);
-      expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
+      // expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
       expect(prWorker.ensurePr.mock.calls).toHaveLength(1);
       expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
     });
@@ -225,8 +225,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [{}],
-        updatedLockFiles: [{}],
+        artifactErrors: [{}],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(true);
       automerge.tryBranchAutomerge.mockReturnValueOnce('failed');
@@ -235,7 +235,7 @@ describe('workers/branch', () => {
       config.releaseTimestamp = '2018-04-26T05:15:51.877Z';
       await branchWorker.processBranch(config);
       expect(platform.ensureComment.mock.calls).toHaveLength(1);
-      expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
+      // expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
       expect(prWorker.ensurePr.mock.calls).toHaveLength(1);
       expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
     });
@@ -244,8 +244,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [{}],
-        updatedLockFiles: [{}],
+        artifactErrors: [{}],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(true);
       automerge.tryBranchAutomerge.mockReturnValueOnce('failed');
@@ -254,7 +254,7 @@ describe('workers/branch', () => {
       config.releaseTimestamp = new Date().toISOString();
       await branchWorker.processBranch(config);
       expect(platform.ensureComment.mock.calls).toHaveLength(1);
-      expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
+      // expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
       expect(prWorker.ensurePr.mock.calls).toHaveLength(1);
       expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
     });
@@ -263,8 +263,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [{}],
-        updatedLockFiles: [{}],
+        artifactErrors: [{}],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(false);
       automerge.tryBranchAutomerge.mockReturnValueOnce('failed');
@@ -284,8 +284,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [{}],
-        updatedLockFiles: [{}],
+        artifactErrors: [{}],
+        updatedArtifacts: [{}],
       });
       config.recreateClosed = true;
       platform.branchExists.mockReturnValueOnce(true);
@@ -294,7 +294,7 @@ describe('workers/branch', () => {
       prWorker.checkAutoMerge.mockReturnValueOnce(true);
       await branchWorker.processBranch(config);
       expect(platform.ensureComment.mock.calls).toHaveLength(1);
-      expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
+      // expect(platform.ensureCommentRemoval.mock.calls).toHaveLength(0);
       expect(prWorker.ensurePr.mock.calls).toHaveLength(1);
       expect(prWorker.checkAutoMerge.mock.calls).toHaveLength(0);
     });
@@ -309,8 +309,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [{}],
-        updatedLockFiles: [{}],
+        artifactErrors: [{}],
+        updatedArtifacts: [{}],
       });
       await branchWorker.processBranch(config);
     });
@@ -319,8 +319,8 @@ describe('workers/branch', () => {
         updatedPackageFiles: [{}],
       });
       npmPostExtract.getAdditionalFiles.mockReturnValueOnce({
-        lockFileErrors: [],
-        updatedLockFiles: [{}],
+        artifactErrors: [],
+        updatedArtifacts: [{}],
       });
       platform.branchExists.mockReturnValueOnce(true);
       automerge.tryBranchAutomerge.mockReturnValueOnce(false);
diff --git a/test/workers/branch/lock-files/__snapshots__/index.spec.js.snap b/test/workers/branch/lock-files/__snapshots__/index.spec.js.snap
index 308642846fe6b26c48968704cc946cfcbfcadd7e..09f1a6b4bd8e6ac424510c35e0e519420aee388e 100644
--- a/test/workers/branch/lock-files/__snapshots__/index.spec.js.snap
+++ b/test/workers/branch/lock-files/__snapshots__/index.spec.js.snap
@@ -2,14 +2,14 @@
 
 exports[`manager/npm/post-update getAdditionalFiles returns no error and empty lockfiles if lock file maintenance exists 1`] = `
 Object {
-  "lockFileErrors": Array [],
-  "updatedLockFiles": Array [],
+  "artifactErrors": Array [],
+  "updatedArtifacts": Array [],
 }
 `;
 
 exports[`manager/npm/post-update getAdditionalFiles returns no error and empty lockfiles if updateLockFiles false 1`] = `
 Object {
-  "lockFileErrors": Array [],
-  "updatedLockFiles": Array [],
+  "artifactErrors": Array [],
+  "updatedArtifacts": Array [],
 }
 `;
diff --git a/test/workers/branch/lock-files/index.spec.js b/test/workers/branch/lock-files/index.spec.js
index c07a9c9ea84f6ef60557c2ea0a9cb200291fc007..972e5e8c62990b7ff05eb91681b3a6f60098906a 100644
--- a/test/workers/branch/lock-files/index.spec.js
+++ b/test/workers/branch/lock-files/index.spec.js
@@ -315,8 +315,8 @@ describe('manager/npm/post-update', () => {
       config.updateLockFiles = false;
       const res = await getAdditionalFiles(config, { npm: [{}] });
       expect(res).toMatchSnapshot();
-      expect(res.lockFileErrors).toHaveLength(0);
-      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(res.artifactErrors).toHaveLength(0);
+      expect(res.updatedArtifacts).toHaveLength(0);
     });
     it('returns no error and empty lockfiles if lock file maintenance exists', async () => {
       config.updateType = 'lockFileMaintenance';
@@ -324,8 +324,8 @@ describe('manager/npm/post-update', () => {
       platform.branchExists.mockReturnValueOnce(true);
       const res = await getAdditionalFiles(config, { npm: [{}] });
       expect(res).toMatchSnapshot();
-      expect(res.lockFileErrors).toHaveLength(0);
-      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(res.artifactErrors).toHaveLength(0);
+      expect(res.updatedArtifacts).toHaveLength(0);
     });
     /*
     it('returns no error and empty lockfiles if none updated', async () => {
@@ -338,8 +338,8 @@ describe('manager/npm/post-update', () => {
       });
       const res = await getAdditionalFiles(config);
       expect(res).toMatchSnapshot();
-      expect(res.lockFileErrors).toHaveLength(0);
-      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(res.artifactErrors).toHaveLength(0);
+      expect(res.updatedArtifacts).toHaveLength(0);
     });
     it('tries multiple lock files', async () => {
       lockFiles.determineLockFileDirs.mockReturnValueOnce({
@@ -351,8 +351,8 @@ describe('manager/npm/post-update', () => {
       });
       const res = await getAdditionalFiles(config);
       expect(res).toMatchSnapshot();
-      expect(res.lockFileErrors).toHaveLength(0);
-      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(res.artifactErrors).toHaveLength(0);
+      expect(res.updatedArtifacts).toHaveLength(0);
       expect(npm.generateLockFile.mock.calls).toHaveLength(3);
       expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
       expect(platform.getFile.mock.calls).toHaveLength(7);
@@ -396,8 +396,8 @@ describe('manager/npm/post-update', () => {
       yarn.generateLockFile.mockReturnValueOnce({ error: true });
       pnpm.generateLockFile.mockReturnValueOnce({ error: true });
       const res = await getAdditionalFiles(config);
-      expect(res.lockFileErrors).toHaveLength(3);
-      expect(res.updatedLockFiles).toHaveLength(0);
+      expect(res.artifactErrors).toHaveLength(3);
+      expect(res.updatedArtifacts).toHaveLength(0);
       expect(npm.generateLockFile.mock.calls).toHaveLength(3);
       expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
       expect(platform.getFile.mock.calls).toHaveLength(4);
@@ -414,8 +414,8 @@ describe('manager/npm/post-update', () => {
       yarn.generateLockFile.mockReturnValueOnce('some new lock file contents');
       pnpm.generateLockFile.mockReturnValueOnce('some new lock file contents');
       const res = await getAdditionalFiles(config);
-      expect(res.lockFileErrors).toHaveLength(0);
-      expect(res.updatedLockFiles).toHaveLength(3);
+      expect(res.artifactErrors).toHaveLength(0);
+      expect(res.updatedArtifacts).toHaveLength(3);
       expect(npm.generateLockFile.mock.calls).toHaveLength(3);
       expect(yarn.generateLockFile.mock.calls).toHaveLength(2);
       expect(platform.getFile.mock.calls).toHaveLength(7);
diff --git a/test/workers/pr/index.spec.js b/test/workers/pr/index.spec.js
index 49cf97a44bf614f870322c8c83324af7de57bff1..dc20f71e7d946ca3d72d2f9b3d681869cf0693c0 100644
--- a/test/workers/pr/index.spec.js
+++ b/test/workers/pr/index.spec.js
@@ -367,11 +367,11 @@ describe('workers/pr', () => {
       expect(platform.createPr.mock.calls[0]).toMatchSnapshot();
       existingPr.body = platform.createPr.mock.calls[0][2];
     });
-    it('should create PR if waiting for not pending but lockFileErrors', async () => {
+    it('should create PR if waiting for not pending but artifactErrors', async () => {
       platform.getBranchStatus.mockReturnValueOnce('pending');
       platform.getBranchLastCommitTime.mockImplementationOnce(() => new Date());
       config.prCreation = 'not-pending';
-      config.lockFileErrors = [{}];
+      config.artifactErrors = [{}];
       config.platform = 'gitlab';
       const pr = await prWorker.ensurePr(config);
       expect(pr).toMatchObject({ displayNumber: 'New Pull Request' });