From 1a25ac6fc386af8d2b4022d547fc6c874229e61e Mon Sep 17 00:00:00 2001
From: RahulGautamSingh <rahultesnik@gmail.com>
Date: Mon, 22 Aug 2022 14:31:48 +0530
Subject: [PATCH] feat: use cached configHash to prevent branch updates
 (#16661)

---
 lib/util/cache/repository/types.ts            |   1 +
 lib/workers/repository/cache.ts               |   2 +
 lib/workers/repository/process/write.spec.ts  | 161 +++++++++-
 lib/workers/repository/process/write.ts       |  58 +++-
 .../repository/update/branch/index.spec.ts    | 230 +++++++++-----
 lib/workers/repository/update/branch/index.ts | 285 ++++++++++--------
 lib/workers/types.ts                          |   2 +
 7 files changed, 533 insertions(+), 206 deletions(-)

diff --git a/lib/util/cache/repository/types.ts b/lib/util/cache/repository/types.ts
index 79f8cf40b5..ceda2315f9 100644
--- a/lib/util/cache/repository/types.ts
+++ b/lib/util/cache/repository/types.ts
@@ -30,6 +30,7 @@ export interface BranchCache {
   sha: string | null;
   parentSha: string | null;
   upgrades: BranchUpgradeCache[];
+  branchFingerprint?: string;
 }
 
 export interface RepoCacheData {
diff --git a/lib/workers/repository/cache.ts b/lib/workers/repository/cache.ts
index c651af91a7..5211d57668 100644
--- a/lib/workers/repository/cache.ts
+++ b/lib/workers/repository/cache.ts
@@ -71,6 +71,7 @@ async function generateBranchCache(
     const upgrades: BranchUpgradeCache[] = branch.upgrades
       ? branch.upgrades.map(generateBranchUpgradeCache)
       : [];
+    const branchFingerprint = branch.branchFingerprint;
     return {
       branchName,
       sha,
@@ -79,6 +80,7 @@ async function generateBranchCache(
       automerge,
       isModified,
       upgrades,
+      branchFingerprint,
     };
   } catch (error) {
     const err = error.err || error; // external host error nests err
diff --git a/lib/workers/repository/process/write.spec.ts b/lib/workers/repository/process/write.spec.ts
index b94cb734ba..3ea1ee7f50 100644
--- a/lib/workers/repository/process/write.spec.ts
+++ b/lib/workers/repository/process/write.spec.ts
@@ -1,22 +1,30 @@
+import is from '@sindresorhus/is';
+import hasha from 'hasha';
 import {
   RenovateConfig,
   getConfig,
   git,
+  logger,
   mocked,
   partial,
 } from '../../../../test/util';
 import { GlobalConfig } from '../../../config/global';
 import { addMeta } from '../../../logger';
+import { hashMap } from '../../../modules/manager';
+import * as _repoCache from '../../../util/cache/repository';
+import type { BranchCache } from '../../../util/cache/repository/types';
 import { Limit, isLimitReached } from '../../global/limits';
-import { BranchConfig, BranchResult } from '../../types';
+import { BranchConfig, BranchResult, BranchUpgradeConfig } from '../../types';
 import * as _branchWorker from '../update/branch';
 import * as _limits from './limits';
-import { writeUpdates } from './write';
+import { canSkipBranchUpdateCheck, writeUpdates } from './write';
 
 jest.mock('../../../util/git');
+jest.mock('../../../util/cache/repository');
 
 const branchWorker = mocked(_branchWorker);
 const limits = mocked(_limits);
+const repoCache = mocked(_repoCache);
 
 branchWorker.processBranch = jest.fn();
 
@@ -33,13 +41,20 @@ beforeEach(() => {
 describe('workers/repository/process/write', () => {
   describe('writeUpdates()', () => {
     it('stops after automerge', async () => {
-      const branches: BranchConfig[] = [
-        {},
-        {},
-        { automergeType: 'pr-comment', ignoreTests: true },
-        {},
-        {},
-      ] as never;
+      const branches = partial<BranchConfig[]>([
+        { branchName: 'test_branch', manager: 'npm', upgrades: [] },
+        { branchName: 'test_branch', manager: 'npm', upgrades: [] },
+        {
+          branchName: 'test_branch',
+          manager: 'npm',
+          automergeType: 'pr-comment',
+          ignoreTests: true,
+          upgrades: [],
+        },
+        { branchName: 'test_branch', manager: 'npm', upgrades: [] },
+        { branchName: 'test_branch', manager: 'npm', upgrades: [] },
+      ]);
+      repoCache.getCache.mockReturnValue({});
       git.branchExists.mockReturnValue(true);
       branchWorker.processBranch.mockResolvedValueOnce({
         branchExists: true,
@@ -66,9 +81,10 @@ describe('workers/repository/process/write', () => {
     it('increments branch counter', async () => {
       const branchName = 'branchName';
       const branches: BranchConfig[] = [
-        partial<BranchConfig>({ baseBranch: 'main', branchName }),
-        partial<BranchConfig>({ baseBranch: 'dev', branchName }),
+        partial<BranchConfig>({ baseBranch: 'main', branchName, upgrades: [] }),
+        partial<BranchConfig>({ baseBranch: 'dev', branchName, upgrades: [] }),
       ] as never;
+      repoCache.getCache.mockReturnValueOnce({});
       branchWorker.processBranch.mockResolvedValueOnce({
         branchExists: true,
         result: BranchResult.PrCreated,
@@ -90,5 +106,128 @@ describe('workers/repository/process/write', () => {
         branch: branchName,
       });
     });
+
+    it('return nowork if same updates', async () => {
+      const branches = partial<BranchConfig[]>([
+        {
+          branchName: 'new/some-branch',
+          manager: 'npm',
+          upgrades: [
+            {
+              manager: 'npm',
+            } as BranchUpgradeConfig,
+          ],
+        },
+      ]);
+      repoCache.getCache.mockReturnValueOnce({
+        branches: [{ branchName: 'new/some-branch' } as BranchCache],
+      });
+      branchWorker.processBranch.mockResolvedValueOnce({
+        branchExists: true,
+        result: BranchResult.NoWork,
+      });
+      git.branchExists.mockReturnValue(true);
+      config.repositoryCache = 'enabled';
+      expect(await writeUpdates(config, branches)).toBe('done');
+    });
+
+    it('updates branch fingerprint when commit is made', async () => {
+      const branches = partial<BranchConfig[]>([
+        {
+          branchName: 'new/some-branch',
+          manager: 'npm',
+          upgrades: [
+            {
+              manager: 'unknown-manager',
+            } as BranchUpgradeConfig,
+          ],
+        },
+      ]);
+      repoCache.getCache.mockReturnValueOnce({
+        branches: [
+          {
+            branchName: 'new/some-branch',
+            branchFingerprint: '222',
+          } as BranchCache,
+        ],
+      });
+      branchWorker.processBranch.mockResolvedValueOnce({
+        branchExists: true,
+        result: BranchResult.Done,
+        commitSha: 'some-value',
+      });
+      git.branchExists.mockReturnValue(true);
+      const branchManagersFingerprint = hasha(
+        [
+          ...new Set(
+            branches[0].upgrades
+              .map((upgrade) => hashMap.get(upgrade.manager) ?? upgrade.manager)
+              .filter(is.string)
+          ),
+        ].sort()
+      );
+      const fingerprint = hasha([
+        JSON.stringify(branches[0]),
+        branchManagersFingerprint,
+      ]);
+      config.repositoryCache = 'enabled';
+      expect(await writeUpdates(config, branches)).toBe('done');
+      expect(branches[0].branchFingerprint).toBe(fingerprint);
+    });
+
+    it('shows debug log when the cache is enabled, but branch cache not found', async () => {
+      const branches = partial<BranchConfig[]>([
+        {
+          branchName: 'new/some-branch',
+          manager: 'npm',
+          upgrades: [
+            {
+              manager: 'npm',
+            } as BranchUpgradeConfig,
+          ],
+        },
+      ]);
+      repoCache.getCache.mockReturnValueOnce({});
+      branchWorker.processBranch.mockResolvedValueOnce({
+        branchExists: true,
+        result: BranchResult.NoWork,
+      });
+      git.branchExists.mockReturnValueOnce(true);
+      config.repositoryCache = 'enabled';
+      await writeUpdates(config, branches);
+      expect(logger.logger.debug).toHaveBeenCalledWith(
+        'No branch cache found for new/some-branch'
+      );
+    });
+  });
+
+  describe('canSkipBranchUpdateCheck()', () => {
+    let branchCache = {} as BranchCache;
+
+    it('returns false if no cache', () => {
+      branchCache = {
+        branchName: 'new/some-branch',
+        sha: '111',
+      } as BranchCache;
+      expect(canSkipBranchUpdateCheck(branchCache, '222')).toBe(false);
+    });
+
+    it('returns false when fingerprints are not same', () => {
+      branchCache = {
+        branchName: 'new/some-branch',
+        sha: '111',
+        branchFingerprint: '211',
+      } as BranchCache;
+      expect(canSkipBranchUpdateCheck(branchCache, '222')).toBe(false);
+    });
+
+    it('returns true', () => {
+      branchCache = {
+        branchName: 'new/some-branch',
+        sha: '111',
+        branchFingerprint: '222',
+      } as BranchCache;
+      expect(canSkipBranchUpdateCheck(branchCache, '222')).toBe(true);
+    });
   });
 });
diff --git a/lib/workers/repository/process/write.ts b/lib/workers/repository/process/write.ts
index dbd830c10a..c3e0baf286 100644
--- a/lib/workers/repository/process/write.ts
+++ b/lib/workers/repository/process/write.ts
@@ -1,5 +1,11 @@
+import is from '@sindresorhus/is';
+import hasha from 'hasha';
+import stringify from 'safe-stable-stringify';
 import type { RenovateConfig } from '../../../config/types';
 import { addMeta, logger, removeMeta } from '../../../logger';
+import { hashMap } from '../../../modules/manager';
+import { getCache } from '../../../util/cache/repository';
+import type { BranchCache } from '../../../util/cache/repository/types';
 import { branchExists } from '../../../util/git';
 import { Limit, incLimitedValue, setMaxLimit } from '../../global/limits';
 import { BranchConfig, BranchResult } from '../../types';
@@ -8,6 +14,23 @@ import { getBranchesRemaining, getPrsRemaining } from './limits';
 
 export type WriteUpdateResult = 'done' | 'automerged';
 
+export function canSkipBranchUpdateCheck(
+  branchCache: BranchCache,
+  branchFingerprint: string
+): boolean {
+  if (!branchCache.branchFingerprint) {
+    return false;
+  }
+
+  if (branchFingerprint !== branchCache.branchFingerprint) {
+    logger.debug('Branch fingerprint has changed, full check required');
+    return false;
+  }
+
+  logger.debug('Branch fingerprint is unchanged, updates check can be skipped');
+  return true;
+}
+
 export async function writeUpdates(
   config: RenovateConfig,
   allBranches: BranchConfig[]
@@ -21,6 +44,8 @@ export async function writeUpdates(
       .sort()
       .join(', ')}`
   );
+  const cache = getCache();
+  const { branches: cachedBranches = [] } = cache;
   const prsRemaining = await getPrsRemaining(config, branches);
   logger.debug({ prsRemaining }, 'Calculated maximum PRs remaining this run');
   setMaxLimit(Limit.PullRequests, prsRemaining);
@@ -39,11 +64,42 @@ export async function writeUpdates(
       meta['baseBranch'] = baseBranch;
     }
     addMeta(meta);
-    const branchExisted = branchExists(branch.branchName);
+    const branchExisted = branchExists(branchName);
+    let branchCache = {} as BranchCache;
+    if (branchExisted && config.repositoryCache === 'enabled') {
+      branchCache =
+        cachedBranches?.find((br) => br.branchName === branchName) ??
+        ({} as BranchCache);
+
+      if (Object.keys(branchCache).length === 0) {
+        logger.debug(`No branch cache found for ${branch.branchName}`);
+      }
+    }
+    const branchManagersFingerprint = hasha(
+      [
+        ...new Set(
+          branch.upgrades
+            .map((upgrade) => hashMap.get(upgrade.manager) ?? upgrade.manager)
+            .filter(is.string)
+        ),
+      ].sort()
+    );
+    const branchFingerprint = hasha([
+      stringify(branch),
+      branchManagersFingerprint,
+    ]);
+    branch.skipBranchUpdate = canSkipBranchUpdateCheck(
+      branchCache,
+      branchFingerprint
+    );
     const res = await processBranch(branch);
     branch.prBlockedBy = res?.prBlockedBy;
     branch.prNo = res?.prNo;
     branch.result = res?.result;
+    branch.branchFingerprint = res?.commitSha
+      ? branchFingerprint
+      : branchCache.branchFingerprint;
+
     if (
       branch.result === BranchResult.Automerged &&
       branch.automergeType !== 'pr-comment'
diff --git a/lib/workers/repository/update/branch/index.spec.ts b/lib/workers/repository/update/branch/index.spec.ts
index 76950d9724..59fb93e704 100644
--- a/lib/workers/repository/update/branch/index.spec.ts
+++ b/lib/workers/repository/update/branch/index.spec.ts
@@ -18,6 +18,7 @@ import * as _npmPostExtract from '../../../../modules/manager/npm/post-update';
 import type { WriteExistingFilesResult } from '../../../../modules/manager/npm/post-update/types';
 import { hashBody } from '../../../../modules/platform/pr-body';
 import { PrState } from '../../../../types';
+import * as _repoCache from '../../../../util/cache/repository';
 import * as _exec from '../../../../util/exec';
 import type { FileChange, StatusResult } from '../../../../util/git/types';
 import * as _mergeConfidence from '../../../../util/merge-confidence';
@@ -55,6 +56,7 @@ jest.mock('../../../../util/sanitize');
 jest.mock('../../../../util/fs');
 jest.mock('../../../../util/git');
 jest.mock('../../../global/limits');
+jest.mock('../../../../util/cache/repository');
 
 const getUpdated = mocked(_getUpdated);
 const schedule = mocked(_schedule);
@@ -69,6 +71,7 @@ const prWorker = mocked(_prWorker);
 const exec = mocked(_exec);
 const sanitize = mocked(_sanitize);
 const limits = mocked(_limits);
+const repoCache = mocked(_repoCache);
 
 const adminConfig: RepoGlobalConfig = { localDir: '', cacheDir: '' };
 
@@ -84,13 +87,14 @@ function findFileContent(
 }
 
 describe('workers/repository/update/branch/index', () => {
+  let config: BranchConfig;
+
   describe('processBranch', () => {
     const updatedPackageFiles: PackageFilesResult = {
       updatedPackageFiles: [],
       artifactErrors: [],
       updatedArtifacts: [],
     };
-    let config: BranchConfig;
 
     beforeEach(() => {
       git.branchExists.mockReturnValue(false);
@@ -118,6 +122,7 @@ describe('workers/repository/update/branch/index', () => {
       });
       GlobalConfig.set(adminConfig);
       sanitize.sanitize.mockImplementation((input) => input);
+      repoCache.getCache.mockReturnValue({});
     });
 
     afterEach(() => {
@@ -203,6 +208,7 @@ describe('workers/repository/update/branch/index', () => {
       const res = await branchWorker.processBranch(config);
       expect(res).toEqual({
         branchExists: false,
+        commitSha: null,
         prNo: undefined,
         result: 'error',
       });
@@ -221,6 +227,7 @@ describe('workers/repository/update/branch/index', () => {
       const res = await branchWorker.processBranch(config);
       expect(res).toEqual({
         branchExists: false,
+        commitSha: null,
         prNo: undefined,
         result: 'error',
       });
@@ -308,6 +315,7 @@ describe('workers/repository/update/branch/index', () => {
       expect(res).toEqual({
         branchExists: true,
         prNo: undefined,
+        commitSha: null,
         result: 'error',
       });
     });
@@ -367,11 +375,12 @@ describe('workers/repository/update/branch/index', () => {
     it('continues branch if branch edited and but PR found', async () => {
       git.branchExists.mockReturnValue(true);
       git.isBranchModified.mockResolvedValueOnce(true);
-      git.getBranchCommit.mockReturnValueOnce('123test');
+      git.getBranchCommit.mockReturnValue('123test');
       platform.findPr.mockResolvedValueOnce({ sha: '123test' } as any);
       const res = await branchWorker.processBranch(config);
       expect(res).toEqual({
         branchExists: true,
+        commitSha: null,
         prNo: undefined,
         result: 'error',
       });
@@ -425,6 +434,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prBlockedBy: 'RateLimited',
         result: 'pr-limit-reached',
+        commitSha: '123test',
       });
     });
 
@@ -550,6 +560,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prBlockedBy: 'NeedsApproval',
         result: 'needs-pr-approval',
+        commitSha: null,
       });
     });
 
@@ -573,6 +584,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prBlockedBy: 'AwaitingTests',
         result: 'pending',
+        commitSha: null,
       });
     });
 
@@ -596,6 +608,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prBlockedBy: 'BranchAutomerge',
         result: 'done',
+        commitSha: null,
       });
     });
 
@@ -619,6 +632,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prBlockedBy: 'Error',
         result: 'error',
+        commitSha: null,
       });
     });
 
@@ -642,6 +656,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prBlockedBy: 'whoops',
         result: 'error',
+        commitSha: null,
       });
     });
 
@@ -654,21 +669,19 @@ describe('workers/repository/update/branch/index', () => {
         artifactErrors: [],
         updatedArtifacts: [partial<FileChange>({})],
       } as WriteExistingFilesResult);
-
+      const inconfig = {
+        ...config,
+        ignoreTests: true,
+        prCreation: 'not-pending',
+        commitBody: '[skip-ci]',
+        fetchReleaseNotes: true,
+      } as BranchConfig;
       mockedFunction(needsChangelogs).mockReturnValueOnce(true);
-
-      expect(
-        await branchWorker.processBranch({
-          ...config,
-          ignoreTests: true,
-          prCreation: 'not-pending',
-          commitBody: '[skip-ci]',
-          fetchReleaseNotes: true,
-        })
-      ).toEqual({
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
         branchExists: true,
         prNo: undefined,
         result: 'pending',
+        commitSha: '123test',
       });
 
       expect(automerge.tryBranchAutomerge).toHaveBeenCalledTimes(0);
@@ -740,15 +753,15 @@ describe('workers/repository/update/branch/index', () => {
       );
       prAutomerge.checkAutoMerge.mockResolvedValueOnce({ automerged: false });
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
-      await expect(
-        branchWorker.processBranch({
-          ...config,
-          automerge: true,
-          rebaseWhen: 'conflicted',
-        })
-      ).resolves.toEqual({
+      const inconfig = {
+        ...config,
+        automerge: true,
+        rebaseWhen: 'conflicted',
+      };
+      await expect(branchWorker.processBranch(inconfig)).resolves.toEqual({
         branchExists: true,
         result: BranchResult.NotScheduled,
+        commitSha: null,
       });
       expect(logger.debug).toHaveBeenCalledWith(
         'Branch cannot automerge now because automergeSchedule is off schedule - skipping'
@@ -877,6 +890,7 @@ describe('workers/repository/update/branch/index', () => {
       const processBranchResult = await branchWorker.processBranch(config);
       expect(processBranchResult).toEqual({
         branchExists: false,
+        commitSha: null,
         prNo: undefined,
         result: 'error',
       });
@@ -895,6 +909,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prNo: undefined,
         result: 'pr-created',
+        commitSha: '123test',
       });
     });
 
@@ -916,6 +931,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: '123test',
       });
     });
 
@@ -976,17 +992,17 @@ describe('workers/repository/update/branch/index', () => {
       schedule.isScheduledNow.mockReturnValueOnce(false);
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
       GlobalConfig.set({ ...adminConfig, dryRun: 'full' });
-      expect(
-        await branchWorker.processBranch({
-          ...config,
-          updateType: 'lockFileMaintenance',
-          reuseExistingBranch: false,
-          updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
-        })
-      ).toEqual({
+      const inconfig = {
+        ...config,
+        updateType: 'lockFileMaintenance',
+        reuseExistingBranch: false,
+        updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
+      } as BranchConfig;
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
     });
 
@@ -1027,6 +1043,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
       expect(logger.info).toHaveBeenCalledWith(
         'DRY-RUN: Would ensure comment removal in PR #undefined'
@@ -1057,17 +1074,17 @@ describe('workers/repository/update/branch/index', () => {
       git.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
-      expect(
-        await branchWorker.processBranch({
-          ...config,
-          updateType: 'lockFileMaintenance',
-          reuseExistingBranch: false,
-          updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
-        })
-      ).toEqual({
+      const inconfig = {
+        ...config,
+        updateType: 'lockFileMaintenance',
+        reuseExistingBranch: false,
+        updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
+      } as BranchConfig;
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
     });
 
@@ -1093,19 +1110,43 @@ describe('workers/repository/update/branch/index', () => {
       git.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
-      expect(
-        await branchWorker.processBranch({
-          ...config,
-          dependencyDashboardChecks: { 'renovate/some-branch': 'true' },
-          updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
+      const inconfig = {
+        ...config,
+        dependencyDashboardChecks: { 'renovate/some-branch': 'true' },
+        updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
+      } as BranchConfig;
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
+        branchExists: true,
+        prNo: undefined,
+        result: 'no-work',
+      });
+      expect(commit.commitFilesToBranch).not.toHaveBeenCalled();
+    });
+
+    it('skips branch update if same updates', async () => {
+      git.branchExists.mockReturnValueOnce(true);
+      git.getBranchCommit.mockReturnValue('111');
+      platform.getBranchPr.mockResolvedValueOnce(
+        partial<Pr>({
+          sourceBranch: 'old/some-branch',
+          state: PrState.Open,
         })
-      ).toMatchInlineSnapshot(`
-        {
-          "branchExists": true,
-          "prNo": undefined,
-          "result": "no-work",
-        }
-      `);
+      );
+      const inconfig = {
+        ...config,
+        branchName: 'new/some-branch',
+        branchPrefix: 'new/',
+        branchPrefixOld: 'old/',
+        branchFingerprint: '111',
+        reuseExistingBranch: true,
+        skipBranchUpdate: true,
+      };
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
+        branchExists: true,
+        prNo: undefined,
+        result: 'done',
+        commitSha: null,
+      });
       expect(commit.commitFilesToBranch).not.toHaveBeenCalled();
     });
 
@@ -1134,19 +1175,17 @@ describe('workers/repository/update/branch/index', () => {
       git.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
-      expect(
-        await branchWorker.processBranch({
-          ...config,
-          reuseExistingBranch: false,
-          updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
-        })
-      ).toMatchInlineSnapshot(`
-        {
-          "branchExists": true,
-          "prNo": undefined,
-          "result": "done",
-        }
-      `);
+      const inconfig = {
+        ...config,
+        reuseExistingBranch: false,
+        updatedArtifacts: [{ type: 'deletion', path: 'dummy' }],
+      } as BranchConfig;
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
+        branchExists: true,
+        prNo: undefined,
+        result: 'done',
+        commitSha: null,
+      });
       expect(commit.commitFilesToBranch).toHaveBeenCalled();
     });
 
@@ -1206,8 +1245,7 @@ describe('workers/repository/update/branch/index', () => {
         exposeAllEnv: true,
         localDir: '/localDir',
       });
-
-      const result = await branchWorker.processBranch({
+      const inconfig = {
         ...config,
         postUpgradeTasks: {
           executionMode: 'update',
@@ -1225,12 +1263,13 @@ describe('workers/repository/update/branch/index', () => {
             },
           } as BranchUpgradeConfig,
         ],
-      });
-
+      } as BranchConfig;
+      const result = await branchWorker.processBranch(inconfig);
       expect(result).toEqual({
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
       const errorMessage = expect.stringContaining(
         "Post-upgrade command 'disallowed task' has not been added to the allowed list in allowedPostUpgradeCommand"
@@ -1380,7 +1419,7 @@ describe('workers/repository/update/branch/index', () => {
         exposeAllEnv: true,
         localDir: '/localDir',
       });
-      const result = await branchWorker.processBranch({
+      const inconfig = {
         ...config,
         postUpgradeTasks: {
           executionMode: 'update',
@@ -1398,12 +1437,13 @@ describe('workers/repository/update/branch/index', () => {
             },
           } as BranchUpgradeConfig,
         ],
-      });
-
+      } as BranchConfig;
+      const result = await branchWorker.processBranch(inconfig);
       expect(result).toEqual({
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
       expect(exec.exec).toHaveBeenCalledWith('echo {{{versioning}}}', {
         cwd: '/localDir',
@@ -1528,6 +1568,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
       expect(exec.exec).toHaveBeenNthCalledWith(1, 'echo some-dep-name-1', {
         cwd: '/localDir',
@@ -1672,6 +1713,7 @@ describe('workers/repository/update/branch/index', () => {
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: null,
       });
       expect(exec.exec).toHaveBeenNthCalledWith(1, 'echo hardcoded-string', {
         cwd: '/localDir',
@@ -1717,22 +1759,58 @@ describe('workers/repository/update/branch/index', () => {
           state: PrState.Open,
         })
       );
-      expect(
-        await branchWorker.processBranch({
-          ...config,
-          branchName: 'new/some-branch',
-          branchPrefix: 'new/',
-          branchPrefixOld: 'old/',
-        })
-      ).toEqual({
+      const inconfig = {
+        ...config,
+        branchName: 'new/some-branch',
+        branchPrefix: 'new/',
+        branchPrefixOld: 'old/',
+      };
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
         branchExists: true,
         prNo: undefined,
         result: 'done',
+        commitSha: '123test',
       });
       expect(logger.debug).toHaveBeenCalledWith('Found existing branch PR');
       expect(logger.debug).toHaveBeenCalledWith(
         'No package files need updating'
       );
     });
+
+    it('does nothing when branchPrefixOld/branch and its pr exists but updates not necessary', async () => {
+      getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce({
+        ...updatedPackageFiles,
+      });
+      npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
+        artifactErrors: [],
+        updatedArtifacts: [],
+      });
+      git.branchExists.mockReturnValueOnce(false);
+      git.branchExists.mockReturnValueOnce(true);
+      platform.getBranchPr.mockResolvedValueOnce(
+        partial<Pr>({
+          sourceBranch: 'old/some-branch',
+          state: PrState.Open,
+        })
+      );
+      config.reuseExistingBranch = true;
+      config.skipBranchUpdate = true;
+      const inconfig = {
+        ...config,
+        branchName: 'new/some-branch',
+        branchPrefix: 'new/',
+        branchPrefixOld: 'old/',
+      };
+      expect(await branchWorker.processBranch(inconfig)).toEqual({
+        branchExists: true,
+        prNo: undefined,
+        result: 'done',
+        commitSha: null,
+      });
+      expect(logger.debug).toHaveBeenCalledWith('Found existing branch PR');
+      expect(logger.debug).not.toHaveBeenCalledWith(
+        'No package files need updating'
+      );
+    });
   });
 });
diff --git a/lib/workers/repository/update/branch/index.ts b/lib/workers/repository/update/branch/index.ts
index d2ffadfa52..a72c8e0b4d 100644
--- a/lib/workers/repository/update/branch/index.ts
+++ b/lib/workers/repository/update/branch/index.ts
@@ -44,7 +44,6 @@ import * as template from '../../../../util/template';
 import { Limit, isLimitReached } from '../../../global/limits';
 import { BranchConfig, BranchResult, PrBlockedBy } from '../../../types';
 import { embedChangelog, needsChangelogs } from '../../changelog';
-// import { embedChangelog, needsChangelogs } from '../../changelog';
 import { ensurePr, getPlatformPrOptions, updatePrDebugData } from '../pr';
 import { checkAutoMerge } from '../pr/automerge';
 import { getPrBody } from '../pr/body';
@@ -80,11 +79,13 @@ export interface ProcessBranchResult {
   prBlockedBy?: PrBlockedBy;
   prNo?: number;
   result: BranchResult;
+  commitSha?: string | null;
 }
 
 export async function processBranch(
   branchConfig: BranchConfig
 ): Promise<ProcessBranchResult> {
+  let commitSha: string | null = null;
   let config: BranchConfig = { ...branchConfig };
   logger.trace({ config }, 'processBranch()');
   let branchExists = gitBranchExists(config.branchName);
@@ -287,7 +288,6 @@ export async function processBranch(
         'Branch + PR exists but is not scheduled -- will update if necessary'
       );
     }
-    await checkoutBranch(config.baseBranch!);
     //stability checks
     if (
       config.upgrades.some(
@@ -380,133 +380,143 @@ export async function processBranch(
     }
     // TODO: types (#7154)
     logger.debug(`Using reuseExistingBranch: ${config.reuseExistingBranch!}`);
-    const res = await getUpdatedPackageFiles(config);
-    // istanbul ignore if
-    if (res.artifactErrors && config.artifactErrors) {
-      res.artifactErrors = config.artifactErrors.concat(res.artifactErrors);
-    }
-    config = { ...config, ...res };
-    if (config.updatedPackageFiles?.length) {
-      logger.debug(
-        `Updated ${config.updatedPackageFiles.length} package files`
+    if (!(config.reuseExistingBranch && config.skipBranchUpdate)) {
+      await checkoutBranch(config.baseBranch!);
+      const res = await getUpdatedPackageFiles(config);
+      // istanbul ignore if
+      if (res.artifactErrors && config.artifactErrors) {
+        res.artifactErrors = config.artifactErrors.concat(res.artifactErrors);
+      }
+      config = { ...config, ...res };
+      if (config.updatedPackageFiles?.length) {
+        logger.debug(
+          `Updated ${config.updatedPackageFiles.length} package files`
+        );
+      } else {
+        logger.debug('No package files need updating');
+      }
+      const additionalFiles = await getAdditionalFiles(
+        config,
+        branchConfig.packageFiles!
       );
-    } else {
-      logger.debug('No package files need updating');
-    }
-    const additionalFiles = await getAdditionalFiles(
-      config,
-      branchConfig.packageFiles!
-    );
-    config.artifactErrors = (config.artifactErrors ?? []).concat(
-      additionalFiles.artifactErrors
-    );
-    config.updatedArtifacts = (config.updatedArtifacts ?? []).concat(
-      additionalFiles.updatedArtifacts
-    );
-    if (config.updatedArtifacts?.length) {
-      logger.debug(
-        {
-          updatedArtifacts: config.updatedArtifacts.map((f) =>
-            f.type === 'deletion' ? `${f.path} (delete)` : f.path
-          ),
-        },
-        `Updated ${config.updatedArtifacts.length} lock files`
+      config.artifactErrors = (config.artifactErrors ?? []).concat(
+        additionalFiles.artifactErrors
+      );
+      config.updatedArtifacts = (config.updatedArtifacts ?? []).concat(
+        additionalFiles.updatedArtifacts
+      );
+      if (config.updatedArtifacts?.length) {
+        logger.debug(
+          {
+            updatedArtifacts: config.updatedArtifacts.map((f) =>
+              f.type === 'deletion' ? `${f.path} (delete)` : f.path
+            ),
+          },
+          `Updated ${config.updatedArtifacts.length} lock files`
+        );
+      } else {
+        logger.debug('No updated lock files in branch');
+      }
+      const postUpgradeCommandResults = await executePostUpgradeCommands(
+        config
       );
-    } else {
-      logger.debug('No updated lock files in branch');
-    }
-    const postUpgradeCommandResults = await executePostUpgradeCommands(config);
 
-    if (postUpgradeCommandResults !== null) {
-      const { updatedArtifacts, artifactErrors } = postUpgradeCommandResults;
-      config.updatedArtifacts = updatedArtifacts;
-      config.artifactErrors = artifactErrors;
-    }
+      if (postUpgradeCommandResults !== null) {
+        const { updatedArtifacts, artifactErrors } = postUpgradeCommandResults;
+        config.updatedArtifacts = updatedArtifacts;
+        config.artifactErrors = artifactErrors;
+      }
 
-    removeMeta(['dep']);
+      removeMeta(['dep']);
 
-    if (config.artifactErrors?.length) {
-      if (config.releaseTimestamp) {
-        logger.debug(`Branch timestamp: ` + config.releaseTimestamp);
-        const releaseTimestamp = DateTime.fromISO(config.releaseTimestamp);
-        if (releaseTimestamp.plus({ hours: 2 }) < DateTime.local()) {
-          logger.debug(
-            'PR is older than 2 hours, raise PR with lock file errors'
-          );
-        } else if (branchExists) {
-          logger.debug(
-            'PR is less than 2 hours old but branchExists so updating anyway'
-          );
+      if (config.artifactErrors?.length) {
+        if (config.releaseTimestamp) {
+          logger.debug(`Branch timestamp: ` + config.releaseTimestamp);
+          const releaseTimestamp = DateTime.fromISO(config.releaseTimestamp);
+          if (releaseTimestamp.plus({ hours: 2 }) < DateTime.local()) {
+            logger.debug(
+              'PR is older than 2 hours, raise PR with lock file errors'
+            );
+          } else if (branchExists) {
+            logger.debug(
+              'PR is less than 2 hours old but branchExists so updating anyway'
+            );
+          } else {
+            logger.debug(
+              'PR is less than 2 hours old - raise error instead of PR'
+            );
+            throw new Error(MANAGER_LOCKFILE_ERROR);
+          }
         } else {
-          logger.debug(
-            'PR is less than 2 hours old - raise error instead of PR'
+          logger.debug('PR has no releaseTimestamp');
+        }
+      } else if (config.updatedArtifacts?.length && branchPr) {
+        // If there are artifacts, no errors, and an existing PR then ensure any artifacts error comment is removed
+        // istanbul ignore if
+        if (GlobalConfig.get('dryRun')) {
+          logger.info(
+            `DRY-RUN: Would ensure comment removal in PR #${branchPr.number}`
           );
-          throw new Error(MANAGER_LOCKFILE_ERROR);
+        } else {
+          // Remove artifacts error comment only if this run has successfully updated artifacts
+          await ensureCommentRemoval({
+            type: 'by-topic',
+            number: branchPr.number,
+            topic: artifactErrorTopic,
+          });
         }
-      } else {
-        logger.debug('PR has no releaseTimestamp');
       }
-    } else if (config.updatedArtifacts?.length && branchPr) {
-      // If there are artifacts, no errors, and an existing PR then ensure any artifacts error comment is removed
-      // istanbul ignore if
-      if (GlobalConfig.get('dryRun')) {
-        logger.info(
-          `DRY-RUN: Would ensure comment removal in PR #${branchPr.number}`
-        );
-      } else {
-        // Remove artifacts error comment only if this run has successfully updated artifacts
-        await ensureCommentRemoval({
-          type: 'by-topic',
-          number: branchPr.number,
-          topic: artifactErrorTopic,
-        });
-      }
-    }
-    const forcedManually = userRebaseRequested || !branchExists;
+      const forcedManually = userRebaseRequested || !branchExists;
 
-    config.isConflicted ??=
-      branchExists &&
-      (await isBranchConflicted(config.baseBranch!, config.branchName));
-    config.forceCommit = forcedManually || config.isConflicted;
+      config.isConflicted ??=
+        branchExists &&
+        (await isBranchConflicted(config.baseBranch!, config.branchName));
+      config.forceCommit = forcedManually || config.isConflicted;
 
-    config.stopUpdating = branchPr?.labels?.includes(config.stopUpdatingLabel!);
+      config.stopUpdating = branchPr?.labels?.includes(
+        config.stopUpdatingLabel!
+      );
 
-    const prRebaseChecked = !!branchPr?.bodyStruct?.rebaseRequested;
+      const prRebaseChecked = !!branchPr?.bodyStruct?.rebaseRequested;
 
-    if (branchExists && dependencyDashboardCheck && config.stopUpdating) {
-      if (!prRebaseChecked) {
-        logger.info(
-          'Branch updating is skipped because stopUpdatingLabel is present in config'
-        );
-        return {
-          branchExists: true,
-          prNo: branchPr?.number,
-          result: BranchResult.NoWork,
-        };
+      if (branchExists && dependencyDashboardCheck && config.stopUpdating) {
+        if (!prRebaseChecked) {
+          logger.info(
+            'Branch updating is skipped because stopUpdatingLabel is present in config'
+          );
+          return {
+            branchExists: true,
+            prNo: branchPr?.number,
+            result: BranchResult.NoWork,
+          };
+        }
       }
-    }
 
-    // compile commit message with body, which maybe needs changelogs
-    if (config.commitBody) {
-      if (config.fetchReleaseNotes && needsChangelogs(config, ['commitBody'])) {
-        // we only need first upgrade, the others are only needed on PR update
-        // we add it to first, so PR fetch can skip fetching for that update
-        await embedChangelog(config.upgrades[0]);
-      }
-      // changelog is on first upgrade
-      config.commitMessage = `${config.commitMessage!}\n\n${template.compile(
-        config.commitBody,
-        {
-          ...config,
-          logJSON: config.upgrades[0].logJSON,
-          releases: config.upgrades[0].releases,
+      // compile commit message with body, which maybe needs changelogs
+      if (config.commitBody) {
+        if (
+          config.fetchReleaseNotes &&
+          needsChangelogs(config, ['commitBody'])
+        ) {
+          // we only need first upgrade, the others are only needed on PR update
+          // we add it to first, so PR fetch can skip fetching for that update
+          await embedChangelog(config.upgrades[0]);
         }
-      )}`;
+        // changelog is on first upgrade
+        config.commitMessage = `${config.commitMessage!}\n\n${template.compile(
+          config.commitBody,
+          {
+            ...config,
+            logJSON: config.upgrades[0].logJSON,
+            releases: config.upgrades[0].releases,
+          }
+        )}`;
 
-      logger.trace(`commitMessage: ` + JSON.stringify(config.commitMessage));
-    }
+        logger.trace(`commitMessage: ` + JSON.stringify(config.commitMessage));
+      }
 
-    const commitSha = await commitFilesToBranch(config);
+      commitSha = await commitFilesToBranch(config);
+    }
     // istanbul ignore if
     if (branchPr && platform.refreshPr) {
       await platform.refreshPr(branchPr.number);
@@ -540,6 +550,7 @@ export async function processBranch(
         branchExists: true,
         prNo: branchPr?.number,
         result: BranchResult.Pending,
+        commitSha,
       };
     }
 
@@ -560,7 +571,11 @@ export async function processBranch(
         logger.debug(
           'Branch cannot automerge now because automergeSchedule is off schedule - skipping'
         );
-        return { branchExists, result: BranchResult.NotScheduled };
+        return {
+          branchExists,
+          result: BranchResult.NotScheduled,
+          commitSha,
+        };
       }
       if (
         mergeStatus === 'stale' &&
@@ -638,6 +653,7 @@ export async function processBranch(
         branchExists: true,
         prNo: branchPr?.number,
         result: BranchResult.Error,
+        commitSha,
       };
     } else if (
       err.messagee &&
@@ -657,7 +673,12 @@ export async function processBranch(
       logger.warn({ err }, `Error updating branch`);
     }
     // Don't throw here - we don't want to stop the other renovations
-    return { branchExists, prNo: branchPr?.number, result: BranchResult.Error };
+    return {
+      branchExists,
+      prNo: branchPr?.number,
+      result: BranchResult.Error,
+      commitSha,
+    };
   }
   try {
     logger.debug('Ensuring PR');
@@ -676,6 +697,7 @@ export async function processBranch(
           branchExists,
           prBlockedBy,
           result: BranchResult.PrLimitReached,
+          commitSha,
         };
       }
       // TODO: ensurePr should check for automerge itself (#9719)
@@ -684,23 +706,40 @@ export async function processBranch(
           branchExists,
           prBlockedBy,
           result: BranchResult.NeedsPrApproval,
+          commitSha,
         };
       }
       if (prBlockedBy === 'AwaitingTests') {
-        return { branchExists, prBlockedBy, result: BranchResult.Pending };
+        return {
+          branchExists,
+          prBlockedBy,
+          result: BranchResult.Pending,
+          commitSha,
+        };
       }
       if (prBlockedBy === 'BranchAutomerge') {
         return {
           branchExists,
           prBlockedBy,
           result: BranchResult.Done,
+          commitSha,
         };
       }
       if (prBlockedBy === 'Error') {
-        return { branchExists, prBlockedBy, result: BranchResult.Error };
+        return {
+          branchExists,
+          prBlockedBy,
+          result: BranchResult.Error,
+          commitSha,
+        };
       }
       logger.warn({ prBlockedBy }, 'Unknown PrBlockedBy result');
-      return { branchExists, prBlockedBy, result: BranchResult.Error };
+      return {
+        branchExists,
+        prBlockedBy,
+        result: BranchResult.Error,
+        commitSha,
+      };
     }
     if (ensurePrResult.type === 'with-pr') {
       const { pr } = ensurePrResult;
@@ -754,7 +793,11 @@ export async function processBranch(
         logger.debug('PR is configured for automerge');
         const prAutomergeResult = await checkAutoMerge(pr, config);
         if (prAutomergeResult?.automerged) {
-          return { branchExists, result: BranchResult.Automerged };
+          return {
+            branchExists,
+            result: BranchResult.Automerged,
+            commitSha,
+          };
         }
       } else {
         logger.debug('PR is not configured for automerge');
@@ -776,7 +819,13 @@ export async function processBranch(
       branchExists: true,
       prNo: branchPr?.number,
       result: BranchResult.PrCreated,
+      commitSha,
     };
   }
-  return { branchExists, prNo: branchPr?.number, result: BranchResult.Done };
+  return {
+    branchExists,
+    prNo: branchPr?.number,
+    result: BranchResult.Done,
+    commitSha,
+  };
 }
diff --git a/lib/workers/types.ts b/lib/workers/types.ts
index 32c036afef..1da19fe992 100644
--- a/lib/workers/types.ts
+++ b/lib/workers/types.ts
@@ -126,6 +126,8 @@ export interface BranchConfig
   prNo?: number;
   stopUpdating?: boolean;
   isConflicted?: boolean;
+  branchFingerprint?: string;
+  skipBranchUpdate?: boolean;
 }
 
 export interface WorkerExtractConfig extends ExtractConfig {
-- 
GitLab