diff --git a/.prettierrc b/.prettierrc
index 6644fd7246173f57f93015ebca30578df3976c1b..76e9f310494387c99a086fcc48b445f03f61c65c 100644
--- a/.prettierrc
+++ b/.prettierrc
@@ -1,2 +1,2 @@
 singleQuote: true
-trailingComma: "es5"
+trailingComma: "all"
diff --git a/docs/development/zod.md b/docs/development/zod.md
index 30ebc56b4973495e2e49f1b652b9997164a92d6d..265e8c93f664d3fc6ac6c821048137e491042135 100644
--- a/docs/development/zod.md
+++ b/docs/development/zod.md
@@ -134,7 +134,7 @@ const ApiResults = z.array(
   z.object({
     id: z.number(),
     value: z.string(),
-  })
+  }),
 );
 type ApiResults = z.infer<typeof ApiResults>;
 
@@ -155,8 +155,8 @@ const ApiResults = Json.pipe(
     z.object({
       id: z.number(),
       value: z.string(),
-    })
-  )
+    }),
+  ),
 );
 
 const results = ApiResults.parse(input);
@@ -269,10 +269,10 @@ const Versions = z
         version: z.string(),
       })
       .nullable()
-      .catch(null)
+      .catch(null),
   )
   .transform((releases) =>
-    releases.filter((x): x is { version: string } => x !== null)
+    releases.filter((x): x is { version: string } => x !== null),
   );
 ```
 
@@ -284,7 +284,7 @@ Instead, you should use the `LooseArray` and `LooseRecord` helpers from `schema-
 const Versions = LooseArray(
   z.object({
     version: z.string(),
-  })
+  }),
 );
 ```
 
@@ -339,7 +339,7 @@ const Users = z.object({
 
 const { body: users } = await http.getJson(
   'https://dummyjson.com/users',
-  LooseArray(User)
+  LooseArray(User),
 );
 ```
 
diff --git a/jest.config.ts b/jest.config.ts
index 8dfa2efa0a96357c1aa60730d2dc733119d14cda..983301386bf64c37561f823c42c86f5e5542e5ec 100644
--- a/jest.config.ts
+++ b/jest.config.ts
@@ -153,7 +153,7 @@ function normalizePattern(pattern: string, suffix: '.ts' | '.spec.ts'): string {
  * Otherwise, `fallback` value is used to determine some defaults.
  */
 function configureShardingOrFallbackTo(
-  fallback: JestShardedSubconfig
+  fallback: JestShardedSubconfig,
 ): JestShardedSubconfig {
   const shardKey = process.env.TEST_SHARD;
   if (!shardKey) {
@@ -163,7 +163,7 @@ function configureShardingOrFallbackTo(
   if (!testShards[shardKey]) {
     const keys = Object.keys(testShards).join(', ');
     throw new Error(
-      `Unknown value for TEST_SHARD: ${shardKey} (possible values: ${keys})`
+      `Unknown value for TEST_SHARD: ${shardKey} (possible values: ${keys})`,
     );
   }
 
@@ -301,7 +301,7 @@ function getMatchingShards(files: string[]): string[] {
       const patterns = matchPaths.map((path) =>
         path.endsWith('.spec.ts')
           ? path.replace(/\.spec\.ts$/, '{.ts,.spec.ts}')
-          : `${path}/**/*`
+          : `${path}/**/*`,
       );
 
       if (patterns.some((pattern) => minimatch(file, pattern))) {
@@ -326,7 +326,7 @@ function scheduleItems<T>(items: T[], availableInstances: number): T[][] {
       : items.length % numInstances;
 
   const partitionSizes = Array.from({ length: numInstances }, (_, idx) =>
-    idx < lighterInstancesIdx ? maxPerInstance : maxPerInstance - 1
+    idx < lighterInstancesIdx ? maxPerInstance : maxPerInstance - 1,
   );
 
   const result: T[][] = Array.from({ length: numInstances }, () => []);
diff --git a/lib/config-validator.ts b/lib/config-validator.ts
index eb2ca7a97795dc6a590a3e9497c2371da2227afc..2436b8a8ab81820221485de31c71c9be3b933d1d 100644
--- a/lib/config-validator.ts
+++ b/lib/config-validator.ts
@@ -20,7 +20,7 @@ async function validate(
   desc: string,
   config: RenovateConfig,
   strict: boolean,
-  isPreset = false
+  isPreset = false,
 ): Promise<void> {
   const { isMigrated, migratedConfig } = migrateConfig(config);
   if (isMigrated) {
@@ -29,7 +29,7 @@ async function validate(
         oldConfig: config,
         newConfig: migratedConfig,
       },
-      'Config migration necessary'
+      'Config migration necessary',
     );
     if (strict) {
       returnVal = 1;
@@ -40,14 +40,14 @@ async function validate(
   if (res.errors.length) {
     logger.error(
       { file: desc, errors: res.errors },
-      'Found errors in configuration'
+      'Found errors in configuration',
     );
     returnVal = 1;
   }
   if (res.warnings.length) {
     logger.warn(
       { file: desc, warnings: res.warnings },
-      'Found errors in configuration'
+      'Found errors in configuration',
     );
     returnVal = 1;
   }
@@ -87,7 +87,7 @@ type PackageJson = {
     }
   } else {
     for (const file of configFileNames.filter(
-      (name) => name !== 'package.json'
+      (name) => name !== 'package.json',
     )) {
       try {
         if (!(await pathExists(file))) {
@@ -108,7 +108,7 @@ type PackageJson = {
     }
     try {
       const pkgJson = JSON.parse(
-        await readFile('package.json', 'utf8')
+        await readFile('package.json', 'utf8'),
       ) as PackageJson;
       if (pkgJson.renovate) {
         logger.info(`Validating package.json > renovate`);
@@ -121,7 +121,7 @@ type PackageJson = {
             'package.json > renovate-config',
             presetConfig,
             strict,
-            true
+            true,
           );
         }
       }
diff --git a/lib/config/decrypt.spec.ts b/lib/config/decrypt.spec.ts
index e6577739056657ea84e67242ca76b888186d4c86..30f3a83204b65b29d20b31e16246bc7c0e47fb51 100644
--- a/lib/config/decrypt.spec.ts
+++ b/lib/config/decrypt.spec.ts
@@ -41,7 +41,7 @@ describe('config/decrypt', () => {
       config.encrypted = { a: 1 };
       GlobalConfig.set({ privateKey, privateKeyOld: 'invalid-key' });
       await expect(decryptConfig(config, repository)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
 
@@ -60,7 +60,7 @@ describe('config/decrypt', () => {
       expect(res.encrypted).toBeUndefined();
       expect(res.npmToken).toBeUndefined();
       expect(res.npmrc).toBe(
-        '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n'
+        '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n',
       );
     });
 
@@ -98,11 +98,11 @@ describe('config/decrypt', () => {
       expect(res.encrypted).toBeUndefined();
       expect(res.packageFiles[0].devDependencies.encrypted).toBeUndefined();
       expect(res.packageFiles[0].devDependencies.branchPrefix).toBe(
-        'abcdef-ghijklm-nopqf-stuvwxyz'
+        'abcdef-ghijklm-nopqf-stuvwxyz',
       );
       expect(res.packageFiles[0].devDependencies.npmToken).toBeUndefined();
       expect(res.packageFiles[0].devDependencies.npmrc).toBe(
-        '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n'
+        '//registry.npmjs.org/:_authToken=abcdef-ghijklm-nopqf-stuvwxyz\n',
       );
     });
 
@@ -113,7 +113,7 @@ describe('config/decrypt', () => {
           'long-but-wrong-wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
       };
       await expect(decryptConfig(config, repository)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
       config.encrypted = {
         // Missing value
@@ -121,7 +121,7 @@ describe('config/decrypt', () => {
           'wcFMAw+4H7SgaqGOAQ//ZNPgHJ4RQBdfFoDX8Ywe9UxqMlc8k6VasCszQ2JULh/BpEdKdgRUGNaKaeZ+oBKYDBmDwAD5V5FEMlsg+KO2gykp/p2BAwvKGtYK0MtxLh4h9yJbN7TrVnGO3/cC+Inp8exQt0gD6f1Qo/9yQ9NE4/BIbaSs2b2DgeIK7Ed8N675AuSo73UOa6o7t+9pKeAAK5TQwgSvolihbUs8zjnScrLZD+nhvL3y5gpAqK9y//a+bTu6xPA1jdLjsswoCUq/lfVeVsB2GWV2h6eex/0fRKgN7xxNgdMn0a7msrvumhTawP8mPisPY2AAsHRIgQ9vdU5HbOPdGoIwI9n9rMdIRn9Dy7/gcX9Ic+RP2WwS/KnPHLu/CveY4W5bYqYoikWtJs9HsBCyWFiHIRrJF+FnXwtKdoptRfxTfJIkBoLrV6fDIyKo79iL+xxzgrzWs77KEJUJfexZBEGBCnrV2o7mo3SU197S0qx7HNvqrmeCj8CLxq8opXC71TNa+XE6BQUVyhMFxtW9LNxZUHRiNzrTSikArT4hzjyr3f9cb0kZVcs6XJQsm1EskU3WXo7ETD7nsukS9GfbwMn7tfYidB/yHSHl09ih871BcgByDmEKKdmamcNilW2bmTAqB5JmtaYT5/H8jRQWo/VGrEqlmiA4KmwSv7SZPlDnaDFrmzmMZZDSRgHe5KWl283XLmSeE8J0NPqwFH3PeOv4fIbOjJrnbnFBwSAsgsMe2K4OyFDh2COfrho7s8EP1Kl5lBkYJ+VRreGRerdSu24',
       };
       await expect(decryptConfig(config, repository)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
       config.encrypted = {
         // Missing org scope
@@ -129,7 +129,7 @@ describe('config/decrypt', () => {
           'wcFMAw+4H7SgaqGOAQ//W38A3PmaZnE9XTCHGDQFD52Kz78UYnaiYeAT13cEqYWTwEvQ57B7D7I6i4jCLe7KwkUCS90kyoqd7twD75W/sO70MyIveKnMlqqnpkagQkFgmzMaXXNHaJXEkjzsflTELZu6UsUs/kZYmab7r14YLl9HbH/pqN9exil/9s3ym9URCPOyw/l04KWntdMAy0D+c5M4mE+obv6fz6nDb8tkdeT5Rt2uU+qw3gH1OsB2yu+zTWpI/xTGwDt5nB5txnNTsVrQ/ZK85MSktacGVcYuU9hsEDmSrShmtqlg6Myq+Hjb7cYAp2g4n13C/I3gGGaczl0PZaHD7ALMjI7p6O1q+Ix7vMxipiKMVjS3omJoqBCz3FKc6DVhyX4tfhxgLxFo0DpixNwGbBRbMBO8qZfUk7bicAl/oCRc2Ijmay5DDYuvtkw3G3Ou+sZTe6DNpWUFy6VA4ai7hhcLvcAuiYmLdwPISRR/X4ePa8ZrmSVPyVOvbmmwLhcDYSDlC9Mw4++7ELomlve5kvjVSHvPv9BPVb5sJF7gX4vOT4FrcKalQRPmhNCZrE8tY2lvlrXwV2EEhya8EYv4QTd3JUYEYW5FXiJrORK5KDTnISw+U02nFZjFlnoz9+R6h+aIT1crS3/+YjCHE/EIKvSftOnieYb02Gk7M9nqU19EYL9ApYw4+IjSRgFM3DShIrvuDwDkAwUfaq8mKtr9Vjg/r+yox//GKS3u3r4I3+dfCljA3OwskTPfbSD+huBk4mylIvaL5v8Fngxo979wiLw',
       };
       await expect(decryptConfig(config, repository)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
       config.encrypted = {
         // Impossible to parse
@@ -137,13 +137,13 @@ describe('config/decrypt', () => {
           'wcFMAw+4H7SgaqGOAQ//Wa/gHgQdH7tj3LQdW6rWKjzmkYVKZW9EbexJExu4WLaMgEKodlRMilcqCKfQZpjzoiC31J8Ly/x6Soury+lQnLVbtIQ4KWa/uCIz4lXCpPpGNgN2jPfOmdwWBMOcXIT+BgAMxRu3rAmvTtunrkACJ3J92eYNwJhTzp2Azn9LpT7kHnZ64z2SPhbdUgMMhCBwBG5BPArPzF5fdaqa8uUSbKhY0GMiqPXq6Zeq+EBNoPc/RJp2urpYTknO+nRb39avKjihd9MCZ/1d3QYymbRj7SZC3LJhenVF0hil3Uk8TBASnGQiDmBcIXQFhJ0cxavXqKjx+AEALq+kTdwGu5vuE2+2B820/o3lAXR9OnJHr8GodJ2ZBpzOaPrQe5zvxL0gLEeUUPatSOwuLhdo/6+bRCl2wNz23jIjDEFFTmsLqfEHcdVYVTH2QqvLjnUYcCRRuM32vS4rCMOEe0l6p0CV2rk22UZDIPcxqXjKucxse2Sow8ATWiPoIw7zWj7XBLqUKHFnMpPV2dCIKFKBsOKYgLjF4BvKzZJyhmVEPgMcKQLYqeT/2uWDR77NSWH0Cyiwk9M3KbOIMmV3pWh9PiXk6CvumECELbJHYH0Mc+P//BnbDq2Ie9dHdmKhFgRyHU7gWvkPhic9BX36xyldPcnhTgr1XWRoVe0ETGLDPCcqrQ/SUQGrLiujSOgxGu2K/6LDJhi4IKz1/nf7FUSj5eTIDqQiSPP5pXDjlH7oYxXXrHI/aYOCZ5sBx7mOzlEcENIrYblCHO/CYMTWdCJ4Wrftqk7K/A=',
       };
       await expect(decryptConfig(config, repository)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
       config.encrypted = {
         token: 'too-short',
       };
       await expect(decryptConfig(config, repository)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
 
@@ -157,7 +157,7 @@ describe('config/decrypt', () => {
       expect(res.encrypted).toBeUndefined();
       expect(res.token).toBe('123');
       await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
 
@@ -174,7 +174,7 @@ describe('config/decrypt', () => {
       expect(res.encrypted).toBeUndefined();
       expect(res.token).toBe('123');
       await expect(decryptConfig(config, 'wrong/org')).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
 
@@ -188,7 +188,7 @@ describe('config/decrypt', () => {
       expect(res.encrypted).toBeUndefined();
       expect(res.token).toBe('123');
       await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
 
@@ -205,7 +205,7 @@ describe('config/decrypt', () => {
       expect(res.encrypted).toBeUndefined();
       expect(res.token).toBe('123');
       await expect(decryptConfig(config, 'abc/defg')).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
   });
diff --git a/lib/config/decrypt.ts b/lib/config/decrypt.ts
index 533d255802ce11926d55ba2692a453ac1d3e3914..300a379b6611a2a1a707ef3400bd00f5c73ba90e 100644
--- a/lib/config/decrypt.ts
+++ b/lib/config/decrypt.ts
@@ -12,7 +12,7 @@ import type { RenovateConfig } from './types';
 
 export async function tryDecryptPgp(
   privateKey: string,
-  encryptedStr: string
+  encryptedStr: string,
 ): Promise<string | null> {
   if (encryptedStr.length < 500) {
     // optimization during transition of public key -> pgp
@@ -49,7 +49,7 @@ export async function tryDecryptPgp(
 
 export function tryDecryptPublicKeyDefault(
   privateKey: string,
-  encryptedStr: string
+  encryptedStr: string,
 ): string | null {
   let decryptedStr: string | null = null;
   try {
@@ -65,7 +65,7 @@ export function tryDecryptPublicKeyDefault(
 
 export function tryDecryptPublicKeyPKCS1(
   privateKey: string,
-  encryptedStr: string
+  encryptedStr: string,
 ): string | null {
   let decryptedStr: string | null = null;
   try {
@@ -75,7 +75,7 @@ export function tryDecryptPublicKeyPKCS1(
           key: privateKey,
           padding: crypto.constants.RSA_PKCS1_PADDING,
         },
-        Buffer.from(encryptedStr, 'base64')
+        Buffer.from(encryptedStr, 'base64'),
       )
       .toString();
   } catch (err) {
@@ -87,7 +87,7 @@ export function tryDecryptPublicKeyPKCS1(
 export async function tryDecrypt(
   privateKey: string,
   encryptedStr: string,
-  repository: string
+  repository: string,
 ): Promise<string | null> {
   let decryptedStr: string | null = null;
   if (privateKey?.startsWith('-----BEGIN PGP PRIVATE KEY BLOCK-----')) {
@@ -112,36 +112,36 @@ export async function tryDecrypt(
               .map((o) => ensureTrailingSlash(o));
             if (is.nonEmptyString(repo)) {
               const scopedRepos = orgPrefixes.map((orgPrefix) =>
-                `${orgPrefix}${repo}`.toUpperCase()
+                `${orgPrefix}${repo}`.toUpperCase(),
               );
               if (scopedRepos.some((r) => r === repository.toUpperCase())) {
                 decryptedStr = value;
               } else {
                 logger.debug(
                   { scopedRepos },
-                  'Secret is scoped to a different repository'
+                  'Secret is scoped to a different repository',
                 );
                 const error = new Error('config-validation');
                 error.validationError = `Encrypted secret is scoped to a different repository: "${scopedRepos.join(
-                  ','
+                  ',',
                 )}".`;
                 throw error;
               }
             } else {
               if (
                 orgPrefixes.some((orgPrefix) =>
-                  repository.toUpperCase().startsWith(orgPrefix)
+                  repository.toUpperCase().startsWith(orgPrefix),
                 )
               ) {
                 decryptedStr = value;
               } else {
                 logger.debug(
                   { orgPrefixes },
-                  'Secret is scoped to a different org'
+                  'Secret is scoped to a different org',
                 );
                 const error = new Error('config-validation');
                 error.validationError = `Encrypted secret is scoped to a different org: "${orgPrefixes.join(
-                  ','
+                  ',',
                 )}".`;
                 throw error;
               }
@@ -171,7 +171,7 @@ export async function tryDecrypt(
 
 export async function decryptConfig(
   config: RenovateConfig,
-  repository: string
+  repository: string,
 ): Promise<RenovateConfig> {
   logger.trace({ config }, 'decryptConfig()');
   const decryptedConfig = { ...config };
@@ -199,7 +199,7 @@ export async function decryptConfig(
             addSecretForSanitizing(token);
             logger.debug(
               { decryptedToken: maskToken(token) },
-              'Migrating npmToken to npmrc'
+              'Migrating npmToken to npmrc',
             );
             if (is.string(decryptedConfig.npmrc)) {
               /* eslint-disable no-template-curly-in-string */
@@ -207,13 +207,13 @@ export async function decryptConfig(
                 logger.debug('Replacing ${NPM_TOKEN} with decrypted token');
                 decryptedConfig.npmrc = decryptedConfig.npmrc.replace(
                   regEx(/\${NPM_TOKEN}/g),
-                  token
+                  token,
                 );
               } else {
                 logger.debug('Appending _authToken= to end of existing npmrc');
                 decryptedConfig.npmrc = decryptedConfig.npmrc.replace(
                   regEx(/\n?$/),
-                  `\n_authToken=${token}\n`
+                  `\n_authToken=${token}\n`,
                 );
               }
               /* eslint-enable no-template-curly-in-string */
@@ -235,7 +235,7 @@ export async function decryptConfig(
       for (const item of val) {
         if (is.object(item) && !is.array(item)) {
           (decryptedConfig[key] as RenovateConfig[]).push(
-            await decryptConfig(item as RenovateConfig, repository)
+            await decryptConfig(item as RenovateConfig, repository),
           );
         } else {
           (decryptedConfig[key] as unknown[]).push(item);
@@ -244,7 +244,7 @@ export async function decryptConfig(
     } else if (is.object(val) && key !== 'content') {
       decryptedConfig[key] = await decryptConfig(
         val as RenovateConfig,
-        repository
+        repository,
       );
     }
   }
diff --git a/lib/config/defaults.spec.ts b/lib/config/defaults.spec.ts
index 1fd77afdd3e5d6e5601f439eb5648b72bc8363e5..20fc263638078198b4fe32dc9be7ac148b5344db 100644
--- a/lib/config/defaults.spec.ts
+++ b/lib/config/defaults.spec.ts
@@ -39,7 +39,7 @@ describe('config/defaults', () => {
         const val = getDefault(option);
 
         expect(val).toBeNull();
-      }
+      },
     );
   });
 });
diff --git a/lib/config/global.ts b/lib/config/global.ts
index 470c7f347ec216bffb3c10309c5d08cf55e9d7b8..8b400fd680390465483ab6e95e873210cf937aa0 100644
--- a/lib/config/global.ts
+++ b/lib/config/global.ts
@@ -35,15 +35,15 @@ export class GlobalConfig {
 
   static get(): RepoGlobalConfig;
   static get<Key extends keyof RepoGlobalConfig>(
-    key: Key
+    key: Key,
   ): RepoGlobalConfig[Key];
   static get<Key extends keyof RepoGlobalConfig>(
     key: Key,
-    defaultValue: Required<RepoGlobalConfig>[Key]
+    defaultValue: Required<RepoGlobalConfig>[Key],
   ): Required<RepoGlobalConfig>[Key];
   static get<Key extends keyof RepoGlobalConfig>(
     key?: Key,
-    defaultValue?: RepoGlobalConfig[Key]
+    defaultValue?: RepoGlobalConfig[Key],
   ): RepoGlobalConfig | RepoGlobalConfig[Key] {
     return key ? GlobalConfig.config[key] ?? defaultValue : GlobalConfig.config;
   }
diff --git a/lib/config/index.ts b/lib/config/index.ts
index c5f8935b46772e208ceb8c949e6d1b6766ca56d8..869a8250e5a9da907701d5e6496253b4f75e747e 100644
--- a/lib/config/index.ts
+++ b/lib/config/index.ts
@@ -13,7 +13,7 @@ export { mergeChildConfig };
 
 export function getManagerConfig(
   config: RenovateConfig,
-  manager: string
+  manager: string,
 ): ManagerConfig {
   let managerConfig: ManagerConfig = {
     ...config,
@@ -33,7 +33,7 @@ export function getManagerConfig(
 
 export function filterConfig(
   inputConfig: AllConfig,
-  targetStage: RenovateConfigStage
+  targetStage: RenovateConfigStage,
 ): AllConfig {
   logger.trace({ config: inputConfig }, `filterConfig('${targetStage}')`);
   const outputConfig: RenovateConfig = { ...inputConfig };
diff --git a/lib/config/massage.ts b/lib/config/massage.ts
index 094237c11bfdb27ad8d3574396062e791d6d553b..c989fecf0723f92ce2cea61d3efbf14a2ba0800a 100644
--- a/lib/config/massage.ts
+++ b/lib/config/massage.ts
@@ -29,7 +29,7 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
       val.forEach((item) => {
         if (is.object(item)) {
           (massagedConfig[key] as RenovateConfig[]).push(
-            massageConfig(item as RenovateConfig)
+            massageConfig(item as RenovateConfig),
           );
         } else {
           (massagedConfig[key] as unknown[]).push(item);
@@ -53,7 +53,7 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
       newRules.push(rule);
       for (const [key, val] of Object.entries(rule) as [
         UpdateType,
-        PackageRule
+        PackageRule,
       ][]) {
         if (updateTypes.includes(key)) {
           let newRule = clone(rule);
@@ -77,7 +77,7 @@ export function massageConfig(config: RenovateConfig): RenovateConfig {
     newRules = newRules.filter((rule) => {
       if (
         Object.keys(rule).every(
-          (key) => key.startsWith('match') || key.startsWith('exclude')
+          (key) => key.startsWith('match') || key.startsWith('exclude'),
         )
       ) {
         // Exclude rules which contain only match or exclude options
diff --git a/lib/config/migrate-validate.spec.ts b/lib/config/migrate-validate.spec.ts
index bd3ba37d5fd9ee7223e45c3b79f3ed45a9d9133c..c0d87d4afa91658f19fe32f1761b76360feb9faf 100644
--- a/lib/config/migrate-validate.spec.ts
+++ b/lib/config/migrate-validate.spec.ts
@@ -39,7 +39,7 @@ describe('config/migrate-validate', () => {
       const input: RenovateConfig = {};
       const res = await migrateAndValidate(
         { ...config, repoIsOnboarded: true },
-        input
+        input,
       );
       expect(res.warnings).toBeUndefined();
       expect(res).toMatchSnapshot();
diff --git a/lib/config/migrate-validate.ts b/lib/config/migrate-validate.ts
index 91865925c8df88971abc2b937e610ce54247041d..81d46499852a4733f42e898f23bd9b5bcae89d00 100644
--- a/lib/config/migrate-validate.ts
+++ b/lib/config/migrate-validate.ts
@@ -8,7 +8,7 @@ import * as configValidation from './validation';
 
 export async function migrateAndValidate(
   config: RenovateConfig,
-  input: RenovateConfig
+  input: RenovateConfig,
 ): Promise<RenovateConfig> {
   logger.debug('migrateAndValidate()');
   try {
@@ -16,7 +16,7 @@ export async function migrateAndValidate(
     if (isMigrated) {
       logger.debug(
         { oldConfig: input, newConfig: migratedConfig },
-        'Config migration necessary'
+        'Config migration necessary',
       );
     } else {
       logger.debug('No config migration necessary');
diff --git a/lib/config/migration.spec.ts b/lib/config/migration.spec.ts
index 42c809ba193176448b02436a4bb92cb7c3517512..86889d5d6011ffa0571ce78340f0d997e5a4fbaa 100644
--- a/lib/config/migration.spec.ts
+++ b/lib/config/migration.spec.ts
@@ -316,7 +316,7 @@ describe('config/migration', () => {
       // TODO: fix types #22198
       expect(
         (migratedConfig.lockFileMaintenance as RenovateConfig)
-          ?.packageRules?.[0].respectLatest
+          ?.packageRules?.[0].respectLatest,
       ).toBeFalse();
     });
 
diff --git a/lib/config/migration.ts b/lib/config/migration.ts
index 659f0b3651eb1fee6ffe3201f60d28e7db3b8d54..84822fc8633ff29acac06c14596ac7d6306cd666 100644
--- a/lib/config/migration.ts
+++ b/lib/config/migration.ts
@@ -36,22 +36,22 @@ export function migrateConfig(config: RenovateConfig): MigratedConfig {
       if (is.string(val) && val.includes('{{baseDir}}')) {
         migratedConfig[key] = val.replace(
           regEx(/{{baseDir}}/g),
-          '{{packageFileDir}}'
+          '{{packageFileDir}}',
         );
       } else if (is.string(val) && val.includes('{{lookupName}}')) {
         migratedConfig[key] = val.replace(
           regEx(/{{lookupName}}/g),
-          '{{packageName}}'
+          '{{packageName}}',
         );
       } else if (is.string(val) && val.includes('{{depNameShort}}')) {
         migratedConfig[key] = val.replace(
           regEx(/{{depNameShort}}/g),
-          '{{depName}}'
+          '{{depName}}',
         );
       } else if (is.string(val) && val.startsWith('{{semanticPrefix}}')) {
         migratedConfig[key] = val.replace(
           '{{semanticPrefix}}',
-          '{{#if semanticCommitType}}{{semanticCommitType}}{{#if semanticCommitScope}}({{semanticCommitScope}}){{/if}}: {{/if}}'
+          '{{#if semanticCommitType}}{{semanticCommitType}}{{#if semanticCommitScope}}({{semanticCommitScope}}){{/if}}: {{/if}}',
         );
       } else if (optionTypes[key] === 'object' && is.boolean(val)) {
         migratedConfig[key] = { enabled: val };
@@ -99,7 +99,7 @@ export function migrateConfig(config: RenovateConfig): MigratedConfig {
         for (const [from, to] of Object.entries(migratedTemplates)) {
           migratedConfig[key] = (migratedConfig[key] as string).replace(
             regEx(from, 'g'),
-            to
+            to,
           );
         }
       }
@@ -140,7 +140,7 @@ export function migrateConfig(config: RenovateConfig): MigratedConfig {
             // TODO: fix types #22198
             const combinedRule = mergeChildConfig(
               packageRule,
-              subrule as PackageRule
+              subrule as PackageRule,
             );
             delete combinedRule.packageRules;
             migratedConfig.packageRules.push(combinedRule);
@@ -156,14 +156,14 @@ export function migrateConfig(config: RenovateConfig): MigratedConfig {
           migratedConfig.matchManagers.push('gradle');
         }
         migratedConfig.matchManagers = migratedConfig.matchManagers.filter(
-          (manager) => manager !== 'gradle-lite'
+          (manager) => manager !== 'gradle-lite',
         );
       }
     }
     if (is.nonEmptyObject(migratedConfig['gradle-lite'])) {
       migratedConfig.gradle = mergeChildConfig(
         migratedConfig.gradle ?? {},
-        migratedConfig['gradle-lite']
+        migratedConfig['gradle-lite'],
       );
     }
     delete migratedConfig['gradle-lite'];
diff --git a/lib/config/migrations/base/abstract-migration.spec.ts b/lib/config/migrations/base/abstract-migration.spec.ts
index dca398847924330d44888146df63cf7659b6521f..f5f1caa7d9bd1b0b29e30360677633f98fef9203 100644
--- a/lib/config/migrations/base/abstract-migration.spec.ts
+++ b/lib/config/migrations/base/abstract-migration.spec.ts
@@ -13,7 +13,7 @@ describe('config/migrations/base/abstract-migration', () => {
       {
         fooBar: true,
       },
-      {}
+      {},
     );
 
     expect(() => customMigration.run()).toThrow();
@@ -31,7 +31,7 @@ describe('config/migrations/base/abstract-migration', () => {
       {
         fooBar: true,
       },
-      {}
+      {},
     );
 
     expect(() => customMigration.run()).toThrow();
diff --git a/lib/config/migrations/base/abstract-migration.ts b/lib/config/migrations/base/abstract-migration.ts
index 46f784e6620264d38a0a9c990be961fa0d99e9d7..390a0eaad8afb8604bae8cf58ae05916e094f7de 100644
--- a/lib/config/migrations/base/abstract-migration.ts
+++ b/lib/config/migrations/base/abstract-migration.ts
@@ -16,7 +16,7 @@ export abstract class AbstractMigration implements Migration {
   abstract run(value: unknown, key: string): void;
 
   protected get<Key extends keyof RenovateConfig>(
-    key: Key
+    key: Key,
   ): RenovateConfig[Key] {
     return this.migratedConfig[key] ?? this.originalConfig[key];
   }
@@ -27,7 +27,7 @@ export abstract class AbstractMigration implements Migration {
 
   protected setSafely<Key extends keyof RenovateConfig>(
     key: Key,
-    value: RenovateConfig[Key]
+    value: RenovateConfig[Key],
   ): void {
     if (
       is.nullOrUndefined(this.originalConfig[key]) &&
@@ -39,7 +39,7 @@ export abstract class AbstractMigration implements Migration {
 
   protected setHard<Key extends keyof RenovateConfig>(
     key: Key,
-    value: RenovateConfig[Key]
+    value: RenovateConfig[Key],
   ): void {
     this.migratedConfig[key] = value;
   }
diff --git a/lib/config/migrations/base/remove-property-migration.ts b/lib/config/migrations/base/remove-property-migration.ts
index 409296455e2c141e44c91bd5e20c1085f469c3b2..1c4d74cde2ddc01b1c195ab6148006f7ef051178 100644
--- a/lib/config/migrations/base/remove-property-migration.ts
+++ b/lib/config/migrations/base/remove-property-migration.ts
@@ -7,7 +7,7 @@ export class RemovePropertyMigration extends AbstractMigration {
   constructor(
     propertyName: string,
     originalConfig: RenovateConfig,
-    migratedConfig: RenovateConfig
+    migratedConfig: RenovateConfig,
   ) {
     super(originalConfig, migratedConfig);
     this.propertyName = propertyName;
diff --git a/lib/config/migrations/base/rename-property-migration.ts b/lib/config/migrations/base/rename-property-migration.ts
index 088cf1f49acc06bb3ff6440c3d13ddcf76a8b961..948adbd8275b81af153dccbcacafa2c06a21d250 100644
--- a/lib/config/migrations/base/rename-property-migration.ts
+++ b/lib/config/migrations/base/rename-property-migration.ts
@@ -11,7 +11,7 @@ export class RenamePropertyMigration extends AbstractMigration {
     deprecatedPropertyName: string,
     newPropertyName: string,
     originalConfig: RenovateConfig,
-    migratedConfig: RenovateConfig
+    migratedConfig: RenovateConfig,
   ) {
     super(originalConfig, migratedConfig);
     this.propertyName = deprecatedPropertyName;
diff --git a/lib/config/migrations/custom/automerge-major-migration.spec.ts b/lib/config/migrations/custom/automerge-major-migration.spec.ts
index 9dd5ebd21c87d04eae9d2dfd6976f4d137c3b556..1e8f1db629971543377686e3be82a41dac8d348d 100644
--- a/lib/config/migrations/custom/automerge-major-migration.spec.ts
+++ b/lib/config/migrations/custom/automerge-major-migration.spec.ts
@@ -10,7 +10,7 @@ describe('config/migrations/custom/automerge-major-migration', () => {
         major: {
           automerge: true,
         },
-      }
+      },
     );
   });
 
@@ -27,7 +27,7 @@ describe('config/migrations/custom/automerge-major-migration', () => {
           automerge: true,
           matchFileNames: ['test'],
         },
-      }
+      },
     );
   });
 
@@ -41,7 +41,7 @@ describe('config/migrations/custom/automerge-major-migration', () => {
         major: {
           automerge: true,
         },
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/automerge-migration.spec.ts b/lib/config/migrations/custom/automerge-migration.spec.ts
index 19f1dac618728dedc40a7faedc2ce244f8766e95..e5b5a7d5ce9edeef5ba194293eb30df207e71631 100644
--- a/lib/config/migrations/custom/automerge-migration.spec.ts
+++ b/lib/config/migrations/custom/automerge-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/automerge-migration', () => {
       } as any,
       {
         automerge: false,
-      }
+      },
     );
   });
 
@@ -27,7 +27,7 @@ describe('config/migrations/custom/automerge-migration', () => {
         major: {
           automerge: false,
         },
-      }
+      },
     );
   });
 
@@ -43,7 +43,7 @@ describe('config/migrations/custom/automerge-migration', () => {
         major: {
           automerge: false,
         },
-      }
+      },
     );
   });
 
@@ -54,7 +54,7 @@ describe('config/migrations/custom/automerge-migration', () => {
       } as any,
       {
         automerge: true,
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/automerge-minor-migration.spec.ts b/lib/config/migrations/custom/automerge-minor-migration.spec.ts
index 5ce417773f84c673cfe0528e00ea6aa7c624b64e..6df4266a032c093374d59f9a3ceaeaa6aee80c8d 100644
--- a/lib/config/migrations/custom/automerge-minor-migration.spec.ts
+++ b/lib/config/migrations/custom/automerge-minor-migration.spec.ts
@@ -10,7 +10,7 @@ describe('config/migrations/custom/automerge-minor-migration', () => {
         minor: {
           automerge: true,
         },
-      }
+      },
     );
   });
 
@@ -27,7 +27,7 @@ describe('config/migrations/custom/automerge-minor-migration', () => {
           automerge: true,
           matchFileNames: ['test'],
         },
-      }
+      },
     );
   });
 
@@ -41,7 +41,7 @@ describe('config/migrations/custom/automerge-minor-migration', () => {
         minor: {
           automerge: true,
         },
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/automerge-patch-migration.spec.ts b/lib/config/migrations/custom/automerge-patch-migration.spec.ts
index 124f599fe921b3a30f39ce4abc1257cb704de11e..dc89491f609e8d9edf1aa0a234b69eea9f1a06a9 100644
--- a/lib/config/migrations/custom/automerge-patch-migration.spec.ts
+++ b/lib/config/migrations/custom/automerge-patch-migration.spec.ts
@@ -10,7 +10,7 @@ describe('config/migrations/custom/automerge-patch-migration', () => {
         patch: {
           automerge: true,
         },
-      }
+      },
     );
   });
 
@@ -27,7 +27,7 @@ describe('config/migrations/custom/automerge-patch-migration', () => {
           automerge: true,
           matchFileNames: ['test'],
         },
-      }
+      },
     );
   });
 
@@ -41,7 +41,7 @@ describe('config/migrations/custom/automerge-patch-migration', () => {
         patch: {
           automerge: true,
         },
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/automerge-type-migration.spec.ts b/lib/config/migrations/custom/automerge-type-migration.spec.ts
index 14c96dd73e409803280e0358f414ca3e710b39a3..039c4c9e82b9a14093fab185df99b40853048a4b 100644
--- a/lib/config/migrations/custom/automerge-type-migration.spec.ts
+++ b/lib/config/migrations/custom/automerge-type-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/automerge-type-migration', () => {
       },
       {
         automergeType: 'branch',
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/automerge-type-migration', () => {
       {
         automergeType: 'test',
       },
-      false
+      false,
     );
   });
 
@@ -32,7 +32,7 @@ describe('config/migrations/custom/automerge-type-migration', () => {
       {
         automergeType: true,
       },
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/azure-gitlab-automerge-migration.spec.ts b/lib/config/migrations/custom/azure-gitlab-automerge-migration.spec.ts
index bc581e0848606a97c8b27962d8e934cb0f31ec60..d51be2cac801e9069c0317d81dc63c8037fa1929 100644
--- a/lib/config/migrations/custom/azure-gitlab-automerge-migration.spec.ts
+++ b/lib/config/migrations/custom/azure-gitlab-automerge-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/azure-gitlab-automerge-migration', () => {
       },
       {
         platformAutomerge: true,
-      }
+      },
     );
   });
 
@@ -17,7 +17,7 @@ describe('config/migrations/custom/azure-gitlab-automerge-migration', () => {
       {
         gitLabAutomerge: undefined,
       },
-      {}
+      {},
     );
   });
 
@@ -29,7 +29,7 @@ describe('config/migrations/custom/azure-gitlab-automerge-migration', () => {
       },
       {
         platformAutomerge: true,
-      }
+      },
     );
   });
 
@@ -40,7 +40,7 @@ describe('config/migrations/custom/azure-gitlab-automerge-migration', () => {
       },
       {
         platformAutomerge: true,
-      }
+      },
     );
   });
 
@@ -49,7 +49,7 @@ describe('config/migrations/custom/azure-gitlab-automerge-migration', () => {
       {
         azureAutoComplete: undefined,
       },
-      {}
+      {},
     );
   });
 
@@ -61,7 +61,7 @@ describe('config/migrations/custom/azure-gitlab-automerge-migration', () => {
       },
       {
         platformAutomerge: true,
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/base-branch-migration.spec.ts b/lib/config/migrations/custom/base-branch-migration.spec.ts
index a80299279c7fcfe20c7b1e3e25e8bcc86b9e30ee..72f25879d1d8994d1be5576bfc9793b875ff68bc 100644
--- a/lib/config/migrations/custom/base-branch-migration.spec.ts
+++ b/lib/config/migrations/custom/base-branch-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/base-branch-migration', () => {
       },
       {
         baseBranches: ['test'],
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/base-branch-migration', () => {
       } as any,
       {
         baseBranches: ['test'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/binary-source-migration.spec.ts b/lib/config/migrations/custom/binary-source-migration.spec.ts
index 8e8ca4dd2b3f6b28be10f3122d9fc2fadc5c4cd2..dc047f079638a49baec1f9716f06801982421d55 100644
--- a/lib/config/migrations/custom/binary-source-migration.spec.ts
+++ b/lib/config/migrations/custom/binary-source-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/binary-source-migration', () => {
       },
       {
         binarySource: 'global',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/branch-name-migration.spec.ts b/lib/config/migrations/custom/branch-name-migration.spec.ts
index 9ff5dfcbee7e70e3ca57fb4170fb9961ee815993..b45c9be8755d333fb2f71d4c888dc016069c8b38 100644
--- a/lib/config/migrations/custom/branch-name-migration.spec.ts
+++ b/lib/config/migrations/custom/branch-name-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/branch-name-migration', () => {
       },
       {
         branchName: 'test {{additionalBranchPrefix}} test',
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/branch-name-migration', () => {
       {
         branchName: 'test',
       },
-      false
+      false,
     );
   });
 
@@ -32,7 +32,7 @@ describe('config/migrations/custom/branch-name-migration', () => {
       {
         branchName: true,
       } as any,
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/branch-name-migration.ts b/lib/config/migrations/custom/branch-name-migration.ts
index e56c2385a4d8e9f96ee11987d29a710dc3a879ae..6470689151006b33b2d79b28d1633635cb3e6dfa 100644
--- a/lib/config/migrations/custom/branch-name-migration.ts
+++ b/lib/config/migrations/custom/branch-name-migration.ts
@@ -7,7 +7,7 @@ export class BranchNameMigration extends AbstractMigration {
   override run(value: unknown): void {
     if (is.string(value) && value.includes('{{managerBranchPrefix}}')) {
       this.rewrite(
-        value.replace('{{managerBranchPrefix}}', '{{additionalBranchPrefix}}')
+        value.replace('{{managerBranchPrefix}}', '{{additionalBranchPrefix}}'),
       );
     }
   }
diff --git a/lib/config/migrations/custom/branch-prefix-migration.spec.ts b/lib/config/migrations/custom/branch-prefix-migration.spec.ts
index cd4816fbd17dabc4cef627eb7f0fbc40e24083de..bc2a6f51a93091f96599d65745e236d1dd1cd0a3 100644
--- a/lib/config/migrations/custom/branch-prefix-migration.spec.ts
+++ b/lib/config/migrations/custom/branch-prefix-migration.spec.ts
@@ -9,7 +9,7 @@ describe('config/migrations/custom/branch-prefix-migration', () => {
       {
         additionalBranchPrefix: '{{parentDir}}-',
         branchPrefix: 'renovate/',
-      }
+      },
     );
   });
 
@@ -21,7 +21,7 @@ describe('config/migrations/custom/branch-prefix-migration', () => {
       {
         branchPrefix: 'test',
       },
-      false
+      false,
     );
   });
 
@@ -33,7 +33,7 @@ describe('config/migrations/custom/branch-prefix-migration', () => {
       {
         branchPrefix: true,
       } as any,
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/compatibility-migration.spec.ts b/lib/config/migrations/custom/compatibility-migration.spec.ts
index 8e44860120a0fdfb8c517ede4a846031b18c2233..6cdfc623262865c8ebfe02980f6f5fb1f21ceaca 100644
--- a/lib/config/migrations/custom/compatibility-migration.spec.ts
+++ b/lib/config/migrations/custom/compatibility-migration.spec.ts
@@ -12,7 +12,7 @@ describe('config/migrations/custom/compatibility-migration', () => {
         constraints: {
           test: 'test',
         },
-      }
+      },
     );
   });
 
@@ -21,7 +21,7 @@ describe('config/migrations/custom/compatibility-migration', () => {
       {
         compatibility: 'test',
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/custom/composer-ignore-platform-reqs-migration.spec.ts b/lib/config/migrations/custom/composer-ignore-platform-reqs-migration.spec.ts
index 799efd9da88eb003b8b1f445df49c9f89cefdbf3..038ac110752d68843954832a64687347f1716cde 100644
--- a/lib/config/migrations/custom/composer-ignore-platform-reqs-migration.spec.ts
+++ b/lib/config/migrations/custom/composer-ignore-platform-reqs-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/composer-ignore-platform-reqs-migration', ()
       },
       {
         composerIgnorePlatformReqs: [],
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/composer-ignore-platform-reqs-migration', ()
       },
       {
         composerIgnorePlatformReqs: null,
-      }
+      },
     );
   });
 
@@ -31,7 +31,7 @@ describe('config/migrations/custom/composer-ignore-platform-reqs-migration', ()
       {
         composerIgnorePlatformReqs: [],
       },
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/custom-managers-migration.spec.ts b/lib/config/migrations/custom/custom-managers-migration.spec.ts
index 792975860915a4c33b0c43944063cfc5ea730fe0..bb492fa23775c768ac039a82efd5e8d96bb0f923 100644
--- a/lib/config/migrations/custom/custom-managers-migration.spec.ts
+++ b/lib/config/migrations/custom/custom-managers-migration.spec.ts
@@ -39,7 +39,7 @@ describe('config/migrations/custom/custom-managers-migration', () => {
             versioningTemplate: 'gradle',
           },
         ],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/datasource-migration.spec.ts b/lib/config/migrations/custom/datasource-migration.spec.ts
index 53540fddf463cc7a26180b8865395e125482fa0a..916cd54a9631e9544703e1d5c2a2bdd1df0ddc07 100644
--- a/lib/config/migrations/custom/datasource-migration.spec.ts
+++ b/lib/config/migrations/custom/datasource-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/datasource-migration', () => {
       },
       {
         datasource: 'java-version',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/datasource-migration', () => {
       },
       {
         datasource: 'dotnet-version',
-      }
+      },
     );
   });
 
@@ -30,7 +30,7 @@ describe('config/migrations/custom/datasource-migration', () => {
       },
       {
         datasource: 'node-version',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/dep-types-migration.spec.ts b/lib/config/migrations/custom/dep-types-migration.spec.ts
index 4c21e9d053f0154b62f92aeb001207cc39197ad4..4d8a89476d4b4080e2e79235e4f07a9e6afe6298 100644
--- a/lib/config/migrations/custom/dep-types-migration.spec.ts
+++ b/lib/config/migrations/custom/dep-types-migration.spec.ts
@@ -84,7 +84,7 @@ describe('config/migrations/custom/dep-types-migration', () => {
             respectLatest: false,
           },
         ],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/dry-run-migration.spec.ts b/lib/config/migrations/custom/dry-run-migration.spec.ts
index f8256f5b1cc2cf54457254c73027f55c870d6bf8..549b4a6afe89afc7601d205c6f883205e03054eb 100644
--- a/lib/config/migrations/custom/dry-run-migration.spec.ts
+++ b/lib/config/migrations/custom/dry-run-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/dry-run-migration', () => {
       },
       {
         dryRun: 'full',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/dry-run-migration', () => {
       },
       {
         dryRun: null,
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/enabled-managers-migration.spec.ts b/lib/config/migrations/custom/enabled-managers-migration.spec.ts
index b9cdf711a78946027a77b94ec29c55126b239e4b..392018ad33c304cea9a64f3546465a8879be3114 100644
--- a/lib/config/migrations/custom/enabled-managers-migration.spec.ts
+++ b/lib/config/migrations/custom/enabled-managers-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/enabled-managers-migration', () => {
       },
       {
         enabledManagers: ['test1', 'npm', 'test2'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/enabled-managers-migration.ts b/lib/config/migrations/custom/enabled-managers-migration.ts
index ddc0b2d4b176e6bab8a48411f4e1656f13f8c7ec..e5ee0d853900ef8c1d103303ffe1407e2c8d14be 100644
--- a/lib/config/migrations/custom/enabled-managers-migration.ts
+++ b/lib/config/migrations/custom/enabled-managers-migration.ts
@@ -7,7 +7,7 @@ export class EnabledManagersMigration extends AbstractMigration {
   override run(value: unknown): void {
     if (is.array(value)) {
       const newValue = value.map((manager) =>
-        manager === 'yarn' ? 'npm' : manager
+        manager === 'yarn' ? 'npm' : manager,
       );
       this.rewrite(newValue);
     }
diff --git a/lib/config/migrations/custom/extends-migration.spec.ts b/lib/config/migrations/custom/extends-migration.spec.ts
index 6829ac6993585b609580193070760ea054958aef..bd521cba030532d19c16a2eea72eade430124972 100644
--- a/lib/config/migrations/custom/extends-migration.spec.ts
+++ b/lib/config/migrations/custom/extends-migration.spec.ts
@@ -9,7 +9,7 @@ describe('config/migrations/custom/extends-migration', () => {
       } as any,
       {
         extends: ['config:js-app'],
-      }
+      },
     );
 
     expect(ExtendsMigration).toMigrate(
@@ -18,7 +18,7 @@ describe('config/migrations/custom/extends-migration', () => {
       } as any,
       {
         extends: ['foo'],
-      }
+      },
     );
   });
 
@@ -29,7 +29,7 @@ describe('config/migrations/custom/extends-migration', () => {
       },
       {
         extends: ['foo', 'config:js-app', 'bar'],
-      }
+      },
     );
   });
 
@@ -40,7 +40,7 @@ describe('config/migrations/custom/extends-migration', () => {
       } as any,
       {
         extends: [],
-      }
+      },
     );
   });
 
@@ -51,7 +51,7 @@ describe('config/migrations/custom/extends-migration', () => {
       },
       {
         extends: [],
-      }
+      },
     );
   });
 
@@ -68,7 +68,7 @@ describe('config/migrations/custom/extends-migration', () => {
       },
       {
         extends: ['local>org/renovate-config'],
-      }
+      },
     );
     GlobalConfig.reset();
   });
@@ -80,7 +80,7 @@ describe('config/migrations/custom/extends-migration', () => {
       },
       {
         extends: ['mergeConfidence:all-badges'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/fetch-release-notes-migration.spec.ts b/lib/config/migrations/custom/fetch-release-notes-migration.spec.ts
index fc15f7d4885e06c4d0316de1238b58f09e6befcf..2427aee13e054e7b22d8e016c190bbf5436fef35 100644
--- a/lib/config/migrations/custom/fetch-release-notes-migration.spec.ts
+++ b/lib/config/migrations/custom/fetch-release-notes-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/fetch-release-notes-migration', () => {
       },
       {
         fetchChangeLogs: 'off',
-      }
+      },
     );
     expect(FetchReleaseNotesMigration).toMigrate(
       {
@@ -16,7 +16,7 @@ describe('config/migrations/custom/fetch-release-notes-migration', () => {
       },
       {
         fetchChangeLogs: 'pr',
-      }
+      },
     );
     expect(FetchReleaseNotesMigration).toMigrate(
       {
@@ -24,7 +24,7 @@ describe('config/migrations/custom/fetch-release-notes-migration', () => {
       },
       {
         fetchChangeLogs: 'pr',
-      }
+      },
     );
     expect(FetchReleaseNotesMigration).toMigrate(
       {
@@ -32,7 +32,7 @@ describe('config/migrations/custom/fetch-release-notes-migration', () => {
       },
       {
         fetchChangeLogs: 'off',
-      }
+      },
     );
     expect(FetchReleaseNotesMigration).toMigrate(
       {
@@ -40,7 +40,7 @@ describe('config/migrations/custom/fetch-release-notes-migration', () => {
       },
       {
         fetchChangeLogs: 'branch',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/fetch-release-notes-migration.ts b/lib/config/migrations/custom/fetch-release-notes-migration.ts
index dda051f8fa0605af53b0bb67391b2b75b3d9c542..657290cf770a875f80e955aedf266e7241b1b524 100644
--- a/lib/config/migrations/custom/fetch-release-notes-migration.ts
+++ b/lib/config/migrations/custom/fetch-release-notes-migration.ts
@@ -8,7 +8,7 @@ export class FetchReleaseNotesMigration extends RenamePropertyMigration {
       'fetchReleaseNotes',
       'fetchChangeLogs',
       originalConfig,
-      migratedConfig
+      migratedConfig,
     );
   }
 
diff --git a/lib/config/migrations/custom/go-mod-tidy-migration.spec.ts b/lib/config/migrations/custom/go-mod-tidy-migration.spec.ts
index 747a7b5fe04ae6c36c6d039c625a93026722a514..5a761299bd3b89eab36b03dd75ccf0e695dba2b2 100644
--- a/lib/config/migrations/custom/go-mod-tidy-migration.spec.ts
+++ b/lib/config/migrations/custom/go-mod-tidy-migration.spec.ts
@@ -9,7 +9,7 @@ describe('config/migrations/custom/go-mod-tidy-migration', () => {
       },
       {
         postUpdateOptions: ['test', 'gomodTidy'],
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/go-mod-tidy-migration', () => {
       },
       {
         postUpdateOptions: ['gomodTidy'],
-      }
+      },
     );
   });
 
@@ -29,7 +29,7 @@ describe('config/migrations/custom/go-mod-tidy-migration', () => {
       {
         gomodTidy: false,
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/custom/host-rules-migration.spec.ts b/lib/config/migrations/custom/host-rules-migration.spec.ts
index 8a14eb0d743a7130f7a3fbdf25f1cf915b08df28..fdc77fa14086cd1a5f3486745b4b314f8fd9e5e0 100644
--- a/lib/config/migrations/custom/host-rules-migration.spec.ts
+++ b/lib/config/migrations/custom/host-rules-migration.spec.ts
@@ -59,7 +59,7 @@ describe('config/migrations/custom/host-rules-migration', () => {
           { matchHost: 'https://domain.com/', token: '123test' },
           { matchHost: 'some.domain.com', token: '123test' },
         ],
-      }
+      },
     );
   });
 
@@ -75,14 +75,14 @@ describe('config/migrations/custom/host-rules-migration', () => {
             },
           ],
         } as any,
-        {}
+        {},
       ).run([
         {
           matchHost: 'https://some-diff.domain.com',
           baseUrl: 'https://some.domain.com',
           token: '123test',
         },
-      ])
+      ]),
     ).toThrow(CONFIG_VALIDATION);
   });
 });
diff --git a/lib/config/migrations/custom/host-rules-migration.ts b/lib/config/migrations/custom/host-rules-migration.ts
index bcf064fe13ffc161cf6a7b458cea72c72483474f..b9d1d5af86b49f0a808a8eb201fd21cb35d03435 100644
--- a/lib/config/migrations/custom/host-rules-migration.ts
+++ b/lib/config/migrations/custom/host-rules-migration.ts
@@ -85,7 +85,7 @@ function validateHostRule(rule: LegacyHostRule & HostRule): void {
     } else {
       logger.warn(
         { hosts },
-        'Duplicate host values found, please only use `matchHost` to specify the host'
+        'Duplicate host values found, please only use `matchHost` to specify the host',
       );
     }
   }
@@ -100,7 +100,7 @@ function massageUrl(url: string): string {
 }
 
 function removeUndefinedFields(
-  obj: Record<string, any>
+  obj: Record<string, any>,
 ): Record<string, string> {
   const result: Record<string, string> = {};
   for (const key of Object.keys(obj)) {
diff --git a/lib/config/migrations/custom/ignore-node-modules-migration.spec.ts b/lib/config/migrations/custom/ignore-node-modules-migration.spec.ts
index d90978b56f177eb797d0de5458dfc1527c13b94a..7b2513dc853687399dc8a965b0ae7262444ca049 100644
--- a/lib/config/migrations/custom/ignore-node-modules-migration.spec.ts
+++ b/lib/config/migrations/custom/ignore-node-modules-migration.spec.ts
@@ -6,7 +6,7 @@ describe('config/migrations/custom/ignore-node-modules-migration', () => {
       {
         ignoreNodeModules: true,
       },
-      { ignorePaths: ['node_modules/'] }
+      { ignorePaths: ['node_modules/'] },
     );
   });
 });
diff --git a/lib/config/migrations/custom/ignore-npmrc-file-migration.spec.ts b/lib/config/migrations/custom/ignore-npmrc-file-migration.spec.ts
index 9d32f6619703582974761144c87416f582f3a202..4c46ec04303d0f36216742f7635472c6701ada3f 100644
--- a/lib/config/migrations/custom/ignore-npmrc-file-migration.spec.ts
+++ b/lib/config/migrations/custom/ignore-npmrc-file-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/ignore-npmrc-file-migration', () => {
       },
       {
         npmrc: '',
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/ignore-npmrc-file-migration', () => {
       },
       {
         npmrc: '',
-      }
+      },
     );
   });
 
@@ -32,7 +32,7 @@ describe('config/migrations/custom/ignore-npmrc-file-migration', () => {
       } as any,
       {
         npmrc: '',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/include-forks-migration.spec.ts b/lib/config/migrations/custom/include-forks-migration.spec.ts
index 03de829b977e62262209686fe2e5df83dbce2069..10ef5fa08c3bf0c87be6a8b64adc920c31175ca1 100644
--- a/lib/config/migrations/custom/include-forks-migration.spec.ts
+++ b/lib/config/migrations/custom/include-forks-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/include-forks-migration', () => {
       },
       {
         forkProcessing: 'enabled',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/include-forks-migration', () => {
       },
       {
         forkProcessing: 'disabled',
-      }
+      },
     );
   });
 
@@ -28,7 +28,7 @@ describe('config/migrations/custom/include-forks-migration', () => {
       {
         includeForks: 'test',
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/custom/match-datasources-migration.spec.ts b/lib/config/migrations/custom/match-datasources-migration.spec.ts
index 8efaf46950d39b66b8819c8ecfeb1a268d31b477..6b57cdd9bb4179885427f0f00bb0eb8e97b95e2a 100644
--- a/lib/config/migrations/custom/match-datasources-migration.spec.ts
+++ b/lib/config/migrations/custom/match-datasources-migration.spec.ts
@@ -13,7 +13,7 @@ describe('config/migrations/custom/match-datasources-migration', () => {
           'npm',
           'node-version',
         ],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/match-strings-migration.spec.ts b/lib/config/migrations/custom/match-strings-migration.spec.ts
index 6091d091ffe625807297233fb05fc81cdd770386..353d910d26f77b9de004dbc8aa52e28445188d20 100644
--- a/lib/config/migrations/custom/match-strings-migration.spec.ts
+++ b/lib/config/migrations/custom/match-strings-migration.spec.ts
@@ -14,7 +14,7 @@ describe('config/migrations/custom/match-strings-migration', () => {
       },
       {
         matchStrings: ['(?<packageName>', '(?<packageName>(?<packageName>'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/match-strings-migration.ts b/lib/config/migrations/custom/match-strings-migration.ts
index 269d564038de1386d79fad55c9f820fe8905df26..795cfc325a3d5e95d2b3e7c72be56c960b4dac1b 100644
--- a/lib/config/migrations/custom/match-strings-migration.ts
+++ b/lib/config/migrations/custom/match-strings-migration.ts
@@ -10,7 +10,7 @@ export class MatchStringsMigration extends AbstractMigration {
       const newValue = value
         .filter(is.nonEmptyString)
         .map((matchString) =>
-          matchString.replace(regEx(/\(\?<lookupName>/g), '(?<packageName>')
+          matchString.replace(regEx(/\(\?<lookupName>/g), '(?<packageName>'),
         );
 
       this.rewrite(newValue);
diff --git a/lib/config/migrations/custom/node-migration.spec.ts b/lib/config/migrations/custom/node-migration.spec.ts
index c7eaa60f6f406d80a5951a855bb814ef4f7cfc80..f9a7ec48fadb80bb6a6a8df3297c4b01754add50 100644
--- a/lib/config/migrations/custom/node-migration.spec.ts
+++ b/lib/config/migrations/custom/node-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/node-migration', () => {
       },
       {
         travis: { enabled: true },
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/node-migration', () => {
       {
         node: { automerge: false },
         travis: { enabled: true },
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/package-files-migration.spec.ts b/lib/config/migrations/custom/package-files-migration.spec.ts
index 51ce8f0902a8497aa75e01f4db9f1fd17b45e403..3ed4fa3568f3a8f58b60d9eecb64e34243603a25 100644
--- a/lib/config/migrations/custom/package-files-migration.spec.ts
+++ b/lib/config/migrations/custom/package-files-migration.spec.ts
@@ -14,7 +14,7 @@ describe('config/migrations/custom/package-files-migration', () => {
       {
         includePaths: ['package.json'],
         packageRules: [{ paths: ['package.json'], packageRules: [] }],
-      }
+      },
     );
   });
 
@@ -25,7 +25,7 @@ describe('config/migrations/custom/package-files-migration', () => {
       },
       {
         includePaths: ['package.json', 'Chart.yaml'],
-      }
+      },
     );
   });
 
@@ -49,7 +49,7 @@ describe('config/migrations/custom/package-files-migration', () => {
             packageRules: [{ labels: ['breaking'] }],
           },
         ],
-      }
+      },
     );
   });
 
@@ -60,7 +60,7 @@ describe('config/migrations/custom/package-files-migration', () => {
       },
       {
         includePaths: ['package.json'],
-      }
+      },
     );
   });
 
@@ -92,7 +92,7 @@ describe('config/migrations/custom/package-files-migration', () => {
             ],
           },
         ],
-      }
+      },
     );
   });
 
@@ -106,7 +106,7 @@ describe('config/migrations/custom/package-files-migration', () => {
       {
         includePaths: ['package.json'],
         packageRules: [{ labels: ['linter'] }],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/package-name-migration.spec.ts b/lib/config/migrations/custom/package-name-migration.spec.ts
index 3b2b1a890a7bf62e41b125879895bc4b779f0595..f98e08a325c52b3ef049926f6e598dbef06652b9 100644
--- a/lib/config/migrations/custom/package-name-migration.spec.ts
+++ b/lib/config/migrations/custom/package-name-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/package-name-migration', () => {
       },
       {
         packageNames: ['test'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/package-pattern-migration.spec.ts b/lib/config/migrations/custom/package-pattern-migration.spec.ts
index 0ed3eb12228b5e8d07e4c1327cc92bdc71ad4ae6..b16224bd3e9a47204db20dc60cd76dc4fbd2ef85 100644
--- a/lib/config/migrations/custom/package-pattern-migration.spec.ts
+++ b/lib/config/migrations/custom/package-pattern-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/package-pattern-migration', () => {
       },
       {
         packagePatterns: ['test'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/package-rules-migration.spec.ts b/lib/config/migrations/custom/package-rules-migration.spec.ts
index 3e6ee625006361c880b9ed0500bc01091bbb1dd2..592e361005aba7404b995f45e420a61568e287f3 100644
--- a/lib/config/migrations/custom/package-rules-migration.spec.ts
+++ b/lib/config/migrations/custom/package-rules-migration.spec.ts
@@ -27,7 +27,7 @@ describe('config/migrations/custom/package-rules-migration', () => {
 
     const mappedProperties = Object.keys(migratedPackageRules![0]);
     const expectedMappedProperties = Object.keys(
-      originalConfig.packageRules![0]
+      originalConfig.packageRules![0],
     ).map((key) => renameMap[key as keyof typeof renameMap] ?? key);
 
     expect(mappedProperties).toEqual(expectedMappedProperties);
@@ -54,7 +54,7 @@ describe('config/migrations/custom/package-rules-migration', () => {
             },
           },
         ],
-      }
+      },
     );
   });
 
@@ -83,7 +83,7 @@ describe('config/migrations/custom/package-rules-migration', () => {
             addLabels: ['java'],
           },
         ],
-      }
+      },
     );
   });
 
@@ -104,7 +104,7 @@ describe('config/migrations/custom/package-rules-migration', () => {
             addLabels: ['py'],
           },
         ],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/packages-migration.spec.ts b/lib/config/migrations/custom/packages-migration.spec.ts
index 07a8151c3d6c96e6b2c2ce3b23446cf7313b8e85..255f9561c777c45aa0f13d0ec6de0bfd623a8000 100644
--- a/lib/config/migrations/custom/packages-migration.spec.ts
+++ b/lib/config/migrations/custom/packages-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/packages-migration', () => {
       },
       {
         packageRules: [{ matchPackagePatterns: ['*'] }],
-      }
+      },
     );
   });
 
@@ -23,7 +23,7 @@ describe('config/migrations/custom/packages-migration', () => {
           { matchPackageNames: [] },
           { matchPackagePatterns: ['*'] },
         ],
-      }
+      },
     );
   });
 
@@ -35,7 +35,7 @@ describe('config/migrations/custom/packages-migration', () => {
       },
       {
         packageRules: [{ matchPackageNames: [] }],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/path-rules-migration.spec.ts b/lib/config/migrations/custom/path-rules-migration.spec.ts
index e7ab02691957b9c95c7537188b98638abbcc0b17..5e4829d59af5d909419593bf39082d53d6974078 100644
--- a/lib/config/migrations/custom/path-rules-migration.spec.ts
+++ b/lib/config/migrations/custom/path-rules-migration.spec.ts
@@ -18,7 +18,7 @@ describe('config/migrations/custom/path-rules-migration', () => {
             extends: ['foo'],
           },
         ],
-      }
+      },
     );
   });
 
@@ -40,7 +40,7 @@ describe('config/migrations/custom/path-rules-migration', () => {
             extends: ['foo'],
           },
         ],
-      }
+      },
     );
   });
 
@@ -49,7 +49,7 @@ describe('config/migrations/custom/path-rules-migration', () => {
       {
         pathRules: 'test',
       },
-      {}
+      {},
     );
   });
 
@@ -80,7 +80,7 @@ describe('config/migrations/custom/path-rules-migration', () => {
             extends: ['foo'],
           },
         ],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/path-rules-migration.ts b/lib/config/migrations/custom/path-rules-migration.ts
index a49b149c99f9a248b3147c75afbeb775949d100e..da322add3c1eeb7282a541fd4e73deded0e6d9f7 100644
--- a/lib/config/migrations/custom/path-rules-migration.ts
+++ b/lib/config/migrations/custom/path-rules-migration.ts
@@ -12,7 +12,7 @@ export class PathRulesMigration extends AbstractMigration {
     if (is.array<PackageRule>(value)) {
       this.setHard(
         'packageRules',
-        is.array(packageRules) ? packageRules.concat(value) : value
+        is.array(packageRules) ? packageRules.concat(value) : value,
       );
     }
   }
diff --git a/lib/config/migrations/custom/pin-versions-migration.spec.ts b/lib/config/migrations/custom/pin-versions-migration.spec.ts
index c3f339a853c1a16a8aacc5c4591af3921dfcf9f9..4983eb8c738d73d702a08d097ff86b0b61656661 100644
--- a/lib/config/migrations/custom/pin-versions-migration.spec.ts
+++ b/lib/config/migrations/custom/pin-versions-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/pin-versions-migration', () => {
       },
       {
         rangeStrategy: 'pin',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/pin-versions-migration', () => {
       },
       {
         rangeStrategy: 'replace',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/post-update-options-migration.spec.ts b/lib/config/migrations/custom/post-update-options-migration.spec.ts
index 87f59b813e225ff2c7d947553cec5e013a5f6f92..43510386064bf1625db67236726204268c1b408a 100644
--- a/lib/config/migrations/custom/post-update-options-migration.spec.ts
+++ b/lib/config/migrations/custom/post-update-options-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/post-update-options-migration', () => {
       },
       {
         postUpdateOptions: ['gomodTidy'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/raise-deprecation-warnings-migration.spec.ts b/lib/config/migrations/custom/raise-deprecation-warnings-migration.spec.ts
index 61f0194202562dc230722a5664c22fc795048189..62921740777e1fd8839ceab24e0eb46d4919dec3 100644
--- a/lib/config/migrations/custom/raise-deprecation-warnings-migration.spec.ts
+++ b/lib/config/migrations/custom/raise-deprecation-warnings-migration.spec.ts
@@ -6,7 +6,7 @@ describe('config/migrations/custom/raise-deprecation-warnings-migration', () =>
       { raiseDeprecationWarnings: false },
       {
         suppressNotifications: ['deprecationWarningIssues'],
-      }
+      },
     );
   });
 
@@ -16,7 +16,7 @@ describe('config/migrations/custom/raise-deprecation-warnings-migration', () =>
         raiseDeprecationWarnings: false,
         suppressNotifications: ['test'],
       },
-      { suppressNotifications: ['test', 'deprecationWarningIssues'] }
+      { suppressNotifications: ['test', 'deprecationWarningIssues'] },
     );
   });
 
@@ -25,7 +25,7 @@ describe('config/migrations/custom/raise-deprecation-warnings-migration', () =>
       {
         raiseDeprecationWarnings: true,
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/custom/raise-deprecation-warnings-migration.ts b/lib/config/migrations/custom/raise-deprecation-warnings-migration.ts
index bc4c4c766f5acf907652883cb50279e652ca0a33..fa04aeedb201684bf4acc08a3076a6652ec3afb3 100644
--- a/lib/config/migrations/custom/raise-deprecation-warnings-migration.ts
+++ b/lib/config/migrations/custom/raise-deprecation-warnings-migration.ts
@@ -12,7 +12,7 @@ export class RaiseDeprecationWarningsMigration extends AbstractMigration {
         'suppressNotifications',
         Array.isArray(suppressNotifications)
           ? suppressNotifications.concat(['deprecationWarningIssues'])
-          : ['deprecationWarningIssues']
+          : ['deprecationWarningIssues'],
       );
     }
   }
diff --git a/lib/config/migrations/custom/rebase-conflicted-prs-migration.spec.ts b/lib/config/migrations/custom/rebase-conflicted-prs-migration.spec.ts
index a237f6feaf58583412d5bc86e32aa33dd319a139..dc4e278b6c9dd5da8a765cd032881421a243c8ec 100644
--- a/lib/config/migrations/custom/rebase-conflicted-prs-migration.spec.ts
+++ b/lib/config/migrations/custom/rebase-conflicted-prs-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/rebase-conflicted-prs-migration', () => {
       },
       {
         rebaseWhen: 'never',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/rebase-stale-prs-migration.spec.ts b/lib/config/migrations/custom/rebase-stale-prs-migration.spec.ts
index ab2c0772d9e798bd4205f0f546bfb132b81c9706..0bc58f5aa1325423dd118e86ed59e809b93ea1ce 100644
--- a/lib/config/migrations/custom/rebase-stale-prs-migration.spec.ts
+++ b/lib/config/migrations/custom/rebase-stale-prs-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/rebase-stale-prs-migration', () => {
       },
       {
         rebaseWhen: 'behind-base-branch',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/rebase-stale-prs-migration', () => {
       },
       {
         rebaseWhen: 'conflicted',
-      }
+      },
     );
   });
 
@@ -30,7 +30,7 @@ describe('config/migrations/custom/rebase-stale-prs-migration', () => {
       },
       {
         rebaseWhen: 'auto',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/rebase-stale-prs-migration.ts b/lib/config/migrations/custom/rebase-stale-prs-migration.ts
index 6df27b8bf16fb88f4e814ef7c23c1a243c410e12..2d233f6094c9208514340f175958976970f5556a 100644
--- a/lib/config/migrations/custom/rebase-stale-prs-migration.ts
+++ b/lib/config/migrations/custom/rebase-stale-prs-migration.ts
@@ -12,7 +12,7 @@ export class RebaseStalePrsMigration extends AbstractMigration {
       if (is.boolean(value)) {
         this.setSafely(
           'rebaseWhen',
-          value ? 'behind-base-branch' : 'conflicted'
+          value ? 'behind-base-branch' : 'conflicted',
         );
       }
 
diff --git a/lib/config/migrations/custom/recreate-closed-migration.spec.ts b/lib/config/migrations/custom/recreate-closed-migration.spec.ts
index 38628ca6c44f1f1890778fc71719178ead29a0ec..c872c7ca41c29f1da6725dfe90474a76554e5a22 100644
--- a/lib/config/migrations/custom/recreate-closed-migration.spec.ts
+++ b/lib/config/migrations/custom/recreate-closed-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/recreate-closed-migration', () => {
       },
       {
         recreateWhen: 'always',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/recreate-closed-migration', () => {
       },
       {
         recreateWhen: 'auto',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/renovate-fork-migration.spec.ts b/lib/config/migrations/custom/renovate-fork-migration.spec.ts
index daa11f1885918c05e9e131c26a87cc810d51716d..3f372f34b6f56feb107e9d5de2d07fd21205affb 100644
--- a/lib/config/migrations/custom/renovate-fork-migration.spec.ts
+++ b/lib/config/migrations/custom/renovate-fork-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/renovate-fork-migration', () => {
       },
       {
         forkProcessing: 'enabled',
-      }
+      },
     );
   });
 
@@ -19,7 +19,7 @@ describe('config/migrations/custom/renovate-fork-migration', () => {
       },
       {
         forkProcessing: 'disabled',
-      }
+      },
     );
   });
 
@@ -28,7 +28,7 @@ describe('config/migrations/custom/renovate-fork-migration', () => {
       {
         renovateFork: 'test',
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/custom/require-config-migration.spec.ts b/lib/config/migrations/custom/require-config-migration.spec.ts
index 03fbe4002b518cfe62d8bb0dd44dcfde3af05888..2e6ee6290726794bb8ec677c705e348bfdccbeb1 100644
--- a/lib/config/migrations/custom/require-config-migration.spec.ts
+++ b/lib/config/migrations/custom/require-config-migration.spec.ts
@@ -9,7 +9,7 @@ describe('config/migrations/custom/require-config-migration', () => {
       },
       {
         requireConfig: 'required',
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/require-config-migration', () => {
       },
       {
         requireConfig: 'optional',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/required-status-checks-migration.spec.ts b/lib/config/migrations/custom/required-status-checks-migration.spec.ts
index e664433ab3c5a202ceccbd8bea30ad6af7c550fc..a738f0521da33934bf41d6790447d861194139cd 100644
--- a/lib/config/migrations/custom/required-status-checks-migration.spec.ts
+++ b/lib/config/migrations/custom/required-status-checks-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/required-status-checks-migration', () => {
       },
       {
         ignoreTests: true,
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/schedule-migration.spec.ts b/lib/config/migrations/custom/schedule-migration.spec.ts
index 5177a5283b571934f5db3d97530a09623a7ed86c..46c4ef6bcf9ae4e817c111221a70a13deba2e195 100644
--- a/lib/config/migrations/custom/schedule-migration.spec.ts
+++ b/lib/config/migrations/custom/schedule-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/schedule-migration', () => {
       } as any,
       {
         schedule: 'on friday',
-      } as any
+      } as any,
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/schedule-migration', () => {
       {
         schedule: 'every weekday',
       } as any,
-      false
+      false,
     );
   });
 
@@ -32,7 +32,7 @@ describe('config/migrations/custom/schedule-migration', () => {
       {
         schedule: 'after 5:00pm on wednesday and thursday',
       } as any,
-      false
+      false,
     );
   });
 
@@ -44,7 +44,7 @@ describe('config/migrations/custom/schedule-migration', () => {
       {
         schedule: 'after 1:00pm and before 5:00pm',
       } as any,
-      false
+      false,
     );
   });
 
@@ -56,7 +56,7 @@ describe('config/migrations/custom/schedule-migration', () => {
       {
         schedule: 'after and before 5:00',
       } as any,
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/schedule-migration.ts b/lib/config/migrations/custom/schedule-migration.ts
index 6ccbb06067b8a5ec966a7957ac274cec581e1d84..3b5a92ff4bb9017ae47b0b846c42bac0c5d35744 100644
--- a/lib/config/migrations/custom/schedule-migration.ts
+++ b/lib/config/migrations/custom/schedule-migration.ts
@@ -26,7 +26,7 @@ export class ScheduleMigration extends AbstractMigration {
         ) {
           const parsedSchedule = later.parse.text(
             // We need to massage short hours first before we can parse it
-            schedules[i].replace(regEx(/( \d?\d)((a|p)m)/g), '$1:00$2') // TODO #12071
+            schedules[i].replace(regEx(/( \d?\d)((a|p)m)/g), '$1:00$2'), // TODO #12071
           ).schedules[0];
           // Only migrate if the after time is greater than before, e.g. "after 10pm and before 5am"
           if (!parsedSchedule?.t_a || !parsedSchedule.t_b) {
@@ -38,20 +38,20 @@ export class ScheduleMigration extends AbstractMigration {
             schedules[i] = toSplit
               .replace(
                 regEx(
-                  /^(.*?)(after|before) (.*?) and (after|before) (.*?)( |$)(.*)/
+                  /^(.*?)(after|before) (.*?) and (after|before) (.*?)( |$)(.*)/,
                 ), // TODO #12071
-                '$1$2 $3 $7'
+                '$1$2 $3 $7',
               )
               .trim();
             schedules.push(
               toSplit
                 .replace(
                   regEx(
-                    /^(.*?)(after|before) (.*?) and (after|before) (.*?)( |$)(.*)/
+                    /^(.*?)(after|before) (.*?) and (after|before) (.*?)( |$)(.*)/,
                   ), // TODO #12071
-                  '$1$4 $5 $7'
+                  '$1$4 $5 $7',
                 )
-                .trim()
+                .trim(),
             );
           }
         }
@@ -60,13 +60,13 @@ export class ScheduleMigration extends AbstractMigration {
         if (schedules[i].includes('on the last day of the month')) {
           schedules[i] = schedules[i].replace(
             'on the last day of the month',
-            'on the first day of the month'
+            'on the first day of the month',
           );
         }
         if (schedules[i].includes('on every weekday')) {
           schedules[i] = schedules[i].replace(
             'on every weekday',
-            'every weekday'
+            'every weekday',
           );
         }
         if (schedules[i].endsWith(' every day')) {
@@ -74,12 +74,12 @@ export class ScheduleMigration extends AbstractMigration {
         }
         if (
           regEx(/every (mon|tues|wednes|thurs|fri|satur|sun)day$/).test(
-            schedules[i]
+            schedules[i],
           ) // TODO #12071
         ) {
           schedules[i] = schedules[i].replace(
             regEx(/every ([a-z]*day)$/), // TODO #12071
-            'on $1'
+            'on $1',
           );
         }
         if (schedules[i].endsWith('days')) {
diff --git a/lib/config/migrations/custom/semantic-commits-migration.spec.ts b/lib/config/migrations/custom/semantic-commits-migration.spec.ts
index 89de9d07f483bafba604b2d19ac8ed5ecdb6583f..76d74f3694a3e7cac0e537094019a793f4fec7f1 100644
--- a/lib/config/migrations/custom/semantic-commits-migration.spec.ts
+++ b/lib/config/migrations/custom/semantic-commits-migration.spec.ts
@@ -6,7 +6,7 @@ describe('config/migrations/custom/semantic-commits-migration', () => {
       {
         semanticCommits: true,
       } as any,
-      { semanticCommits: 'enabled' }
+      { semanticCommits: 'enabled' },
     );
   });
 
@@ -15,7 +15,7 @@ describe('config/migrations/custom/semantic-commits-migration', () => {
       {
         semanticCommits: false,
       } as any,
-      { semanticCommits: 'disabled' }
+      { semanticCommits: 'disabled' },
     );
   });
 
@@ -24,7 +24,7 @@ describe('config/migrations/custom/semantic-commits-migration', () => {
       {
         semanticCommits: null,
       } as any,
-      { semanticCommits: 'auto' }
+      { semanticCommits: 'auto' },
     );
   });
 
@@ -33,7 +33,7 @@ describe('config/migrations/custom/semantic-commits-migration', () => {
       {
         semanticCommits: 'test',
       } as any,
-      { semanticCommits: 'auto' }
+      { semanticCommits: 'auto' },
     );
   });
 
@@ -43,7 +43,7 @@ describe('config/migrations/custom/semantic-commits-migration', () => {
         semanticCommits: 'enabled',
       } as any,
       { semanticCommits: 'enabled' },
-      false
+      false,
     );
   });
 
@@ -53,7 +53,7 @@ describe('config/migrations/custom/semantic-commits-migration', () => {
         semanticCommits: 'disabled',
       } as any,
       { semanticCommits: 'disabled' },
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/semantic-prefix-migration.spec.ts b/lib/config/migrations/custom/semantic-prefix-migration.spec.ts
index c02c779eed4aed0724e625100e757e0665e989ab..67ca56c92da307d27d07258b9379748a725a8b44 100644
--- a/lib/config/migrations/custom/semantic-prefix-migration.spec.ts
+++ b/lib/config/migrations/custom/semantic-prefix-migration.spec.ts
@@ -6,7 +6,7 @@ describe('config/migrations/custom/semantic-prefix-migration', () => {
       {
         semanticPrefix: 'fix(deps): ',
       } as any,
-      { semanticCommitType: 'fix', semanticCommitScope: 'deps' }
+      { semanticCommitType: 'fix', semanticCommitScope: 'deps' },
     );
   });
 
@@ -15,7 +15,7 @@ describe('config/migrations/custom/semantic-prefix-migration', () => {
       {
         semanticPrefix: true,
       } as any,
-      {}
+      {},
     );
   });
 
@@ -24,7 +24,7 @@ describe('config/migrations/custom/semantic-prefix-migration', () => {
       {
         semanticPrefix: 'fix: ',
       } as any,
-      { semanticCommitType: 'fix', semanticCommitScope: null }
+      { semanticCommitType: 'fix', semanticCommitScope: null },
     );
   });
 
@@ -33,7 +33,7 @@ describe('config/migrations/custom/semantic-prefix-migration', () => {
       {
         semanticPrefix: 'test',
       } as any,
-      { semanticCommitType: 'test', semanticCommitScope: null }
+      { semanticCommitType: 'test', semanticCommitScope: null },
     );
   });
 });
diff --git a/lib/config/migrations/custom/separate-major-release-migration.spec.ts b/lib/config/migrations/custom/separate-major-release-migration.spec.ts
index e43742bec3423f130cbad16bc2e2a3fd7443f3c0..9c610b35b57d93e3991bcbb6c96e917e6f28d83d 100644
--- a/lib/config/migrations/custom/separate-major-release-migration.spec.ts
+++ b/lib/config/migrations/custom/separate-major-release-migration.spec.ts
@@ -9,7 +9,7 @@ describe('config/migrations/custom/separate-major-release-migration', () => {
       {
         separateMajorMinor: true,
         separateMajorReleases: true,
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/separate-multiple-major-migration.spec.ts b/lib/config/migrations/custom/separate-multiple-major-migration.spec.ts
index ff1ce38fe5171fa24a00fb613cb3a6024a28e744..e206b496fdd07bb0c72a8bd2885bf5894e09e5c6 100644
--- a/lib/config/migrations/custom/separate-multiple-major-migration.spec.ts
+++ b/lib/config/migrations/custom/separate-multiple-major-migration.spec.ts
@@ -9,7 +9,7 @@ describe('config/migrations/custom/separate-multiple-major-migration', () => {
       },
       {
         separateMajorReleases: true,
-      }
+      },
     );
   });
 
@@ -21,7 +21,7 @@ describe('config/migrations/custom/separate-multiple-major-migration', () => {
       {
         separateMultipleMajor: true,
       },
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/stability-days-migration.spec.ts b/lib/config/migrations/custom/stability-days-migration.spec.ts
index b13719bb3901ea1ff78d6a9551f4ff87eac98b29..bc1e14380efcb04057a385f618b074f9bce470ff 100644
--- a/lib/config/migrations/custom/stability-days-migration.spec.ts
+++ b/lib/config/migrations/custom/stability-days-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/stability-days-migration', () => {
       },
       {
         minimumReleaseAge: null,
-      }
+      },
     );
     expect(StabilityDaysMigration).toMigrate(
       {
@@ -16,7 +16,7 @@ describe('config/migrations/custom/stability-days-migration', () => {
       },
       {
         minimumReleaseAge: '2 days',
-      }
+      },
     );
     expect(StabilityDaysMigration).toMigrate(
       {
@@ -24,7 +24,7 @@ describe('config/migrations/custom/stability-days-migration', () => {
       },
       {
         minimumReleaseAge: '1 day',
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/suppress-notifications-migration.spec.ts b/lib/config/migrations/custom/suppress-notifications-migration.spec.ts
index bf0838ecde0d6bce216265bb62f5aca3d72711e8..4137b1051a56b025261107bd33d485c1ebde4218 100644
--- a/lib/config/migrations/custom/suppress-notifications-migration.spec.ts
+++ b/lib/config/migrations/custom/suppress-notifications-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/suppress-notifications-migration', () => {
       },
       {
         suppressNotifications: ['test'],
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/suppress-notifications-migration', () => {
       {
         suppressNotifications: ['test'],
       },
-      false
+      false,
     );
   });
 
@@ -32,7 +32,7 @@ describe('config/migrations/custom/suppress-notifications-migration', () => {
       {
         suppressNotifications: [],
       },
-      false
+      false,
     );
   });
 });
diff --git a/lib/config/migrations/custom/trust-level-migration.spec.ts b/lib/config/migrations/custom/trust-level-migration.spec.ts
index 4ecb187dddc85c79bc31c08fc3af5b6547b41608..ee9e9f6bdac0212bed20edc07f4b7f6decf31b34 100644
--- a/lib/config/migrations/custom/trust-level-migration.spec.ts
+++ b/lib/config/migrations/custom/trust-level-migration.spec.ts
@@ -10,7 +10,7 @@ describe('config/migrations/custom/trust-level-migration', () => {
         allowCustomCrateRegistries: true,
         allowScripts: true,
         exposeAllEnv: true,
-      }
+      },
     );
   });
 
@@ -26,7 +26,7 @@ describe('config/migrations/custom/trust-level-migration', () => {
         allowCustomCrateRegistries: false,
         allowScripts: false,
         exposeAllEnv: false,
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/unpublish-safe-migration.spec.ts b/lib/config/migrations/custom/unpublish-safe-migration.spec.ts
index 5728c0d73652ec1706256e46eca8482f640d2d72..150b21752a200f6c0b5eef33d2632a337af67497 100644
--- a/lib/config/migrations/custom/unpublish-safe-migration.spec.ts
+++ b/lib/config/migrations/custom/unpublish-safe-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       },
       {
         extends: ['npm:unpublishSafe'],
-      }
+      },
     );
   });
 
@@ -20,7 +20,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       } as any,
       {
         extends: ['test', 'npm:unpublishSafe'],
-      }
+      },
     );
   });
 
@@ -32,7 +32,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       } as any,
       {
         extends: ['npm:unpublishSafe'],
-      }
+      },
     );
   });
 
@@ -44,7 +44,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       } as any,
       {
         extends: ['foo', 'npm:unpublishSafe', 'bar'],
-      }
+      },
     );
 
     expect(UnpublishSafeMigration).toMigrate(
@@ -54,7 +54,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       } as any,
       {
         extends: ['foo', 'npm:unpublishSafe', 'bar'],
-      }
+      },
     );
 
     expect(UnpublishSafeMigration).toMigrate(
@@ -64,7 +64,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       } as any,
       {
         extends: ['foo', 'npm:unpublishSafe', 'bar'],
-      }
+      },
     );
   });
 
@@ -76,7 +76,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       } as any,
       {
         extends: ['foo', 'bar'],
-      }
+      },
     );
   });
 
@@ -88,7 +88,7 @@ describe('config/migrations/custom/unpublish-safe-migration', () => {
       },
       {
         extends: ['npm:unpublishSafe'],
-      }
+      },
     );
   });
 });
diff --git a/lib/config/migrations/custom/unpublish-safe-migration.ts b/lib/config/migrations/custom/unpublish-safe-migration.ts
index cfdf179f8c4b2256e3e9c3c6650eb554d4fadeef..66820f4903556d470432ed392bc19fb5541b9fe8 100644
--- a/lib/config/migrations/custom/unpublish-safe-migration.ts
+++ b/lib/config/migrations/custom/unpublish-safe-migration.ts
@@ -32,7 +32,7 @@ export class UnpublishSafeMigration extends AbstractMigration {
           }
 
           return item;
-        })
+        }),
       );
     }
   }
diff --git a/lib/config/migrations/custom/upgrade-in-range-migration.spec.ts b/lib/config/migrations/custom/upgrade-in-range-migration.spec.ts
index af27aaeaf0939b2ba6ccb58c9dda4fa8e281c4bd..4512da9c4cb19c00481a3f5a7d6d165b648bdfbd 100644
--- a/lib/config/migrations/custom/upgrade-in-range-migration.spec.ts
+++ b/lib/config/migrations/custom/upgrade-in-range-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/upgrade-in-range-migration', () => {
       },
       {
         rangeStrategy: 'bump',
-      }
+      },
     );
   });
 
@@ -17,7 +17,7 @@ describe('config/migrations/custom/upgrade-in-range-migration', () => {
       {
         upgradeInRange: false,
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/custom/version-strategy-migration.spec.ts b/lib/config/migrations/custom/version-strategy-migration.spec.ts
index 80e0beff7f676c427a511767bb5736c7dc0d379c..f6dc43db5577708bff412d0168a4e1bf6e5b0ade 100644
--- a/lib/config/migrations/custom/version-strategy-migration.spec.ts
+++ b/lib/config/migrations/custom/version-strategy-migration.spec.ts
@@ -8,7 +8,7 @@ describe('config/migrations/custom/version-strategy-migration', () => {
       },
       {
         rangeStrategy: 'widen',
-      }
+      },
     );
   });
 
@@ -17,7 +17,7 @@ describe('config/migrations/custom/version-strategy-migration', () => {
       {
         versionStrategy: 'test',
       },
-      {}
+      {},
     );
   });
 });
diff --git a/lib/config/migrations/migrations-service.spec.ts b/lib/config/migrations/migrations-service.spec.ts
index e038cc3eb4b75264ca35f77d76697a4b8bee356f..be95f2b890b8aeec923531c36ad650cf93f50a20 100644
--- a/lib/config/migrations/migrations-service.spec.ts
+++ b/lib/config/migrations/migrations-service.spec.ts
@@ -14,7 +14,7 @@ describe('config/migrations/migrations-service', () => {
 
       const migratedConfig = MigrationsService.run(originalConfig);
       expect(
-        MigrationsService.isMigrated(originalConfig, migratedConfig)
+        MigrationsService.isMigrated(originalConfig, migratedConfig),
       ).toBeTrue();
       expect(migratedConfig).toEqual({});
     }
@@ -31,7 +31,7 @@ describe('config/migrations/migrations-service', () => {
 
       const migratedConfig = MigrationsService.run(originalConfig);
       expect(
-        MigrationsService.isMigrated(originalConfig, migratedConfig)
+        MigrationsService.isMigrated(originalConfig, migratedConfig),
       ).toBeTrue();
       expect(migratedConfig).toEqual({
         [newPropertyName]: 'test',
@@ -48,11 +48,11 @@ describe('config/migrations/migrations-service', () => {
     const migratedConfig = MigrationsService.run(originalConfig);
 
     const mappedProperties = Object.keys(originalConfig).map((property) =>
-      MigrationsService.renamedProperties.get(property)
+      MigrationsService.renamedProperties.get(property),
     );
 
     expect(
-      MigrationsService.isMigrated(originalConfig, migratedConfig)
+      MigrationsService.isMigrated(originalConfig, migratedConfig),
     ).toBeTrue();
     expect(mappedProperties).toEqual(Object.keys(migratedConfig));
   });
@@ -74,7 +74,7 @@ describe('config/migrations/migrations-service', () => {
     class CustomMigrationsService extends MigrationsService {
       public static override getMigrations(
         original: RenovateConfig,
-        migrated: RenovateConfig
+        migrated: RenovateConfig,
       ): ReadonlyArray<Migration> {
         return [new CustomMigration(original, migrated)];
       }
@@ -107,7 +107,7 @@ describe('config/migrations/migrations-service', () => {
       .filter((name) => !name.includes('spec.ts'));
 
     expect(MigrationsService.customMigrations).toHaveLength(
-      allDefinedMigrationClasses.length
+      allDefinedMigrationClasses.length,
     );
   });
 });
diff --git a/lib/config/migrations/migrations-service.ts b/lib/config/migrations/migrations-service.ts
index 7162d697bbb37a5e2a896eea49add3c37c4a558e..a14639e97dcdd0b01398bfb7b210125d22f308fd 100644
--- a/lib/config/migrations/migrations-service.ts
+++ b/lib/config/migrations/migrations-service.ts
@@ -177,14 +177,14 @@ export class MigrationsService {
 
   static isMigrated(
     originalConfig: RenovateConfig,
-    migratedConfig: RenovateConfig
+    migratedConfig: RenovateConfig,
   ): boolean {
     return !dequal(originalConfig, migratedConfig);
   }
 
   public static getMigrations(
     originalConfig: RenovateConfig,
-    migratedConfig: RenovateConfig
+    migratedConfig: RenovateConfig,
   ): ReadonlyArray<Migration> {
     const migrations: Migration[] = [];
 
@@ -193,8 +193,8 @@ export class MigrationsService {
         new RemovePropertyMigration(
           propertyName,
           originalConfig,
-          migratedConfig
-        )
+          migratedConfig,
+        ),
       );
     }
 
@@ -207,8 +207,8 @@ export class MigrationsService {
           oldPropertyName,
           newPropertyName,
           originalConfig,
-          migratedConfig
-        )
+          migratedConfig,
+        ),
       );
     }
 
@@ -221,7 +221,7 @@ export class MigrationsService {
 
   private static getMigration(
     migrations: ReadonlyArray<Migration>,
-    key: string
+    key: string,
   ): Migration | undefined {
     return migrations.find((migration) => {
       if (is.regExp(migration.propertyName)) {
diff --git a/lib/config/migrations/types.ts b/lib/config/migrations/types.ts
index bd00fe956248b1f82879786d05c92c4e7990a5cf..69f0857bb984472f31deb27339b46b3a41bf8b97 100644
--- a/lib/config/migrations/types.ts
+++ b/lib/config/migrations/types.ts
@@ -2,7 +2,7 @@ import type { RenovateConfig } from './../types';
 export interface MigrationConstructor {
   new (
     originalConfig: RenovateConfig,
-    migratedConfig: RenovateConfig
+    migratedConfig: RenovateConfig,
   ): Migration;
 }
 
diff --git a/lib/config/presets/gitea/index.spec.ts b/lib/config/presets/gitea/index.spec.ts
index a84adaff4a153936d0e9635ac38eb15762cc4b7b..3da1bb0a7c9b786ab4a0944026e0ab81b3375c51 100644
--- a/lib/config/presets/gitea/index.spec.ts
+++ b/lib/config/presets/gitea/index.spec.ts
@@ -33,7 +33,7 @@ describe('config/presets/gitea/index', () => {
         'some/repo',
         'some-filename.json',
         giteaApiHost,
-        null
+        null,
       );
       expect(res).toEqual({ from: 'api' });
     });
@@ -50,7 +50,7 @@ describe('config/presets/gitea/index', () => {
         'some/repo',
         'some-filename.json5',
         giteaApiHost,
-        null
+        null,
       );
       expect(res).toEqual({ from: 'api' });
     });
@@ -75,7 +75,7 @@ describe('config/presets/gitea/index', () => {
         .reply(200, { content: toBase64('invalid') });
 
       await expect(gitea.getPreset({ repo: 'some/repo' })).rejects.toThrow(
-        PRESET_INVALID_JSON
+        PRESET_INVALID_JSON,
       );
     });
 
@@ -88,7 +88,7 @@ describe('config/presets/gitea/index', () => {
         });
 
       await expect(gitea.getPreset({ repo: 'some/repo' })).rejects.toThrow(
-        PRESET_INVALID_JSON
+        PRESET_INVALID_JSON,
       );
     });
 
@@ -124,7 +124,7 @@ describe('config/presets/gitea/index', () => {
         .get(`${basePath}/somefile.json`)
         .reply(200, {
           content: Buffer.from(
-            '{"somename":{"somesubname":{"foo":"bar"}}}'
+            '{"somename":{"somesubname":{"foo":"bar"}}}',
           ).toString('base64'),
         });
 
@@ -175,7 +175,7 @@ describe('config/presets/gitea/index', () => {
         gitea.getPreset({
           repo: 'some/repo',
           presetName: 'somefile/somename/somesubname',
-        })
+        }),
       ).rejects.toThrow(PRESET_NOT_FOUND);
     });
   });
@@ -189,7 +189,7 @@ describe('config/presets/gitea/index', () => {
           content: toBase64('{"from":"api"}'),
         });
       expect(
-        await gitea.getPresetFromEndpoint('some/repo', 'default', undefined)
+        await gitea.getPresetFromEndpoint('some/repo', 'default', undefined),
       ).toEqual({ from: 'api' });
     });
 
@@ -206,9 +206,9 @@ describe('config/presets/gitea/index', () => {
             'some/repo',
             'default',
             undefined,
-            'https://api.gitea.example.org'
+            'https://api.gitea.example.org',
           )
-          .catch(() => ({ from: 'api' }))
+          .catch(() => ({ from: 'api' })),
       ).toEqual({ from: 'api' });
     });
 
@@ -225,8 +225,8 @@ describe('config/presets/gitea/index', () => {
           'default',
           undefined,
           giteaApiHost,
-          'someTag'
-        )
+          'someTag',
+        ),
       ).toEqual({ from: 'api' });
     });
 
@@ -244,9 +244,9 @@ describe('config/presets/gitea/index', () => {
             'default',
             undefined,
             'https://api.gitea.example.org',
-            'someTag'
+            'someTag',
           )
-          .catch(() => ({ from: 'api' }))
+          .catch(() => ({ from: 'api' })),
       ).toEqual({ from: 'api' });
     });
   });
diff --git a/lib/config/presets/gitea/index.ts b/lib/config/presets/gitea/index.ts
index adb71c599337d4c8286bae809d5312882bc6b8a2..65edb1174381d78f83768cf4d159315dc09b0f42 100644
--- a/lib/config/presets/gitea/index.ts
+++ b/lib/config/presets/gitea/index.ts
@@ -12,7 +12,7 @@ export async function fetchJSONFile(
   repo: string,
   fileName: string,
   endpoint: string,
-  tag?: string | null
+  tag?: string | null,
 ): Promise<Preset> {
   let res: RepoContents;
   try {
@@ -37,7 +37,7 @@ export function getPresetFromEndpoint(
   filePreset: string,
   presetPath?: string,
   endpoint = Endpoint,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset | undefined> {
   return fetchPreset({
     repo,
diff --git a/lib/config/presets/github/index.spec.ts b/lib/config/presets/github/index.spec.ts
index b9c06e3aa658f21a8caafe4c7f74ab59f9eb743e..b9ce54f571444f89ad3a58fdb36d1a011d08da9b 100644
--- a/lib/config/presets/github/index.spec.ts
+++ b/lib/config/presets/github/index.spec.ts
@@ -31,7 +31,7 @@ describe('config/presets/github/index', () => {
         'some/repo',
         'some-filename.json',
         githubApiHost,
-        undefined
+        undefined,
       );
       expect(res).toEqual({ from: 'api' });
     });
@@ -56,7 +56,7 @@ describe('config/presets/github/index', () => {
         .reply(200, { content: toBase64('invalid') });
 
       await expect(github.getPreset({ repo: 'some/repo' })).rejects.toThrow(
-        PRESET_INVALID_JSON
+        PRESET_INVALID_JSON,
       );
     });
 
@@ -69,7 +69,7 @@ describe('config/presets/github/index', () => {
         });
 
       await expect(github.getPreset({ repo: 'some/repo' })).rejects.toThrow(
-        PRESET_INVALID_JSON
+        PRESET_INVALID_JSON,
       );
     });
 
@@ -133,7 +133,7 @@ describe('config/presets/github/index', () => {
         .get(`${basePath}/somefile.json`)
         .reply(200, {
           content: Buffer.from(
-            '{"somename":{"somesubname":{"foo":"bar"}}}'
+            '{"somename":{"somesubname":{"foo":"bar"}}}',
           ).toString('base64'),
         });
 
@@ -184,7 +184,7 @@ describe('config/presets/github/index', () => {
         github.getPreset({
           repo: 'some/repo',
           presetName: 'somefile/somename/somesubname',
-        })
+        }),
       ).rejects.toThrow(PRESET_NOT_FOUND);
     });
   });
@@ -198,7 +198,7 @@ describe('config/presets/github/index', () => {
           content: toBase64('{"from":"api"}'),
         });
       expect(
-        await github.getPresetFromEndpoint('some/repo', 'default', undefined)
+        await github.getPresetFromEndpoint('some/repo', 'default', undefined),
       ).toEqual({ from: 'api' });
     });
 
@@ -216,9 +216,9 @@ describe('config/presets/github/index', () => {
             'default',
             undefined,
             'https://api.github.example.org',
-            undefined
+            undefined,
           )
-          .catch(() => ({ from: 'api' }))
+          .catch(() => ({ from: 'api' })),
       ).toEqual({ from: 'api' });
     });
 
@@ -235,8 +235,8 @@ describe('config/presets/github/index', () => {
           'default',
           undefined,
           githubApiHost,
-          'someTag'
-        )
+          'someTag',
+        ),
       ).toEqual({ from: 'api' });
     });
 
@@ -254,9 +254,9 @@ describe('config/presets/github/index', () => {
             'default',
             undefined,
             'https://api.github.example.org',
-            'someTag'
+            'someTag',
           )
-          .catch(() => ({ from: 'api' }))
+          .catch(() => ({ from: 'api' })),
       ).toEqual({ from: 'api' });
     });
   });
diff --git a/lib/config/presets/github/index.ts b/lib/config/presets/github/index.ts
index 4d1584b0d8e6e972ad4d9d604821ced0a6985156..4d2a82c43cd9b9db31ae17e1968c0898d24e7d02 100644
--- a/lib/config/presets/github/index.ts
+++ b/lib/config/presets/github/index.ts
@@ -14,7 +14,7 @@ export async function fetchJSONFile(
   repo: string,
   fileName: string,
   endpoint: string,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset> {
   let ref = '';
   if (is.nonEmptyString(tag)) {
@@ -42,7 +42,7 @@ export function getPresetFromEndpoint(
   filePreset: string,
   presetPath?: string,
   endpoint = Endpoint,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset | undefined> {
   return fetchPreset({
     repo,
diff --git a/lib/config/presets/gitlab/index.spec.ts b/lib/config/presets/gitlab/index.spec.ts
index 38395133e69c9ec37068a490d760eff4a8e58a83..bccecb87ab3a32a23628eebcb765e15e0ebc5130 100644
--- a/lib/config/presets/gitlab/index.spec.ts
+++ b/lib/config/presets/gitlab/index.spec.ts
@@ -15,7 +15,7 @@ describe('config/presets/gitlab/index', () => {
         gitlab.getPreset({
           repo: 'some/repo',
           presetName: 'non-default',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -25,7 +25,7 @@ describe('config/presets/gitlab/index', () => {
         gitlab.getPreset({
           repo: 'some/repo',
           presetName: 'non-default',
-        })
+        }),
       ).rejects.toThrow(PRESET_DEP_NOT_FOUND);
     });
 
@@ -40,7 +40,7 @@ describe('config/presets/gitlab/index', () => {
         .get(`${basePath}/files/renovate.json/raw?ref=master`)
         .reply(404);
       await expect(gitlab.getPreset({ repo: 'some/repo' })).rejects.toThrow(
-        PRESET_DEP_NOT_FOUND
+        PRESET_DEP_NOT_FOUND,
       );
     });
 
@@ -140,8 +140,8 @@ describe('config/presets/gitlab/index', () => {
         await gitlab.getPresetFromEndpoint(
           'some/repo',
           'some/preset/file',
-          undefined
-        )
+          undefined,
+        ),
       ).toEqual({});
     });
 
@@ -159,8 +159,8 @@ describe('config/presets/gitlab/index', () => {
           'some/repo',
           'some/preset/file',
           undefined,
-          'https://gitlab.example.org/api/v4'
-        )
+          'https://gitlab.example.org/api/v4',
+        ),
       ).rejects.toThrow(PRESET_DEP_NOT_FOUND);
     });
 
@@ -175,8 +175,8 @@ describe('config/presets/gitlab/index', () => {
           'some/preset/file',
           undefined,
           'https://gitlab.com/api/v4',
-          'someTag'
-        )
+          'someTag',
+        ),
       ).toEqual({});
     });
 
@@ -191,8 +191,8 @@ describe('config/presets/gitlab/index', () => {
           'some/preset/file',
           undefined,
           'https://gitlab.example.org/api/v4',
-          'someTag'
-        )
+          'someTag',
+        ),
       ).toEqual({});
     });
   });
diff --git a/lib/config/presets/gitlab/index.ts b/lib/config/presets/gitlab/index.ts
index 6c3c336bb3a01e723a8e33bd82d9beb045c8b088..fb55c98e0a5a66e9d789254de8133ff890127eca 100644
--- a/lib/config/presets/gitlab/index.ts
+++ b/lib/config/presets/gitlab/index.ts
@@ -12,10 +12,10 @@ export const Endpoint = 'https://gitlab.com/api/v4/';
 
 async function getDefaultBranchName(
   urlEncodedPkgName: string,
-  endpoint: string
+  endpoint: string,
 ): Promise<string> {
   const res = await gitlabApi.getJson<GitlabProject>(
-    `${endpoint}projects/${urlEncodedPkgName}`
+    `${endpoint}projects/${urlEncodedPkgName}`,
   );
   return res.body.default_branch ?? 'master'; // should never happen, but we keep this to ensure the current behavior
 }
@@ -24,7 +24,7 @@ export async function fetchJSONFile(
   repo: string,
   fileName: string,
   endpoint: string,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset> {
   let url = endpoint;
   let ref = '';
@@ -37,7 +37,7 @@ export async function fetchJSONFile(
     } else {
       const defaultBranchName = await getDefaultBranchName(
         urlEncodedRepo,
-        endpoint
+        endpoint,
       );
       ref = `?ref=${defaultBranchName}`;
     }
@@ -60,7 +60,7 @@ export function getPresetFromEndpoint(
   presetName: string,
   presetPath?: string,
   endpoint = Endpoint,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset | undefined> {
   return fetchPreset({
     repo,
diff --git a/lib/config/presets/index.spec.ts b/lib/config/presets/index.spec.ts
index 78ee0e05f0c28883ee24eefdb6034ca87a93feac..fc4a55c4c25a10e661598b2cb32c08a01952aed0 100644
--- a/lib/config/presets/index.spec.ts
+++ b/lib/config/presets/index.spec.ts
@@ -72,7 +72,7 @@ describe('config/presets/index', () => {
       expect(e).toBeDefined();
       expect(e!.validationSource).toBeUndefined();
       expect(e!.validationError).toBe(
-        "Cannot find preset's package (notfound)"
+        "Cannot find preset's package (notfound)",
       );
       expect(e!.validationMessage).toBeUndefined();
     });
@@ -89,7 +89,7 @@ describe('config/presets/index', () => {
       expect(e).toBeDefined();
       expect(e!.validationSource).toBeUndefined();
       expect(e!.validationError).toBe(
-        'Preset name not found within published preset config (wrongpreset:invalid-preset)'
+        'Preset name not found within published preset config (wrongpreset:invalid-preset)',
       );
       expect(e!.validationMessage).toBeUndefined();
     });
@@ -121,7 +121,7 @@ describe('config/presets/index', () => {
       expect(e).toBeDefined();
       expect(e!.validationSource).toBeUndefined();
       expect(e!.validationError).toBe(
-        'Sub-presets cannot be combined with a custom path (github>user/repo//path:subpreset)'
+        'Sub-presets cannot be combined with a custom path (github>user/repo//path:subpreset)',
       );
       expect(e!.validationMessage).toBeUndefined();
     });
@@ -154,7 +154,7 @@ describe('config/presets/index', () => {
       expect(e).toBeDefined();
       expect(e!.validationSource).toBeUndefined();
       expect(e!.validationError).toBe(
-        'Preset package is missing a renovate-config entry (noconfig:recommended)'
+        'Preset package is missing a renovate-config entry (noconfig:recommended)',
       );
       expect(e!.validationMessage).toBeUndefined();
     });
@@ -171,7 +171,7 @@ describe('config/presets/index', () => {
       expect(e).toBeDefined();
       expect(e!.validationSource).toBeUndefined();
       expect(e!.validationError).toBe(
-        'Preset caused unexpected error (throw:base)'
+        'Preset caused unexpected error (throw:base)',
       );
       expect(e!.validationMessage).toBeUndefined();
     });
@@ -201,7 +201,7 @@ describe('config/presets/index', () => {
       expect(e).toBeDefined();
       expect(e!.validationSource).toBeUndefined();
       expect(e!.validationError).toBe(
-        'Preset name not found within published preset config (wrongpreset:invalid-preset)'
+        'Preset name not found within published preset config (wrongpreset:invalid-preset)',
       );
       expect(e!.validationMessage).toBeUndefined();
     });
@@ -437,7 +437,7 @@ describe('config/presets/index', () => {
 
     it('parses github subfiles with preset name', () => {
       expect(
-        presets.parsePreset('github>some/repo:somefile/somepreset')
+        presets.parsePreset('github>some/repo:somefile/somepreset'),
       ).toEqual({
         repo: 'some/repo',
         params: undefined,
@@ -471,7 +471,7 @@ describe('config/presets/index', () => {
 
     it('parses github subfiles with preset name with .json extension', () => {
       expect(
-        presets.parsePreset('github>some/repo:somefile.json/somepreset')
+        presets.parsePreset('github>some/repo:somefile.json/somepreset'),
       ).toEqual({
         repo: 'some/repo',
         params: undefined,
@@ -484,7 +484,7 @@ describe('config/presets/index', () => {
 
     it('parses github subfiles with preset name with .json5 extension', () => {
       expect(
-        presets.parsePreset('github>some/repo:somefile.json5/somepreset')
+        presets.parsePreset('github>some/repo:somefile.json5/somepreset'),
       ).toEqual({
         repo: 'some/repo',
         params: undefined,
@@ -498,8 +498,8 @@ describe('config/presets/index', () => {
     it('parses github subfiles with preset and sub-preset name', () => {
       expect(
         presets.parsePreset(
-          'github>some/repo:somefile/somepreset/somesubpreset'
-        )
+          'github>some/repo:somefile/somepreset/somesubpreset',
+        ),
       ).toEqual({
         repo: 'some/repo',
         params: undefined,
@@ -511,7 +511,7 @@ describe('config/presets/index', () => {
 
     it('parses github subdirectories', () => {
       expect(
-        presets.parsePreset('github>some/repo//somepath/somesubpath/somefile')
+        presets.parsePreset('github>some/repo//somepath/somesubpath/somefile'),
       ).toEqual({
         repo: 'some/repo',
         params: undefined,
@@ -573,7 +573,7 @@ describe('config/presets/index', () => {
 
     it('parses local with subdirectory', () => {
       expect(
-        presets.parsePreset('local>some-group/some-repo//some-dir/some-file')
+        presets.parsePreset('local>some-group/some-repo//some-dir/some-file'),
       ).toEqual({
         repo: 'some-group/some-repo',
         params: undefined,
@@ -585,7 +585,7 @@ describe('config/presets/index', () => {
 
     it('parses local with spaces and subdirectory', () => {
       expect(
-        presets.parsePreset('local>A2B CD/A2B_Renovate//some-dir/some-file')
+        presets.parsePreset('local>A2B CD/A2B_Renovate//some-dir/some-file'),
       ).toEqual({
         repo: 'A2B CD/A2B_Renovate',
         params: undefined,
@@ -598,8 +598,8 @@ describe('config/presets/index', () => {
     it('parses local with sub preset and tag', () => {
       expect(
         presets.parsePreset(
-          'local>some-group/some-repo:some-file/subpreset#1.2.3'
-        )
+          'local>some-group/some-repo:some-file/subpreset#1.2.3',
+        ),
       ).toEqual({
         repo: 'some-group/some-repo',
         params: undefined,
@@ -613,8 +613,8 @@ describe('config/presets/index', () => {
     it('parses local with subdirectory and tag', () => {
       expect(
         presets.parsePreset(
-          'local>some-group/some-repo//some-dir/some-file#1.2.3'
-        )
+          'local>some-group/some-repo//some-dir/some-file#1.2.3',
+        ),
       ).toEqual({
         repo: 'some-group/some-repo',
         params: undefined,
@@ -628,8 +628,8 @@ describe('config/presets/index', () => {
     it('parses local with subdirectory and branch/tag with a slash', () => {
       expect(
         presets.parsePreset(
-          'local>PROJECT/repository//path/to/preset#feature/branch'
-        )
+          'local>PROJECT/repository//path/to/preset#feature/branch',
+        ),
       ).toEqual({
         repo: 'PROJECT/repository',
         params: undefined,
@@ -643,8 +643,8 @@ describe('config/presets/index', () => {
     it('parses local with sub preset and branch/tag with a slash', () => {
       expect(
         presets.parsePreset(
-          'local>PROJECT/repository:preset/subpreset#feature/branch'
-        )
+          'local>PROJECT/repository:preset/subpreset#feature/branch',
+        ),
       ).toEqual({
         repo: 'PROJECT/repository',
         params: undefined,
@@ -729,8 +729,8 @@ describe('config/presets/index', () => {
     it('returns scope with repo and params and default', () => {
       expect(
         presets.parsePreset(
-          '@somescope/somepackagename(param1, param2, param3)'
-        )
+          '@somescope/somepackagename(param1, param2, param3)',
+        ),
       ).toEqual({
         repo: '@somescope/somepackagename',
         params: ['param1', 'param2', 'param3'],
@@ -762,7 +762,7 @@ describe('config/presets/index', () => {
 
     it('returns scope with repo and presetName', () => {
       expect(
-        presets.parsePreset('@somescope/somepackagename:somePresetName')
+        presets.parsePreset('@somescope/somepackagename:somePresetName'),
       ).toEqual({
         repo: '@somescope/somepackagename',
         params: undefined,
@@ -775,8 +775,8 @@ describe('config/presets/index', () => {
     it('returns scope with repo and presetName and params', () => {
       expect(
         presets.parsePreset(
-          '@somescope/somepackagename:somePresetName(param1, param2)'
-        )
+          '@somescope/somepackagename:somePresetName(param1, param2)',
+        ),
       ).toEqual({
         repo: '@somescope/somepackagename',
         params: ['param1', 'param2'],
@@ -815,7 +815,7 @@ describe('config/presets/index', () => {
           presetName: 'webapp',
           presetPath: undefined,
           presetSource: 'npm',
-        }
+        },
       );
     });
 
@@ -900,7 +900,7 @@ describe('config/presets/index', () => {
     it('gets parameterised configs', async () => {
       const res = await presets.getPreset(
         ':group(packages:eslint, eslint)',
-        {}
+        {},
       );
       expect(res).toEqual({
         description: ['Group `eslint` packages into same branch/PR.'],
diff --git a/lib/config/presets/index.ts b/lib/config/presets/index.ts
index f648eb56d4f601f2a6048f87146238613e6bbb14..ed7b0c7704ebc6c384190f0129e79b627649dc07 100644
--- a/lib/config/presets/index.ts
+++ b/lib/config/presets/index.ts
@@ -39,27 +39,27 @@ const presetSources: Record<string, PresetApi> = {
 };
 
 const nonScopedPresetWithSubdirRegex = regEx(
-  /^(?<repo>~?[\w\-. /]+?)\/\/(?:(?<presetPath>[\w\-./]+)\/)?(?<presetName>[\w\-.]+)(?:#(?<tag>[\w\-./]+?))?$/
+  /^(?<repo>~?[\w\-. /]+?)\/\/(?:(?<presetPath>[\w\-./]+)\/)?(?<presetName>[\w\-.]+)(?:#(?<tag>[\w\-./]+?))?$/,
 );
 const gitPresetRegex = regEx(
-  /^(?<repo>~?[\w\-. /]+)(?::(?<presetName>[\w\-.+/]+))?(?:#(?<tag>[\w\-./]+?))?$/
+  /^(?<repo>~?[\w\-. /]+)(?::(?<presetName>[\w\-.+/]+))?(?:#(?<tag>[\w\-./]+?))?$/,
 );
 
 export function replaceArgs(
   obj: string,
-  argMapping: Record<string, any>
+  argMapping: Record<string, any>,
 ): string;
 export function replaceArgs(
   obj: string[],
-  argMapping: Record<string, any>
+  argMapping: Record<string, any>,
 ): string[];
 export function replaceArgs(
   obj: Record<string, any>,
-  argMapping: Record<string, any>
+  argMapping: Record<string, any>,
 ): Record<string, any>;
 export function replaceArgs(
   obj: Record<string, any>[],
-  argMapping: Record<string, any>
+  argMapping: Record<string, any>,
 ): Record<string, any>[];
 
 /**
@@ -70,7 +70,7 @@ export function replaceArgs(
 export function replaceArgs(obj: any, argMapping: Record<string, any>): any;
 export function replaceArgs(
   obj: string | string[] | Record<string, any> | Record<string, any>[],
-  argMapping: Record<string, any>
+  argMapping: Record<string, any>,
 ): any {
   if (is.string(obj)) {
     let returnStr = obj;
@@ -208,7 +208,7 @@ export function parsePreset(input: string): ParsedPreset {
 
 export async function getPreset(
   preset: string,
-  baseConfig?: RenovateConfig
+  baseConfig?: RenovateConfig,
 ): Promise<RenovateConfig> {
   logger.trace(`getPreset(${preset})`);
   // Check if the preset has been removed or replaced
@@ -274,7 +274,7 @@ export async function resolveConfigPresets(
   inputConfig: AllConfig,
   baseConfig?: RenovateConfig,
   _ignorePresets?: string[],
-  existingPresets: string[] = []
+  existingPresets: string[] = [],
 ): Promise<AllConfig> {
   let ignorePresets = clone(_ignorePresets);
   if (!ignorePresets || ignorePresets.length === 0) {
@@ -282,7 +282,7 @@ export async function resolveConfigPresets(
   }
   logger.trace(
     { config: inputConfig, existingPresets },
-    'resolveConfigPresets'
+    'resolveConfigPresets',
   );
   let config: AllConfig = {};
   // First, merge all the preset configs from left to right
@@ -294,13 +294,13 @@ export async function resolveConfigPresets(
           preset,
           baseConfig,
           inputConfig,
-          existingPresets
+          existingPresets,
         );
         const presetConfig = await resolveConfigPresets(
           fetchedPreset,
           baseConfig ?? inputConfig,
           ignorePresets,
-          existingPresets.concat([preset])
+          existingPresets.concat([preset]),
         );
         // istanbul ignore if
         if (inputConfig?.ignoreDeps?.length === 0) {
@@ -328,8 +328,8 @@ export async function resolveConfigPresets(
               element as RenovateConfig,
               baseConfig,
               ignorePresets,
-              existingPresets
-            )
+              existingPresets,
+            ),
           );
         } else {
           (config[key] as unknown[]).push(element);
@@ -342,7 +342,7 @@ export async function resolveConfigPresets(
         val as RenovateConfig,
         baseConfig,
         ignorePresets,
-        existingPresets
+        existingPresets,
       );
     }
   }
@@ -355,7 +355,7 @@ async function fetchPreset(
   preset: string,
   baseConfig: RenovateConfig | undefined,
   inputConfig: AllConfig,
-  existingPresets: string[]
+  existingPresets: string[],
 ): Promise<AllConfig> {
   try {
     return await getPreset(preset, baseConfig ?? inputConfig);
@@ -392,7 +392,7 @@ async function fetchPreset(
     }
     logger.info(
       { validationError: error.validationError },
-      'Throwing preset error'
+      'Throwing preset error',
     );
     throw error;
   }
@@ -401,19 +401,19 @@ async function fetchPreset(
 function shouldResolvePreset(
   preset: string,
   existingPresets: string[],
-  ignorePresets: string[]
+  ignorePresets: string[],
 ): boolean {
   // istanbul ignore if
   if (existingPresets.includes(preset)) {
     logger.debug(
-      `Already seen preset ${preset} in [${existingPresets.join(', ')}]`
+      `Already seen preset ${preset} in [${existingPresets.join(', ')}]`,
     );
     return false;
   }
   if (ignorePresets.includes(preset)) {
     // istanbul ignore next
     logger.debug(
-      `Ignoring preset ${preset} in [${existingPresets.join(', ')}]`
+      `Ignoring preset ${preset} in [${existingPresets.join(', ')}]`,
     );
     return false;
   }
diff --git a/lib/config/presets/internal/auto-generate-replacements.ts b/lib/config/presets/internal/auto-generate-replacements.ts
index ece2db24bf0316355d200fd366af04b5a4e3ed61..d1d7aa90cddea135b6c4b2d542596dea7e3c836c 100644
--- a/lib/config/presets/internal/auto-generate-replacements.ts
+++ b/lib/config/presets/internal/auto-generate-replacements.ts
@@ -18,7 +18,7 @@ export interface PresetTemplate {
 }
 
 function generatePackageRules(
-  replacementRules: ReplacementRule[]
+  replacementRules: ReplacementRule[],
 ): PackageRule[] {
   const rules: PackageRule[] = [];
   for (const replacementRule of replacementRules) {
diff --git a/lib/config/presets/internal/index.spec.ts b/lib/config/presets/internal/index.spec.ts
index 68c595ce28a01cddc40200625447e8ef5a9aa48b..88c27487f7c39228af69eedcfebce428e25a1ecd 100644
--- a/lib/config/presets/internal/index.spec.ts
+++ b/lib/config/presets/internal/index.spec.ts
@@ -17,7 +17,7 @@ describe('config/presets/internal/index', () => {
     const preset = 'foo:bar';
     const presetConfig = { extends: [preset] };
     await expect(resolveConfigPresets(presetConfig)).rejects.toThrow(
-      CONFIG_VALIDATION
+      CONFIG_VALIDATION,
     );
   });
 
@@ -28,7 +28,7 @@ describe('config/presets/internal/index', () => {
         it(`${preset} validates`, async () => {
           try {
             const config = await resolveConfigPresets(
-              massageConfig(presetConfig)
+              massageConfig(presetConfig),
             );
             const res = await validateConfig(config, true);
             expect(res.errors).toHaveLength(0);
diff --git a/lib/config/presets/internal/regex-managers.spec.ts b/lib/config/presets/internal/regex-managers.spec.ts
index 07259740701d972eadcc4fa170b3cb82133ec24a..4f72f5702ffc1a81fbeadeb24a8ca4c0d91ef012 100644
--- a/lib/config/presets/internal/regex-managers.spec.ts
+++ b/lib/config/presets/internal/regex-managers.spec.ts
@@ -32,7 +32,7 @@ describe('config/presets/internal/regex-managers', () => {
       const res = await extractPackageFile(
         fileContent,
         'Dockerfile',
-        customManager!
+        customManager!,
       );
 
       expect(res?.deps).toMatchObject([
@@ -131,7 +131,7 @@ describe('config/presets/internal/regex-managers', () => {
       const res = await extractPackageFile(
         fileContent,
         'github-workflow.yaml',
-        customManager!
+        customManager!,
       );
 
       expect(res?.deps).toMatchObject([
@@ -221,7 +221,7 @@ describe('config/presets/internal/regex-managers', () => {
       const res = await extractPackageFile(
         fileContent,
         'gitlab-ci.yml',
-        customManager!
+        customManager!,
       );
 
       expect(res?.deps).toMatchObject([
@@ -292,7 +292,7 @@ describe('config/presets/internal/regex-managers', () => {
       const res = await extractPackageFile(
         fileContent,
         'Chart.yaml',
-        customManager!
+        customManager!,
       );
 
       expect(res?.deps).toMatchObject([
@@ -351,7 +351,7 @@ describe('config/presets/internal/regex-managers', () => {
       const res = await extractPackageFile(
         fileContent,
         'pom.xml',
-        customManager!
+        customManager!,
       );
 
       expect(res?.deps).toMatchObject([
diff --git a/lib/config/presets/local/common.spec.ts b/lib/config/presets/local/common.spec.ts
index 6bf9872b10c3b0a1540d3335a5f4da36503e74f9..aa34eccf25b3b4a6a117ec3a5b029807b4a1923b 100644
--- a/lib/config/presets/local/common.spec.ts
+++ b/lib/config/presets/local/common.spec.ts
@@ -9,17 +9,17 @@ describe('config/presets/local/common', () => {
       platform.getRawFile.mockResolvedValueOnce(null);
 
       await expect(fetchJSONFile('some/repo', 'default.json')).rejects.toThrow(
-        PRESET_DEP_NOT_FOUND
+        PRESET_DEP_NOT_FOUND,
       );
     });
 
     it('throws for ExternalHostError', async () => {
       platform.getRawFile.mockRejectedValueOnce(
-        new ExternalHostError(new Error())
+        new ExternalHostError(new Error()),
       );
 
       await expect(fetchJSONFile('some/repo', 'default.json')).rejects.toThrow(
-        ExternalHostError
+        ExternalHostError,
       );
     });
 
@@ -27,7 +27,7 @@ describe('config/presets/local/common', () => {
       platform.getRawFile.mockRejectedValueOnce(new Error());
 
       await expect(fetchJSONFile('some/repo', 'default.json')).rejects.toThrow(
-        PRESET_DEP_NOT_FOUND
+        PRESET_DEP_NOT_FOUND,
       );
     });
   });
@@ -40,8 +40,8 @@ describe('config/presets/local/common', () => {
           'some/repo',
           'default.json',
           undefined,
-          'dummy'
-        )
+          'dummy',
+        ),
       ).toEqual({});
     });
   });
diff --git a/lib/config/presets/local/common.ts b/lib/config/presets/local/common.ts
index 00e11f8517f553be8380e35634cd04ad44b1183b..6e2253de1ffcfd84e2d445ef2b3acf7bafd830bd 100644
--- a/lib/config/presets/local/common.ts
+++ b/lib/config/presets/local/common.ts
@@ -8,7 +8,7 @@ export async function fetchJSONFile(
   repo: string,
   fileName: string,
   _endpoint?: string,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset> {
   let raw: string | null;
   try {
@@ -19,7 +19,7 @@ export async function fetchJSONFile(
     }
 
     logger.debug(
-      `Preset file ${fileName} not found in ${repo}: ${err.message}}`
+      `Preset file ${fileName} not found in ${repo}: ${err.message}}`,
     );
 
     throw new Error(PRESET_DEP_NOT_FOUND);
@@ -37,7 +37,7 @@ export function getPresetFromEndpoint(
   filePreset: string,
   presetPath: string | undefined,
   endpoint: string,
-  tag?: string | undefined
+  tag?: string | undefined,
 ): Promise<Preset | undefined> {
   return fetchPreset({
     repo,
diff --git a/lib/config/presets/local/index.spec.ts b/lib/config/presets/local/index.spec.ts
index da874ccc290b23860830d2a652569f4a35a73a0e..63596dcb2cbf13f23ce0a1a33aaa03cec5af4b3c 100644
--- a/lib/config/presets/local/index.spec.ts
+++ b/lib/config/presets/local/index.spec.ts
@@ -65,7 +65,7 @@ describe('config/presets/local/index', () => {
       expect(platform.getRawFile).toHaveBeenCalledWith(
         'default.json',
         'some/repo',
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -84,7 +84,7 @@ describe('config/presets/local/index', () => {
       expect(platform.getRawFile).toHaveBeenCalledWith(
         'default.json',
         'some/repo',
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -103,7 +103,7 @@ describe('config/presets/local/index', () => {
       expect(platform.getRawFile).toHaveBeenCalledWith(
         'default.json',
         'some/repo',
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -122,7 +122,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         undefined,
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -142,7 +142,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         'https://api.gitea.example.com',
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -161,7 +161,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         undefined,
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -182,7 +182,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         'https://api.github.example.com',
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -202,7 +202,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         undefined,
-        'someTag'
+        'someTag',
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -224,7 +224,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         'https://api.github.example.com',
-        'someTag'
+        'someTag',
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -244,7 +244,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         undefined,
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -265,7 +265,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         'https://gitlab.example.com/api/v4',
-        undefined
+        undefined,
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -284,7 +284,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         undefined,
-        'someTag'
+        'someTag',
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
@@ -306,7 +306,7 @@ describe('config/presets/local/index', () => {
         'default',
         undefined,
         'https://gitlab.example.com/api/v4',
-        'someTag'
+        'someTag',
       );
       expect(content).toEqual({ resolved: 'preset' });
     });
diff --git a/lib/config/presets/local/index.ts b/lib/config/presets/local/index.ts
index ace06f7cc36a3dd52720ada0be07b974ef844ee5..96b1d4d4690211d91283dacf4aac369965a6c26c 100644
--- a/lib/config/presets/local/index.ts
+++ b/lib/config/presets/local/index.ts
@@ -12,7 +12,7 @@ interface Resolver {
     filePreset: string,
     presetPath?: string,
     endpoint?: string,
-    tag?: string
+    tag?: string,
   ): Promise<Preset | undefined>;
 }
 
@@ -40,7 +40,7 @@ export function getPreset({
   const resolver = resolvers[platform];
   if (!resolver) {
     throw new Error(
-      `The platform you're using (${platform}) does not support local presets.`
+      `The platform you're using (${platform}) does not support local presets.`,
     );
   }
   const endpoint = GlobalConfig.get('endpoint');
@@ -50,6 +50,6 @@ export function getPreset({
     presetPath,
     // TODO: fix type #22198
     endpoint!,
-    tag
+    tag,
   );
 }
diff --git a/lib/config/presets/npm/index.spec.ts b/lib/config/presets/npm/index.spec.ts
index ef5b455d501c53dfea2cb1bcd4fe34a5c1195dda..5e9fb095354015ef9804f4e9ec6652d3056dc554 100644
--- a/lib/config/presets/npm/index.spec.ts
+++ b/lib/config/presets/npm/index.spec.ts
@@ -14,7 +14,7 @@ describe('config/presets/npm/index', () => {
   it('should throw if no package', async () => {
     httpMock.scope('https://registry.npmjs.org').get('/nopackage').reply(404);
     await expect(
-      npm.getPreset({ repo: 'nopackage', presetName: 'default' })
+      npm.getPreset({ repo: 'nopackage', presetName: 'default' }),
     ).rejects.toThrow(/dep not found/);
   });
 
@@ -47,7 +47,7 @@ describe('config/presets/npm/index', () => {
       .get('/norenovateconfig')
       .reply(200, presetPackage);
     await expect(
-      npm.getPreset({ repo: 'norenovateconfig', presetName: 'default' })
+      npm.getPreset({ repo: 'norenovateconfig', presetName: 'default' }),
     ).rejects.toThrow(/preset renovate-config not found/);
   });
 
@@ -84,7 +84,7 @@ describe('config/presets/npm/index', () => {
       npm.getPreset({
         repo: 'presetnamenotfound',
         presetName: 'missing',
-      })
+      }),
     ).rejects.toThrow(/preset not found/);
   });
 
diff --git a/lib/config/presets/npm/index.ts b/lib/config/presets/npm/index.ts
index 46bdc16847e212a6bb79e307046b424f0e73af91..f6ea15db2ca7f37b58434cfb9580734e5046845e 100644
--- a/lib/config/presets/npm/index.ts
+++ b/lib/config/presets/npm/index.ts
@@ -28,7 +28,7 @@ export async function getPreset({
     const registryUrl = resolveRegistryUrl(pkg);
     logger.once.warn(
       { registryUrl, pkg },
-      'Using npm packages for Renovate presets is now deprecated. Please migrate to repository-based presets instead.'
+      'Using npm packages for Renovate presets is now deprecated. Please migrate to repository-based presets instead.',
     );
     const packageUrl = resolvePackageUrl(registryUrl, pkg);
     const body = (await http.getJson<NpmResponse>(packageUrl)).body;
@@ -45,7 +45,7 @@ export async function getPreset({
     const presetNames = Object.keys(dep['renovate-config']);
     logger.debug(
       { presetNames, presetName },
-      'Preset not found within renovate-config'
+      'Preset not found within renovate-config',
     );
     throw new Error(PRESET_NOT_FOUND);
   }
diff --git a/lib/config/presets/types.ts b/lib/config/presets/types.ts
index 943739dafa33883fc4732442d4f1362e8c5775ee..5b5faf498ef026d94a828b1c16c018f33cb43ce9 100644
--- a/lib/config/presets/types.ts
+++ b/lib/config/presets/types.ts
@@ -12,7 +12,7 @@ export type PresetConfig = {
 
 export interface PresetApi {
   getPreset(
-    config: PresetConfig
+    config: PresetConfig,
   ): Promise<Preset | null | undefined> | Preset | null | undefined;
 }
 
@@ -29,7 +29,7 @@ export type PresetFetcher = (
   repo: string,
   fileName: string,
   endpoint: string,
-  tag?: string | undefined
+  tag?: string | undefined,
 ) => Promise<Preset | null | undefined>;
 
 export type FetchPresetConfig = {
diff --git a/lib/config/presets/util.spec.ts b/lib/config/presets/util.spec.ts
index 1f522fa6be7049dc2027a5a092695b39db9bfbc0..d6d9f328beeee716114f8ff40e92563c66d99563 100644
--- a/lib/config/presets/util.spec.ts
+++ b/lib/config/presets/util.spec.ts
@@ -29,12 +29,12 @@ describe('config/presets/util', () => {
 
     fetch.mockResolvedValueOnce({ sub: { preset: { foo: true } } });
     expect(
-      await fetchPreset({ ...config, filePreset: 'some/sub', fetch })
+      await fetchPreset({ ...config, filePreset: 'some/sub', fetch }),
     ).toEqual({ preset: { foo: true } });
 
     fetch.mockResolvedValueOnce({ sub: { preset: { foo: true } } });
     expect(
-      await fetchPreset({ ...config, filePreset: 'some/sub/preset', fetch })
+      await fetchPreset({ ...config, filePreset: 'some/sub/preset', fetch }),
     ).toEqual({ foo: true });
   });
 
@@ -46,25 +46,25 @@ describe('config/presets/util', () => {
   it(PRESET_DEP_NOT_FOUND, async () => {
     fetch.mockResolvedValueOnce(null);
     await expect(fetchPreset({ ...config, fetch })).rejects.toThrow(
-      PRESET_DEP_NOT_FOUND
+      PRESET_DEP_NOT_FOUND,
     );
 
     fetch.mockRejectedValueOnce(new Error(PRESET_DEP_NOT_FOUND));
     fetch.mockRejectedValueOnce(new Error(PRESET_DEP_NOT_FOUND));
     await expect(fetchPreset({ ...config, fetch })).rejects.toThrow(
-      PRESET_DEP_NOT_FOUND
+      PRESET_DEP_NOT_FOUND,
     );
   });
 
   it(PRESET_NOT_FOUND, async () => {
     fetch.mockResolvedValueOnce({});
     await expect(
-      fetchPreset({ ...config, filePreset: 'some/sub/preset', fetch })
+      fetchPreset({ ...config, filePreset: 'some/sub/preset', fetch }),
     ).rejects.toThrow(PRESET_NOT_FOUND);
 
     fetch.mockResolvedValueOnce({ sub: {} });
     await expect(
-      fetchPreset({ ...config, filePreset: 'some/sub/preset', fetch })
+      fetchPreset({ ...config, filePreset: 'some/sub/preset', fetch }),
     ).rejects.toThrow(PRESET_NOT_FOUND);
   });
 });
diff --git a/lib/config/presets/util.ts b/lib/config/presets/util.ts
index 7b2f166886ff5a7cf0ede1d7b77d4ee26a0235f9..2ab6e34e3abf0dbc28165906a6eeb35c05967a36 100644
--- a/lib/config/presets/util.ts
+++ b/lib/config/presets/util.ts
@@ -33,7 +33,7 @@ export async function fetchPreset({
         repo,
         buildFilePath('default.json'),
         endpoint,
-        tag
+        tag,
       );
     } catch (err) {
       if (err.message !== PRESET_DEP_NOT_FOUND) {
@@ -43,7 +43,7 @@ export async function fetchPreset({
         repo,
         buildFilePath('renovate.json'),
         endpoint,
-        tag
+        tag,
       );
       logger.warn(
         {
@@ -53,17 +53,17 @@ export async function fetchPreset({
           endpoint,
           tag,
         },
-        'Fallback to renovate.json file as a preset is deprecated, please use a default.json file instead.'
+        'Fallback to renovate.json file as a preset is deprecated, please use a default.json file instead.',
       );
     }
   } else {
     jsonContent = await fetch(
       repo,
       buildFilePath(
-        regEx(/\.json5?$/).test(fileName) ? fileName : `${fileName}.json`
+        regEx(/\.json5?$/).test(fileName) ? fileName : `${fileName}.json`,
       ),
       endpoint,
-      tag
+      tag,
     );
   }
 
diff --git a/lib/config/schema.ts b/lib/config/schema.ts
index 3541b34c9595a7ec1203dea1c25905320265662d..b3d99d2949cd9a09ae930d04dac97f1ac018f0ed 100644
--- a/lib/config/schema.ts
+++ b/lib/config/schema.ts
@@ -6,5 +6,5 @@ export const DecryptedObject = Json.pipe(
     o: z.string().optional(),
     r: z.string().optional(),
     v: z.string().optional(),
-  })
+  }),
 );
diff --git a/lib/config/secrets.spec.ts b/lib/config/secrets.spec.ts
index dbd295c087f3a2506823b72ada9a45dc84901a2a..4c5d2e1f885b39ed73de656e502befe75147b748 100644
--- a/lib/config/secrets.spec.ts
+++ b/lib/config/secrets.spec.ts
@@ -17,19 +17,19 @@ describe('config/secrets', () => {
 
     it('throws if secrets is not an object', () => {
       expect(() => validateConfigSecrets({ secrets: 'hello' } as any)).toThrow(
-        CONFIG_SECRETS_INVALID
+        CONFIG_SECRETS_INVALID,
       );
     });
 
     it('throws for invalid secret names', () => {
       expect(() =>
-        validateConfigSecrets({ secrets: { '123': 'abc' } })
+        validateConfigSecrets({ secrets: { '123': 'abc' } }),
       ).toThrow(CONFIG_SECRETS_INVALID);
     });
 
     it('throws for non-string secret', () => {
       expect(() =>
-        validateConfigSecrets({ secrets: { abc: 123 } } as any)
+        validateConfigSecrets({ secrets: { abc: 123 } } as any),
       ).toThrow(CONFIG_SECRETS_INVALID);
     });
 
@@ -39,7 +39,7 @@ describe('config/secrets', () => {
           repositories: [
             { repository: 'abc/def', secrets: { abc: 123 } },
           ] as any,
-        })
+        }),
       ).toThrow(CONFIG_SECRETS_INVALID);
     });
   });
@@ -152,7 +152,7 @@ describe('config/secrets', () => {
         allowedManagers: ['{{ secrets.SECRET_MANAGER }}'],
       };
       expect(() => applySecretsToConfig(config, {}, false)).toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
 
@@ -176,7 +176,7 @@ describe('config/secrets', () => {
       };
       // TODO fix me? #22198
       expect(() => applySecretsToConfig(config, null as never, false)).toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
     });
   });
diff --git a/lib/config/secrets.ts b/lib/config/secrets.ts
index 874afe29a70f61e3452412db5e0e49ca90f78c9f..7b40537db7ed090db35a1ea6b9e1119c0b508bd6 100644
--- a/lib/config/secrets.ts
+++ b/lib/config/secrets.ts
@@ -25,13 +25,13 @@ function validateSecrets(secrets_: unknown): void {
       }
       if (!is.string(secretValue)) {
         validationErrors.push(
-          `Secret values must be strings. Found type ${typeof secretValue} for secret ${secretName}`
+          `Secret values must be strings. Found type ${typeof secretValue} for secret ${secretName}`,
         );
       }
     }
   } else {
     validationErrors.push(
-      `Config secrets must be a plain object. Found: ${typeof secrets_}`
+      `Config secrets must be a plain object. Found: ${typeof secrets_}`,
     );
   }
   if (validationErrors.length) {
@@ -54,7 +54,7 @@ export function validateConfigSecrets(config: AllConfig): void {
 function replaceSecretsInString(
   key: string,
   value: string,
-  secrets: Record<string, string>
+  secrets: Record<string, string>,
 ): string {
   // do nothing if no secret template found
   if (!secretTemplateRegex.test(value)) {
@@ -77,7 +77,7 @@ function replaceSecretsInString(
     error.validationSource = 'config';
     error.validationError = 'Unknown secret name';
     error.validationMessage = `The following secret name was not found in config: ${String(
-      secretName
+      secretName,
     )}`;
     throw error;
   });
@@ -86,7 +86,7 @@ function replaceSecretsInString(
 function replaceSecretsInObject(
   config_: RenovateConfig,
   secrets: Record<string, string>,
-  deleteSecrets: boolean
+  deleteSecrets: boolean,
 ): RenovateConfig {
   const config = { ...config_ };
   if (deleteSecrets) {
@@ -105,7 +105,7 @@ function replaceSecretsInObject(
           value[arrayIndex] = replaceSecretsInObject(
             arrayItem,
             secrets,
-            deleteSecrets
+            deleteSecrets,
           );
         } else if (is.string(arrayItem)) {
           value[arrayIndex] = replaceSecretsInString(key, arrayItem, secrets);
@@ -119,7 +119,7 @@ function replaceSecretsInObject(
 export function applySecretsToConfig(
   config: RenovateConfig,
   secrets = config.secrets,
-  deleteSecrets = true
+  deleteSecrets = true,
 ): RenovateConfig {
   // Add all secrets to be sanitized
   if (is.plainObject(secrets)) {
diff --git a/lib/config/types.ts b/lib/config/types.ts
index f9732cba2f9875c675ac85ecb56d8d47c7fcbacf..7de52aecf0750e8f7f83ce69bd54091087921a87 100644
--- a/lib/config/types.ts
+++ b/lib/config/types.ts
@@ -176,7 +176,7 @@ export interface PostUpgradeTasks {
 }
 
 export type UpdateConfig<
-  T extends RenovateSharedConfig = RenovateSharedConfig
+  T extends RenovateSharedConfig = RenovateSharedConfig,
 > = Partial<Record<UpdateType, T | null>>;
 
 export type RenovateRepository =
@@ -397,7 +397,7 @@ export interface RenovateOptionBase {
 }
 
 export interface RenovateArrayOption<
-  T extends string | number | Record<string, unknown> = Record<string, unknown>
+  T extends string | number | Record<string, unknown> = Record<string, unknown>,
 > extends RenovateOptionBase {
   default?: T[] | null;
   mergeable?: boolean;
diff --git a/lib/config/utils.ts b/lib/config/utils.ts
index 4d5161e78ef958f7bbd95372f85725f917049e00..ab3b710ab93d81118e4a620100130716ada0ad5b 100644
--- a/lib/config/utils.ts
+++ b/lib/config/utils.ts
@@ -6,7 +6,7 @@ import type { RenovateConfig } from './types';
 
 export function mergeChildConfig<
   T extends Record<string, any>,
-  TChild extends Record<string, any> | undefined
+  TChild extends Record<string, any> | undefined,
 >(parent: T, child: TChild): T & TChild {
   logger.trace({ parent, child }, `mergeChildConfig`);
   if (!child) {
@@ -20,7 +20,7 @@ export function mergeChildConfig<
   if (config?.isVulnerabilityAlert) {
     config.vulnerabilitySeverity = getHighestVulnerabilitySeverity(
       parent,
-      child
+      child,
     );
   }
 
@@ -39,17 +39,17 @@ export function mergeChildConfig<
         };
       } else if (option.type === 'array') {
         config[option.name] = (parentConfig[option.name] as unknown[]).concat(
-          config[option.name]
+          config[option.name],
         );
       } else {
         config[option.name] = mergeChildConfig(
           parentConfig[option.name] as RenovateConfig,
-          childConfig[option.name] as RenovateConfig
+          childConfig[option.name] as RenovateConfig,
         );
       }
       logger.trace(
         { result: config[option.name] },
-        `Merged config.${option.name}`
+        `Merged config.${option.name}`,
       );
     }
   }
diff --git a/lib/config/validation-helpers/managers.ts b/lib/config/validation-helpers/managers.ts
index aecad19df3c48302e03ff7cc6aa0787ae22319b3..fbb2e0e2e98c815bdd1341a84b755e2214d5c1dc 100644
--- a/lib/config/validation-helpers/managers.ts
+++ b/lib/config/validation-helpers/managers.ts
@@ -13,12 +13,12 @@ export function check({
   if (Array.isArray(resolvedRule.matchManagers)) {
     if (
       resolvedRule.matchManagers.find(
-        (confManager) => !allManagersList.includes(confManager)
+        (confManager) => !allManagersList.includes(confManager),
       )
     ) {
       managersErrMessage = `${currentPath}:
         You have included an unsupported manager in a package rule. Your list: ${String(
-          resolvedRule.matchManagers
+          resolvedRule.matchManagers,
         )}.
         Supported managers are: (${allManagersList.join(', ')}).`;
     }
diff --git a/lib/config/validation.spec.ts b/lib/config/validation.spec.ts
index 59c3676f981164127ca0726e516d0a910c825eaa..31b295cf4d7e24d9d655b95896c4b65927f82809 100644
--- a/lib/config/validation.spec.ts
+++ b/lib/config/validation.spec.ts
@@ -13,7 +13,7 @@ describe('config/validation', () => {
 
     it('handles encrypted within array types', () => {
       expect(configValidation.getParentName('hostRules[0].encrypted')).toBe(
-        'hostRules'
+        'hostRules',
       );
     });
   });
@@ -190,7 +190,7 @@ describe('config/validation', () => {
         major: null,
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(3);
@@ -207,7 +207,7 @@ describe('config/validation', () => {
         ],
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -224,7 +224,7 @@ describe('config/validation', () => {
         ],
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config as any
+        config as any,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(2);
@@ -243,7 +243,7 @@ describe('config/validation', () => {
       ],
     ])('validates enabled managers for %s', async (_case, config) => {
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -260,12 +260,12 @@ describe('config/validation', () => {
       'errors if included not supported enabled managers for %s',
       async (_case, config) => {
         const { warnings, errors } = await configValidation.validateConfig(
-          config
+          config,
         );
         expect(warnings).toHaveLength(0);
         expect(errors).toHaveLength(1);
         expect(errors).toMatchSnapshot();
-      }
+      },
     );
 
     it('errors for all types', async () => {
@@ -300,7 +300,7 @@ describe('config/validation', () => {
         major: null,
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(1);
       expect(errors).toMatchSnapshot();
@@ -328,7 +328,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(4);
       expect(errors).toMatchSnapshot();
@@ -347,7 +347,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toMatchSnapshot();
@@ -364,7 +364,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(2);
@@ -385,7 +385,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -403,7 +403,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as any,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -430,7 +430,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as any,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -458,7 +458,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as any,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -494,7 +494,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as RenovateConfig,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(2);
@@ -524,7 +524,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as any,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -545,7 +545,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -565,7 +565,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(4);
@@ -588,7 +588,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -610,7 +610,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as any,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -630,7 +630,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toMatchSnapshot();
@@ -643,7 +643,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -655,7 +655,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -667,7 +667,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config as never, // TODO: #15963
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -679,7 +679,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -693,7 +693,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -708,7 +708,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toMatchObject([
@@ -728,7 +728,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toMatchObject([
@@ -760,7 +760,7 @@ describe('config/validation', () => {
         ],
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(errors).toHaveLength(1);
       expect(warnings).toHaveLength(1);
@@ -780,7 +780,7 @@ describe('config/validation', () => {
         },
       } as never;
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(errors).toHaveLength(1);
       expect(warnings).toHaveLength(0);
@@ -792,7 +792,7 @@ describe('config/validation', () => {
         hostType: 'npm',
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(errors).toHaveLength(0);
       expect(warnings).toHaveLength(1);
@@ -805,7 +805,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -819,7 +819,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(1);
       expect(warnings).toMatchSnapshot();
@@ -837,7 +837,7 @@ describe('config/validation', () => {
       } as any;
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(1);
@@ -856,7 +856,7 @@ describe('config/validation', () => {
       };
       const { warnings, errors } = await configValidation.validateConfig(
         config,
-        true
+        true,
       );
       expect(errors).toHaveLength(0);
       expect(warnings).toHaveLength(1);
@@ -870,7 +870,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toHaveLength(0);
@@ -884,7 +884,7 @@ describe('config/validation', () => {
         },
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toMatchObject([
@@ -901,7 +901,7 @@ describe('config/validation', () => {
         schedule: ['30 5 * * *'],
       };
       const { warnings, errors } = await configValidation.validateConfig(
-        config
+        config,
       );
       expect(warnings).toHaveLength(0);
       expect(errors).toMatchObject([
diff --git a/lib/config/validation.ts b/lib/config/validation.ts
index 56217ec95400a358fe1fe792836bf24c7050dd2d..dfc4420d95b1570c4bd95956966a9a0bb6a0453c 100644
--- a/lib/config/validation.ts
+++ b/lib/config/validation.ts
@@ -71,7 +71,7 @@ function validatePlainObject(val: Record<string, unknown>): true | string {
 
 function getUnsupportedEnabledManagers(enabledManagers: string[]): string[] {
   return enabledManagers.filter(
-    (manager) => !allManagersList.includes(manager)
+    (manager) => !allManagersList.includes(manager),
   );
 }
 
@@ -97,7 +97,7 @@ export function getParentName(parentPath: string | undefined): string {
 export async function validateConfig(
   config: RenovateConfig,
   isPreset?: boolean,
-  parentPath?: string
+  parentPath?: string,
 ): Promise<ValidationResult> {
   if (!optionTypes) {
     optionTypes = {};
@@ -134,13 +134,13 @@ export async function validateConfig(
     }
     if (key === 'enabledManagers' && val) {
       const unsupportedManagers = getUnsupportedEnabledManagers(
-        val as string[]
+        val as string[],
       );
       if (is.nonEmptyArray(unsupportedManagers)) {
         errors.push({
           topic: 'Configuration Error',
           message: `The following managers configured in enabledManagers are not supported: "${unsupportedManagers.join(
-            ', '
+            ', ',
           )}"`,
         });
       }
@@ -248,7 +248,7 @@ export async function validateConfig(
             errors.push({
               topic: 'Configuration Error',
               message: `Configuration option \`${currentPath}\` should be boolean. Found: ${JSON.stringify(
-                val
+                val,
               )} (${typeof val})`,
             });
           }
@@ -257,7 +257,7 @@ export async function validateConfig(
             errors.push({
               topic: 'Configuration Error',
               message: `Configuration option \`${currentPath}\` should be an integer. Found: ${JSON.stringify(
-                val
+                val,
               )} (${typeof val})`,
             });
           }
@@ -268,7 +268,7 @@ export async function validateConfig(
                 const subValidation = await validateConfig(
                   subval as RenovateConfig,
                   isPreset,
-                  `${currentPath}[${subIndex}]`
+                  `${currentPath}[${subIndex}]`,
                 );
                 warnings = warnings.concat(subValidation.warnings);
                 errors = errors.concat(subValidation.errors);
@@ -340,19 +340,19 @@ export async function validateConfig(
                     packageRules: [
                       await resolveConfigPresets(
                         packageRule as RenovateConfig,
-                        config
+                        config,
                       ),
                     ],
                   }).migratedConfig.packageRules![0];
                   errors.push(
-                    ...managerValidator.check({ resolvedRule, currentPath })
+                    ...managerValidator.check({ resolvedRule, currentPath }),
                   );
                   const selectorLength = Object.keys(resolvedRule).filter(
-                    (ruleKey) => selectors.includes(ruleKey)
+                    (ruleKey) => selectors.includes(ruleKey),
                   ).length;
                   if (!selectorLength) {
                     const message = `${currentPath}[${subIndex}]: Each packageRule must contain at least one match* or exclude* selector. Rule: ${JSON.stringify(
-                      packageRule
+                      packageRule,
                     )}`;
                     errors.push({
                       topic: 'Configuration Error',
@@ -361,7 +361,7 @@ export async function validateConfig(
                   }
                   if (selectorLength === Object.keys(resolvedRule).length) {
                     const message = `${currentPath}[${subIndex}]: Each packageRule must contain at least one non-match* or non-exclude* field. Rule: ${JSON.stringify(
-                      packageRule
+                      packageRule,
                     )}`;
                     warnings.push({
                       topic: 'Configuration Error',
@@ -388,7 +388,7 @@ export async function validateConfig(
                     for (const option of preLookupOptions) {
                       if (resolvedRule[option] !== undefined) {
                         const message = `${currentPath}[${subIndex}]: packageRules cannot combine both matchUpdateTypes and ${option}. Rule: ${JSON.stringify(
-                          packageRule
+                          packageRule,
                         )}`;
                         errors.push({
                           topic: 'Configuration Error',
@@ -425,16 +425,16 @@ export async function validateConfig(
               for (const customManager of val as CustomManager[]) {
                 if (
                   Object.keys(customManager).some(
-                    (k) => !allowedKeys.includes(k)
+                    (k) => !allowedKeys.includes(k),
                   )
                 ) {
                   const disallowedKeys = Object.keys(customManager).filter(
-                    (k) => !allowedKeys.includes(k)
+                    (k) => !allowedKeys.includes(k),
                   );
                   errors.push({
                     topic: 'Configuration Error',
                     message: `Custom Manager contains disallowed fields: ${disallowedKeys.join(
-                      ', '
+                      ', ',
                     )}`,
                   });
                 } else if (
@@ -447,7 +447,7 @@ export async function validateConfig(
                         validateRegexManagerFields(
                           customManager,
                           currentPath,
-                          errors
+                          errors,
                         );
                         break;
                     }
@@ -581,7 +581,7 @@ export async function validateConfig(
                   continue;
                 }
                 for (const [subKey, subValue] of Object.entries(
-                  customDatasourceValue
+                  customDatasourceValue,
                 )) {
                   if (!allowedKeys.includes(subKey)) {
                     errors.push({
@@ -627,7 +627,7 @@ export async function validateConfig(
                 const subValidation = await validateConfig(
                   val,
                   isPreset,
-                  currentPath
+                  currentPath,
                 );
                 warnings = warnings.concat(subValidation.warnings);
                 errors = errors.concat(subValidation.errors);
@@ -661,7 +661,7 @@ export async function validateConfig(
 function validateRegexManagerFields(
   customManager: Partial<RegexManagerConfig>,
   currentPath: string,
-  errors: ValidationMessage[]
+  errors: ValidationMessage[],
 ): void {
   if (is.nonEmptyArray(customManager.matchStrings)) {
     for (const matchString of customManager.matchStrings) {
@@ -687,7 +687,7 @@ function validateRegexManagerFields(
     if (
       !customManager[templateField] &&
       !customManager.matchStrings?.some((matchString) =>
-        matchString.includes(`(?<${field}>`)
+        matchString.includes(`(?<${field}>`),
       )
     ) {
       errors.push({
diff --git a/lib/constants/platform.spec.ts b/lib/constants/platform.spec.ts
index 54b9e718df242aba3b23745e6b07a4c0cb1daa8f..8b8aee044aaf6c2156231fcccdbf238dcefb11e2 100644
--- a/lib/constants/platform.spec.ts
+++ b/lib/constants/platform.spec.ts
@@ -19,23 +19,23 @@ import {
 describe('constants/platform', () => {
   it('should be part of the GITEA_API_USING_HOST_TYPES', () => {
     expect(
-      GITEA_API_USING_HOST_TYPES.includes(GiteaTagsDatasource.id)
+      GITEA_API_USING_HOST_TYPES.includes(GiteaTagsDatasource.id),
     ).toBeTrue();
     expect(GITEA_API_USING_HOST_TYPES.includes('gitea')).toBeTrue();
   });
 
   it('should be part of the GITLAB_API_USING_HOST_TYPES', () => {
     expect(
-      GITLAB_API_USING_HOST_TYPES.includes(GitlabTagsDatasource.id)
+      GITLAB_API_USING_HOST_TYPES.includes(GitlabTagsDatasource.id),
     ).toBeTrue();
     expect(
-      GITLAB_API_USING_HOST_TYPES.includes(GitlabReleasesDatasource.id)
+      GITLAB_API_USING_HOST_TYPES.includes(GitlabReleasesDatasource.id),
     ).toBeTrue();
     expect(
-      GITLAB_API_USING_HOST_TYPES.includes(GitlabPackagesDatasource.id)
+      GITLAB_API_USING_HOST_TYPES.includes(GitlabPackagesDatasource.id),
     ).toBeTrue();
     expect(
-      GITLAB_API_USING_HOST_TYPES.includes(GITLAB_CHANGELOG_ID)
+      GITLAB_API_USING_HOST_TYPES.includes(GITLAB_CHANGELOG_ID),
     ).toBeTrue();
     expect(GITLAB_API_USING_HOST_TYPES.includes('gitlab')).toBeTrue();
   });
@@ -46,17 +46,17 @@ describe('constants/platform', () => {
 
   it('should be part of the GITHUB_API_USING_HOST_TYPES ', () => {
     expect(
-      GITHUB_API_USING_HOST_TYPES.includes(GithubTagsDatasource.id)
+      GITHUB_API_USING_HOST_TYPES.includes(GithubTagsDatasource.id),
     ).toBeTrue();
     expect(
-      GITHUB_API_USING_HOST_TYPES.includes(GithubReleasesDatasource.id)
+      GITHUB_API_USING_HOST_TYPES.includes(GithubReleasesDatasource.id),
     ).toBeTrue();
     expect(GITHUB_API_USING_HOST_TYPES.includes(PodDatasource.id)).toBeTrue();
     expect(
-      GITHUB_API_USING_HOST_TYPES.includes(HermitDatasource.id)
+      GITHUB_API_USING_HOST_TYPES.includes(HermitDatasource.id),
     ).toBeTrue();
     expect(
-      GITHUB_API_USING_HOST_TYPES.includes(GITHUB_CHANGELOG_ID)
+      GITHUB_API_USING_HOST_TYPES.includes(GITHUB_CHANGELOG_ID),
     ).toBeTrue();
     expect(GITHUB_API_USING_HOST_TYPES.includes('github')).toBeTrue();
   });
@@ -67,7 +67,7 @@ describe('constants/platform', () => {
 
   it('should be part of the BITBUCKET_API_USING_HOST_TYPES ', () => {
     expect(
-      BITBUCKET_API_USING_HOST_TYPES.includes(BitbucketTagsDatasource.id)
+      BITBUCKET_API_USING_HOST_TYPES.includes(BitbucketTagsDatasource.id),
     ).toBeTrue();
     expect(BITBUCKET_API_USING_HOST_TYPES.includes('bitbucket')).toBeTrue();
   });
diff --git a/lib/instrumentation/index.spec.ts b/lib/instrumentation/index.spec.ts
index c098aae373dba6ffae83dcc0a5fcd29fa020b79f..ddb347e0155c6bb873e0218f6be8c80627267970 100644
--- a/lib/instrumentation/index.spec.ts
+++ b/lib/instrumentation/index.spec.ts
@@ -104,7 +104,7 @@ describe('instrumentation/index', () => {
       expect(() =>
         instrument('test', () => {
           throw error;
-        })
+        }),
       ).toThrow(error);
     });
 
@@ -124,7 +124,7 @@ describe('instrumentation/index', () => {
         instrument('test', async () => {
           await Promise.resolve();
           throw error;
-        })
+        }),
       ).rejects.toThrow(error);
     });
   });
diff --git a/lib/instrumentation/index.ts b/lib/instrumentation/index.ts
index edc590c753c547087e11c7dd6a31e18be743914e..4ae908847b84a76bb0d871df9447787429d87745 100644
--- a/lib/instrumentation/index.ts
+++ b/lib/instrumentation/index.ts
@@ -52,7 +52,7 @@ export function init(): void {
   // add processors
   if (isTraceDebuggingEnabled()) {
     traceProvider.addSpanProcessor(
-      new SimpleSpanProcessor(new ConsoleSpanExporter())
+      new SimpleSpanProcessor(new ConsoleSpanExporter()),
     );
   }
 
@@ -72,7 +72,7 @@ export function init(): void {
       applyCustomAttributesOnSpan: /* istanbul ignore next */ (
         span,
         request,
-        response
+        response,
       ) => {
         // ignore 404 errors when the branch protection of Github could not be found. This is expected if no rules are configured
         if (
@@ -124,18 +124,18 @@ function getTracer(): Tracer {
 
 export function instrument<F extends (span: Span) => ReturnType<F>>(
   name: string,
-  fn: F
+  fn: F,
 ): ReturnType<F>;
 export function instrument<F extends (span: Span) => ReturnType<F>>(
   name: string,
   fn: F,
-  options: SpanOptions
+  options: SpanOptions,
 ): ReturnType<F>;
 export function instrument<F extends (span: Span) => ReturnType<F>>(
   name: string,
   fn: F,
   options: SpanOptions = {},
-  context: Context = api.context.active()
+  context: Context = api.context.active(),
 ): ReturnType<F> {
   return getTracer().startActiveSpan(name, options, context, (span: Span) => {
     try {
diff --git a/lib/logger/cmd-serializer.ts b/lib/logger/cmd-serializer.ts
index 5142e5c6ca8c69123d78c956b1c8e2d216a17526..6b3492d3ba0e44296a4b568b3d025cf0ae7e9732 100644
--- a/lib/logger/cmd-serializer.ts
+++ b/lib/logger/cmd-serializer.ts
@@ -1,6 +1,6 @@
 // istanbul ignore next
 export default function cmdSerializer(
-  cmd: string | string[]
+  cmd: string | string[],
 ): string | string[] {
   if (typeof cmd === 'string') {
     return cmd.replace(/https:\/\/[^@]*@/g, 'https://**redacted**@'); // TODO #12874
diff --git a/lib/logger/config-serializer.ts b/lib/logger/config-serializer.ts
index cc54936e583c4031d58c5c8847d4897139bb3b74..cb44198a0f9babb2c97e6eb4a7b1ddf60301322b 100644
--- a/lib/logger/config-serializer.ts
+++ b/lib/logger/config-serializer.ts
@@ -2,7 +2,7 @@ import traverse from 'traverse';
 import type { RenovateConfig } from '../config/types';
 
 export default function configSerializer(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): RenovateConfig {
   const templateFields = ['prBody'];
   const contentFields = [
diff --git a/lib/logger/err-serializer.ts b/lib/logger/err-serializer.ts
index d33cc614606e3ac9883a7ba440d4e4971983c04b..f6381bb6322c576c406607088c137441787a3138 100644
--- a/lib/logger/err-serializer.ts
+++ b/lib/logger/err-serializer.ts
@@ -13,7 +13,7 @@ export default function errSerializer(err: Error): any {
     if (is.string(val)) {
       response[field] = val.replace(
         /https:\/\/[^@]*?@/g, // TODO #12874
-        'https://**redacted**@'
+        'https://**redacted**@',
       );
     }
   }
diff --git a/lib/logger/index.spec.ts b/lib/logger/index.spec.ts
index c8422d469e2be6d814074a22f4c13f63db0d0385..3fa2fbd10aa2e17c8cbaaad2a19f10816d1f23ff 100644
--- a/lib/logger/index.spec.ts
+++ b/lib/logger/index.spec.ts
@@ -79,7 +79,7 @@ describe('logger/index', () => {
       addStream({
         name: 'logfile',
         level: 'error',
-      })
+      }),
     ).toThrow("Missing 'stream' or 'path' for bunyan stream");
   });
 
@@ -90,7 +90,7 @@ describe('logger/index', () => {
         path: 'file.log',
         level: 'error',
         type: 'rotating-file',
-      })
+      }),
     ).toThrow("Rotating files aren't supported");
   });
 
@@ -103,7 +103,7 @@ describe('logger/index', () => {
           chunk = x;
           return true;
         },
-      })
+      }),
     );
 
     addStream({
@@ -126,7 +126,7 @@ describe('logger/index', () => {
           logged = JSON.parse(x);
           return true;
         },
-      })
+      }),
     );
 
     addStream({
@@ -154,7 +154,7 @@ describe('logger/index', () => {
           logged = JSON.parse(x);
           return true;
         },
-      })
+      }),
     );
 
     addStream({
diff --git a/lib/logger/index.ts b/lib/logger/index.ts
index 43add78c9fdb0de81bc09c22dfe7df67c9c6de19..7e90ab4d841cea3b38f121ff5756cd8952e73055 100644
--- a/lib/logger/index.ts
+++ b/lib/logger/index.ts
@@ -131,7 +131,7 @@ export function removeMeta(fields: string[]): void {
 }
 
 export /* istanbul ignore next */ function addStream(
-  stream: bunyan.Stream
+  stream: bunyan.Stream,
 ): void {
   bunyanLogger.addStream(withSanitizer(stream));
 }
diff --git a/lib/logger/pretty-stdout.spec.ts b/lib/logger/pretty-stdout.spec.ts
index 2c402c17c9bcafd5247de527e5f229b4d612607d..2f19c16dd4e0ece0e15e9df8468aafc6513ce86f 100644
--- a/lib/logger/pretty-stdout.spec.ts
+++ b/lib/logger/pretty-stdout.spec.ts
@@ -5,8 +5,8 @@ import type { BunyanRecord } from './types';
 jest.mock('chalk', () =>
   ['bgRed', 'blue', 'gray', 'green', 'magenta', 'red'].reduce(
     (r, c) => Object.defineProperty(r, c, { value: (s: string) => s }),
-    {}
-  )
+    {},
+  ),
 );
 
 describe('logger/pretty-stdout', () => {
@@ -32,7 +32,7 @@ describe('logger/pretty-stdout', () => {
         repository: 'a/b',
       };
       expect(prettyStdout.getMeta(rec as any)).toEqual(
-        chalk.gray(' (repository=a/b)')
+        chalk.gray(' (repository=a/b)'),
       );
     });
 
@@ -44,7 +44,7 @@ describe('logger/pretty-stdout', () => {
         module: 'test',
       };
       expect(prettyStdout.getMeta(rec as any)).toEqual(
-        chalk.gray(' (repository=a/b, branch=c) [test]')
+        chalk.gray(' (repository=a/b, branch=c) [test]'),
       );
     });
   });
@@ -75,7 +75,7 @@ describe('logger/pretty-stdout', () => {
         },
       };
       expect(prettyStdout.getDetails(rec as any)).toBe(
-        `       "config": {"a": "b", "d": ["e", "f"]}\n`
+        `       "config": {"a": "b", "d": ["e", "f"]}\n`,
       );
     });
   });
@@ -104,7 +104,7 @@ describe('logger/pretty-stdout', () => {
           `TRACE: test message`,
           `       "config": {"a": "b", "d": ["e", "f"]}`,
           ``,
-        ].join('\n')
+        ].join('\n'),
       );
     });
   });
diff --git a/lib/logger/types.ts b/lib/logger/types.ts
index df0a1824356b463981b388ff1a8d24b7073ad90c..26fe16a73a814d67b8de56003eb3a5c9288c1b08 100644
--- a/lib/logger/types.ts
+++ b/lib/logger/types.ts
@@ -37,6 +37,6 @@ export type BunyanStream = (NodeJS.WritableStream | Stream) & {
   write: (
     chunk: BunyanRecord,
     enc: BufferEncoding,
-    cb: (err?: Error | null) => void
+    cb: (err?: Error | null) => void,
   ) => void;
 };
diff --git a/lib/logger/utils.spec.ts b/lib/logger/utils.spec.ts
index 788ed5e0ee5a956e49c4e7ccd34944f42cf5d1d4..51b3ac222616bf9f0107b2e1a01bee0d970121a7 100644
--- a/lib/logger/utils.spec.ts
+++ b/lib/logger/utils.spec.ts
@@ -54,7 +54,7 @@ describe('logger/utils', () => {
   describe('prepareError', () => {
     function getError<T extends z.ZodType>(
       schema: T,
-      input: unknown
+      input: unknown,
     ): z.ZodError | null {
       try {
         schema.parse(input);
@@ -68,7 +68,7 @@ describe('logger/utils', () => {
 
     function prepareIssues<T extends z.ZodType>(
       schema: T,
-      input: unknown
+      input: unknown,
     ): unknown {
       const error = getError(schema, input);
       return error ? prepareZodIssues(error.format()) : null;
@@ -76,15 +76,15 @@ describe('logger/utils', () => {
 
     it('prepareZodIssues', () => {
       expect(prepareIssues(z.string(), 42)).toBe(
-        'Expected string, received number'
+        'Expected string, received number',
       );
 
       expect(prepareIssues(z.string().array(), 42)).toBe(
-        'Expected array, received number'
+        'Expected array, received number',
       );
 
       expect(
-        prepareIssues(z.string().array(), ['foo', 'bar', 42, 42, 42, 42, 42])
+        prepareIssues(z.string().array(), ['foo', 'bar', 42, 42, 42, 42, 42]),
       ).toEqual({
         '2': 'Expected string, received number',
         '3': 'Expected string, received number',
@@ -101,7 +101,7 @@ describe('logger/utils', () => {
           key3: 42,
           key4: 42,
           key5: 42,
-        })
+        }),
       ).toEqual({
         key1: 'Expected string, received number',
         key2: 'Expected string, received number',
@@ -116,8 +116,8 @@ describe('logger/utils', () => {
               bar: z.string(),
             }),
           }),
-          { foo: { bar: [], baz: 42 } }
-        )
+          { foo: { bar: [], baz: 42 } },
+        ),
       ).toEqual({
         foo: {
           bar: 'Expected string, received array',
@@ -130,8 +130,8 @@ describe('logger/utils', () => {
             z.object({ type: z.literal('foo') }),
             z.object({ type: z.literal('bar') }),
           ]),
-          { type: 'baz' }
-        )
+          { type: 'baz' },
+        ),
       ).toEqual({
         type: "Invalid discriminator value. Expected 'foo' | 'bar'",
       });
@@ -142,8 +142,8 @@ describe('logger/utils', () => {
             z.object({ type: z.literal('foo') }),
             z.object({ type: z.literal('bar') }),
           ]),
-          {}
-        )
+          {},
+        ),
       ).toEqual({
         type: "Invalid discriminator value. Expected 'foo' | 'bar'",
       });
@@ -154,8 +154,8 @@ describe('logger/utils', () => {
             z.object({ type: z.literal('foo') }),
             z.object({ type: z.literal('bar') }),
           ]),
-          42
-        )
+          42,
+        ),
       ).toBe('Expected object, received number');
     });
 
@@ -168,7 +168,7 @@ describe('logger/utils', () => {
             }),
           }),
         }),
-        { foo: { bar: { baz: 42 } } }
+        { foo: { bar: { baz: 42 } } },
       );
 
       expect(prepareError(err!)).toEqual({
diff --git a/lib/logger/utils.ts b/lib/logger/utils.ts
index 46ac17e604be073d969f09d960ec99b525621ac2..dc49caa7f957ea4b9d02ee17f729da95455d1dc6 100644
--- a/lib/logger/utils.ts
+++ b/lib/logger/utils.ts
@@ -171,7 +171,7 @@ function isNested(value: unknown): value is NestedValue {
 
 export function sanitizeValue(
   value: unknown,
-  seen = new WeakMap<NestedValue, unknown>()
+  seen = new WeakMap<NestedValue, unknown>(),
 ): any {
   if (is.string(value)) {
     return sanitize(sanitizeUrls(value));
@@ -247,7 +247,7 @@ export function withSanitizer(streamConfig: bunyan.Stream): bunyan.Stream {
     const write = (
       chunk: BunyanRecord,
       enc: BufferEncoding,
-      cb: (err?: Error | null) => void
+      cb: (err?: Error | null) => void,
     ): void => {
       const raw = sanitizeValue(chunk);
       const result =
diff --git a/lib/modules/datasource/api.ts b/lib/modules/datasource/api.ts
index d0593b2c339b8a68139f7b78e82817d977fecf17..4ba206675060c80b33f0a40c3b74df3a630926b9 100644
--- a/lib/modules/datasource/api.ts
+++ b/lib/modules/datasource/api.ts
@@ -92,7 +92,7 @@ api.set(GiteaReleasesDatasource.id, new GiteaReleasesDatasource());
 api.set(GiteaTagsDatasource.id, new GiteaTagsDatasource());
 api.set(
   GithubReleaseAttachmentsDatasource.id,
-  new GithubReleaseAttachmentsDatasource()
+  new GithubReleaseAttachmentsDatasource(),
 );
 api.set(GithubReleasesDatasource.id, new GithubReleasesDatasource());
 api.set(GithubRunnersDatasource.id, new GithubRunnersDatasource());
diff --git a/lib/modules/datasource/artifactory/index.spec.ts b/lib/modules/datasource/artifactory/index.spec.ts
index d84c402ef524034743c1a4ee643a00e8a6345346..02206081ed9e31cfd419039f88816561511a2263 100644
--- a/lib/modules/datasource/artifactory/index.spec.ts
+++ b/lib/modules/datasource/artifactory/index.spec.ts
@@ -58,7 +58,7 @@ describe('modules/datasource/artifactory/index', () => {
     it('parses real data (merge strategy with 2 registries)', async () => {
       const secondRegistryUrl: string = joinUrlParts(
         testRegistryUrl,
-        'production'
+        'production',
       );
       httpMock
         .scope(testRegistryUrl)
@@ -85,7 +85,7 @@ describe('modules/datasource/artifactory/index', () => {
       expect(logger.warn).toHaveBeenCalledTimes(1);
       expect(logger.warn).toHaveBeenCalledWith(
         { packageName: 'project' },
-        'artifactory datasource requires custom registryUrl. Skipping datasource'
+        'artifactory datasource requires custom registryUrl. Skipping datasource',
       );
       expect(res).toBeNull();
     });
@@ -100,7 +100,7 @@ describe('modules/datasource/artifactory/index', () => {
           ...testConfig,
           datasource,
           packageName: testLookupName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -111,7 +111,7 @@ describe('modules/datasource/artifactory/index', () => {
           ...testConfig,
           datasource,
           packageName: testLookupName,
-        })
+        }),
       ).toBeNull();
       expect(logger.warn).toHaveBeenCalledTimes(1);
       expect(logger.warn).toHaveBeenCalledWith(
@@ -119,7 +119,7 @@ describe('modules/datasource/artifactory/index', () => {
           packageName: 'project',
           registryUrl: 'https://jfrog.company.com/artifactory',
         },
-        'artifactory: `Not Found` error'
+        'artifactory: `Not Found` error',
       );
     });
 
@@ -130,7 +130,7 @@ describe('modules/datasource/artifactory/index', () => {
           ...testConfig,
           datasource,
           packageName: testLookupName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
diff --git a/lib/modules/datasource/artifactory/index.ts b/lib/modules/datasource/artifactory/index.ts
index 64779cac681fc9077b2d724c38c644111d5206f5..2668182e800fa6e0598404be5e098b3a3e1f4363 100644
--- a/lib/modules/datasource/artifactory/index.ts
+++ b/lib/modules/datasource/artifactory/index.ts
@@ -34,7 +34,7 @@ export class ArtifactoryDatasource extends Datasource {
     if (!registryUrl) {
       logger.warn(
         { packageName },
-        'artifactory datasource requires custom registryUrl. Skipping datasource'
+        'artifactory datasource requires custom registryUrl. Skipping datasource',
       );
       return null;
     }
@@ -58,7 +58,7 @@ export class ArtifactoryDatasource extends Datasource {
       nodes
         .filter(
           // filter out hyperlink to navigate to parent folder
-          (node) => node.innerHTML !== '../' && node.innerHTML !== '..'
+          (node) => node.innerHTML !== '../' && node.innerHTML !== '..',
         )
         .forEach(
           // extract version and published time for each node
@@ -69,7 +69,7 @@ export class ArtifactoryDatasource extends Datasource {
                 : node.innerHTML;
 
             const published = ArtifactoryDatasource.parseReleaseTimestamp(
-              node.nextSibling?.text
+              node.nextSibling?.text,
             );
 
             const thisRelease: Release = {
@@ -78,18 +78,18 @@ export class ArtifactoryDatasource extends Datasource {
             };
 
             result.releases.push(thisRelease);
-          }
+          },
         );
 
       if (result.releases.length) {
         logger.trace(
           { registryUrl, packageName, versions: result.releases.length },
-          'artifactory: Found versions'
+          'artifactory: Found versions',
         );
       } else {
         logger.trace(
           { registryUrl, packageName },
-          'artifactory: No versions found'
+          'artifactory: No versions found',
         );
       }
     } catch (err) {
@@ -98,7 +98,7 @@ export class ArtifactoryDatasource extends Datasource {
         if (err.response?.statusCode === 404) {
           logger.warn(
             { registryUrl, packageName },
-            'artifactory: `Not Found` error'
+            'artifactory: `Not Found` error',
           );
           return null;
         }
diff --git a/lib/modules/datasource/aws-machine-image/index.spec.ts b/lib/modules/datasource/aws-machine-image/index.spec.ts
index 06fb9419ca505e81c7194d4fcbb2fd475d9f2644..c0b3ad7b8569de4070d853ba409038f7f2846bd1 100644
--- a/lib/modules/datasource/aws-machine-image/index.spec.ts
+++ b/lib/modules/datasource/aws-machine-image/index.spec.ts
@@ -142,7 +142,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
       mockDescribeImagesCommand(mock3Images);
       const ec2DataSource = new AwsMachineImageDataSource();
       const res = await ec2DataSource.getSortedAwsMachineImages(
-        '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["3images"]}]'
+        '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["3images"]}]',
       );
       expect(res).toStrictEqual([image1, image2, image3]);
       expect(ec2Mock.calls()).toHaveLength(1);
@@ -186,7 +186,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
       mockDescribeImagesCommand(mock1Image);
       const ec2DataSource = new AwsMachineImageDataSource();
       const res = await ec2DataSource.getSortedAwsMachineImages(
-        '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["1image"]}]'
+        '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["1image"]}]',
       );
       expect(res).toStrictEqual([image3]);
       expect(ec2Mock.calls()).toHaveLength(1);
@@ -230,7 +230,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
       mockDescribeImagesCommand(mockEmpty);
       const ec2DataSource = new AwsMachineImageDataSource();
       const res = await ec2DataSource.getSortedAwsMachineImages(
-        '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["noiamge"]}]'
+        '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["noiamge"]}]',
       );
       expect(res).toStrictEqual([]);
       expect(ec2Mock.calls()).toHaveLength(1);
@@ -310,7 +310,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
           packageName:
             '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["with matching newValue, with 3 matching image to return the matching image"]}]',
         },
-        image1.ImageId
+        image1.ImageId,
       );
       expect(res).toStrictEqual(image1.Name);
     });
@@ -323,7 +323,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
           packageName:
             '[{"Name":"owner-id","Values":["602401143452"]},{"Name":"name","Values":["with not matching newValue, with 3 matching images to return the matching image"]}]',
         },
-        'will never match'
+        'will never match',
       );
       expect(res).toBeNull();
     });
@@ -403,7 +403,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
 
     it('loads filters without aws config', () => {
       const res = ec2DataSource.loadConfig(
-        '[{"Name":"testname","Values":["testvalue"]}]'
+        '[{"Name":"testname","Values":["testvalue"]}]',
       );
       expect(res).toEqual([
         [
@@ -418,7 +418,7 @@ describe('modules/datasource/aws-machine-image/index', () => {
 
     it('loads filters with multiple aws configs', () => {
       const res = ec2DataSource.loadConfig(
-        '[{"Name":"testname","Values":["testvalue"]},{"region":"us-west-2"},{"profile":"test-profile"},{"region":"eu-central-1"}]'
+        '[{"Name":"testname","Values":["testvalue"]},{"region":"us-west-2"},{"profile":"test-profile"},{"region":"eu-central-1"}]',
       );
       expect(res).toEqual([
         [
diff --git a/lib/modules/datasource/aws-machine-image/index.ts b/lib/modules/datasource/aws-machine-image/index.ts
index 4413632bf93f3ae6ca166a1120b24834288ec3dc..8c15677505b0aaa750a9afcf330e566f0fe4ee92 100644
--- a/lib/modules/datasource/aws-machine-image/index.ts
+++ b/lib/modules/datasource/aws-machine-image/index.ts
@@ -82,7 +82,7 @@ export class AwsMachineImageDataSource extends Datasource {
       `getSortedAwsMachineImages:${serializedAmiFilter}`,
   })
   async getSortedAwsMachineImages(
-    serializedAmiFilter: string
+    serializedAmiFilter: string,
   ): Promise<Image[]> {
     const [amiFilter, clientConfig] = this.loadConfig(serializedAmiFilter);
     const amiFilterCmd = this.getAmiFilterCommand(amiFilter);
@@ -108,7 +108,7 @@ export class AwsMachineImageDataSource extends Datasource {
   })
   override async getDigest(
     { packageName: serializedAmiFilter }: GetReleasesConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     const images = await this.getSortedAwsMachineImages(serializedAmiFilter);
     if (images.length < 1) {
@@ -117,7 +117,7 @@ export class AwsMachineImageDataSource extends Datasource {
 
     if (newValue) {
       const newValueMatchingImages = images.filter(
-        (image) => image.ImageId === newValue
+        (image) => image.ImageId === newValue,
       );
       if (newValueMatchingImages.length === 1) {
         return (
diff --git a/lib/modules/datasource/aws-rds/index.spec.ts b/lib/modules/datasource/aws-rds/index.spec.ts
index 7d46e5493c6eb4240fbf0f9be62cb8a920ca02fc..c6602fc4de6caab240568d3bea9a9bf7444fc5ab 100644
--- a/lib/modules/datasource/aws-rds/index.spec.ts
+++ b/lib/modules/datasource/aws-rds/index.spec.ts
@@ -88,7 +88,7 @@ const version3: DBEngineVersion = {
 };
 
 function mockDescribeVersionsCommand(
-  result: DescribeDBEngineVersionsCommandOutput
+  result: DescribeDBEngineVersionsCommandOutput,
 ): void {
   rdsMock.on(DescribeDBEngineVersionsCommand).resolves(result);
 }
diff --git a/lib/modules/datasource/azure-bicep-resource/index.spec.ts b/lib/modules/datasource/azure-bicep-resource/index.spec.ts
index 47f3edc6678fd36dbb644ad8dc3f171464a554e1..7b724443f29d86fb366b4cfaf2ccff15a2648858 100644
--- a/lib/modules/datasource/azure-bicep-resource/index.spec.ts
+++ b/lib/modules/datasource/azure-bicep-resource/index.spec.ts
@@ -17,7 +17,7 @@ describe('modules/datasource/azure-bicep-resource/index', () => {
             "Resources": {},
             "Functions": {}
           }
-        `
+        `,
       );
 
     const azureBicepResourceDatasource = new AzureBicepResourceDatasource();
@@ -54,7 +54,7 @@ describe('modules/datasource/azure-bicep-resource/index', () => {
               }
             }
           }
-        `
+        `,
       );
 
     const azureBicepResourceDatasource = new AzureBicepResourceDatasource();
@@ -98,7 +98,7 @@ describe('modules/datasource/azure-bicep-resource/index', () => {
             },
             "Functions": {}
           }
-        `
+        `,
       );
 
     const azureBicepResourceDatasource = new AzureBicepResourceDatasource();
diff --git a/lib/modules/datasource/azure-bicep-resource/index.ts b/lib/modules/datasource/azure-bicep-resource/index.ts
index 169008690ac87f5819717a7a8e74f1858c24e07e..32693401312c25ba648ff2b3e83b08f299763f74 100644
--- a/lib/modules/datasource/azure-bicep-resource/index.ts
+++ b/lib/modules/datasource/azure-bicep-resource/index.ts
@@ -37,7 +37,7 @@ export class AzureBicepResourceDatasource extends Datasource {
     key: ({ packageName }: GetReleasesConfig) => `getReleases-${packageName}`,
   })
   async getReleases(
-    getReleasesConfig: GetReleasesConfig
+    getReleasesConfig: GetReleasesConfig,
   ): Promise<ReleaseResult | null> {
     const resourceVersionIndex = await this.getResourceVersionIndex();
     const packageName = getReleasesConfig.packageName.toLowerCase();
@@ -62,7 +62,7 @@ export class AzureBicepResourceDatasource extends Datasource {
   async getResourceVersionIndex(): Promise<BicepResourceVersionIndex> {
     const { body } = await this.http.getJson(
       BICEP_TYPES_INDEX_URL,
-      BicepResourceVersionIndex
+      BicepResourceVersionIndex,
     );
     return body;
   }
diff --git a/lib/modules/datasource/azure-bicep-resource/schema.ts b/lib/modules/datasource/azure-bicep-resource/schema.ts
index 692f8f196184d09a1e41030bb4f4312335e2b2db..9c0b94ed1b2726bc024de19054e9b6f7a80e7574 100644
--- a/lib/modules/datasource/azure-bicep-resource/schema.ts
+++ b/lib/modules/datasource/azure-bicep-resource/schema.ts
@@ -7,7 +7,7 @@ export const BicepResourceVersionIndex = z
       z.object({
         RelativePath: z.string(),
         Index: z.number(),
-      })
+      }),
     ),
     Functions: z.record(
       z.string(),
@@ -17,9 +17,9 @@ export const BicepResourceVersionIndex = z
           z.object({
             RelativePath: z.string(),
             Index: z.number(),
-          })
-        )
-      )
+          }),
+        ),
+      ),
     ),
   })
   .transform(({ Resources, Functions }) => {
diff --git a/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts b/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts
index a221b425b316824657fb80b8321e18bc3f074b5f..94d15fcec1f583fee466bc38a6a48e5520bd4d05 100644
--- a/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts
+++ b/lib/modules/datasource/azure-pipelines-tasks/index.spec.ts
@@ -20,7 +20,7 @@ describe('modules/datasource/azure-pipelines-tasks/index', () => {
       await getPkgReleases({
         datasource: AzurePipelinesTasksDatasource.id,
         packageName: 'unknown',
-      })
+      }),
     ).toBeNull();
   });
 
@@ -33,7 +33,7 @@ describe('modules/datasource/azure-pipelines-tasks/index', () => {
       await getPkgReleases({
         datasource: AzurePipelinesTasksDatasource.id,
         packageName: 'AutomatedAnalysis',
-      })
+      }),
     ).toEqual({ releases: [{ version: '0.171.0' }, { version: '0.198.0' }] });
   });
 
@@ -48,7 +48,7 @@ describe('modules/datasource/azure-pipelines-tasks/index', () => {
       await getPkgReleases({
         datasource: AzurePipelinesTasksDatasource.id,
         packageName: 'AutomatedAnalysis-Marketplace',
-      })
+      }),
     ).toEqual({ releases: [{ version: '0.171.0' }, { version: '0.198.0' }] });
   });
 
@@ -61,7 +61,7 @@ describe('modules/datasource/azure-pipelines-tasks/index', () => {
       await getPkgReleases({
         datasource: AzurePipelinesTasksDatasource.id,
         packageName: 'automatedanalysis',
-      })
+      }),
     ).toEqual({ releases: [{ version: '0.171.0' }, { version: '0.198.0' }] });
   });
 });
diff --git a/lib/modules/datasource/bazel/index.spec.ts b/lib/modules/datasource/bazel/index.spec.ts
index c9ba6ebb67437f826263bf6af8deb4b41ce27d05..f594d09fae254677fb1c88c9e8e9042c9b3b1523 100644
--- a/lib/modules/datasource/bazel/index.spec.ts
+++ b/lib/modules/datasource/bazel/index.spec.ts
@@ -14,7 +14,7 @@ describe('modules/datasource/bazel/index', () => {
     it('throws for error', async () => {
       httpMock.scope(defaultRegistryUrl).get(path).replyWithError('error');
       await expect(getPkgReleases({ datasource, packageName })).rejects.toThrow(
-        EXTERNAL_HOST_ERROR
+        EXTERNAL_HOST_ERROR,
       );
     });
 
@@ -39,7 +39,7 @@ describe('modules/datasource/bazel/index', () => {
     it('throws for 5xx', async () => {
       httpMock.scope(defaultRegistryUrl).get(path).reply(502);
       await expect(getPkgReleases({ datasource, packageName })).rejects.toThrow(
-        EXTERNAL_HOST_ERROR
+        EXTERNAL_HOST_ERROR,
       );
     });
 
diff --git a/lib/modules/datasource/bazel/index.ts b/lib/modules/datasource/bazel/index.ts
index b6c12307b4ea669be6b25913a9179c5d936ad916..07030643beb3bd8f0283682db2e3a614f8ba6ec6 100644
--- a/lib/modules/datasource/bazel/index.ts
+++ b/lib/modules/datasource/bazel/index.ts
@@ -43,7 +43,7 @@ export class BazelDatasource extends Datasource {
     try {
       const { body: metadata } = await this.http.getJson(
         url,
-        BazelModuleMetadata
+        BazelModuleMetadata,
       );
       result.releases = metadata.versions
         .map((v) => new BzlmodVersion(v))
diff --git a/lib/modules/datasource/bitbucket-tags/index.spec.ts b/lib/modules/datasource/bitbucket-tags/index.spec.ts
index 02f7022a9acc7c1bdf0ee52815c1e1e2b514a72b..4a8780d689e67f4b2affeaee0e07df3fed70607b 100644
--- a/lib/modules/datasource/bitbucket-tags/index.spec.ts
+++ b/lib/modules/datasource/bitbucket-tags/index.spec.ts
@@ -118,7 +118,7 @@ describe('modules/datasource/bitbucket-tags/index', () => {
           datasource,
           packageName: 'some/dep2',
         },
-        'v1.0.0'
+        'v1.0.0',
       );
       expect(res).toMatchSnapshot();
       expect(res).toBeString();
@@ -138,7 +138,7 @@ describe('modules/datasource/bitbucket-tags/index', () => {
           datasource,
           packageName: 'some/dep2',
         },
-        'v1.0.0'
+        'v1.0.0',
       );
       expect(res).toBeNull();
     });
diff --git a/lib/modules/datasource/bitbucket-tags/index.ts b/lib/modules/datasource/bitbucket-tags/index.ts
index c78b7d85b74179f5f452995364496a28668540a8..c41a68add215817bdcdd80d91daf02b4c89e3d14 100644
--- a/lib/modules/datasource/bitbucket-tags/index.ts
+++ b/lib/modules/datasource/bitbucket-tags/index.ts
@@ -31,10 +31,10 @@ export class BitbucketTagsDatasource extends Datasource {
   static getCacheKey(
     registryUrl: string | undefined,
     repo: string,
-    type: string
+    type: string,
   ): string {
     return `${BitbucketTagsDatasource.getRegistryURL(
-      registryUrl
+      registryUrl,
     )}:${repo}:${type}`;
   }
 
@@ -83,7 +83,7 @@ export class BitbucketTagsDatasource extends Datasource {
   async getTagCommit(
     _registryUrl: string | undefined,
     repo: string,
-    tag: string
+    tag: string,
   ): Promise<string | null> {
     const url = `/2.0/repositories/${repo}/refs/tags/${tag}`;
 
@@ -102,7 +102,7 @@ export class BitbucketTagsDatasource extends Datasource {
   async getMainBranch(repo: string): Promise<string> {
     return (
       await this.bitbucketHttp.getJson<RepoInfoBody>(
-        `/2.0/repositories/${repo}`
+        `/2.0/repositories/${repo}`,
       )
     ).body.mainbranch.name;
   }
@@ -116,7 +116,7 @@ export class BitbucketTagsDatasource extends Datasource {
   })
   override async getDigest(
     { packageName: repo, registryUrl }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     if (newValue?.length) {
       return this.getTagCommit(registryUrl, repo, newValue);
diff --git a/lib/modules/datasource/cdnjs/index.spec.ts b/lib/modules/datasource/cdnjs/index.spec.ts
index c5d0f85eaa177e444527629001212e5138904acd..506eb7b03107c9fd44a51764ec28ffefe03aa600 100644
--- a/lib/modules/datasource/cdnjs/index.spec.ts
+++ b/lib/modules/datasource/cdnjs/index.spec.ts
@@ -17,7 +17,7 @@ describe('modules/datasource/cdnjs/index', () => {
         getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -27,7 +27,7 @@ describe('modules/datasource/cdnjs/index', () => {
         getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -37,7 +37,7 @@ describe('modules/datasource/cdnjs/index', () => {
         await getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -50,7 +50,7 @@ describe('modules/datasource/cdnjs/index', () => {
         await getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'doesnotexist/doesnotexist',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -60,7 +60,7 @@ describe('modules/datasource/cdnjs/index', () => {
         getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -70,7 +70,7 @@ describe('modules/datasource/cdnjs/index', () => {
         getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -80,7 +80,7 @@ describe('modules/datasource/cdnjs/index', () => {
         getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -90,7 +90,7 @@ describe('modules/datasource/cdnjs/index', () => {
         getPkgReleases({
           datasource: CdnJsDatasource.id,
           packageName: 'foo/bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
diff --git a/lib/modules/datasource/cdnjs/index.ts b/lib/modules/datasource/cdnjs/index.ts
index d6ef8f0eda3cea692968f651a52c07e6ab69c686..e7618185887b7629c8f7bd77660989020092be00 100644
--- a/lib/modules/datasource/cdnjs/index.ts
+++ b/lib/modules/datasource/cdnjs/index.ts
@@ -23,7 +23,7 @@ const Assets = z.array(
     version: z.string(),
     files: z.string().array(),
     sri: z.record(z.string()).optional(),
-  })
+  }),
 );
 
 export class CdnJsDatasource extends Datasource {
@@ -62,7 +62,7 @@ export class CdnJsDatasource extends Datasource {
                 }
 
                 return res;
-              })
+              }),
           ),
         });
 
diff --git a/lib/modules/datasource/clojure/index.spec.ts b/lib/modules/datasource/clojure/index.spec.ts
index 6e6fef32ebf9e28435c919cd7bb7c01a7ba6e991..0697be378576fdb1372c016b36c83be92e270f10 100644
--- a/lib/modules/datasource/clojure/index.spec.ts
+++ b/lib/modules/datasource/clojure/index.spec.ts
@@ -56,7 +56,7 @@ function mockGenericPackage(opts: MockOpts = {}) {
             version: '1.0.3-SNAPSHOT',
             meta: Fixtures.get(
               'metadata-snapshot-version.xml',
-              upath.join('..', 'maven')
+              upath.join('..', 'maven'),
             ),
             jarStatus: 200,
           },
@@ -64,7 +64,7 @@ function mockGenericPackage(opts: MockOpts = {}) {
             version: '1.0.4-SNAPSHOT',
             meta: Fixtures.get(
               'metadata-snapshot-version-invalid.xml',
-              upath.join('..', 'maven')
+              upath.join('..', 'maven'),
             ),
           },
           {
@@ -129,14 +129,14 @@ function mockGenericPackage(opts: MockOpts = {}) {
               snapshot.version
             }/${artifact}-${snapshot.version.replace(
               '-SNAPSHOT',
-              ''
-            )}-20200101.${major}${minor}${patch}-${parseInt(patch, 10)}.pom`
+              '',
+            )}-20200101.${major}${minor}${patch}-${parseInt(patch, 10)}.pom`,
           )
           .reply(snapshot.jarStatus, '', { 'Last-Modified': timestamp });
       } else {
         scope
           .head(
-            `/${packagePath}/${snapshot.version}/${artifact}-${snapshot.version}.pom`
+            `/${packagePath}/${snapshot.version}/${artifact}-${snapshot.version}.pom`,
           )
           .reply(404, '');
       }
@@ -186,7 +186,7 @@ describe('modules/datasource/clojure/index', () => {
     const { releases } = (await get(
       'org.example:package',
       baseUrl,
-      baseUrlCustom
+      baseUrlCustom,
     ))!;
 
     expect(releases).toMatchObject([
@@ -223,7 +223,7 @@ describe('modules/datasource/clojure/index', () => {
       'https://unauthorized_repo/',
       'https://empty_repo',
       'https://unknown_error',
-      baseUrl
+      baseUrl,
     );
 
     expect(res).toMatchSnapshot();
@@ -236,7 +236,7 @@ describe('modules/datasource/clojure/index', () => {
     const { releases } = (await get(
       'org.example:package',
       'ftp://protocol_error_repo',
-      base
+      base,
     ))!;
 
     expect(releases).toMatchSnapshot();
@@ -249,13 +249,13 @@ describe('modules/datasource/clojure/index', () => {
       .get('/org/example/package/maven-metadata.xml')
       .reply(
         200,
-        Fixtures.get('metadata-invalid.xml', upath.join('..', 'maven'))
+        Fixtures.get('metadata-invalid.xml', upath.join('..', 'maven')),
       );
 
     const res = await get(
       'org.example:package',
       'https://invalid_metadata_repo',
-      baseUrl
+      baseUrl,
     );
 
     expect(res).toMatchSnapshot();
@@ -271,7 +271,7 @@ describe('modules/datasource/clojure/index', () => {
     const res = await get(
       'org.example:package',
       'https://invalid_metadata_repo',
-      baseUrl
+      baseUrl,
     );
 
     expect(res).toMatchSnapshot();
@@ -291,7 +291,7 @@ describe('modules/datasource/clojure/index', () => {
     const res = await get(
       'org.example:package',
 
-      '${project.baseUri}../../repository/'
+      '${project.baseUri}../../repository/',
     );
     expect(res).toBeNull();
   });
diff --git a/lib/modules/datasource/common.spec.ts b/lib/modules/datasource/common.spec.ts
index fcf249fa14f0a4e8353da4245438960bfb1fb499..67e7ebccdec610489e4df3d4015d0707aa162f8e 100644
--- a/lib/modules/datasource/common.spec.ts
+++ b/lib/modules/datasource/common.spec.ts
@@ -22,7 +22,7 @@ describe('modules/datasource/common', () => {
 
     it('supports custom datasource', () => {
       expect(getDatasourceFor('custom.foobar')).toEqual(
-        getDatasourceFor(CustomDatasource.id)
+        getDatasourceFor(CustomDatasource.id),
       );
     });
 
@@ -42,7 +42,7 @@ describe('modules/datasource/common', () => {
       expect(getDefaultVersioning('foobar')).toBe(defaultVersioning.id);
       expect(logger.logger.warn).toHaveBeenCalledWith(
         { datasourceName: 'foobar' },
-        'Missing datasource!'
+        'Missing datasource!',
       );
     });
 
@@ -185,7 +185,7 @@ describe('modules/datasource/common', () => {
       });
       expect(logger.logger.warn).toHaveBeenCalledWith(
         { datasourceName: 'foobar' },
-        'Missing datasource!'
+        'Missing datasource!',
       );
     });
   });
@@ -249,7 +249,7 @@ describe('modules/datasource/common', () => {
     it('filters out non-matching', () => {
       const versionCompatibility = '^(?<version>[^-]+)$';
       expect(
-        applyVersionCompatibility(input, versionCompatibility, undefined)
+        applyVersionCompatibility(input, versionCompatibility, undefined),
       ).toMatchObject({
         releases: [{ version: '1.0.0' }, { version: '2.0.0' }],
       });
@@ -258,7 +258,7 @@ describe('modules/datasource/common', () => {
     it('filters out incompatible', () => {
       const versionCompatibility = '^(?<version>[^-]+)(?<compatibility>.*)?$';
       expect(
-        applyVersionCompatibility(input, versionCompatibility, '-alpine')
+        applyVersionCompatibility(input, versionCompatibility, '-alpine'),
       ).toMatchObject({
         releases: [{ version: '2.0.0' }],
       });
diff --git a/lib/modules/datasource/common.ts b/lib/modules/datasource/common.ts
index 335effc116945e45feadf7e32b127920b0c6908d..9cb24fd158ca7f101a026b9b4810195533ed91df 100644
--- a/lib/modules/datasource/common.ts
+++ b/lib/modules/datasource/common.ts
@@ -20,7 +20,7 @@ export function getDatasourceFor(datasource: string): DatasourceApi | null {
 }
 
 export function getDefaultVersioning(
-  datasourceName: string | undefined
+  datasourceName: string | undefined,
 ): string {
   if (!datasourceName) {
     return defaultVersioning.id;
@@ -41,14 +41,14 @@ export function getDefaultVersioning(
 }
 
 export function isGetPkgReleasesConfig(
-  input: unknown
+  input: unknown,
 ): input is GetPkgReleasesConfig {
   return (
     is.nonEmptyStringAndNotWhitespace(
-      (input as GetPkgReleasesConfig).datasource
+      (input as GetPkgReleasesConfig).datasource,
     ) &&
     is.nonEmptyStringAndNotWhitespace(
-      (input as GetPkgReleasesConfig).packageName
+      (input as GetPkgReleasesConfig).packageName,
     )
   );
 }
@@ -56,7 +56,7 @@ export function isGetPkgReleasesConfig(
 export function applyVersionCompatibility(
   releaseResult: ReleaseResult,
   versionCompatibility: string | undefined,
-  currentCompatibility: string | undefined
+  currentCompatibility: string | undefined,
 ): ReleaseResult {
   if (!versionCompatibility) {
     return releaseResult;
@@ -80,7 +80,7 @@ export function applyVersionCompatibility(
 
 export function applyExtractVersion(
   releaseResult: ReleaseResult,
-  extractVersion: string | undefined
+  extractVersion: string | undefined,
 ): ReleaseResult {
   if (!extractVersion) {
     return releaseResult;
@@ -101,28 +101,28 @@ export function applyExtractVersion(
 }
 
 export function filterValidVersions<
-  Config extends Pick<GetPkgReleasesConfig, 'versioning' | 'datasource'>
+  Config extends Pick<GetPkgReleasesConfig, 'versioning' | 'datasource'>,
 >(releaseResult: ReleaseResult, config: Config): ReleaseResult {
   const versioningName =
     config.versioning ?? getDefaultVersioning(config.datasource);
   const versioning = allVersioning.get(versioningName);
 
   releaseResult.releases = filterMap(releaseResult.releases, (release) =>
-    versioning.isVersion(release.version) ? release : null
+    versioning.isVersion(release.version) ? release : null,
   );
 
   return releaseResult;
 }
 
 export function sortAndRemoveDuplicates<
-  Config extends Pick<GetPkgReleasesConfig, 'versioning' | 'datasource'>
+  Config extends Pick<GetPkgReleasesConfig, 'versioning' | 'datasource'>,
 >(releaseResult: ReleaseResult, config: Config): ReleaseResult {
   const versioningName =
     config.versioning ?? getDefaultVersioning(config.datasource);
   const versioning = allVersioning.get(versioningName);
 
   releaseResult.releases = releaseResult.releases.sort((a, b) =>
-    versioning.sortVersions(a.version, b.version)
+    versioning.sortVersions(a.version, b.version),
   );
 
   // Once releases are sorted, deduplication is straightforward and efficient
@@ -146,7 +146,7 @@ export function applyConstraintsFiltering<
     | 'datasource'
     | 'constraints'
     | 'packageName'
-  >
+  >,
 >(releaseResult: ReleaseResult, config: Config): ReleaseResult {
   if (config?.constraintsFiltering !== 'strict') {
     for (const release of releaseResult.releases) {
@@ -187,7 +187,7 @@ export function applyConstraintsFiltering<
         (releaseConstraint) =>
           !releaseConstraint ||
           (versioning.subset?.(configConstraint, releaseConstraint) ??
-            versioning.matches(configConstraint, releaseConstraint))
+            versioning.matches(configConstraint, releaseConstraint)),
       );
 
       if (!satisfiesConstraints) {
@@ -204,7 +204,7 @@ export function applyConstraintsFiltering<
     const packageName = config.packageName;
     const releases = filteredReleases.join(', ');
     logger.debug(
-      `Filtered ${count} releases for ${packageName} due to constraintsFiltering=strict: ${releases}`
+      `Filtered ${count} releases for ${packageName} due to constraintsFiltering=strict: ${releases}`,
     );
   }
 
diff --git a/lib/modules/datasource/conan/common.ts b/lib/modules/datasource/conan/common.ts
index 0321fd43e7535597c3bd41d7a9597cc15990f110..e0dfa211ddd5009be11b37d544408e936cc0985e 100644
--- a/lib/modules/datasource/conan/common.ts
+++ b/lib/modules/datasource/conan/common.ts
@@ -6,7 +6,7 @@ export const defaultRegistryUrl = 'https://center.conan.io/';
 export const datasource = 'conan';
 
 export const conanDatasourceRegex = regEx(
-  /(?<name>[a-z\-_0-9]+)\/(?<version>[^@/\n]+)(?<userChannel>@\S+\/\S+)/gim
+  /(?<name>[a-z\-_0-9]+)\/(?<version>[^@/\n]+)(?<userChannel>@\S+\/\S+)/gim,
 );
 
 export function getConanPackage(packageName: string): ConanPackage {
diff --git a/lib/modules/datasource/conan/index.spec.ts b/lib/modules/datasource/conan/index.spec.ts
index 85d64dd1c12bde5506b90b821ac9131f11106670..d913514de8feb7602bad494f5508a7881033df5a 100644
--- a/lib/modules/datasource/conan/index.spec.ts
+++ b/lib/modules/datasource/conan/index.spec.ts
@@ -49,7 +49,7 @@ describe('modules/datasource/conan/index', () => {
       digestConfig.packageName = `poco/${version}@_/_`;
       digestConfig.currentDigest = '4fc13d60fd91ba44fefe808ad719a5af';
       expect(await getDigest(digestConfig, version)).toBe(
-        '3a9b47caee2e2c1d3fb7d97788339aa8'
+        '3a9b47caee2e2c1d3fb7d97788339aa8',
       );
     });
 
@@ -75,7 +75,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'fakepackage/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -88,7 +88,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'fakepackage/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -102,7 +102,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'poco/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -115,7 +115,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'fakepackage/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -128,7 +128,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'poco/1.2@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://not.conan.io',
         releases: [
@@ -160,7 +160,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'FooBar/1.0.0@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://not.conan.io',
         releases: [
@@ -181,7 +181,7 @@ describe('modules/datasource/conan/index', () => {
       httpMock
         .scope('https://api.github.com')
         .get(
-          '/repos/conan-io/conan-center-index/contents/recipes/poco/config.yml'
+          '/repos/conan-io/conan-center-index/contents/recipes/poco/config.yml',
         )
         .reply(200, pocoYamlGitHubContent);
       expect(
@@ -189,7 +189,7 @@ describe('modules/datasource/conan/index', () => {
           ...config,
           registryUrls: [defaultRegistryUrl],
           packageName: 'poco/1.2@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://center.conan.io',
         releases: [
@@ -222,7 +222,7 @@ describe('modules/datasource/conan/index', () => {
       httpMock
         .scope('https://api.github.com')
         .get(
-          '/repos/conan-io/conan-center-index/contents/recipes/poco/config.yml'
+          '/repos/conan-io/conan-center-index/contents/recipes/poco/config.yml',
         )
         .reply(200, '');
       expect(
@@ -230,7 +230,7 @@ describe('modules/datasource/conan/index', () => {
           ...config,
           registryUrls: [defaultRegistryUrl],
           packageName: 'poco/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -240,7 +240,7 @@ describe('modules/datasource/conan/index', () => {
           ...config,
           registryUrls: [defaultRegistryUrl],
           packageName: 'poco/1.2@foo/bar',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -254,7 +254,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'poco/1.2@un/matched',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -268,7 +268,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'bad/1.2@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://not.conan.io',
         releases: [
@@ -290,7 +290,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'poco/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -305,7 +305,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'poco/1.2@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -318,7 +318,7 @@ describe('modules/datasource/conan/index', () => {
           { results: ['arti/1.0.0@_/_', 'arti/1.1.1@_/_'] },
           {
             'x-jfrog-version': 'latest',
-          }
+          },
         );
       httpMock
         .scope('https://fake.artifactory.com/artifactory/api/conan/test-repo/')
@@ -329,10 +329,10 @@ describe('modules/datasource/conan/index', () => {
         });
       httpMock
         .scope(
-          'https://fake.artifactory.com/artifactory/api/storage/test-repo/'
+          'https://fake.artifactory.com/artifactory/api/storage/test-repo/',
         )
         .get(
-          '/_/arti/1.1.1/_/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/export/conanfile.py?properties=conan.package.url'
+          '/_/arti/1.1.1/_/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/export/conanfile.py?properties=conan.package.url',
         )
         .reply(200, {
           properties: {
@@ -348,7 +348,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'arti/1.1@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl:
           'https://fake.artifactory.com/artifactory/api/conan/test-repo',
@@ -372,7 +372,7 @@ describe('modules/datasource/conan/index', () => {
           { results: ['arti/1.0.0@_/_', 'arti/1.1.1@_/_'] },
           {
             'x-jfrog-version': 'latest',
-          }
+          },
         );
       config.registryUrls = ['https://fake.artifactory.com'];
       config.packageName = 'arti';
@@ -380,7 +380,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'arti/1.1@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://fake.artifactory.com',
         releases: [
@@ -403,7 +403,7 @@ describe('modules/datasource/conan/index', () => {
           {
             results: ['arti/invalid_version@_/_'],
           },
-          { 'x-jfrog-version': 'latest' }
+          { 'x-jfrog-version': 'latest' },
         );
       config.registryUrls = [
         'https://fake.artifactory.com/artifactory/api/conan/test-repo',
@@ -413,7 +413,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'arti/1.1@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl:
           'https://fake.artifactory.com/artifactory/api/conan/test-repo',
@@ -434,7 +434,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'arti/1.1@_/_',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -447,7 +447,7 @@ describe('modules/datasource/conan/index', () => {
           { results: ['arti/1.0.0@_/_', 'arti/1.1.1@_/_'] },
           {
             'x-jfrog-version': 'latest',
-          }
+          },
         );
       httpMock
         .scope('https://fake.artifactory.com/artifactory/api/conan/test-repo/')
@@ -458,10 +458,10 @@ describe('modules/datasource/conan/index', () => {
         });
       httpMock
         .scope(
-          'https://fake.artifactory.com/artifactory/api/storage/test-repo/'
+          'https://fake.artifactory.com/artifactory/api/storage/test-repo/',
         )
         .get(
-          '/_/arti/1.1.1/_/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/export/conanfile.py?properties=conan.package.url'
+          '/_/arti/1.1.1/_/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/export/conanfile.py?properties=conan.package.url',
         )
         .reply(200);
 
@@ -473,7 +473,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'arti/1.1@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl:
           'https://fake.artifactory.com/artifactory/api/conan/test-repo',
@@ -497,7 +497,7 @@ describe('modules/datasource/conan/index', () => {
           { results: ['arti/1.0.0@_/_', 'arti/1.1.1@_/_'] },
           {
             'x-jfrog-version': 'latest',
-          }
+          },
         );
       httpMock
         .scope('https://fake.artifactory.com/artifactory/api/conan/test-repo/')
@@ -512,7 +512,7 @@ describe('modules/datasource/conan/index', () => {
         await getPkgReleases({
           ...config,
           packageName: 'arti/1.1@_/_',
-        })
+        }),
       ).toEqual({
         registryUrl:
           'https://fake.artifactory.com/artifactory/api/conan/test-repo',
diff --git a/lib/modules/datasource/conan/index.ts b/lib/modules/datasource/conan/index.ts
index 809df6bba33810cb23d33b0ffa4fa3af86dbb04e..1a1fd2e9826105b8889ee7d096cdd4100eb194c1 100644
--- a/lib/modules/datasource/conan/index.ts
+++ b/lib/modules/datasource/conan/index.ts
@@ -45,12 +45,12 @@ export class ConanDatasource extends Datasource {
 
   async getConanCenterReleases(
     conanName: string,
-    userAndChannel: string
+    userAndChannel: string,
   ): Promise<ReleaseResult | null> {
     if (userAndChannel && userAndChannel !== '@_/_') {
       logger.debug(
         { conanName, userAndChannel },
-        'User/channel not supported for Conan Center lookups'
+        'User/channel not supported for Conan Center lookups',
       );
       return null;
     }
@@ -76,7 +76,7 @@ export class ConanDatasource extends Datasource {
   })
   override async getDigest(
     { registryUrl, packageName }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     if (is.undefined(newValue) || is.undefined(registryUrl)) {
       return null;
@@ -89,10 +89,10 @@ export class ConanDatasource extends Datasource {
       conanPackage.conanName,
       newValue,
       conanPackage.userAndChannel,
-      '/revisions'
+      '/revisions',
     );
     const revisionRep = await this.http.getJson<ConanRevisionsJSON>(
-      revisionLookUp
+      revisionLookUp,
     );
     const revisions = revisionRep?.body.revisions;
     return revisions?.[0].revision ?? null;
@@ -116,20 +116,20 @@ export class ConanDatasource extends Datasource {
     ) {
       return this.getConanCenterReleases(
         conanPackage.conanName,
-        userAndChannel
+        userAndChannel,
       );
     }
 
     logger.trace(
       { packageName, registryUrl },
-      'Looking up conan api dependency'
+      'Looking up conan api dependency',
     );
 
     if (registryUrl) {
       const url = ensureTrailingSlash(registryUrl);
       const lookupUrl = joinUrlParts(
         url,
-        `v2/conans/search?q=${conanPackage.conanName}`
+        `v2/conans/search?q=${conanPackage.conanName}`,
       );
 
       try {
@@ -173,25 +173,25 @@ export class ConanDatasource extends Datasource {
                 return dep;
               }
               logger.debug(
-                `Conan package ${packageName} has latest version ${latestVersion}`
+                `Conan package ${packageName} has latest version ${latestVersion}`,
               );
 
               const latestRevisionUrl = joinUrlParts(
                 url,
-                `v2/conans/${conanPackage.conanName}/${latestVersion}/${conanPackage.userAndChannel}/latest`
+                `v2/conans/${conanPackage.conanName}/${latestVersion}/${conanPackage.userAndChannel}/latest`,
               );
               const revResp = await this.http.getJson<ConanRevisionJSON>(
-                latestRevisionUrl
+                latestRevisionUrl,
               );
               const packageRev = revResp.body.revision;
 
               const [user, channel] = conanPackage.userAndChannel.split('/');
               const packageUrl = joinUrlParts(
                 `${groups.host}/artifactory/api/storage/${groups.repo}`,
-                `${user}/${conanPackage.conanName}/${latestVersion}/${channel}/${packageRev}/export/conanfile.py?properties=conan.package.url`
+                `${user}/${conanPackage.conanName}/${latestVersion}/${channel}/${packageRev}/export/conanfile.py?properties=conan.package.url`,
               );
               const packageUrlResp = await this.http.getJson<ConanProperties>(
-                packageUrl
+                packageUrl,
               );
 
               if (
diff --git a/lib/modules/datasource/conda/index.spec.ts b/lib/modules/datasource/conda/index.spec.ts
index e77c16d2ae26aa629d145f94051a37515b2b20db..1800894ac1e162624ec4bbb98795588b77a3ec20 100644
--- a/lib/modules/datasource/conda/index.spec.ts
+++ b/lib/modules/datasource/conda/index.spec.ts
@@ -16,7 +16,7 @@ describe('modules/datasource/conda/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -26,7 +26,7 @@ describe('modules/datasource/conda/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -39,7 +39,7 @@ describe('modules/datasource/conda/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -49,7 +49,7 @@ describe('modules/datasource/conda/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
diff --git a/lib/modules/datasource/cpan/index.spec.ts b/lib/modules/datasource/cpan/index.spec.ts
index 291da75a49e7e6a58eee00a8b1f2b288999a531a..c4727a1281d9900127bcca601772e6cd8d11028d 100644
--- a/lib/modules/datasource/cpan/index.spec.ts
+++ b/lib/modules/datasource/cpan/index.spec.ts
@@ -14,14 +14,14 @@ describe('modules/datasource/cpan/index', () => {
         .post(
           '/v1/file/_search',
           (body) =>
-            body.query.filtered.filter.and[0].term['module.name'] === 'FooBar'
+            body.query.filtered.filter.and[0].term['module.name'] === 'FooBar',
         )
         .reply(200, Fixtures.get('empty.json'));
       expect(
         await getPkgReleases({
           datasource: CpanDatasource.id,
           packageName: 'FooBar',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -31,7 +31,7 @@ describe('modules/datasource/cpan/index', () => {
         await getPkgReleases({
           datasource: CpanDatasource.id,
           packageName: 'Plack',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -41,7 +41,7 @@ describe('modules/datasource/cpan/index', () => {
         getPkgReleases({
           datasource: CpanDatasource.id,
           packageName: 'Plack',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -51,7 +51,7 @@ describe('modules/datasource/cpan/index', () => {
         await getPkgReleases({
           datasource: CpanDatasource.id,
           packageName: 'Plack',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -61,7 +61,7 @@ describe('modules/datasource/cpan/index', () => {
         .post(
           '/v1/file/_search',
           (body) =>
-            body.query.filtered.filter.and[0].term['module.name'] === 'Plack'
+            body.query.filtered.filter.and[0].term['module.name'] === 'Plack',
         )
         .reply(200, Fixtures.get('Plack.json'));
       const res = await getPkgReleases({
diff --git a/lib/modules/datasource/cpan/index.ts b/lib/modules/datasource/cpan/index.ts
index d020eebb5bdb72c4ea15d626abda473ba6601b61..ee8a808da54bf94750a46f381137233fb44d31cb 100644
--- a/lib/modules/datasource/cpan/index.ts
+++ b/lib/modules/datasource/cpan/index.ts
@@ -61,7 +61,7 @@ export class CpanDatasource extends Datasource {
       };
       const res = await this.http.postJson<MetaCpanApiFileSearchResult>(
         searchUrl,
-        { body }
+        { body },
       );
       hits = res.body?.hits?.hits?.map(({ _source }) => _source);
     } catch (err) {
@@ -80,7 +80,7 @@ export class CpanDatasource extends Datasource {
           maturity,
         } = hit;
         const version = module.find(
-          ({ name }) => name === packageName
+          ({ name }) => name === packageName,
         )?.version;
         if (version) {
           // https://metacpan.org/pod/CPAN::DistnameInfo#maturity
diff --git a/lib/modules/datasource/crate/index.spec.ts b/lib/modules/datasource/crate/index.spec.ts
index a144cf69c24125b153f0363f84200263b7e05e92..7882cdb87c96d3076931ea5621669fab748c32ee 100644
--- a/lib/modules/datasource/crate/index.spec.ts
+++ b/lib/modules/datasource/crate/index.spec.ts
@@ -36,7 +36,7 @@ function setupGitMocks(delayMs?: number): { mockClone: jest.Mock<any, any> } {
         const path = `${clonePath}/my/pk/mypkg`;
         fs.mkdirSync(dirname(path), { recursive: true });
         fs.writeFileSync(path, Fixtures.get('mypkg'), { encoding: 'utf8' });
-      }
+      },
     );
 
   simpleGit.mockReturnValue({
@@ -51,7 +51,7 @@ function setupErrorGitMock(): { mockClone: jest.Mock<any, any> } {
     .fn()
     .mockName('clone')
     .mockImplementation((_registryUrl: string, _clonePath: string, _opts) =>
-      Promise.reject(new Error('mocked error'))
+      Promise.reject(new Error('mocked error')),
     );
 
   simpleGit.mockReturnValue({
@@ -128,7 +128,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'non_existent_crate',
           registryUrls: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -138,7 +138,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'non_existent_crate',
           registryUrls: ['3'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -150,7 +150,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'non_existent_crate',
           registryUrls: ['https://crates.io'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -165,7 +165,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'non_existent_crate',
           registryUrls: ['https://crates.io'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -177,7 +177,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'non_existent_crate',
           registryUrls: ['https://crates.io'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -188,7 +188,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'some_crate',
           registryUrls: ['https://crates.io'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -199,7 +199,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'some_crate',
           registryUrls: ['https://crates.io'],
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -210,7 +210,7 @@ describe('modules/datasource/crate/index', () => {
           datasource,
           packageName: 'some_crate',
           registryUrls: ['https://crates.io'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -383,7 +383,7 @@ describe('modules/datasource/crate/index', () => {
       };
       const crateDatasource = new CrateDatasource();
       await expect(
-        crateDatasource.fetchCrateRecordsPayload(info, 'benedict')
+        crateDatasource.fetchCrateRecordsPayload(info, 'benedict'),
       ).toReject();
     });
   });
diff --git a/lib/modules/datasource/crate/index.ts b/lib/modules/datasource/crate/index.ts
index 928dc893aea2d0f3ccc61ed7cf58def6f1becbed..5e6d07b2529c8c95be81f1fe1f5be5908abddddc 100644
--- a/lib/modules/datasource/crate/index.ts
+++ b/lib/modules/datasource/crate/index.ts
@@ -50,7 +50,7 @@ export class CrateDatasource extends Datasource {
     // istanbul ignore if
     if (!registryUrl) {
       logger.warn(
-        'crate datasource: No registryUrl specified, cannot perform getReleases'
+        'crate datasource: No registryUrl specified, cannot perform getReleases',
       );
       return null;
     }
@@ -66,12 +66,12 @@ export class CrateDatasource extends Datasource {
 
     const dependencyUrl = CrateDatasource.getDependencyUrl(
       registryInfo,
-      packageName
+      packageName,
     );
 
     const payload = await this.fetchCrateRecordsPayload(
       registryInfo,
-      packageName
+      packageName,
     );
     const lines = payload
       .split(newlineRegex) // break into lines
@@ -122,7 +122,7 @@ export class CrateDatasource extends Datasource {
   })
   public async getCrateMetadata(
     info: RegistryInfo,
-    packageName: string
+    packageName: string,
   ): Promise<CrateMetadata | null> {
     if (info.flavor !== 'crates.io') {
       return null;
@@ -135,7 +135,7 @@ export class CrateDatasource extends Datasource {
 
     logger.debug(
       { crateUrl, packageName, registryUrl: info.rawUrl },
-      'downloading crate metadata'
+      'downloading crate metadata',
     );
 
     try {
@@ -145,7 +145,7 @@ export class CrateDatasource extends Datasource {
     } catch (err) {
       logger.warn(
         { err, packageName, registryUrl: info.rawUrl },
-        'failed to download crate metadata'
+        'failed to download crate metadata',
       );
     }
 
@@ -154,12 +154,12 @@ export class CrateDatasource extends Datasource {
 
   public async fetchCrateRecordsPayload(
     info: RegistryInfo,
-    packageName: string
+    packageName: string,
   ): Promise<string> {
     if (info.clonePath) {
       const path = upath.join(
         info.clonePath,
-        ...CrateDatasource.getIndexSuffix(packageName)
+        ...CrateDatasource.getIndexSuffix(packageName),
       );
       return readCacheFile(path, 'utf8');
     }
@@ -171,7 +171,7 @@ export class CrateDatasource extends Datasource {
 
     if (info.flavor === 'crates.io' || info.isSparse) {
       const packageSuffix = CrateDatasource.getIndexSuffix(
-        packageName.toLowerCase()
+        packageName.toLowerCase(),
       );
       const crateUrl = joinUrlParts(baseUrl, ...packageSuffix);
       try {
@@ -189,7 +189,7 @@ export class CrateDatasource extends Datasource {
    */
   private static getDependencyUrl(
     info: RegistryInfo,
-    packageName: string
+    packageName: string,
   ): string {
     switch (info.flavor) {
       case 'crates.io':
@@ -273,7 +273,7 @@ export class CrateDatasource extends Datasource {
       !GlobalConfig.get('allowCustomCrateRegistries')
     ) {
       logger.warn(
-        'crate datasource: allowCustomCrateRegistries=true is required for registries other than crates.io, bailing out'
+        'crate datasource: allowCustomCrateRegistries=true is required for registries other than crates.io, bailing out',
       );
       return null;
     }
@@ -293,11 +293,11 @@ export class CrateDatasource extends Datasource {
       } else {
         clonePath = upath.join(
           privateCacheDir(),
-          CrateDatasource.cacheDirFromUrl(url)
+          CrateDatasource.cacheDirFromUrl(url),
         );
         logger.info(
           { clonePath, registryFetchUrl },
-          `Cloning private cargo registry`
+          `Cloning private cargo registry`,
         );
 
         const git = Git({ ...simpleGitConfig(), maxConcurrentProcesses: 1 });
@@ -307,7 +307,7 @@ export class CrateDatasource extends Datasource {
 
         memCache.set(
           cacheKey,
-          clonePromise.then(() => clonePath).catch(() => null)
+          clonePromise.then(() => clonePath).catch(() => null),
         );
 
         try {
@@ -315,7 +315,7 @@ export class CrateDatasource extends Datasource {
         } catch (err) {
           logger.warn(
             { err, packageName, registryFetchUrl },
-            'failed cloning git registry'
+            'failed cloning git registry',
           );
           memCache.set(cacheKeyForError, err);
 
@@ -327,7 +327,7 @@ export class CrateDatasource extends Datasource {
         const err = memCache.get(cacheKeyForError);
         logger.warn(
           { err, packageName, registryFetchUrl },
-          'Previous git clone failed, bailing out.'
+          'Previous git clone failed, bailing out.',
         );
 
         return null;
@@ -340,7 +340,7 @@ export class CrateDatasource extends Datasource {
   }
 
   private static areReleasesCacheable(
-    registryUrl: string | undefined
+    registryUrl: string | undefined,
   ): boolean {
     // We only cache public releases, we don't want to cache private
     // cloned data between runs.
diff --git a/lib/modules/datasource/custom/index.ts b/lib/modules/datasource/custom/index.ts
index cb1739fd827e246953300c7f0582b403637f2d8f..33438f922055ce0188684e2e0446e3795846afaf 100644
--- a/lib/modules/datasource/custom/index.ts
+++ b/lib/modules/datasource/custom/index.ts
@@ -17,23 +17,23 @@ export class CustomDatasource extends Datasource {
   }
 
   async getReleases(
-    getReleasesConfig: GetReleasesConfig
+    getReleasesConfig: GetReleasesConfig,
   ): Promise<ReleaseResult | null> {
     const customDatasourceName = getReleasesConfig.datasource?.replace(
       'custom.',
-      ''
+      '',
     );
 
     if (!is.nonEmptyString(customDatasourceName)) {
       logger.debug(
-        `No datasource has been supplied while looking up ${getReleasesConfig.packageName}`
+        `No datasource has been supplied while looking up ${getReleasesConfig.packageName}`,
       );
       return null;
     }
 
     const config = massageCustomDatasourceConfig(
       customDatasourceName,
-      getReleasesConfig
+      getReleasesConfig,
     );
     if (is.nullOrUndefined(config)) {
       return null;
diff --git a/lib/modules/datasource/custom/schema.ts b/lib/modules/datasource/custom/schema.ts
index e5c26e0be0a5f3ebcf7d66a129417942a6c0f917..b618f98012cf130dcf875960dfeedb1cace5a741 100644
--- a/lib/modules/datasource/custom/schema.ts
+++ b/lib/modules/datasource/custom/schema.ts
@@ -9,7 +9,7 @@ export const ReleaseResultZodSchema = z.object({
       sourceUrl: z.string().optional(),
       sourceDirectory: z.string().optional(),
       changelogUrl: z.string().optional(),
-    })
+    }),
   ),
   sourceUrl: z.string().optional(),
   sourceDirectory: z.string().optional(),
diff --git a/lib/modules/datasource/custom/utils.ts b/lib/modules/datasource/custom/utils.ts
index b3173d3ad2a6aa97987fc54d5174ce328aaabe4a..15d95a021687d935b24c00348f398b09c6b189a7 100644
--- a/lib/modules/datasource/custom/utils.ts
+++ b/lib/modules/datasource/custom/utils.ts
@@ -10,12 +10,12 @@ export function massageCustomDatasourceConfig(
     customDatasources,
     packageName,
     registryUrl: defaultRegistryUrl,
-  }: GetReleasesConfig
+  }: GetReleasesConfig,
 ): Required<CustomDatasourceConfig> | null {
   const customDatasource = customDatasources?.[customDatasourceName];
   if (is.nullOrUndefined(customDatasource)) {
     logger.debug(
-      `No custom datasource config provided while ${packageName} has been requested`
+      `No custom datasource config provided while ${packageName} has been requested`,
     );
     return null;
   }
@@ -25,7 +25,7 @@ export function massageCustomDatasourceConfig(
     defaultRegistryUrl ?? customDatasource.defaultRegistryUrlTemplate;
   if (is.nullOrUndefined(registryUrlTemplate)) {
     logger.debug(
-      'No registry url provided by extraction nor datasource configuration'
+      'No registry url provided by extraction nor datasource configuration',
     );
     return null;
   }
diff --git a/lib/modules/datasource/dart-version/index.spec.ts b/lib/modules/datasource/dart-version/index.spec.ts
index 4d612e7e566f823dd00e3cd9f80d1aae2e5ca9aa..33a4c9243f8263d47f1097baab711605ac309362 100644
--- a/lib/modules/datasource/dart-version/index.spec.ts
+++ b/lib/modules/datasource/dart-version/index.spec.ts
@@ -19,7 +19,7 @@ describe('modules/datasource/dart-version/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -29,7 +29,7 @@ describe('modules/datasource/dart-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -38,7 +38,7 @@ describe('modules/datasource/dart-version/index', () => {
       for (const channel of channels) {
         scope
           .get(
-            `/storage/v1/b/dart-archive/o?delimiter=%2F&prefix=channels%2F${channel}%2Frelease%2F&alt=json`
+            `/storage/v1/b/dart-archive/o?delimiter=%2F&prefix=channels%2F${channel}%2Frelease%2F&alt=json`,
           )
           .reply(200, { prefixes: [] });
       }
@@ -46,7 +46,7 @@ describe('modules/datasource/dart-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -55,7 +55,7 @@ describe('modules/datasource/dart-version/index', () => {
         httpMock
           .scope(baseUrl)
           .get(
-            `/storage/v1/b/dart-archive/o?delimiter=%2F&prefix=channels%2F${channel}%2Frelease%2F&alt=json`
+            `/storage/v1/b/dart-archive/o?delimiter=%2F&prefix=channels%2F${channel}%2Frelease%2F&alt=json`,
           )
           .reply(200, Fixtures.get(`${channel}.json`));
       }
diff --git a/lib/modules/datasource/dart-version/index.ts b/lib/modules/datasource/dart-version/index.ts
index b350f562b2a26ee662d2fee5260da033790f9e4b..a3aa8c45687a51d8dd1240af1646638dbb6a9f73 100644
--- a/lib/modules/datasource/dart-version/index.ts
+++ b/lib/modules/datasource/dart-version/index.ts
@@ -38,7 +38,7 @@ export class DartVersionDatasource extends Datasource {
       for (const channel of this.channels) {
         const resp = (
           await this.http.getJson<DartResponse>(
-            `${registryUrl}/storage/v1/b/dart-archive/o?delimiter=%2F&prefix=channels%2F${channel}%2Frelease%2F&alt=json`
+            `${registryUrl}/storage/v1/b/dart-archive/o?delimiter=%2F&prefix=channels%2F${channel}%2Frelease%2F&alt=json`,
           )
         ).body;
         const releases = this.getReleasesFromResponse(channel, resp.prefixes);
@@ -53,7 +53,7 @@ export class DartVersionDatasource extends Datasource {
 
   private getReleasesFromResponse(
     channel: string,
-    prefixes: string[]
+    prefixes: string[],
   ): Release[] {
     return prefixes
       .map((prefix) => this.getVersionFromPrefix(prefix))
diff --git a/lib/modules/datasource/dart/index.spec.ts b/lib/modules/datasource/dart/index.spec.ts
index 06bad8e6f21dc4f463c9902e0e6ef97ecb442dba..7012de123511402554e06bfeff1142cf224a8940 100644
--- a/lib/modules/datasource/dart/index.spec.ts
+++ b/lib/modules/datasource/dart/index.spec.ts
@@ -16,7 +16,7 @@ describe('modules/datasource/dart/index', () => {
         await getPkgReleases({
           datasource: DartDatasource.id,
           packageName: 'non_sense',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -33,7 +33,7 @@ describe('modules/datasource/dart/index', () => {
         await getPkgReleases({
           datasource: DartDatasource.id,
           packageName: 'shared_preferences',
-        })
+        }),
       ).toBeNull();
 
       const withoutLatest = {
@@ -48,7 +48,7 @@ describe('modules/datasource/dart/index', () => {
         await getPkgReleases({
           datasource: DartDatasource.id,
           packageName: 'shared_preferences',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -58,7 +58,7 @@ describe('modules/datasource/dart/index', () => {
         await getPkgReleases({
           datasource: DartDatasource.id,
           packageName: 'shared_preferences',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -68,7 +68,7 @@ describe('modules/datasource/dart/index', () => {
         getPkgReleases({
           datasource: DartDatasource.id,
           packageName: 'shared_preferences',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -78,7 +78,7 @@ describe('modules/datasource/dart/index', () => {
         await getPkgReleases({
           datasource: DartDatasource.id,
           packageName: 'shared_preferences',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/datasource.spec.ts b/lib/modules/datasource/datasource.spec.ts
index 91b7b0d9d711622d54f3283fed8be979413ca84f..2b603dca6edab1f3a31299f078a04f5103f63357 100644
--- a/lib/modules/datasource/datasource.spec.ts
+++ b/lib/modules/datasource/datasource.spec.ts
@@ -12,7 +12,7 @@ class TestDatasource extends Datasource {
   }
 
   async getReleases(
-    getReleasesConfig: GetReleasesConfig
+    getReleasesConfig: GetReleasesConfig,
   ): Promise<ReleaseResult | null> {
     try {
       await this.http.get(exampleUrl);
@@ -30,7 +30,7 @@ describe('modules/datasource/datasource', () => {
     httpMock.scope(exampleUrl).get('/').reply(429);
 
     await expect(
-      testDatasource.getReleases(partial<GetReleasesConfig>())
+      testDatasource.getReleases(partial<GetReleasesConfig>()),
     ).rejects.toThrow(EXTERNAL_HOST_ERROR);
   });
 
@@ -40,7 +40,7 @@ describe('modules/datasource/datasource', () => {
     httpMock.scope(exampleUrl).get('/').reply(504);
 
     await expect(
-      testDatasource.getReleases(partial<GetReleasesConfig>())
+      testDatasource.getReleases(partial<GetReleasesConfig>()),
     ).rejects.toThrow(EXTERNAL_HOST_ERROR);
   });
 });
diff --git a/lib/modules/datasource/datasource.ts b/lib/modules/datasource/datasource.ts
index 343ca34ddcbe59d898e030f2b86dd60189dc8dfb..2d88417201d2444b4781161408d72097f9cf139d 100644
--- a/lib/modules/datasource/datasource.ts
+++ b/lib/modules/datasource/datasource.ts
@@ -28,7 +28,7 @@ export abstract class Datasource implements DatasourceApi {
   protected http: Http;
 
   abstract getReleases(
-    getReleasesConfig: GetReleasesConfig
+    getReleasesConfig: GetReleasesConfig,
   ): Promise<ReleaseResult | null>;
 
   getDigest?(config: DigestConfig, newValue?: string): Promise<string | null>;
diff --git a/lib/modules/datasource/deno/index.spec.ts b/lib/modules/datasource/deno/index.spec.ts
index 32958a9806aae6c94149dee59420068a8fbf3aa2..a720572b7cf5669ec8ca776ed373b17f015dd040 100644
--- a/lib/modules/datasource/deno/index.spec.ts
+++ b/lib/modules/datasource/deno/index.spec.ts
@@ -67,7 +67,7 @@ describe('modules/datasource/deno/index', () => {
         expect.objectContaining({
           err: expect.any(ZodError),
         }),
-        `Deno: failed to get version details for 0.161.0`
+        `Deno: failed to get version details for 0.161.0`,
       );
     });
 
@@ -81,7 +81,7 @@ describe('modules/datasource/deno/index', () => {
         deno.getReleases({
           packageName: 'https://deno.land/std',
           registryUrl: deno.defaultRegistryUrls[0],
-        })
+        }),
       ).rejects.toThrow();
     });
 
@@ -109,7 +109,7 @@ describe('modules/datasource/deno/index', () => {
         deno.getReleases({
           packageName: 'https://deno.land/std',
           registryUrl: deno.defaultRegistryUrls[0],
-        })
+        }),
       ).rejects.toThrow();
     });
 
@@ -117,7 +117,7 @@ describe('modules/datasource/deno/index', () => {
       expect(
         await deno.getReleases({
           packageName: 'https://myexample.com/std',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/deno/index.ts b/lib/modules/datasource/deno/index.ts
index 3ff7139d5f7b5cdc99a71696eef4308647eb925f..425cf975c065f769ec83699d3a866d2772ad0562 100644
--- a/lib/modules/datasource/deno/index.ts
+++ b/lib/modules/datasource/deno/index.ts
@@ -39,12 +39,12 @@ export class DenoDatasource extends Datasource {
     const massagedRegistryUrl = registryUrl!;
 
     const extractResult = regEx(
-      /^(https:\/\/deno.land\/)(?<rawPackageName>[^@\s]+)/
+      /^(https:\/\/deno.land\/)(?<rawPackageName>[^@\s]+)/,
     ).exec(packageName);
     const rawPackageName = extractResult?.groups?.rawPackageName;
     if (is.nullOrUndefined(rawPackageName)) {
       logger.debug(
-        `Could not extract rawPackageName from packageName: "${packageName}"`
+        `Could not extract rawPackageName from packageName: "${packageName}"`,
       );
       return null;
     }
@@ -56,7 +56,7 @@ export class DenoDatasource extends Datasource {
     const moduleAPIURL = joinUrlParts(
       massagedRegistryUrl,
       'v2/modules',
-      massagedPackageName
+      massagedPackageName,
     );
 
     return await this.getReleaseResult(moduleAPIURL);
@@ -70,7 +70,7 @@ export class DenoDatasource extends Datasource {
     const releasesCache: Record<string, Release> =
       (await packageCache.get(
         `datasource-${DenoDatasource.id}-details`,
-        moduleAPIURL
+        moduleAPIURL,
       )) ?? {};
     let cacheModified = false;
 
@@ -95,10 +95,10 @@ export class DenoDatasource extends Datasource {
           DenoAPIModuleVersionResponse.catch(({ error: err }) => {
             logger.warn(
               { err },
-              `Deno: failed to get version details for ${version}`
+              `Deno: failed to get version details for ${version}`,
             );
             return { version };
-          })
+          }),
         );
 
         releasesCache[release.version] = release;
@@ -106,7 +106,7 @@ export class DenoDatasource extends Datasource {
 
         return release;
       },
-      { concurrency: 5 }
+      { concurrency: 5 },
     );
 
     if (cacheModified) {
@@ -115,7 +115,7 @@ export class DenoDatasource extends Datasource {
         `datasource-${DenoDatasource.id}-details`,
         moduleAPIURL,
         releasesCache,
-        10080
+        10080,
       );
     }
 
diff --git a/lib/modules/datasource/deno/schema.ts b/lib/modules/datasource/deno/schema.ts
index 047bcea03c4af08d022974657d138ea7887a98a5..2587874425d9d8f92145e392bfc65062e557349f 100644
--- a/lib/modules/datasource/deno/schema.ts
+++ b/lib/modules/datasource/deno/schema.ts
@@ -42,5 +42,5 @@ export const DenoAPIModuleVersionResponse = z
         sourceUrl = getGithubSourceUrl(repository);
       }
       return { version, gitRef, releaseTimestamp, sourceUrl };
-    }
+    },
   );
diff --git a/lib/modules/datasource/docker/common.spec.ts b/lib/modules/datasource/docker/common.spec.ts
index c30292f98b083b0815da0e907063a30fe01d5e7c..5fbc6da3eab038a9ac0680f0b7d991b9ba685d2e 100644
--- a/lib/modules/datasource/docker/common.spec.ts
+++ b/lib/modules/datasource/docker/common.spec.ts
@@ -24,7 +24,7 @@ describe('modules/datasource/docker/common', () => {
     it('handles local registries', () => {
       const res = getRegistryRepository(
         'registry:5000/org/package',
-        'https://index.docker.io'
+        'https://index.docker.io',
       );
       expect(res).toStrictEqual({
         dockerRepository: 'org/package',
@@ -35,7 +35,7 @@ describe('modules/datasource/docker/common', () => {
     it('supports registryUrls', () => {
       const res = getRegistryRepository(
         'my.local.registry/prefix/image',
-        'https://my.local.registry/prefix'
+        'https://my.local.registry/prefix',
       );
       expect(res).toStrictEqual({
         dockerRepository: 'prefix/image',
@@ -46,7 +46,7 @@ describe('modules/datasource/docker/common', () => {
     it('supports http registryUrls', () => {
       const res = getRegistryRepository(
         'my.local.registry/prefix/image',
-        'http://my.local.registry/prefix'
+        'http://my.local.registry/prefix',
       );
       expect(res).toStrictEqual({
         dockerRepository: 'prefix/image',
@@ -57,7 +57,7 @@ describe('modules/datasource/docker/common', () => {
     it('supports schemeless registryUrls', () => {
       const res = getRegistryRepository(
         'my.local.registry/prefix/image',
-        'my.local.registry/prefix'
+        'my.local.registry/prefix',
       );
       expect(res).toStrictEqual({
         dockerRepository: 'prefix/image',
@@ -69,7 +69,7 @@ describe('modules/datasource/docker/common', () => {
       hostRules.find.mockReturnValueOnce({ insecureRegistry: true });
       const res = getRegistryRepository(
         'prefix/image',
-        'my.local.registry/prefix'
+        'my.local.registry/prefix',
       );
       expect(res).toStrictEqual({
         dockerRepository: 'prefix/prefix/image',
@@ -127,8 +127,8 @@ describe('modules/datasource/docker/common', () => {
           http,
           'https://my.local.registry',
           'repo',
-          'https://my.local.registry/v2/repo/tags/list?n=1000'
-        )
+          'https://my.local.registry/v2/repo/tags/list?n=1000',
+        ),
       ).rejects.toThrow(PAGE_NOT_FOUND_ERROR);
     });
 
@@ -146,7 +146,7 @@ describe('modules/datasource/docker/common', () => {
       const headers = await getAuthHeaders(
         http,
         'https://my.local.registry',
-        'https://my.local.registry/prefix'
+        'https://my.local.registry/prefix',
       );
 
       // do not inline, otherwise we get false positive from codeql
@@ -170,7 +170,7 @@ describe('modules/datasource/docker/common', () => {
       const headers = await getAuthHeaders(
         http,
         'https://my.local.registry',
-        'https://my.local.registry/prefix'
+        'https://my.local.registry/prefix',
       );
 
       // do not inline, otherwise we get false positive from codeql
@@ -197,7 +197,7 @@ describe('modules/datasource/docker/common', () => {
       const headers = await getAuthHeaders(
         http,
         'https://my.local.registry',
-        'https://my.local.registry/prefix'
+        'https://my.local.registry/prefix',
       );
 
       expect(headers).toBeNull();
@@ -212,7 +212,7 @@ describe('modules/datasource/docker/common', () => {
             'Bearer realm="https://my.local.registry/oauth2/token",service="my.local.registry",scope="repository:my/node:whatever"',
         })
         .get(
-          '/oauth2/token?service=my.local.registry&scope=repository:my/node:whatever'
+          '/oauth2/token?service=my.local.registry&scope=repository:my/node:whatever',
         )
         .reply(200, { token: 'some-token' });
 
@@ -220,7 +220,7 @@ describe('modules/datasource/docker/common', () => {
         http,
         'https://my.local.registry',
         'my/node/prefix',
-        'https://my.local.registry/v2/my/node/resource'
+        'https://my.local.registry/v2/my/node/resource',
       );
 
       // do not inline, otherwise we get false positive from codeql
@@ -241,8 +241,8 @@ describe('modules/datasource/docker/common', () => {
       findHelmSourceUrl(
         partial<OciHelmConfig>({
           home: 'https://github.com/bitnami/charts/tree/main/bitnami/harbor',
-        })
-      )
+        }),
+      ),
     ).toBe('https://github.com/bitnami/charts/tree/main/bitnami/harbor');
 
     expect(findHelmSourceUrl(partial<OciHelmConfig>({}))).toBeNull();
@@ -253,16 +253,16 @@ describe('modules/datasource/docker/common', () => {
           sources: [
             'https://github.com/bitnami/charts/tree/main/bitnami/harbor',
           ],
-        })
-      )
+        }),
+      ),
     ).toBe('https://github.com/bitnami/charts/tree/main/bitnami/harbor');
 
     expect(
       findHelmSourceUrl(
         partial<OciHelmConfig>({
           sources: ['https://some.test'],
-        })
-      )
+        }),
+      ),
     ).toBe('https://some.test');
   });
 });
diff --git a/lib/modules/datasource/docker/common.ts b/lib/modules/datasource/docker/common.ts
index e312ab7dab1d811e570f66475405eed048e82b14..a697ad869dafa63165fdaa31aba399367f425921 100644
--- a/lib/modules/datasource/docker/common.ts
+++ b/lib/modules/datasource/docker/common.ts
@@ -50,7 +50,7 @@ export async function getAuthHeaders(
   http: Http,
   registryHost: string,
   dockerRepository: string,
-  apiCheckUrl = `${registryHost}/v2/`
+  apiCheckUrl = `${registryHost}/v2/`,
 ): Promise<OutgoingHttpHeaders | null> {
   try {
     const options = {
@@ -78,13 +78,13 @@ export async function getAuthHeaders(
     ) {
       logger.warn(
         { apiCheckUrl, res: apiCheckResponse },
-        'Invalid registry response'
+        'Invalid registry response',
       );
       return null;
     }
 
     const authenticateHeader = parse(
-      apiCheckResponse.headers['www-authenticate']
+      apiCheckResponse.headers['www-authenticate'],
     );
 
     const opts: HostRule & HttpOptions = hostRules.find({
@@ -94,7 +94,7 @@ export async function getAuthHeaders(
     if (ecrRegex.test(registryHost)) {
       logger.trace(
         { registryHost, dockerRepository },
-        `Using ecr auth for Docker registry`
+        `Using ecr auth for Docker registry`,
       );
       const [, region] = coerceArray(ecrRegex.exec(registryHost));
       const auth = await getECRAuthToken(region, opts);
@@ -109,7 +109,7 @@ export async function getAuthHeaders(
     ) {
       logger.trace(
         { registryHost, dockerRepository },
-        `Using google auth for Docker registry`
+        `Using google auth for Docker registry`,
       );
       const auth = await getGoogleAuthToken();
       if (auth) {
@@ -117,23 +117,23 @@ export async function getAuthHeaders(
       } else {
         logger.once.debug(
           { registryHost, dockerRepository },
-          'Could not get Google access token, using no auth'
+          'Could not get Google access token, using no auth',
         );
       }
     } else if (opts.username && opts.password) {
       logger.trace(
         { registryHost, dockerRepository },
-        `Using basic auth for Docker registry`
+        `Using basic auth for Docker registry`,
       );
       const auth = Buffer.from(`${opts.username}:${opts.password}`).toString(
-        'base64'
+        'base64',
       );
       opts.headers = { authorization: `Basic ${auth}` };
     } else if (opts.token) {
       const authType = opts.authType ?? 'Bearer';
       logger.trace(
         { registryHost, dockerRepository },
-        `Using ${authType} token for Docker registry`
+        `Using ${authType} token for Docker registry`,
       );
       opts.headers = { authorization: `${authType} ${opts.token}` };
     }
@@ -154,7 +154,7 @@ export async function getAuthHeaders(
     ) {
       logger.trace(
         { registryHost, dockerRepository, authenticateHeader },
-        `Invalid realm, testing direct auth`
+        `Invalid realm, testing direct auth`,
       );
       return opts.headers ?? null;
     }
@@ -170,7 +170,7 @@ export async function getAuthHeaders(
     } else {
       authUrl.searchParams.append(
         'scope',
-        `repository:${dockerRepository}:pull`
+        `repository:${dockerRepository}:pull`,
       );
     }
 
@@ -180,13 +180,13 @@ export async function getAuthHeaders(
 
     logger.trace(
       { registryHost, dockerRepository, authUrl: authUrl.href },
-      `Obtaining docker registry token`
+      `Obtaining docker registry token`,
     );
     opts.noAuth = true;
     const authResponse = (
       await http.getJson<{ token?: string; access_token?: string }>(
         authUrl.href,
-        opts
+        opts,
       )
     ).body;
 
@@ -209,7 +209,7 @@ export async function getAuthHeaders(
     if (err.statusCode === 401) {
       logger.debug(
         { registryHost, dockerRepository },
-        'Unauthorized docker lookup'
+        'Unauthorized docker lookup',
       );
       logger.debug({ err });
       return null;
@@ -217,7 +217,7 @@ export async function getAuthHeaders(
     if (err.statusCode === 403) {
       logger.debug(
         { registryHost, dockerRepository },
-        'Not allowed to access docker registry'
+        'Not allowed to access docker registry',
       );
       logger.debug({ err });
       return null;
@@ -240,7 +240,7 @@ export async function getAuthHeaders(
     }
     logger.warn(
       { registryHost, dockerRepository, err },
-      'Error obtaining docker token'
+      'Error obtaining docker token',
     );
     return null;
   }
@@ -248,11 +248,11 @@ export async function getAuthHeaders(
 
 export function getRegistryRepository(
   packageName: string,
-  registryUrl: string
+  registryUrl: string,
 ): RegistryRepository {
   if (registryUrl !== DOCKER_HUB) {
     const registryEndingWithSlash = ensureTrailingSlash(
-      registryUrl.replace(regEx(/^https?:\/\//), '')
+      registryUrl.replace(regEx(/^https?:\/\//), ''),
     );
     if (packageName.startsWith(registryEndingWithSlash)) {
       let registryHost = trimTrailingSlash(registryUrl);
@@ -292,7 +292,7 @@ export function getRegistryRepository(
 
   registryHost = registryHost.replace(
     'https://docker.io',
-    'https://index.docker.io'
+    'https://index.docker.io',
   );
 
   const opts = hostRules.find({
@@ -312,7 +312,7 @@ export function getRegistryRepository(
 }
 
 export function extractDigestFromResponseBody(
-  manifestResponse: HttpResponse
+  manifestResponse: HttpResponse,
 ): string {
   return 'sha256:' + toSha256(manifestResponse.body);
 }
diff --git a/lib/modules/datasource/docker/ecr.ts b/lib/modules/datasource/docker/ecr.ts
index ad141385409f0c9737e6bdb77c75cf6a4a5f186f..005c7405767b156cf6942e7b71e9a75122f480af 100644
--- a/lib/modules/datasource/docker/ecr.ts
+++ b/lib/modules/datasource/docker/ecr.ts
@@ -11,7 +11,7 @@ export const ecrPublicRegex = regEx(/public\.ecr\.aws/);
 
 export async function getECRAuthToken(
   region: string,
-  opts: HostRule
+  opts: HostRule,
 ): Promise<string | null> {
   const config: ECRClientConfig = { region };
   if (opts.username && opts.password) {
@@ -32,7 +32,7 @@ export async function getECRAuthToken(
       return authorizationToken;
     }
     logger.warn(
-      'Could not extract authorizationToken from ECR getAuthorizationToken response'
+      'Could not extract authorizationToken from ECR getAuthorizationToken response',
     );
   } catch (err) {
     logger.trace({ err }, 'err');
@@ -48,7 +48,7 @@ export function isECRMaxResultsError(err: HttpError): boolean {
     resp.headers?.['docker-distribution-api-version'] &&
     // https://docs.aws.amazon.com/AmazonECR/latest/APIReference/API_DescribeRepositories.html#ECR-DescribeRepositories-request-maxResults
     resp.body?.['errors']?.[0]?.message?.includes(
-      'Member must have value less than or equal to 1000'
+      'Member must have value less than or equal to 1000',
     )
   );
 }
diff --git a/lib/modules/datasource/docker/google.ts b/lib/modules/datasource/docker/google.ts
index cec035f75865cfa07fa5909dbf816d190a8a2239..6e75dd69655c1f55ad1dd1f0f325dca8f664c3f3 100644
--- a/lib/modules/datasource/docker/google.ts
+++ b/lib/modules/datasource/docker/google.ts
@@ -1,5 +1,5 @@
 import { regEx } from '../../../util/regex';
 
 export const googleRegex = regEx(
-  /(((eu|us|asia)\.)?gcr\.io|[a-z0-9-]+-docker\.pkg\.dev)/
+  /(((eu|us|asia)\.)?gcr\.io|[a-z0-9-]+-docker\.pkg\.dev)/,
 );
diff --git a/lib/modules/datasource/docker/index.spec.ts b/lib/modules/datasource/docker/index.spec.ts
index bc80ae2838c6977d8e15a578cf815b3de76676c0..926d4a7c6715dfc37366a5157651f64fb82bbd67 100644
--- a/lib/modules/datasource/docker/index.spec.ts
+++ b/lib/modules/datasource/docker/index.spec.ts
@@ -30,7 +30,7 @@ const garUrl = 'https://europe-docker.pkg.dev/v2';
 const dockerHubUrl = 'https://hub.docker.com/v2/repositories';
 
 function mockEcrAuthResolve(
-  res: Partial<GetAuthorizationTokenCommandOutput> = {}
+  res: Partial<GetAuthorizationTokenCommandOutput> = {},
 ) {
   ecrMock.on(GetAuthorizationTokenCommand).resolvesOnce(res);
 }
@@ -63,7 +63,7 @@ describe('modules/datasource/docker/index', () => {
         .reply(401);
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-new-value'
+        'some-new-value',
       );
       expect(res).toBeNull();
     });
@@ -79,7 +79,7 @@ describe('modules/datasource/docker/index', () => {
         .replyWithError('error');
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-new-value'
+        'some-new-value',
       );
       expect(res).toBeNull();
     });
@@ -93,7 +93,7 @@ describe('modules/datasource/docker/index', () => {
         .reply(200, undefined, { 'docker-content-digest': '' });
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-new-value'
+        'some-new-value',
       );
       expect(res).toBeNull();
     });
@@ -111,7 +111,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .reply(200, { token: 'some-token' });
 
@@ -157,21 +157,21 @@ describe('modules/datasource/docker/index', () => {
        }`,
           {
             'content-type': 'text/plain',
-          }
+          },
         );
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .twice()
         .reply(200, { token: 'some-token' });
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-new-value'
+        'some-new-value',
       );
       expect(res).toBe(
-        'sha256:b3d6068234f3a18ebeedd2dab81e67b6a192e81192a099df4112ecfc7c3be84f'
+        'sha256:b3d6068234f3a18ebeedd2dab81e67b6a192e81192a099df4112ecfc7c3be84f',
       );
     });
 
@@ -201,12 +201,12 @@ describe('modules/datasource/docker/index', () => {
         .head('/library/some-dep/manifests/some-tag')
         .matchHeader(
           'authorization',
-          'Basic c29tZS11c2VybmFtZTpzb21lLXBhc3N3b3Jk'
+          'Basic c29tZS11c2VybmFtZTpzb21lLXBhc3N3b3Jk',
         )
         .reply(200, '', { 'docker-content-digest': 'some-digest' });
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
     });
@@ -222,7 +222,7 @@ describe('modules/datasource/docker/index', () => {
         .reply(403);
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBeNull();
     });
@@ -248,8 +248,8 @@ describe('modules/datasource/docker/index', () => {
             datasource: 'docker',
             packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
           },
-          'some-tag'
-        )
+          'some-tag',
+        ),
       ).toBe('some-digest');
 
       const ecr = ecrMock.call(0).thisValue as ECRClient;
@@ -287,8 +287,8 @@ describe('modules/datasource/docker/index', () => {
             datasource: 'docker',
             packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
           },
-          'some-tag'
-        )
+          'some-tag',
+        ),
       ).toBe('some-digest');
 
       const ecr = ecrMock.call(0).thisValue as ECRClient;
@@ -320,7 +320,7 @@ describe('modules/datasource/docker/index', () => {
           datasource: 'docker',
           packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
         },
-        'some-tag'
+        'some-tag',
       );
 
       expect(res).toBe('some-digest');
@@ -337,7 +337,7 @@ describe('modules/datasource/docker/index', () => {
           datasource: 'docker',
           packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBeNull();
     });
@@ -353,7 +353,7 @@ describe('modules/datasource/docker/index', () => {
           datasource: 'docker',
           packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBeNull();
     });
@@ -368,14 +368,14 @@ describe('modules/datasource/docker/index', () => {
         .head('/some-project/some-package/manifests/some-tag')
         .matchHeader(
           'authorization',
-          'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg=='
+          'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg==',
         )
         .reply(200, '', { 'docker-content-digest': 'some-digest' });
 
       googleAuth.GoogleAuth.mockImplementationOnce(
         jest.fn().mockImplementationOnce(() => ({
           getAccessToken: jest.fn().mockResolvedValue('some-token'),
-        }))
+        })),
       );
 
       hostRules.find.mockReturnValue({});
@@ -384,7 +384,7 @@ describe('modules/datasource/docker/index', () => {
           datasource: 'docker',
           packageName: 'eu.gcr.io/some-project/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(1);
@@ -400,14 +400,14 @@ describe('modules/datasource/docker/index', () => {
         .head('/some-project/some-repo/some-package/manifests/some-tag')
         .matchHeader(
           'authorization',
-          'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg=='
+          'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg==',
         )
         .reply(200, '', { 'docker-content-digest': 'some-digest' });
 
       googleAuth.GoogleAuth.mockImplementationOnce(
         jest.fn().mockImplementationOnce(() => ({
           getAccessToken: jest.fn().mockResolvedValue('some-token'),
-        }))
+        })),
       );
 
       hostRules.find.mockReturnValue({});
@@ -417,7 +417,7 @@ describe('modules/datasource/docker/index', () => {
           packageName:
             'europe-docker.pkg.dev/some-project/some-repo/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(1);
@@ -433,14 +433,14 @@ describe('modules/datasource/docker/index', () => {
         .head('/some-project/some-package/manifests/some-tag')
         .matchHeader(
           'authorization',
-          'Basic c29tZS11c2VybmFtZTpzb21lLXBhc3N3b3Jk'
+          'Basic c29tZS11c2VybmFtZTpzb21lLXBhc3N3b3Jk',
         )
         .reply(200, '', { 'docker-content-digest': 'some-digest' });
 
       googleAuth.GoogleAuth.mockImplementationOnce(
         jest.fn().mockImplementationOnce(() => ({
           getAccessToken: jest.fn().mockResolvedValue('some-token'),
-        }))
+        })),
       );
 
       const res = await getDigest(
@@ -448,7 +448,7 @@ describe('modules/datasource/docker/index', () => {
           datasource: 'docker',
           packageName: 'eu.gcr.io/some-project/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(0);
@@ -464,14 +464,14 @@ describe('modules/datasource/docker/index', () => {
         .head('/some-project/some-repo/some-package/manifests/some-tag')
         .matchHeader(
           'authorization',
-          'Basic c29tZS11c2VybmFtZTpzb21lLXBhc3N3b3Jk'
+          'Basic c29tZS11c2VybmFtZTpzb21lLXBhc3N3b3Jk',
         )
         .reply(200, '', { 'docker-content-digest': 'some-digest' });
 
       googleAuth.GoogleAuth.mockImplementationOnce(
         jest.fn().mockImplementationOnce(() => ({
           getAccessToken: jest.fn().mockResolvedValue('some-token'),
-        }))
+        })),
       );
 
       const res = await getDigest(
@@ -480,7 +480,7 @@ describe('modules/datasource/docker/index', () => {
           packageName:
             'europe-docker.pkg.dev/some-project/some-repo/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(0);
@@ -500,7 +500,7 @@ describe('modules/datasource/docker/index', () => {
           datasource: 'docker',
           packageName: 'eu.gcr.io/some-project/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(0);
@@ -521,7 +521,7 @@ describe('modules/datasource/docker/index', () => {
           packageName:
             'europe-docker.pkg.dev/some-project/some-repo/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBe('some-digest');
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(0);
@@ -535,14 +535,14 @@ describe('modules/datasource/docker/index', () => {
       googleAuth.GoogleAuth.mockImplementationOnce(
         jest.fn().mockImplementationOnce(() => ({
           getAccessToken: jest.fn().mockResolvedValue(undefined),
-        }))
+        })),
       );
       const res = await getDigest(
         {
           datasource: 'docker',
           packageName: 'eu.gcr.io/some-project/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBeNull();
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(1);
@@ -556,7 +556,7 @@ describe('modules/datasource/docker/index', () => {
       googleAuth.GoogleAuth.mockImplementationOnce(
         jest.fn().mockImplementationOnce(() => ({
           getAccessToken: jest.fn().mockRejectedValue('some-error'),
-        }))
+        })),
       );
       const res = await getDigest(
         {
@@ -564,7 +564,7 @@ describe('modules/datasource/docker/index', () => {
           packageName:
             'europe-docker.pkg.dev/some-project/some-repo/some-package',
         },
-        'some-tag'
+        'some-tag',
       );
       expect(res).toBeNull();
       expect(googleAuth.GoogleAuth).toHaveBeenCalledTimes(1);
@@ -581,7 +581,7 @@ describe('modules/datasource/docker/index', () => {
         .reply(200, {}, { 'docker-content-digest': 'some-digest' });
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-dep' },
-        'some-new-value'
+        'some-new-value',
       );
       expect(res).toBe('some-digest');
     });
@@ -602,7 +602,7 @@ describe('modules/datasource/docker/index', () => {
         .reply(200, { access_token: 'test' });
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-other-dep' },
-        '8.0.0-alpine'
+        '8.0.0-alpine',
       );
       expect(res).toBe('some-digest');
     });
@@ -620,12 +620,12 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-other-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-other-dep:pull',
         )
         .reply(200, { access_token: 'test' });
       const res = await getDigest(
         { datasource: 'docker', packageName: 'some-other-dep' },
-        '8.0.0-alpine'
+        '8.0.0-alpine',
       );
       expect(res).toBe('some-digest');
     });
@@ -633,14 +633,14 @@ describe('modules/datasource/docker/index', () => {
     it('should throw error for 429', async () => {
       httpMock.scope(baseUrl).get('/').replyWithError({ statusCode: 429 });
       await expect(
-        getDigest({ datasource: 'docker', packageName: 'some-dep' }, 'latest')
+        getDigest({ datasource: 'docker', packageName: 'some-dep' }, 'latest'),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
     it('should throw error for 5xx', async () => {
       httpMock.scope(baseUrl).get('/').replyWithError({ statusCode: 504 });
       await expect(
-        getDigest({ datasource: 'docker', packageName: 'some-dep' }, 'latest')
+        getDigest({ datasource: 'docker', packageName: 'some-dep' }, 'latest'),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -651,7 +651,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .times(4)
         .reply(200, { token: 'some-token' });
@@ -731,14 +731,14 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           currentDigest,
         },
-        'some-new-value'
+        'some-new-value',
       );
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        `Current digest ${currentDigest} relates to architecture amd64`
+        `Current digest ${currentDigest} relates to architecture amd64`,
       );
       expect(res).toBe(
-        'sha256:81093b981e72a54d488d5a60780006d82f7cc02d248d88ff71ff4137b0f51176'
+        'sha256:81093b981e72a54d488d5a60780006d82f7cc02d248d88ff71ff4137b0f51176',
       );
     });
 
@@ -749,7 +749,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .times(5)
         .reply(200, { token: 'some-token' });
@@ -830,14 +830,14 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           currentDigest,
         },
-        'some-new-value'
+        'some-new-value',
       );
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        `Current digest ${currentDigest} relates to architecture null`
+        `Current digest ${currentDigest} relates to architecture null`,
       );
       expect(res).toBe(
-        'sha256:5194622ded36da4097a53c4ec9d85bba370d9e826e88a74fa910c46ddbf3208c'
+        'sha256:5194622ded36da4097a53c4ec9d85bba370d9e826e88a74fa910c46ddbf3208c',
       );
     });
 
@@ -848,7 +848,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .times(4)
         .reply(200, { token: 'some-token' });
@@ -902,7 +902,7 @@ describe('modules/datasource/docker/index', () => {
           },
           {
             'content-type': 'text/plain',
-          }
+          },
         );
 
       const res = await getDigest(
@@ -911,11 +911,11 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           currentDigest,
         },
-        'some-new-value'
+        'some-new-value',
       );
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        `Current digest ${currentDigest} relates to architecture amd64`
+        `Current digest ${currentDigest} relates to architecture amd64`,
       );
       expect(res).toBe('some-new-image-digest');
     });
@@ -927,7 +927,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .times(4)
         .reply(200, { token: 'some-token' });
@@ -982,11 +982,11 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           currentDigest,
         },
-        'some-new-value'
+        'some-new-value',
       );
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        `Current digest ${currentDigest} relates to architecture amd64`
+        `Current digest ${currentDigest} relates to architecture amd64`,
       );
       expect(res).toBe('some-new-image-digest');
     });
@@ -998,7 +998,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .times(4)
         .reply(200, { token: 'some-token' });
@@ -1067,11 +1067,11 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           currentDigest,
         },
-        'some-new-value'
+        'some-new-value',
       );
 
       expect(res).toBe(
-        'sha256:ee75deb1a41bb998e52a116707a6e22a91904cba0c1d6e6c76cf04923efff2d8'
+        'sha256:ee75deb1a41bb998e52a116707a6e22a91904cba0c1d6e6c76cf04923efff2d8',
       );
     });
 
@@ -1082,7 +1082,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull'
+          '/token?service=registry.docker.io&scope=repository:library/some-dep:pull',
         )
         .times(3)
         .reply(200, { token: 'some-token' });
@@ -1123,7 +1123,7 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           currentDigest,
         },
-        'some-new-value'
+        'some-new-value',
       );
       expect(res).toBeNull();
     });
@@ -1134,7 +1134,7 @@ describe('modules/datasource/docker/index', () => {
         .get('/', undefined, { badheaders: ['authorization'] })
         .reply(200, { token: 'some-token' })
         .head(
-          '/library/some-dep/manifests/sha256:0101010101010101010101010101010101010101010101010101010101010101'
+          '/library/some-dep/manifests/sha256:0101010101010101010101010101010101010101010101010101010101010101',
         )
         .reply(404, {});
       httpMock
@@ -1146,7 +1146,7 @@ describe('modules/datasource/docker/index', () => {
           undefined,
           {
             badheaders: ['authorization'],
-          }
+          },
         )
         .reply(401);
 
@@ -1157,7 +1157,7 @@ describe('modules/datasource/docker/index', () => {
           currentDigest:
             'sha256:0101010101010101010101010101010101010101010101010101010101010101',
         },
-        'sha256:fafafafafafafafafafafafafafafafafafafafafafafafafafafafafafafafa'
+        'sha256:fafafafafafafafafafafafafafafafafafafafafafafafafafafafafafafafa',
       );
       expect(res).toBeNull();
     });
@@ -1198,7 +1198,7 @@ describe('modules/datasource/docker/index', () => {
           currentDigest,
           registryUrls: ['https://registry.company.com'],
         },
-        '3.17'
+        '3.17',
       );
 
       expect(res).toBe(newDigest);
@@ -1229,7 +1229,7 @@ describe('modules/datasource/docker/index', () => {
           packageName: 'some-dep',
           registryUrls: ['https://registry.company.com'],
         },
-        '3.17'
+        '3.17',
       );
 
       expect(res).toBe(newDigest);
@@ -1264,7 +1264,7 @@ describe('modules/datasource/docker/index', () => {
           { tags },
           {
             link: '<https://api.github.com/user/9287/repos?page=3&per_page=1000>; rel="next", ',
-          }
+          },
         )
         .get('/')
         .reply(200)
@@ -1295,7 +1295,7 @@ describe('modules/datasource/docker/index', () => {
           { tags: ['1.0.0'] },
           {
             link: `<${baseUrl}/library/node/tags/list?n=1&page=1>; rel="next", `,
-          }
+          },
         )
         .get('/library/node/tags/list?n=1&page=1')
         .reply(
@@ -1303,7 +1303,7 @@ describe('modules/datasource/docker/index', () => {
           { tags: ['1.0.1'] },
           {
             link: `<${baseUrl}/library/node/tags/list?n=1&page=2>; rel="next", `,
-          }
+          },
         );
 
       const config = {
@@ -1338,11 +1338,11 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope('https://quay.io')
         .get(
-          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=1&onlyActiveTags=true'
+          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=1&onlyActiveTags=true',
         )
         .reply(200, { tags, has_additional: true })
         .get(
-          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=2&onlyActiveTags=true'
+          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=2&onlyActiveTags=true',
         )
         .reply(200, { tags: [], has_additional: false })
         .get('/v2/')
@@ -1363,11 +1363,11 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope('https://quay.io')
         .get(
-          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=1&onlyActiveTags=true'
+          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=1&onlyActiveTags=true',
         )
         .reply(200, { tags, has_additional: true })
         .get(
-          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=2&onlyActiveTags=true'
+          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=2&onlyActiveTags=true',
         )
         .reply(200, { tags: [], has_additional: false })
         .get('/v2/')
@@ -1387,7 +1387,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope('https://quay.io')
         .get(
-          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=1&onlyActiveTags=true'
+          '/api/v1/repository/bitnami/redis/tag/?limit=100&page=1&onlyActiveTags=true',
         )
         .reply(500);
       const config = {
@@ -1419,13 +1419,13 @@ describe('modules/datasource/docker/index', () => {
           {
             'x-jfrog-version': 'Artifactory/7.42.2 74202900',
             link: '</library/node/tags/list?n=10000&last=10000>; rel="next", ',
-          }
+          },
         )
         .get('/virtual-mirror/library/node/tags/list?n=10000&last=10000')
         .reply(
           200,
           { tags: tags2 },
-          { 'x-jfrog-version': 'Artifactory/7.42.2 74202900' }
+          { 'x-jfrog-version': 'Artifactory/7.42.2 74202900' },
         )
         .get('/')
         .reply(200, '', {})
@@ -1455,7 +1455,7 @@ describe('modules/datasource/docker/index', () => {
         await getPkgReleases({
           datasource: DockerDatasource.id,
           packageName: '123456789.dkr.ecr.us-east-1.amazonaws.com/node',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://123456789.dkr.ecr.us-east-1.amazonaws.com',
         releases: [],
@@ -1491,7 +1491,7 @@ describe('modules/datasource/docker/index', () => {
             'Bearer realm="https://public.ecr.aws/token",service="public.ecr.aws",scope="aws"',
         })
         .get(
-          '/token?service=public.ecr.aws&scope=repository:amazonlinux/amazonlinux:pull'
+          '/token?service=public.ecr.aws&scope=repository:amazonlinux/amazonlinux:pull',
         )
         .reply(200, { token: 'test' });
       httpMock
@@ -1507,7 +1507,7 @@ describe('modules/datasource/docker/index', () => {
         await getPkgReleases({
           datasource: DockerDatasource.id,
           packageName: 'public.ecr.aws/amazonlinux/amazonlinux',
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://public.ecr.aws',
         releases: [],
@@ -1534,7 +1534,7 @@ describe('modules/datasource/docker/index', () => {
             },
             {
               'Docker-Distribution-Api-Version': 'registry/2.0',
-            }
+            },
           )
           .get('/')
           .reply(200)
@@ -1565,7 +1565,7 @@ describe('modules/datasource/docker/index', () => {
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toEqual({
           registryUrl: 'https://ecr-proxy.company.com',
           releases: [],
@@ -1600,7 +1600,7 @@ describe('modules/datasource/docker/index', () => {
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1625,13 +1625,13 @@ describe('modules/datasource/docker/index', () => {
             },
             {
               'Docker-Distribution-Api-Version': 'registry/2.0',
-            }
+            },
           );
         expect(
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1654,7 +1654,7 @@ describe('modules/datasource/docker/index', () => {
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1677,13 +1677,13 @@ describe('modules/datasource/docker/index', () => {
             },
             {
               'Irrelevant-Header': 'irrelevant-value',
-            }
+            },
           );
         expect(
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1698,13 +1698,13 @@ describe('modules/datasource/docker/index', () => {
             {},
             {
               'Docker-Distribution-Api-Version': 'registry/2.0',
-            }
+            },
           );
         expect(
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1721,13 +1721,13 @@ describe('modules/datasource/docker/index', () => {
             },
             {
               'Docker-Distribution-Api-Version': 'registry/2.0',
-            }
+            },
           );
         expect(
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1748,13 +1748,13 @@ describe('modules/datasource/docker/index', () => {
             },
             {
               'Docker-Distribution-Api-Version': 'registry/2.0',
-            }
+            },
           );
         expect(
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
 
@@ -1776,13 +1776,13 @@ describe('modules/datasource/docker/index', () => {
             },
             {
               'Docker-Distribution-Api-Version': 'registry/2.0',
-            }
+            },
           );
         expect(
           await getPkgReleases({
             datasource: DockerDatasource.id,
             packageName: 'ecr-proxy.company.com/node',
-          })
+          }),
         ).toBeNull();
       });
     });
@@ -1806,7 +1806,7 @@ describe('modules/datasource/docker/index', () => {
       httpMock
         .scope(authUrl)
         .get(
-          '/token?service=registry.docker.io&scope=repository:library/node:pull'
+          '/token?service=registry.docker.io&scope=repository:library/node:pull',
         )
         .reply(200, { token: 'test' });
       const res = await getPkgReleases({
@@ -1867,7 +1867,7 @@ describe('modules/datasource/docker/index', () => {
             'Bearer realm="https://k8s.gcr.io/v2/token",service="k8s.gcr.io"',
         })
         .get(
-          '/token?service=k8s.gcr.io&scope=repository:kubernetes-dashboard-amd64:pull'
+          '/token?service=k8s.gcr.io&scope=repository:kubernetes-dashboard-amd64:pull',
         )
         .reply(200, { token: 'some-token ' })
         .get('/kubernetes-dashboard-amd64/tags/list?n=10000')
@@ -2047,11 +2047,11 @@ describe('modules/datasource/docker/index', () => {
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
         expect.anything(),
-        `manifest blob response body missing the "config" property`
+        `manifest blob response body missing the "config" property`,
       );
       expect(logger.logger.info).not.toHaveBeenCalledWith(
         expect.anything(),
-        'Unknown error getting Docker labels'
+        'Unknown error getting Docker labels',
       );
     });
 
@@ -2457,7 +2457,7 @@ describe('modules/datasource/docker/index', () => {
             'https://github.com/renovatebot/renovate',
           'org.opencontainers.image.revision':
             'ab7ddb5e3c5c3b402acd7c3679d4e415f8092dde',
-        }
+        },
       );
     });
 
@@ -2488,7 +2488,7 @@ describe('modules/datasource/docker/index', () => {
             'https://github.com/renovatebot/renovate',
           'org.opencontainers.image.revision':
             'ab7ddb5e3c5c3b402acd7c3679d4e415f8092dde',
-        }
+        },
       );
     });
 
@@ -2518,7 +2518,7 @@ describe('modules/datasource/docker/index', () => {
         {
           'org.opencontainers.image.source':
             'https://github.com/bitnami/charts/tree/main/bitnami/harbor',
-        }
+        },
       );
     });
   });
diff --git a/lib/modules/datasource/docker/index.ts b/lib/modules/datasource/docker/index.ts
index df29be785e5cf64fa9559f07bd99f0b374e9bdb5..77def2090785e36b95d0236a41ff63c44d243d89 100644
--- a/lib/modules/datasource/docker/index.ts
+++ b/lib/modules/datasource/docker/index.ts
@@ -88,16 +88,16 @@ export class DockerDatasource extends Datasource {
     registryHost: string,
     dockerRepository: string,
     tag: string,
-    mode: 'head' | 'get' = 'get'
+    mode: 'head' | 'get' = 'get',
   ): Promise<HttpResponse | null> {
     logger.debug(
-      `getManifestResponse(${registryHost}, ${dockerRepository}, ${tag}, ${mode})`
+      `getManifestResponse(${registryHost}, ${dockerRepository}, ${tag}, ${mode})`,
     );
     try {
       const headers = await getAuthHeaders(
         this.http,
         registryHost,
-        dockerRepository
+        dockerRepository,
       );
       if (!headers) {
         logger.warn('No docker auth found - returning');
@@ -122,7 +122,7 @@ export class DockerDatasource extends Datasource {
       if (err.statusCode === 401) {
         logger.debug(
           { registryHost, dockerRepository },
-          'Unauthorized docker lookup'
+          'Unauthorized docker lookup',
         );
         logger.debug({ err });
         return null;
@@ -135,7 +135,7 @@ export class DockerDatasource extends Datasource {
             dockerRepository,
             tag,
           },
-          'Docker Manifest is unknown'
+          'Docker Manifest is unknown',
         );
         return null;
       }
@@ -148,7 +148,7 @@ export class DockerDatasource extends Datasource {
       if (err.code === 'ETIMEDOUT') {
         logger.debug(
           { registryHost },
-          'Timeout when attempting to connect to docker registry'
+          'Timeout when attempting to connect to docker registry',
         );
         logger.debug({ err });
         return null;
@@ -160,7 +160,7 @@ export class DockerDatasource extends Datasource {
           dockerRepository,
           tag,
         },
-        'Unknown Error looking up docker manifest'
+        'Unknown Error looking up docker manifest',
       );
       return null;
     }
@@ -171,23 +171,23 @@ export class DockerDatasource extends Datasource {
     key: (
       registryHost: string,
       dockerRepository: string,
-      configDigest: string
+      configDigest: string,
     ) => `${registryHost}:${dockerRepository}@${configDigest}`,
     ttlMinutes: 1440 * 28,
   })
   public async getImageConfig(
     registryHost: string,
     dockerRepository: string,
-    configDigest: string
+    configDigest: string,
   ): Promise<HttpResponse<OciImageConfig> | undefined> {
     logger.trace(
-      `getImageConfig(${registryHost}, ${dockerRepository}, ${configDigest})`
+      `getImageConfig(${registryHost}, ${dockerRepository}, ${configDigest})`,
     );
 
     const headers = await getAuthHeaders(
       this.http,
       registryHost,
-      dockerRepository
+      dockerRepository,
     );
     // istanbul ignore if: Should never happen
     if (!headers) {
@@ -199,7 +199,7 @@ export class DockerDatasource extends Datasource {
       'v2',
       dockerRepository,
       'blobs',
-      configDigest
+      configDigest,
     );
     return await this.http.getJson(
       url,
@@ -207,7 +207,7 @@ export class DockerDatasource extends Datasource {
         headers,
         noAuth: true,
       },
-      OciImageConfig
+      OciImageConfig,
     );
   }
 
@@ -216,23 +216,23 @@ export class DockerDatasource extends Datasource {
     key: (
       registryHost: string,
       dockerRepository: string,
-      configDigest: string
+      configDigest: string,
     ) => `${registryHost}:${dockerRepository}@${configDigest}`,
     ttlMinutes: 1440 * 28,
   })
   public async getHelmConfig(
     registryHost: string,
     dockerRepository: string,
-    configDigest: string
+    configDigest: string,
   ): Promise<HttpResponse<OciHelmConfig> | undefined> {
     logger.trace(
-      `getImageConfig(${registryHost}, ${dockerRepository}, ${configDigest})`
+      `getImageConfig(${registryHost}, ${dockerRepository}, ${configDigest})`,
     );
 
     const headers = await getAuthHeaders(
       this.http,
       registryHost,
-      dockerRepository
+      dockerRepository,
     );
     // istanbul ignore if: Should never happen
     if (!headers) {
@@ -244,7 +244,7 @@ export class DockerDatasource extends Datasource {
       'v2',
       dockerRepository,
       'blobs',
-      configDigest
+      configDigest,
     );
     return await this.http.getJson(
       url,
@@ -252,14 +252,14 @@ export class DockerDatasource extends Datasource {
         headers,
         noAuth: true,
       },
-      OciHelmConfig
+      OciHelmConfig,
     );
   }
 
   private async getConfigDigest(
     registry: string,
     dockerRepository: string,
-    tag: string
+    tag: string,
   ): Promise<string | null> {
     return (
       (await this.getManifest(registry, dockerRepository, tag))?.config
@@ -270,12 +270,12 @@ export class DockerDatasource extends Datasource {
   private async getManifest(
     registry: string,
     dockerRepository: string,
-    tag: string
+    tag: string,
   ): Promise<OciImageManifest | DistributionManifest | null> {
     const manifestResponse = await this.getManifestResponse(
       registry,
       dockerRepository,
-      tag
+      tag,
     );
 
     // If getting the manifest fails here, then abort
@@ -291,7 +291,7 @@ export class DockerDatasource extends Datasource {
     if (!parsed.success) {
       logger.debug(
         { registry, dockerRepository, tag, err: parsed.error },
-        'Invalid manifest response'
+        'Invalid manifest response',
       );
       return null;
     }
@@ -307,18 +307,18 @@ export class DockerDatasource extends Datasource {
         if (!manifest.manifests.length) {
           logger.debug(
             { manifest },
-            'Invalid manifest list with no manifests - returning'
+            'Invalid manifest list with no manifests - returning',
           );
           return null;
         }
         logger.trace(
           { registry, dockerRepository, tag },
-          'Found manifest list, using first image'
+          'Found manifest list, using first image',
         );
         return this.getManifest(
           registry,
           dockerRepository,
-          manifest.manifests[0].digest
+          manifest.manifests[0].digest,
         );
       // istanbul ignore next: can't happen
       default:
@@ -331,14 +331,14 @@ export class DockerDatasource extends Datasource {
     key: (
       registryHost: string,
       dockerRepository: string,
-      currentDigest: string
+      currentDigest: string,
     ) => `${registryHost}:${dockerRepository}@${currentDigest}`,
     ttlMinutes: 1440 * 28,
   })
   public async getImageArchitecture(
     registryHost: string,
     dockerRepository: string,
-    currentDigest: string
+    currentDigest: string,
   ): Promise<string | null | undefined> {
     try {
       let manifestResponse: HttpResponse<string> | null;
@@ -348,7 +348,7 @@ export class DockerDatasource extends Datasource {
           registryHost,
           dockerRepository,
           currentDigest,
-          'head'
+          'head',
         );
       } catch (_err) {
         const err =
@@ -381,7 +381,7 @@ export class DockerDatasource extends Datasource {
       const configDigest = await this.getConfigDigest(
         registryHost,
         dockerRepository,
-        currentDigest
+        currentDigest,
       );
       if (!configDigest) {
         return null;
@@ -390,7 +390,7 @@ export class DockerDatasource extends Datasource {
       const configResponse = await this.getImageConfig(
         registryHost,
         dockerRepository,
-        configDigest
+        configDigest,
       );
 
       // TODO: fix me, architecture is required in spec
@@ -403,7 +403,7 @@ export class DockerDatasource extends Datasource {
         logger.debug(
           `Current digest ${currentDigest} relates to architecture ${
             architecture ?? 'null'
-          }`
+          }`,
         );
 
         return architecture;
@@ -414,7 +414,7 @@ export class DockerDatasource extends Datasource {
       }
       logger.debug(
         { registryHost, dockerRepository, currentDigest, err },
-        'Unknown error getting image architecture'
+        'Unknown error getting image architecture',
       );
     }
 
@@ -436,7 +436,7 @@ export class DockerDatasource extends Datasource {
   public async getLabels(
     registryHost: string,
     dockerRepository: string,
-    tag: string
+    tag: string,
   ): Promise<Record<string, string> | undefined> {
     logger.debug(`getLabels(${registryHost}, ${dockerRepository}, ${tag})`);
     // Docker Hub library images don't have labels we need
@@ -452,13 +452,13 @@ export class DockerDatasource extends Datasource {
       const manifest = await this.getManifest(
         registryHost,
         dockerRepository,
-        tag
+        tag,
       );
 
       if (!manifest) {
         logger.debug(
           { registryHost, dockerRepository, tag },
-          'No manifest found'
+          'No manifest found',
         );
         return undefined;
       }
@@ -476,7 +476,7 @@ export class DockerDatasource extends Datasource {
           const configResponse = await this.getHelmConfig(
             registryHost,
             dockerRepository,
-            manifest.config.digest
+            manifest.config.digest,
           );
 
           if (configResponse) {
@@ -497,7 +497,7 @@ export class DockerDatasource extends Datasource {
           const configResponse = await this.getImageConfig(
             registryHost,
             dockerRepository,
-            manifest.config.digest
+            manifest.config.digest,
           );
 
           // istanbul ignore if: should never happen
@@ -511,7 +511,7 @@ export class DockerDatasource extends Datasource {
           } else {
             logger.debug(
               { headers: configResponse.headers, body },
-              `manifest blob response body missing the "config" property`
+              `manifest blob response body missing the "config" property`,
             );
           }
           break;
@@ -523,7 +523,7 @@ export class DockerDatasource extends Datasource {
           {
             labels,
           },
-          'found labels in manifest'
+          'found labels in manifest',
         );
       }
       return labels;
@@ -534,7 +534,7 @@ export class DockerDatasource extends Datasource {
       if (err.statusCode === 400 || err.statusCode === 401) {
         logger.debug(
           { registryHost, dockerRepository, err },
-          'Unauthorized docker lookup'
+          'Unauthorized docker lookup',
         );
       } else if (err.statusCode === 404) {
         logger.warn(
@@ -544,7 +544,7 @@ export class DockerDatasource extends Datasource {
             dockerRepository,
             tag,
           },
-          'Config Manifest is unknown'
+          'Config Manifest is unknown',
         );
       } else if (err.statusCode === 429 && isDockerHost(registryHost)) {
         logger.warn({ err }, 'docker registry failure: too many requests');
@@ -556,7 +556,7 @@ export class DockerDatasource extends Datasource {
             dockerRepository,
             tag,
           },
-          'docker registry failure: internal error'
+          'docker registry failure: internal error',
         );
       } else if (
         err.code === 'ERR_TLS_CERT_ALTNAME_INVALID' ||
@@ -564,17 +564,17 @@ export class DockerDatasource extends Datasource {
       ) {
         logger.debug(
           { registryHost, err },
-          'Error connecting to docker registry'
+          'Error connecting to docker registry',
         );
       } else if (registryHost === 'https://quay.io') {
         // istanbul ignore next
         logger.debug(
-          'Ignoring quay.io errors until they fully support v2 schema'
+          'Ignoring quay.io errors until they fully support v2 schema',
         );
       } else {
         logger.info(
           { registryHost, dockerRepository, tag, err },
-          'Unknown error getting Docker labels'
+          'Unknown error getting Docker labels',
         );
       }
       return {};
@@ -583,7 +583,7 @@ export class DockerDatasource extends Datasource {
 
   private async getTagsQuayRegistry(
     registry: string,
-    repository: string
+    repository: string,
   ): Promise<string[]> {
     let tags: string[] = [];
     const limit = 100;
@@ -604,7 +604,7 @@ export class DockerDatasource extends Datasource {
       // typescript issue :-/
       // eslint-disable-next-line @typescript-eslint/no-unnecessary-type-assertion
       const res = (await this.http.getJson<QuayRestDockerTags>(
-        url
+        url,
       )) as HttpResponse<QuayRestDockerTags>;
       const pageTags = res.body.tags.map((tag) => tag.name);
       tags = tags.concat(pageTags);
@@ -616,7 +616,7 @@ export class DockerDatasource extends Datasource {
 
   private async getDockerApiTags(
     registryHost: string,
-    dockerRepository: string
+    dockerRepository: string,
   ): Promise<string[] | null> {
     let tags: string[] = [];
     // AWS ECR limits the maximum number of results to 1000
@@ -634,7 +634,7 @@ export class DockerDatasource extends Datasource {
       this.http,
       registryHost,
       dockerRepository,
-      url
+      url,
     );
     if (!headers) {
       logger.debug('Failed to get authHeaders for getTags lookup');
@@ -689,11 +689,11 @@ export class DockerDatasource extends Datasource {
   })
   public async getTags(
     registryHost: string,
-    dockerRepository: string
+    dockerRepository: string,
   ): Promise<string[] | null> {
     try {
       const isQuay = regEx(/^https:\/\/quay\.io(?::[1-9][0-9]{0,4})?$/i).test(
-        registryHost
+        registryHost,
       );
       let tags: string[] | null;
       if (isQuay) {
@@ -710,7 +710,7 @@ export class DockerDatasource extends Datasource {
         !dockerRepository.includes('/')
       ) {
         logger.debug(
-          `Retrying Tags for ${registryHost}/${dockerRepository} using library/ prefix`
+          `Retrying Tags for ${registryHost}/${dockerRepository} using library/ prefix`,
         );
         return this.getTags(registryHost, 'library/' + dockerRepository);
       }
@@ -722,7 +722,7 @@ export class DockerDatasource extends Datasource {
         dockerRepository.split('/').length === 2
       ) {
         logger.debug(
-          `JFrog Artifactory: Retrying Tags for ${registryHost}/${dockerRepository} using library/ path between JFrog virtual repository and image`
+          `JFrog Artifactory: Retrying Tags for ${registryHost}/${dockerRepository} using library/ path between JFrog virtual repository and image`,
         );
 
         const dockerRepositoryParts = dockerRepository.split('/');
@@ -731,20 +731,20 @@ export class DockerDatasource extends Datasource {
 
         return this.getTags(
           registryHost,
-          jfrogRepository + '/library/' + dockerImage
+          jfrogRepository + '/library/' + dockerImage,
         );
       }
       if (err.statusCode === 429 && isDockerHost(registryHost)) {
         logger.warn(
           { registryHost, dockerRepository, err },
-          'docker registry failure: too many requests'
+          'docker registry failure: too many requests',
         );
         throw new ExternalHostError(err);
       }
       if (err.statusCode >= 500 && err.statusCode < 600) {
         logger.warn(
           { registryHost, dockerRepository, err },
-          'docker registry failure: internal error'
+          'docker registry failure: internal error',
         );
         throw new ExternalHostError(err);
       }
@@ -752,7 +752,7 @@ export class DockerDatasource extends Datasource {
       if (errorCodes.includes(err.code)) {
         logger.warn(
           { registryHost, dockerRepository, err },
-          'docker registry connection failure'
+          'docker registry connection failure',
         );
         throw new ExternalHostError(err);
       }
@@ -776,12 +776,12 @@ export class DockerDatasource extends Datasource {
     namespace: 'datasource-docker-digest',
     key: (
       { registryUrl, packageName, currentDigest }: DigestConfig,
-      newValue?: string
+      newValue?: string,
     ) => {
       const newTag = newValue ?? 'latest';
       const { registryHost, dockerRepository } = getRegistryRepository(
         packageName,
-        registryUrl!
+        registryUrl!,
       );
       const digest = currentDigest ? `@${currentDigest}` : '';
       return `${registryHost}:${dockerRepository}:${newTag}${digest}`;
@@ -789,15 +789,15 @@ export class DockerDatasource extends Datasource {
   })
   override async getDigest(
     { registryUrl, packageName, currentDigest }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     const { registryHost, dockerRepository } = getRegistryRepository(
       packageName,
-      registryUrl!
+      registryUrl!,
     );
     logger.debug(
       // TODO: types (#22198)
-      `getDigest(${registryHost}, ${dockerRepository}, ${newValue})`
+      `getDigest(${registryHost}, ${dockerRepository}, ${newValue})`,
     );
     const newTag = newValue ?? 'latest';
     let digest: string | null = null;
@@ -807,7 +807,7 @@ export class DockerDatasource extends Datasource {
         architecture = await this.getImageArchitecture(
           registryHost,
           dockerRepository,
-          currentDigest
+          currentDigest,
         );
       }
 
@@ -817,7 +817,7 @@ export class DockerDatasource extends Datasource {
           registryHost,
           dockerRepository,
           newTag,
-          'head'
+          'head',
         );
 
         if (
@@ -837,12 +837,12 @@ export class DockerDatasource extends Datasource {
       ) {
         logger.debug(
           { registryHost, dockerRepository },
-          'Architecture-specific digest or missing docker-content-digest header - pulling full manifest'
+          'Architecture-specific digest or missing docker-content-digest header - pulling full manifest',
         );
         manifestResponse = await this.getManifestResponse(
           registryHost,
           dockerRepository,
-          newTag
+          newTag,
         );
 
         if (architecture && manifestResponse) {
@@ -869,7 +869,7 @@ export class DockerDatasource extends Datasource {
         if (!digest) {
           logger.debug(
             { registryHost, dockerRepository, newTag },
-            'Extraction digest from manifest response body is deprecated'
+            'Extraction digest from manifest response body is deprecated',
           );
           digest = extractDigestFromResponseBody(manifestResponse!);
         }
@@ -881,7 +881,7 @@ export class DockerDatasource extends Datasource {
         !packageName.includes('/')
       ) {
         logger.debug(
-          `Retrying Digest for ${registryHost}/${dockerRepository} using library/ prefix`
+          `Retrying Digest for ${registryHost}/${dockerRepository} using library/ prefix`,
         );
         return this.getDigest(
           {
@@ -889,7 +889,7 @@ export class DockerDatasource extends Datasource {
             packageName: 'library/' + packageName,
             currentDigest,
           },
-          newValue
+          newValue,
         );
       }
 
@@ -907,7 +907,7 @@ export class DockerDatasource extends Datasource {
           packageName,
           newTag,
         },
-        'Unknown Error looking up docker image digest'
+        'Unknown Error looking up docker image digest',
       );
     }
     return digest;
@@ -951,7 +951,7 @@ export class DockerDatasource extends Datasource {
     key: ({ registryUrl, packageName }: GetReleasesConfig) => {
       const { registryHost, dockerRepository } = getRegistryRepository(
         packageName,
-        registryUrl!
+        registryUrl!,
       );
       return `${registryHost}:${dockerRepository}`;
     },
@@ -966,7 +966,7 @@ export class DockerDatasource extends Datasource {
   }: GetReleasesConfig): Promise<ReleaseResult | null> {
     const { registryHost, dockerRepository } = getRegistryRepository(
       packageName,
-      registryUrl!
+      registryUrl!,
     );
 
     type TagsResultType = AsyncResult<
@@ -977,13 +977,13 @@ export class DockerDatasource extends Datasource {
     const getTags = (): TagsResultType =>
       Result.wrapNullable(
         this.getTags(registryHost, dockerRepository),
-        'tags-error' as const
+        'tags-error' as const,
       ).transform((tags) => tags.map((version) => ({ version })));
 
     const getDockerHubTags = (): TagsResultType =>
       Result.wrapNullable(
         this.getDockerHubTags(dockerRepository),
-        'dockerhub-error' as const
+        'dockerhub-error' as const,
       ).catch(getTags);
 
     const tagsResult =
@@ -1016,7 +1016,7 @@ export class DockerDatasource extends Datasource {
     const labels = await this.getLabels(
       registryHost,
       dockerRepository,
-      latestTag
+      latestTag,
     );
     if (labels) {
       if (is.nonEmptyString(labels[gitRefLabel])) {
diff --git a/lib/modules/datasource/docker/schema.ts b/lib/modules/datasource/docker/schema.ts
index cab3fc6aecf821bcda2e8339f1444ab960fbb0a7..1af87da24b49d45e8e3f6c259304f88e592a4aa9 100644
--- a/lib/modules/datasource/docker/schema.ts
+++ b/lib/modules/datasource/docker/schema.ts
@@ -88,7 +88,7 @@ export const OciImageIndexManifest = ManifestObject.extend({
         'application/vnd.oci.image.index.v1+json',
       ]),
       platform: OciPlatform,
-    })
+    }),
   ),
   annotations: z.record(z.string()).nullish(),
 });
@@ -113,15 +113,15 @@ export type DistributionManifest = z.infer<typeof DistributionManifest>;
  */
 export const DistributionListManifest = ManifestObject.extend({
   mediaType: z.literal(
-    'application/vnd.docker.distribution.manifest.list.v2+json'
+    'application/vnd.docker.distribution.manifest.list.v2+json',
   ),
   manifests: z.array(
     Descriptor.extend({
       mediaType: z.literal(
-        'application/vnd.docker.distribution.manifest.v2+json'
+        'application/vnd.docker.distribution.manifest.v2+json',
       ),
       platform: OciPlatform,
-    })
+    }),
   ),
 });
 
@@ -149,7 +149,7 @@ export const Manifest = ManifestObject.passthrough()
       DistributionListManifest,
       OciImageManifest,
       OciImageIndexManifest,
-    ])
+    ]),
   );
 
 export type Manifest = z.infer<typeof Manifest>;
@@ -182,7 +182,7 @@ export const DockerHubTagsPage = z
       onError: /* istanbul ignore next */ ({ error }) => {
         logger.debug(
           { error },
-          'Docker: Failed to parse some tags from Docker Hub'
+          'Docker: Failed to parse some tags from Docker Hub',
         );
       },
     }),
diff --git a/lib/modules/datasource/dotnet-version/index.spec.ts b/lib/modules/datasource/dotnet-version/index.spec.ts
index d6bc44a8790aec5e4f6645a52588a32af8a8a093..d44ac205e4a27186f6b484efa37219d43fe222e7 100644
--- a/lib/modules/datasource/dotnet-version/index.spec.ts
+++ b/lib/modules/datasource/dotnet-version/index.spec.ts
@@ -20,7 +20,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         await getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'non-dotnet',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -31,7 +31,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         await getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'dotnet-sdk',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -47,7 +47,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         await getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'dotnet-sdk',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -58,7 +58,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'dotnet-sdk',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -74,7 +74,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'dotnet-sdk',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -85,7 +85,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         await getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'dotnet-sdk',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -101,7 +101,7 @@ describe('modules/datasource/dotnet-version/index', () => {
         await getPkgReleases({
           datasource: DotnetVersionDatasource.id,
           packageName: 'dotnet-sdk',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/dotnet-version/index.ts b/lib/modules/datasource/dotnet-version/index.ts
index a544e51754fa5f18d3cf299b322e2d967e0a7e9b..c7051630d6223c2aa45f595eb64e9e4fbcfd3559 100644
--- a/lib/modules/datasource/dotnet-version/index.ts
+++ b/lib/modules/datasource/dotnet-version/index.ts
@@ -39,13 +39,13 @@ export class DotnetVersionDatasource extends Datasource {
       const registryUrl = this.defaultRegistryUrls[0];
       const { body: urls } = await this.http.getJson(
         registryUrl,
-        ReleasesIndex
+        ReleasesIndex,
       );
 
       const channelReleases = await p.map(
         urls,
         (url) => this.getChannelReleases(url, packageName),
-        { concurrency: 1, stopOnError: true }
+        { concurrency: 1, stopOnError: true },
       );
       const releases = channelReleases.flat();
 
@@ -68,7 +68,7 @@ export class DotnetVersionDatasource extends Datasource {
   })
   async getChannelReleases(
     releaseUrl: string,
-    packageName: string
+    packageName: string,
   ): Promise<Release[]> {
     const schema =
       packageName === 'dotnet-sdk' ? DotnetSdkReleases : DotnetRuntimeReleases;
diff --git a/lib/modules/datasource/dotnet-version/schema.ts b/lib/modules/datasource/dotnet-version/schema.ts
index b41350695cce54c27d5f477b1abe3445d903c821..48f808c24fe294de31592744f55213bcaa1e6444 100644
--- a/lib/modules/datasource/dotnet-version/schema.ts
+++ b/lib/modules/datasource/dotnet-version/schema.ts
@@ -9,7 +9,7 @@ export const ReleasesIndex = z
         .object({
           'releases.json': z.string(),
         })
-        .transform(({ 'releases.json': releasesUrl }) => releasesUrl)
+        .transform(({ 'releases.json': releasesUrl }) => releasesUrl),
     ).catch([]),
   })
   .transform(({ 'releases-index': releasesIndex }) => releasesIndex);
@@ -27,7 +27,7 @@ export const DotnetSdkReleases = z
     releases: LooseArray(
       ReleaseBase.extend({
         sdk: ReleaseDetails,
-      })
+      }),
     ).catch([]),
   })
   .transform(({ releases }): Release[] =>
@@ -36,8 +36,8 @@ export const DotnetSdkReleases = z
         sdk: { version },
         'release-date': releaseTimestamp,
         'release-notes': changelogUrl,
-      }) => ({ version, releaseTimestamp, changelogUrl })
-    )
+      }) => ({ version, releaseTimestamp, changelogUrl }),
+    ),
   );
 
 export const DotnetRuntimeReleases = z
@@ -45,7 +45,7 @@ export const DotnetRuntimeReleases = z
     releases: LooseArray(
       ReleaseBase.extend({
         runtime: ReleaseDetails,
-      })
+      }),
     ).catch([]),
   })
   .transform(({ releases }): Release[] =>
@@ -54,6 +54,6 @@ export const DotnetRuntimeReleases = z
         runtime: { version },
         'release-date': releaseTimestamp,
         'release-notes': changelogUrl,
-      }) => ({ version, releaseTimestamp, changelogUrl })
-    )
+      }) => ({ version, releaseTimestamp, changelogUrl }),
+    ),
   );
diff --git a/lib/modules/datasource/endoflife-date/index.spec.ts b/lib/modules/datasource/endoflife-date/index.spec.ts
index 2c29e0e3c385724606532a9770348215d6a484b2..d3660718d0031057c5d18e16e9674f66faf0b116 100644
--- a/lib/modules/datasource/endoflife-date/index.spec.ts
+++ b/lib/modules/datasource/endoflife-date/index.spec.ts
@@ -95,7 +95,7 @@ describe('modules/datasource/endoflife-date/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -105,7 +105,7 @@ describe('modules/datasource/endoflife-date/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -115,7 +115,7 @@ describe('modules/datasource/endoflife-date/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
diff --git a/lib/modules/datasource/endoflife-date/schema.ts b/lib/modules/datasource/endoflife-date/schema.ts
index ac4eabdfe85fc154c7bc417feeaee8b2163bc714..4f184a86fc9d72409b091bb601d458bc4b8bf077 100644
--- a/lib/modules/datasource/endoflife-date/schema.ts
+++ b/lib/modules/datasource/endoflife-date/schema.ts
@@ -30,6 +30,6 @@ export const EndoflifeDateVersions = z
       const version = latest ?? cycle;
       const isDeprecated = eol === true || discontinued === true;
       return { version, releaseTimestamp, isDeprecated };
-    }
+    },
   )
   .array();
diff --git a/lib/modules/datasource/flutter-version/index.spec.ts b/lib/modules/datasource/flutter-version/index.spec.ts
index 5af2bb5e92d008ea778afab85e023a74868e9e93..40e10f8ec0c38c1a2b5fbf55cd223434b357a290 100644
--- a/lib/modules/datasource/flutter-version/index.spec.ts
+++ b/lib/modules/datasource/flutter-version/index.spec.ts
@@ -17,7 +17,7 @@ describe('modules/datasource/flutter-version/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -27,7 +27,7 @@ describe('modules/datasource/flutter-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -37,7 +37,7 @@ describe('modules/datasource/flutter-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/flutter-version/index.ts b/lib/modules/datasource/flutter-version/index.ts
index 293134377f733401bf77463b54eb685fdea73a24..bead276e7a3f488f02061d9a31641d677831d0d1 100644
--- a/lib/modules/datasource/flutter-version/index.ts
+++ b/lib/modules/datasource/flutter-version/index.ts
@@ -37,7 +37,7 @@ export class FlutterVersionDatasource extends Datasource {
     try {
       const resp = (
         await this.http.getJson<FlutterResponse>(
-          `${registryUrl}/flutter_infra_release/releases/releases_linux.json`
+          `${registryUrl}/flutter_infra_release/releases/releases_linux.json`,
         )
       ).body;
       result.releases = resp.releases
diff --git a/lib/modules/datasource/galaxy-collection/index.spec.ts b/lib/modules/datasource/galaxy-collection/index.spec.ts
index feccb01d8d5b07a7649f9ef618100723d2055261..4fe355e057f9abcf5493ffb8b2672b0369f4a55c 100644
--- a/lib/modules/datasource/galaxy-collection/index.spec.ts
+++ b/lib/modules/datasource/galaxy-collection/index.spec.ts
@@ -6,16 +6,16 @@ import { GalaxyCollectionDatasource } from '.';
 
 const communityKubernetesBase = Fixtures.get('community_kubernetes_base.json');
 const communityKubernetesVersions = Fixtures.get(
-  'community_kubernetes_versions.json'
+  'community_kubernetes_versions.json',
 );
 const communityKubernetesDetails121 = Fixtures.get(
-  'community_kubernetes_version_details_1.2.1.json'
+  'community_kubernetes_version_details_1.2.1.json',
 );
 const communityKubernetesDetails120 = Fixtures.get(
-  'community_kubernetes_version_details_1.2.0.json'
+  'community_kubernetes_version_details_1.2.0.json',
 );
 const communityKubernetesDetails0111 = Fixtures.get(
-  'community_kubernetes_version_details_0.11.1.json'
+  'community_kubernetes_version_details_0.11.1.json',
 );
 
 const baseUrl = 'https://old-galaxy.ansible.com';
@@ -30,7 +30,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'foo.bar',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -40,7 +40,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'foo.bar',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -53,7 +53,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'community.kubernetes',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -68,7 +68,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'community.kubernetes',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -83,7 +83,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'community.kubernetes',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -116,7 +116,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         await getPkgReleases({
           datasource,
           packageName: '',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -125,7 +125,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         await getPkgReleases({
           datasource,
           packageName: '',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -138,7 +138,7 @@ describe('modules/datasource/galaxy-collection/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'foo.bar',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/galaxy-collection/index.ts b/lib/modules/datasource/galaxy-collection/index.ts
index 8a6ed239f9ee74bbd65cc738422b5ef3f4d3aef5..dcd4ceca31734e1063322f3880fbe3bad98e48d6 100644
--- a/lib/modules/datasource/galaxy-collection/index.ts
+++ b/lib/modules/datasource/galaxy-collection/index.ts
@@ -48,7 +48,7 @@ export class GalaxyCollectionDatasource extends Datasource {
     if (!baseUrlResponse?.body) {
       logger.warn(
         { dependency: packageName },
-        `Received invalid data from ${baseUrl}`
+        `Received invalid data from ${baseUrl}`,
       );
       return null;
     }
@@ -60,7 +60,7 @@ export class GalaxyCollectionDatasource extends Datasource {
     let versionsUrlResponse: HttpResponse<VersionsProjectResult>;
     try {
       versionsUrlResponse = await this.http.getJson<VersionsProjectResult>(
-        versionsUrl
+        versionsUrl,
       );
     } catch (err) {
       this.handleGenericErrors(err);
@@ -83,10 +83,10 @@ export class GalaxyCollectionDatasource extends Datasource {
       (basicRelease) =>
         this.http
           .getJson<VersionsDetailResult>(
-            `${versionsUrl}${basicRelease.version}/`
+            `${versionsUrl}${basicRelease.version}/`,
           )
           .then(
-            (versionDetailResultResponse) => versionDetailResultResponse.body
+            (versionDetailResultResponse) => versionDetailResultResponse.body,
           )
           .then((versionDetails) => {
             try {
@@ -106,11 +106,11 @@ export class GalaxyCollectionDatasource extends Datasource {
             } catch (err) {
               logger.warn(
                 { dependency: packageName, err },
-                `Received invalid data from ${versionsUrl}${basicRelease.version}/`
+                `Received invalid data from ${versionsUrl}${basicRelease.version}/`,
               );
               return null;
             }
-          })
+          }),
     );
     // filter failed versions
     const filteredReleases = enrichedReleases.filter(is.truthy);
diff --git a/lib/modules/datasource/galaxy/index.spec.ts b/lib/modules/datasource/galaxy/index.spec.ts
index 49c37c37f74abe91358728597ef965b80ffe552a..13cea554d13d4ddb6e4d3f86e33f0ca9b87807ab 100644
--- a/lib/modules/datasource/galaxy/index.spec.ts
+++ b/lib/modules/datasource/galaxy/index.spec.ts
@@ -17,7 +17,7 @@ describe('modules/datasource/galaxy/index', () => {
         await getPkgReleases({
           datasource: GalaxyDatasource.id,
           packageName: 'non_existent_crate',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -30,7 +30,7 @@ describe('modules/datasource/galaxy/index', () => {
         await getPkgReleases({
           datasource: GalaxyDatasource.id,
           packageName: 'non_existent_crate',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -43,7 +43,7 @@ describe('modules/datasource/galaxy/index', () => {
         await getPkgReleases({
           datasource: GalaxyDatasource.id,
           packageName: 'non_existent_crate',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -56,7 +56,7 @@ describe('modules/datasource/galaxy/index', () => {
         await getPkgReleases({
           datasource: GalaxyDatasource.id,
           packageName: 'some_crate',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -69,7 +69,7 @@ describe('modules/datasource/galaxy/index', () => {
         await getPkgReleases({
           datasource: GalaxyDatasource.id,
           packageName: 'some_crate',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -108,7 +108,7 @@ describe('modules/datasource/galaxy/index', () => {
         getPkgReleases({
           datasource: GalaxyDatasource.id,
           packageName: 'some_crate',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
diff --git a/lib/modules/datasource/galaxy/index.ts b/lib/modules/datasource/galaxy/index.ts
index 479f4d889269addd2100feb22735e46d658990a9..edc3b7c66d93cb7c11b21047c9a4cf8fa68e01c6 100644
--- a/lib/modules/datasource/galaxy/index.ts
+++ b/lib/modules/datasource/galaxy/index.ts
@@ -47,14 +47,14 @@ export class GalaxyDatasource extends Datasource {
     if (body.results.length > 1) {
       logger.warn(
         { dependency: packageName },
-        `Received multiple results from ${galaxyAPIUrl}`
+        `Received multiple results from ${galaxyAPIUrl}`,
       );
       return null;
     }
     if (body.results.length === 0) {
       logger.info(
         { dependency: packageName },
-        `Received no results from ${galaxyAPIUrl}`
+        `Received no results from ${galaxyAPIUrl}`,
       );
       return null;
     }
@@ -80,7 +80,7 @@ export class GalaxyDatasource extends Datasource {
         };
 
         return release;
-      }
+      },
     );
 
     return result;
diff --git a/lib/modules/datasource/galaxy/schema.ts b/lib/modules/datasource/galaxy/schema.ts
index 8580c319904a8133244b67fe48d7c9592a8e5b88..a5362dbb4204ccc1f9768df1c7aeae82b35d60ad 100644
--- a/lib/modules/datasource/galaxy/schema.ts
+++ b/lib/modules/datasource/galaxy/schema.ts
@@ -9,11 +9,11 @@ export const GalaxyV1 = z.object({
           z.object({
             name: z.string(),
             created: z.string(),
-          })
+          }),
         ),
       }),
       github_user: z.string().optional(),
       github_repo: z.string().optional(),
-    })
+    }),
   ),
 });
diff --git a/lib/modules/datasource/git-refs/index.spec.ts b/lib/modules/datasource/git-refs/index.spec.ts
index a5917fbd0ebed505dec0a0d6635057f30701d8de..83207d8ccd4853606682b6f352548831fe0c82c9 100644
--- a/lib/modules/datasource/git-refs/index.spec.ts
+++ b/lib/modules/datasource/git-refs/index.spec.ts
@@ -83,7 +83,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'a tag to look up' },
-        'v2.0.0'
+        'v2.0.0',
       );
       expect(digest).toBeNull();
     });
@@ -93,7 +93,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'a tag to look up' },
-        'v1.0.4'
+        'v1.0.4',
       );
       expect(digest).toBe('2b52829c7c1bd65b3501c450849c53b90b11fa0e');
     });
@@ -103,7 +103,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'a tag to look up' },
-        'master'
+        'master',
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
     });
@@ -113,7 +113,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
     });
@@ -123,7 +123,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
       expect(gitMock.env).toHaveBeenCalledWith({});
@@ -140,7 +140,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
       expect(gitMock.env).toHaveBeenCalledWith({
@@ -165,7 +165,7 @@ describe('modules/datasource/git-refs/index', () => {
 
       const digest = await new GitRefsDatasource().getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
       expect(gitMock.env).toHaveBeenCalledWith({
diff --git a/lib/modules/datasource/git-refs/index.ts b/lib/modules/datasource/git-refs/index.ts
index d861fa957ac19c871ab768126e15c609187b217f..d6f93ba1a12753f8d10d35c74e1cb2bccafac247 100644
--- a/lib/modules/datasource/git-refs/index.ts
+++ b/lib/modules/datasource/git-refs/index.ts
@@ -60,7 +60,7 @@ export class GitRefsDatasource extends GitDatasource {
 
   override async getDigest(
     { packageName }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     const rawRefs: RawRefs[] | null = await this.getRawRefs({ packageName });
 
@@ -73,11 +73,11 @@ export class GitRefsDatasource extends GitDatasource {
     if (newValue) {
       ref = rawRefs.find(
         (rawRef) =>
-          ['heads', 'tags'].includes(rawRef.type) && rawRef.value === newValue
+          ['heads', 'tags'].includes(rawRef.type) && rawRef.value === newValue,
       );
     } else {
       ref = rawRefs.find(
-        (rawRef) => rawRef.type === '' && rawRef.value === 'HEAD'
+        (rawRef) => rawRef.type === '' && rawRef.value === 'HEAD',
       );
     }
     if (ref) {
diff --git a/lib/modules/datasource/git-tags/index.spec.ts b/lib/modules/datasource/git-tags/index.spec.ts
index 58eb6c084a06904a0f1ab8c3d2036a09abef9d30..d0b345d905fb560dc71002bcde5a3ac415d209aa 100644
--- a/lib/modules/datasource/git-tags/index.spec.ts
+++ b/lib/modules/datasource/git-tags/index.spec.ts
@@ -66,7 +66,7 @@ describe('modules/datasource/git-tags/index', () => {
 
       const digest = await datasourceInstance.getDigest(
         { packageName: 'a tag to look up' },
-        'notfound'
+        'notfound',
       );
       expect(digest).toBeNull();
     });
@@ -76,7 +76,7 @@ describe('modules/datasource/git-tags/index', () => {
 
       const digest = await datasourceInstance.getDigest(
         { packageName: 'a tag to look up' },
-        'v1.0.2'
+        'v1.0.2',
       );
       expect(digest).toBe('9cb93e0b236385a4e2efd089d7c6a458f5ff321f');
     });
@@ -86,7 +86,7 @@ describe('modules/datasource/git-tags/index', () => {
 
       const digest = await datasourceInstance.getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
     });
@@ -102,7 +102,7 @@ describe('modules/datasource/git-tags/index', () => {
 
       const digest = await datasourceInstance.getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
       expect(gitMock.env).toHaveBeenCalledWith({
@@ -127,7 +127,7 @@ describe('modules/datasource/git-tags/index', () => {
 
       const digest = await datasourceInstance.getDigest(
         { packageName: 'another tag to look up' },
-        undefined
+        undefined,
       );
       expect(digest).toBe('a9920c014aebc28dc1b23e7efcc006d0455cc710');
       expect(gitMock.env).toHaveBeenCalledWith({
diff --git a/lib/modules/datasource/git-tags/index.ts b/lib/modules/datasource/git-tags/index.ts
index e8773274f6147e18d1239418e2c2f967ddce589b..09f084d09b345ed22b11a85d83d7a971bd08d696 100644
--- a/lib/modules/datasource/git-tags/index.ts
+++ b/lib/modules/datasource/git-tags/index.ts
@@ -46,7 +46,7 @@ export class GitTagsDatasource extends GitDatasource {
 
   override async getDigest(
     { packageName }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     const rawRefs = await this.getRawRefs({ packageName });
     const findValue = newValue ?? 'HEAD';
diff --git a/lib/modules/datasource/gitea-releases/index.spec.ts b/lib/modules/datasource/gitea-releases/index.spec.ts
index 460200915ed6557d9519a30b2b6614a08fade71a..f12aeab09671576d1cbe580fa4b15b3ed6e4ebf1 100644
--- a/lib/modules/datasource/gitea-releases/index.spec.ts
+++ b/lib/modules/datasource/gitea-releases/index.spec.ts
@@ -260,7 +260,7 @@ describe('modules/datasource/gitea-releases/index', () => {
       httpMock
         .scope('https://codeberg.org')
         .get(
-          '/api/v1/repos/forgejo-contrib/forgejo-helm/commits?stat=false&verification=false&files=false&page=1&limit=1'
+          '/api/v1/repos/forgejo-contrib/forgejo-helm/commits?stat=false&verification=false&files=false&page=1&limit=1',
         )
         .reply(200, body);
 
@@ -278,7 +278,7 @@ describe('modules/datasource/gitea-releases/index', () => {
       httpMock
         .scope('https://gitea.com')
         .get(
-          '/api/v1/repos/some/dep2/commits?stat=false&verification=false&files=false&page=1&limit=1'
+          '/api/v1/repos/some/dep2/commits?stat=false&verification=false&files=false&page=1&limit=1',
         )
         .reply(200, []);
       const res = await getDigest({
@@ -312,7 +312,7 @@ describe('modules/datasource/gitea-releases/index', () => {
           datasource,
           packageName: 'gitea/helm-chart',
         },
-        'v9.0.1'
+        'v9.0.1',
       );
       expect(res).toBe('29c9bbb4bfec04ab22761cc2d999eb0fcb8acbed');
     });
diff --git a/lib/modules/datasource/gitea-releases/index.ts b/lib/modules/datasource/gitea-releases/index.ts
index 4dc33c7a5c4ba19cabec9d4d6e69b4b45bb7a37f..a679c7786c3fda983d284443fe40274f4cffb306 100644
--- a/lib/modules/datasource/gitea-releases/index.ts
+++ b/lib/modules/datasource/gitea-releases/index.ts
@@ -30,7 +30,7 @@ export class GiteaReleasesDatasource extends Datasource {
     packageName: repo,
   }: GetReleasesConfig): Promise<ReleaseResult | null> {
     const url = `${GiteaTagsDatasource.getApiUrl(
-      registryUrl
+      registryUrl,
     )}repos/${repo}/releases?draft=false`;
     const tags = (
       await this.http.getJson(
@@ -38,7 +38,7 @@ export class GiteaReleasesDatasource extends Datasource {
         {
           paginate: true,
         },
-        ReleasesSchema
+        ReleasesSchema,
       )
     ).body;
 
@@ -65,10 +65,10 @@ export class GiteaReleasesDatasource extends Datasource {
   async getTagCommit(
     registryUrl: string | undefined,
     repo: string,
-    tag: string
+    tag: string,
   ): Promise<string | null> {
     const url = `${GiteaTagsDatasource.getApiUrl(
-      registryUrl
+      registryUrl,
     )}repos/${repo}/tags/${tag}`;
 
     const { body } = await this.http.getJson(url, TagSchema);
@@ -85,14 +85,14 @@ export class GiteaReleasesDatasource extends Datasource {
   })
   override async getDigest(
     { packageName: repo, registryUrl }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     if (newValue?.length) {
       return this.getTagCommit(registryUrl, repo, newValue);
     }
 
     const url = `${GiteaTagsDatasource.getApiUrl(
-      registryUrl
+      registryUrl,
     )}repos/${repo}/commits?stat=false&verification=false&files=false&page=1&limit=1`;
     const { body } = await this.http.getJson(url, CommitsSchema);
 
diff --git a/lib/modules/datasource/gitea-tags/index.spec.ts b/lib/modules/datasource/gitea-tags/index.spec.ts
index b0b38c05029439d82082965565b266a227e56172..925232ca83f0a73aa07d6a672827ba0d023fbd7c 100644
--- a/lib/modules/datasource/gitea-tags/index.spec.ts
+++ b/lib/modules/datasource/gitea-tags/index.spec.ts
@@ -239,7 +239,7 @@ describe('modules/datasource/gitea-tags/index', () => {
       httpMock
         .scope('https://codeberg.org')
         .get(
-          '/api/v1/repos/forgejo-contrib/forgejo-helm/commits?stat=false&verification=false&files=false&page=1&limit=1'
+          '/api/v1/repos/forgejo-contrib/forgejo-helm/commits?stat=false&verification=false&files=false&page=1&limit=1',
         )
         .reply(200, body);
 
@@ -257,7 +257,7 @@ describe('modules/datasource/gitea-tags/index', () => {
       httpMock
         .scope('https://gitea.com')
         .get(
-          '/api/v1/repos/some/dep2/commits?stat=false&verification=false&files=false&page=1&limit=1'
+          '/api/v1/repos/some/dep2/commits?stat=false&verification=false&files=false&page=1&limit=1',
         )
         .reply(200, []);
       const res = await getDigest({
@@ -291,7 +291,7 @@ describe('modules/datasource/gitea-tags/index', () => {
           datasource,
           packageName: 'gitea/helm-chart',
         },
-        'v9.0.1'
+        'v9.0.1',
       );
       expect(res).toBe('29c9bbb4bfec04ab22761cc2d999eb0fcb8acbed');
     });
diff --git a/lib/modules/datasource/gitea-tags/index.ts b/lib/modules/datasource/gitea-tags/index.ts
index dc7d2d00d58702b82d70ef4e1f2e3f65c426704b..7dd85b5e6e9391e000cd1799c2ac09e2e6077c55 100644
--- a/lib/modules/datasource/gitea-tags/index.ts
+++ b/lib/modules/datasource/gitea-tags/index.ts
@@ -27,7 +27,7 @@ export class GiteaTagsDatasource extends Datasource {
   static getApiUrl(registryUrl?: string): string {
     const res = GiteaTagsDatasource.getRegistryURL(registryUrl).replace(
       regEx(/\/api\/v1$/),
-      ''
+      '',
     );
     return `${ensureTrailingSlash(res)}api/v1/`;
   }
@@ -35,7 +35,7 @@ export class GiteaTagsDatasource extends Datasource {
   static getCacheKey(
     registryUrl: string | undefined,
     repo: string,
-    type: string
+    type: string,
   ): string {
     return `${GiteaTagsDatasource.getRegistryURL(registryUrl)}:${repo}:${type}`;
   }
@@ -57,7 +57,7 @@ export class GiteaTagsDatasource extends Datasource {
     packageName: repo,
   }: GetReleasesConfig): Promise<ReleaseResult | null> {
     const url = `${GiteaTagsDatasource.getApiUrl(
-      registryUrl
+      registryUrl,
     )}repos/${repo}/tags`;
     const tags = (
       await this.http.getJson(
@@ -65,7 +65,7 @@ export class GiteaTagsDatasource extends Datasource {
         {
           paginate: true,
         },
-        TagsSchema
+        TagsSchema,
       )
     ).body;
 
@@ -92,10 +92,10 @@ export class GiteaTagsDatasource extends Datasource {
   async getTagCommit(
     registryUrl: string | undefined,
     repo: string,
-    tag: string
+    tag: string,
   ): Promise<string | null> {
     const url = `${GiteaTagsDatasource.getApiUrl(
-      registryUrl
+      registryUrl,
     )}repos/${repo}/tags/${tag}`;
 
     const { body } = await this.http.getJson(url, TagSchema);
@@ -112,14 +112,14 @@ export class GiteaTagsDatasource extends Datasource {
   })
   override async getDigest(
     { packageName: repo, registryUrl }: DigestConfig,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     if (newValue?.length) {
       return this.getTagCommit(registryUrl, repo, newValue);
     }
 
     const url = `${GiteaTagsDatasource.getApiUrl(
-      registryUrl
+      registryUrl,
     )}repos/${repo}/commits?stat=false&verification=false&files=false&page=1&limit=1`;
     const { body } = await this.http.getJson(url, CommitsSchema);
 
diff --git a/lib/modules/datasource/github-release-attachments/digest.spec.ts b/lib/modules/datasource/github-release-attachments/digest.spec.ts
index 40fedd2b1461c92480f62f2a4d5dff30d7d183c0..8635dce3f90fa8155f6c9ee0e9d66ac187c8981d 100644
--- a/lib/modules/datasource/github-release-attachments/digest.spec.ts
+++ b/lib/modules/datasource/github-release-attachments/digest.spec.ts
@@ -9,7 +9,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
   const packageName = 'some/dep';
   const releaseMock = new GitHubReleaseAttachmentMocker(
     'https://api.github.com',
-    packageName
+    packageName,
   );
   const githubReleaseAttachments = new GithubReleaseAttachmentsDatasource();
 
@@ -18,12 +18,12 @@ describe('modules/datasource/github-release-attachments/digest', () => {
       const release = releaseMock.withDigestFileAsset(
         'v1.0.0',
         'test-digest    linux-amd64.tar.gz',
-        'another-digest linux-arm64.tar.gz'
+        'another-digest linux-arm64.tar.gz',
       );
 
       const digestAsset = await githubReleaseAttachments.findDigestAsset(
         release,
-        'test-digest'
+        'test-digest',
       );
       expect(digestAsset?.assetName).toBe('SHASUMS.txt');
       expect(digestAsset?.digestedFileName).toBe('linux-amd64.tar.gz');
@@ -32,7 +32,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
     it('returns null when not found in digest file asset', async () => {
       const release = releaseMock.withDigestFileAsset(
         'v1.0.0',
-        'another-digest linux-arm64.tar.gz'
+        'another-digest linux-arm64.tar.gz',
       );
       // Small assets like this digest file may be downloaded twice
       httpMock
@@ -42,7 +42,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
 
       const digestAsset = await githubReleaseAttachments.findDigestAsset(
         release,
-        'test-digest'
+        'test-digest',
       );
       expect(digestAsset).toBeNull();
     });
@@ -59,7 +59,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
 
       const digestAsset = await githubReleaseAttachments.findDigestAsset(
         release,
-        contentDigest
+        contentDigest,
       );
       expect(digestAsset?.assetName).toBe('asset.zip');
       expect(digestAsset?.digestedFileName).toBeUndefined();
@@ -69,7 +69,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
       const release = releaseMock.release('v1.0.0');
       const digestAsset = await githubReleaseAttachments.findDigestAsset(
         release,
-        'test-digest'
+        'test-digest',
       );
       expect(digestAsset).toBeNull();
     });
@@ -87,11 +87,11 @@ describe('modules/datasource/github-release-attachments/digest', () => {
       it('downloads updated digest file', async () => {
         const release = releaseMock.withDigestFileAsset(
           'v1.0.1',
-          'updated-digest  asset.zip'
+          'updated-digest  asset.zip',
         );
         const digest = await githubReleaseAttachments.mapDigestAssetToRelease(
           digestAsset,
-          release
+          release,
         );
         expect(digest).toBe('updated-digest');
       });
@@ -104,11 +104,11 @@ describe('modules/datasource/github-release-attachments/digest', () => {
 
         const release = releaseMock.withDigestFileAsset(
           'v1.0.1',
-          'updated-digest  asset-1.0.1.zip'
+          'updated-digest  asset-1.0.1.zip',
         );
         const digest = await githubReleaseAttachments.mapDigestAssetToRelease(
           digestAssetWithVersion,
-          release
+          release,
         );
         expect(digest).toBe('updated-digest');
       });
@@ -116,11 +116,11 @@ describe('modules/datasource/github-release-attachments/digest', () => {
       it('returns null when not found in digest file', async () => {
         const release = releaseMock.withDigestFileAsset(
           'v1.0.1',
-          'moot-digest asset.tar.gz'
+          'moot-digest asset.tar.gz',
         );
         const digest = await githubReleaseAttachments.mapDigestAssetToRelease(
           digestAsset,
-          release
+          release,
         );
         expect(digest).toBeNull();
       });
@@ -129,7 +129,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
         const release = releaseMock.release('v1.0.1');
         const digest = await githubReleaseAttachments.mapDigestAssetToRelease(
           digestAsset,
-          release
+          release,
         );
         expect(digest).toBeNull();
       });
@@ -151,7 +151,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
 
         const digest = await githubReleaseAttachments.mapDigestAssetToRelease(
           digestAsset,
-          release
+          release,
         );
         expect(digest).toEqual(contentDigest);
       });
@@ -160,7 +160,7 @@ describe('modules/datasource/github-release-attachments/digest', () => {
         const release = releaseMock.release('v1.0.1');
         const digest = await githubReleaseAttachments.mapDigestAssetToRelease(
           digestAsset,
-          release
+          release,
         );
         expect(digest).toBeNull();
       });
diff --git a/lib/modules/datasource/github-release-attachments/index.spec.ts b/lib/modules/datasource/github-release-attachments/index.spec.ts
index 6494eb3bc88cf8348132a91af99cb836e21e1ec3..f1c17ad42c9102617f02fbea065438c94f363776 100644
--- a/lib/modules/datasource/github-release-attachments/index.spec.ts
+++ b/lib/modules/datasource/github-release-attachments/index.spec.ts
@@ -93,13 +93,13 @@ describe('modules/datasource/github-release-attachments/index', () => {
 
     const releaseMock = new GitHubReleaseAttachmentMocker(
       githubApiHost,
-      packageName
+      packageName,
     );
 
     it('requires currentDigest', async () => {
       const digest = await getDigest(
         { datasource: GithubReleaseAttachmentsDatasource.id, packageName },
-        currentValue
+        currentValue,
       );
       expect(digest).toBeNull();
     });
@@ -111,7 +111,7 @@ describe('modules/datasource/github-release-attachments/index', () => {
           packageName,
           currentDigest,
         },
-        currentValue
+        currentValue,
       );
       expect(digest).toEqual(currentDigest);
     });
@@ -119,7 +119,7 @@ describe('modules/datasource/github-release-attachments/index', () => {
     it('returns updated digest in new release', async () => {
       releaseMock.withDigestFileAsset(
         currentValue,
-        `${currentDigest} asset.zip`
+        `${currentDigest} asset.zip`,
       );
       const nextValue = 'v1.0.1';
       const nextDigest = 'updated-digest';
@@ -131,7 +131,7 @@ describe('modules/datasource/github-release-attachments/index', () => {
           currentValue,
           currentDigest,
         },
-        nextValue
+        nextValue,
       );
       expect(digest).toEqual(nextDigest);
     });
@@ -147,7 +147,7 @@ describe('modules/datasource/github-release-attachments/index', () => {
           currentValue,
           currentDigest,
         },
-        currentValue
+        currentValue,
       );
       expect(digest).toEqual(currentDigest);
     });
diff --git a/lib/modules/datasource/github-release-attachments/index.ts b/lib/modules/datasource/github-release-attachments/index.ts
index 5df62728d1402e814fa303a8a24df613a5ecf7a8..d79242124e5acb9d103601a52b64c8dfac84e4c6 100644
--- a/lib/modules/datasource/github-release-attachments/index.ts
+++ b/lib/modules/datasource/github-release-attachments/index.ts
@@ -51,10 +51,10 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
   })
   async findDigestFile(
     release: GithubRestRelease,
-    digest: string
+    digest: string,
   ): Promise<GithubDigestFile | null> {
     const smallAssets = release.assets.filter(
-      (a: GithubRestAsset) => a.size < 5 * 1024
+      (a: GithubRestAsset) => a.size < 5 * 1024,
     );
     for (const asset of smallAssets) {
       const res = await this.http.get(asset.browser_download_url);
@@ -81,7 +81,7 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
   })
   async downloadAndDigest(
     asset: GithubRestAsset,
-    algorithm: string
+    algorithm: string,
   ): Promise<string> {
     const res = this.http.stream(asset.browser_download_url);
     const digest = await hashStream(res, algorithm);
@@ -90,7 +90,7 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
 
   async findAssetWithDigest(
     release: GithubRestRelease,
-    digest: string
+    digest: string,
   ): Promise<GithubDigestFile | null> {
     const algorithm = inferHashAlg(digest);
     const assetsBySize = release.assets.sort(
@@ -102,7 +102,7 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
           return 1;
         }
         return 0;
-      }
+      },
     );
 
     for (const asset of assetsBySize) {
@@ -121,7 +121,7 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
   /** Identify the asset associated with a known digest. */
   async findDigestAsset(
     release: GithubRestRelease,
-    digest: string
+    digest: string,
   ): Promise<GithubDigestFile | null> {
     const digestFile = await this.findDigestFile(release, digest);
     if (digestFile) {
@@ -135,16 +135,16 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
   /** Given a digest asset, find the equivalent digest in a different release. */
   async mapDigestAssetToRelease(
     digestAsset: GithubDigestFile,
-    release: GithubRestRelease
+    release: GithubRestRelease,
   ): Promise<string | null> {
     const current = digestAsset.currentVersion.replace(regEx(/^v/), '');
     const next = release.tag_name.replace(regEx(/^v/), '');
     const releaseChecksumAssetName = digestAsset.assetName.replace(
       current,
-      next
+      next,
     );
     const releaseAsset = release.assets.find(
-      (a: GithubRestAsset) => a.name === releaseChecksumAssetName
+      (a: GithubRestAsset) => a.name === releaseChecksumAssetName,
     );
     if (!releaseAsset) {
       return null;
@@ -152,7 +152,7 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
     if (digestAsset.digestedFileName) {
       const releaseFilename = digestAsset.digestedFileName.replace(
         current,
-        next
+        next,
       );
       const res = await this.http.get(releaseAsset.browser_download_url);
       for (const line of res.body.split(newlineRegex)) {
@@ -188,11 +188,11 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
       currentDigest,
       registryUrl,
     }: DigestConfig,
-    newValue: string
+    newValue: string,
   ): Promise<string | null> {
     logger.debug(
       { repo, currentValue, currentDigest, registryUrl, newValue },
-      'getDigest'
+      'getDigest',
     );
     if (!currentDigest) {
       return null;
@@ -203,18 +203,18 @@ export class GithubReleaseAttachmentsDatasource extends Datasource {
 
     const apiBaseUrl = getApiBaseUrl(registryUrl);
     const { body: currentRelease } = await this.http.getJson<GithubRestRelease>(
-      `${apiBaseUrl}repos/${repo}/releases/tags/${currentValue}`
+      `${apiBaseUrl}repos/${repo}/releases/tags/${currentValue}`,
     );
     const digestAsset = await this.findDigestAsset(
       currentRelease,
-      currentDigest
+      currentDigest,
     );
     let newDigest: string | null;
     if (!digestAsset || newValue === currentValue) {
       newDigest = currentDigest;
     } else {
       const { body: newRelease } = await this.http.getJson<GithubRestRelease>(
-        `${apiBaseUrl}repos/${repo}/releases/tags/${newValue}`
+        `${apiBaseUrl}repos/${repo}/releases/tags/${newValue}`,
       );
       newDigest = await this.mapDigestAssetToRelease(digestAsset, newRelease);
     }
diff --git a/lib/modules/datasource/github-release-attachments/test/index.ts b/lib/modules/datasource/github-release-attachments/test/index.ts
index 84f6f3086c1e285f05a63a03a9445c250f652237..066268558b30ff546724444f458aa52679e164f9 100644
--- a/lib/modules/datasource/github-release-attachments/test/index.ts
+++ b/lib/modules/datasource/github-release-attachments/test/index.ts
@@ -5,7 +5,7 @@ import type { GithubRestRelease } from '../../../../util/github/types';
 export class GitHubReleaseAttachmentMocker {
   constructor(
     private readonly githubApiHost: string,
-    private readonly packageName: string
+    private readonly packageName: string,
   ) {}
 
   release(version: string): GithubRestRelease {
@@ -14,7 +14,7 @@ export class GitHubReleaseAttachmentMocker {
 
   withAssets(
     version: string,
-    assets: { [key: string]: string }
+    assets: { [key: string]: string },
   ): GithubRestRelease {
     const releaseData = partial<GithubRestRelease>({
       tag_name: version,
diff --git a/lib/modules/datasource/github-releases/index.spec.ts b/lib/modules/datasource/github-releases/index.spec.ts
index 999c708fbc99f15363f6b59ae7de098b0366bc70..e6f93f07803bcdc4b282810b88b0a05e23dd8729 100644
--- a/lib/modules/datasource/github-releases/index.spec.ts
+++ b/lib/modules/datasource/github-releases/index.spec.ts
@@ -114,7 +114,7 @@ describe('modules/datasource/github-releases/index', () => {
           packageName,
           currentValue,
         },
-        newValue
+        newValue,
       );
       expect(digest).toBe(newDigest);
     });
@@ -122,7 +122,7 @@ describe('modules/datasource/github-releases/index', () => {
     it('should be independent of the current value', async () => {
       const digest = await getDigest(
         { datasource: GithubReleasesDatasource.id, packageName },
-        newValue
+        newValue,
       );
       expect(digest).toBe(newDigest);
     });
@@ -135,7 +135,7 @@ describe('modules/datasource/github-releases/index', () => {
           currentValue,
           currentDigest,
         },
-        newValue
+        newValue,
       );
       expect(digest).toEqual(newDigest);
     });
@@ -148,7 +148,7 @@ describe('modules/datasource/github-releases/index', () => {
           currentValue,
           currentDigest,
         },
-        'unknown-tag'
+        'unknown-tag',
       );
       expect(digest).toBeNull();
     });
diff --git a/lib/modules/datasource/github-releases/index.ts b/lib/modules/datasource/github-releases/index.ts
index 11714a8593be37023c3061018d23ab7ac017bb65..b13d0e15000eed95a90fc7dfd630a9262ea53ce8 100644
--- a/lib/modules/datasource/github-releases/index.ts
+++ b/lib/modules/datasource/github-releases/index.ts
@@ -43,11 +43,11 @@ export class GithubReleasesDatasource extends Datasource {
       currentDigest,
       registryUrl,
     }: DigestConfig,
-    newValue: string
+    newValue: string,
   ): Promise<string | null> {
     logger.debug(
       { repo, currentValue, currentDigest, registryUrl, newValue },
-      'getDigest'
+      'getDigest',
     );
 
     return findCommitOfTag(registryUrl, repo, newValue, this.http);
diff --git a/lib/modules/datasource/github-runners/index.ts b/lib/modules/datasource/github-runners/index.ts
index 37dda6fdba8052d476253e46f85712bc5a802587..0d33ddc0f172c35fddb48a50cebc1aa69c0d5192 100644
--- a/lib/modules/datasource/github-runners/index.ts
+++ b/lib/modules/datasource/github-runners/index.ts
@@ -23,7 +23,7 @@ export class GithubRunnersDatasource extends Datasource {
 
   public static isValidRunner(
     runnerName: string,
-    runnerVersion: string
+    runnerVersion: string,
   ): boolean {
     const runnerReleases = GithubRunnersDatasource.releases[runnerName];
     if (!runnerReleases) {
@@ -31,7 +31,7 @@ export class GithubRunnersDatasource extends Datasource {
     }
 
     const versionExists = runnerReleases.some(
-      ({ version }) => version === runnerVersion
+      ({ version }) => version === runnerVersion,
     );
 
     return runnerVersion === 'latest' || versionExists;
diff --git a/lib/modules/datasource/github-tags/index.ts b/lib/modules/datasource/github-tags/index.ts
index fca0e7f1a59cdeb4adc640f7e6868c9eff0a3371..328374cb91db5d5b033f76601d6aa97d48907177 100644
--- a/lib/modules/datasource/github-tags/index.ts
+++ b/lib/modules/datasource/github-tags/index.ts
@@ -29,7 +29,7 @@ export class GithubTagsDatasource extends Datasource {
 
   async getCommit(
     registryUrl: string | undefined,
-    githubRepo: string
+    githubRepo: string,
   ): Promise<string | null> {
     const apiBaseUrl = getApiBaseUrl(registryUrl);
     let digest: string | null = null;
@@ -40,7 +40,7 @@ export class GithubTagsDatasource extends Datasource {
     } catch (err) {
       logger.debug(
         { githubRepo, err, registryUrl },
-        'Error getting latest commit from GitHub repo'
+        'Error getting latest commit from GitHub repo',
       );
     }
     return digest;
@@ -55,7 +55,7 @@ export class GithubTagsDatasource extends Datasource {
    */
   override getDigest(
     { packageName: repo, registryUrl }: Partial<DigestConfig>,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     return newValue
       ? findCommitOfTag(registryUrl, repo!, newValue, this.http)
@@ -63,7 +63,7 @@ export class GithubTagsDatasource extends Datasource {
   }
 
   override async getReleases(
-    config: GetReleasesConfig
+    config: GetReleasesConfig,
   ): Promise<ReleaseResult> {
     const { registryUrl, packageName: repo } = config;
     const sourceUrl = getSourceUrl(repo, registryUrl);
@@ -74,7 +74,7 @@ export class GithubTagsDatasource extends Datasource {
         version,
         releaseTimestamp,
         gitRef,
-      })
+      }),
     );
 
     try {
diff --git a/lib/modules/datasource/gitlab-packages/index.spec.ts b/lib/modules/datasource/gitlab-packages/index.spec.ts
index 366fd9a7cc51c7f83d4184c6d883950f19578859..a2fce223c466d51a7e3e2b136b16aecb65f4282a 100644
--- a/lib/modules/datasource/gitlab-packages/index.spec.ts
+++ b/lib/modules/datasource/gitlab-packages/index.spec.ts
@@ -59,7 +59,7 @@ describe('modules/datasource/gitlab-packages/index', () => {
           datasource,
           registryUrls: ['https://gitlab.com'],
           packageName: 'user/project1:mypkg',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -77,7 +77,7 @@ describe('modules/datasource/gitlab-packages/index', () => {
           datasource,
           registryUrls: ['https://gitlab.com'],
           packageName: 'user/project1:mypkg',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -95,7 +95,7 @@ describe('modules/datasource/gitlab-packages/index', () => {
           datasource,
           registryUrls: ['https://gitlab.com'],
           packageName: 'user/project1:mypkg',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
   });
diff --git a/lib/modules/datasource/gitlab-packages/index.ts b/lib/modules/datasource/gitlab-packages/index.ts
index fce9566ef23f59dac21dca0286aba7c470803016..e30bc997a3e0c38568a131901b29b62670c63e74 100644
--- a/lib/modules/datasource/gitlab-packages/index.ts
+++ b/lib/modules/datasource/gitlab-packages/index.ts
@@ -27,7 +27,7 @@ export class GitlabPackagesDatasource extends Datasource {
   static getGitlabPackageApiUrl(
     registryUrl: string,
     projectName: string,
-    packageName: string
+    packageName: string,
   ): string {
     const projectNameEncoded = encodeURIComponent(projectName);
     const packageNameEncoded = encodeURIComponent(packageName);
@@ -36,7 +36,7 @@ export class GitlabPackagesDatasource extends Datasource {
       registryUrl,
       `api/v4/projects`,
       projectNameEncoded,
-      `packages?package_name=${packageNameEncoded}&per_page=100`
+      `packages?package_name=${packageNameEncoded}&per_page=100`,
     );
   }
 
@@ -60,7 +60,7 @@ export class GitlabPackagesDatasource extends Datasource {
     const apiUrl = GitlabPackagesDatasource.getGitlabPackageApiUrl(
       registryUrl,
       projectPart,
-      packagePart
+      packagePart,
     );
 
     const result: ReleaseResult = {
diff --git a/lib/modules/datasource/gitlab-releases/index.spec.ts b/lib/modules/datasource/gitlab-releases/index.spec.ts
index 2a1d498aa9c3b9de8718ae2501ec33b3f51c8f51..04364f4bb9f63b1a99c15b01efa459e63d36e25a 100644
--- a/lib/modules/datasource/gitlab-releases/index.spec.ts
+++ b/lib/modules/datasource/gitlab-releases/index.spec.ts
@@ -51,7 +51,7 @@ describe('modules/datasource/gitlab-releases/index', () => {
         await getPkgReleases({
           datasource: GitlabReleasesDatasource.id,
           packageName: 'some/dep2',
-        })
+        }),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/datasource/gitlab-tags/index.spec.ts b/lib/modules/datasource/gitlab-tags/index.spec.ts
index f9b333fe7e4587d5fd94aaa0a1f7983299ca6129..7f8888a3be9f6afef78bd3652972ebceee60b0f5 100644
--- a/lib/modules/datasource/gitlab-tags/index.spec.ts
+++ b/lib/modules/datasource/gitlab-tags/index.spec.ts
@@ -114,7 +114,7 @@ describe('modules/datasource/gitlab-tags/index', () => {
           registryUrls: ['https://gitlab.company.com/api/v4/'],
           packageName: 'some/dep2',
         },
-        'branch'
+        'branch',
       );
       expect(res).toBe(digest);
     });
@@ -143,7 +143,7 @@ describe('modules/datasource/gitlab-tags/index', () => {
           registryUrls: ['https://gitlab.company.com/api/v4/'],
           packageName: 'some/dep2',
         },
-        'unknown-branch'
+        'unknown-branch',
       );
       expect(res).toBeNull();
     });
diff --git a/lib/modules/datasource/gitlab-tags/index.ts b/lib/modules/datasource/gitlab-tags/index.ts
index c80b9b4911530fbc00fc111d22630da92f052019..d1317c5f8035698232a74b5e6b1c832fffb79933 100644
--- a/lib/modules/datasource/gitlab-tags/index.ts
+++ b/lib/modules/datasource/gitlab-tags/index.ts
@@ -37,7 +37,7 @@ export class GitlabTagsDatasource extends Datasource {
       depHost,
       `api/v4/projects`,
       urlEncodedRepo,
-      `repository/tags?per_page=100`
+      `repository/tags?per_page=100`,
     );
 
     const gitlabTags = (
@@ -71,7 +71,7 @@ export class GitlabTagsDatasource extends Datasource {
   })
   override async getDigest(
     { packageName: repo, registryUrl }: Partial<DigestConfig>,
-    newValue?: string
+    newValue?: string,
   ): Promise<string | null> {
     const depHost = getDepHost(registryUrl);
 
@@ -85,7 +85,7 @@ export class GitlabTagsDatasource extends Datasource {
           `api/v4/projects`,
           urlEncodedRepo,
           `repository/commits/`,
-          newValue
+          newValue,
         );
         const gitlabCommits = await this.http.getJson<GitlabCommit>(url);
         digest = gitlabCommits.body.id;
@@ -94,7 +94,7 @@ export class GitlabTagsDatasource extends Datasource {
           depHost,
           `api/v4/projects`,
           urlEncodedRepo,
-          `repository/commits?per_page=1`
+          `repository/commits?per_page=1`,
         );
         const gitlabCommits = await this.http.getJson<GitlabCommit[]>(url);
         digest = gitlabCommits.body[0].id;
@@ -102,7 +102,7 @@ export class GitlabTagsDatasource extends Datasource {
     } catch (err) {
       logger.debug(
         { gitlabRepo: repo, err, registryUrl },
-        'Error getting latest commit from Gitlab repo'
+        'Error getting latest commit from Gitlab repo',
       );
     }
 
diff --git a/lib/modules/datasource/gitlab-tags/util.spec.ts b/lib/modules/datasource/gitlab-tags/util.spec.ts
index ff675a788417105641c72e3c528f36568a517260..6ed503c06be623a1dfe110106748663bb06e2d7f 100644
--- a/lib/modules/datasource/gitlab-tags/util.spec.ts
+++ b/lib/modules/datasource/gitlab-tags/util.spec.ts
@@ -5,10 +5,10 @@ describe('modules/datasource/gitlab-tags/util', () => {
     it('works', () => {
       expect(getDepHost()).toBe('https://gitlab.com');
       expect(getDepHost('https://gitlab.domain.test/api/v4')).toBe(
-        'https://gitlab.domain.test'
+        'https://gitlab.domain.test',
       );
       expect(getDepHost('https://domain.test/gitlab/api/v4')).toBe(
-        'https://domain.test/gitlab'
+        'https://domain.test/gitlab',
       );
     });
   });
@@ -17,7 +17,7 @@ describe('modules/datasource/gitlab-tags/util', () => {
     it('works', () => {
       expect(getSourceUrl('some/repo')).toBe('https://gitlab.com/some/repo');
       expect(
-        getSourceUrl('some/repo', 'https://gitlab.domain.test/api/v4')
+        getSourceUrl('some/repo', 'https://gitlab.domain.test/api/v4'),
       ).toBe('https://gitlab.domain.test/some/repo');
     });
   });
diff --git a/lib/modules/datasource/gitlab-tags/util.ts b/lib/modules/datasource/gitlab-tags/util.ts
index bb0a6e439dc4f91b6babb6b0e48b841a81429613..301de922674b52ed5eb1b3e5e98212601e270565 100644
--- a/lib/modules/datasource/gitlab-tags/util.ts
+++ b/lib/modules/datasource/gitlab-tags/util.ts
@@ -9,7 +9,7 @@ export function getDepHost(registryUrl: string = defaultRegistryUrl): string {
 
 export function getSourceUrl(
   packageName: string,
-  registryUrl?: string
+  registryUrl?: string,
 ): string {
   const depHost = getDepHost(registryUrl);
   return joinUrlParts(depHost, packageName);
diff --git a/lib/modules/datasource/go/base.spec.ts b/lib/modules/datasource/go/base.spec.ts
index bdc3036fbfd5198e76226516f2ce99d757c00e88..4ef0db7ab6a83fbac0177430de729dd5e6bed896 100644
--- a/lib/modules/datasource/go/base.spec.ts
+++ b/lib/modules/datasource/go/base.spec.ts
@@ -26,7 +26,7 @@ describe('modules/datasource/go/base', () => {
       async ({ module, datasource, packageName }) => {
         const res = await BaseGoDatasource.getDatasource(module);
         expect(res).toMatchObject({ datasource, packageName });
-      }
+      },
     );
   });
 
@@ -57,7 +57,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200);
 
         const res = await BaseGoDatasource.getDatasource(
-          'example.com/example/module'
+          'example.com/example/module',
         );
 
         expect(res).toBeNull();
@@ -66,7 +66,7 @@ describe('modules/datasource/go/base', () => {
       it('returns null for go-import prefix mismatch', async () => {
         const mismatchResponse = Fixtures.get('go-get-github-ee.html').replace(
           'git.enterprise.com/example/module',
-          'git.enterprise.com/badexample/badmodule'
+          'git.enterprise.com/badexample/badmodule',
         );
         httpMock
           .scope('https://example.com')
@@ -74,7 +74,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, mismatchResponse);
 
         const res = await BaseGoDatasource.getDatasource(
-          'example.com/example/module'
+          'example.com/example/module',
         );
 
         expect(res).toBeNull();
@@ -103,7 +103,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-github-ee.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'git.enterprise.com/example/module'
+          'git.enterprise.com/example/module',
         );
 
         expect(res).toEqual({
@@ -120,7 +120,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'gitlab.com/group/subgroup'
+          'gitlab.com/group/subgroup',
         );
 
         expect(res).toEqual({
@@ -137,7 +137,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'gitlab.com/group/subgroup/private.git/v3'
+          'gitlab.com/group/subgroup/private.git/v3',
         );
 
         expect(res).toEqual({
@@ -154,7 +154,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'gitlab.com/group/subgroup/my.git.module'
+          'gitlab.com/group/subgroup/my.git.module',
         );
 
         expect(res).toEqual({
@@ -167,7 +167,7 @@ describe('modules/datasource/go/base', () => {
       it('supports GitLab with URL mismatch', async () => {
         const mismatchingResponse = Fixtures.get('go-get-github.html').replace(
           'https://github.com/golang/text/',
-          'https://gitlab.com/golang/text/'
+          'https://gitlab.com/golang/text/',
         );
         httpMock
           .scope('https://golang.org')
@@ -190,7 +190,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'gitlab.com/group/subgroup/v2'
+          'gitlab.com/group/subgroup/v2',
         );
 
         expect(res).toEqual({
@@ -208,7 +208,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab-ee.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'my.custom.domain/golang/myrepo'
+          'my.custom.domain/golang/myrepo',
         );
 
         expect(res).toEqual({
@@ -226,7 +226,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab-ee-subgroup.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'my.custom.domain/golang/subgroup/myrepo'
+          'my.custom.domain/golang/subgroup/myrepo',
         );
 
         expect(res).toEqual({
@@ -244,7 +244,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab-ee-subgroup.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'my.custom.domain/golang/subgroup/myrepo/v2'
+          'my.custom.domain/golang/subgroup/myrepo/v2',
         );
 
         expect(res).toEqual({
@@ -262,7 +262,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab-ee-private-subgroup.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'my.custom.domain/golang/subgroup/myrepo.git/v2'
+          'my.custom.domain/golang/subgroup/myrepo.git/v2',
         );
 
         expect(res).toEqual({
@@ -280,7 +280,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, Fixtures.get('go-get-gitlab-ee-subgroup.html'));
 
         const res = await BaseGoDatasource.getDatasource(
-          'my.custom.domain/golang/subgroup/myrepo/monorepo'
+          'my.custom.domain/golang/subgroup/myrepo/monorepo',
         );
 
         expect(res).toEqual({
@@ -333,7 +333,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, meta);
 
         const res = await BaseGoDatasource.getDatasource(
-          'my.custom.domain/golang/myrepo'
+          'my.custom.domain/golang/myrepo',
         );
 
         expect(res).toEqual({
@@ -352,7 +352,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, meta);
 
         const res = await BaseGoDatasource.getDatasource(
-          'dev.azure.com/my-organization/my-project/_git/my-repo.git'
+          'dev.azure.com/my-organization/my-project/_git/my-repo.git',
         );
 
         expect(res).toEqual({
@@ -371,7 +371,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, meta);
 
         const res = await BaseGoDatasource.getDatasource(
-          'example.com/uncommon'
+          'example.com/uncommon',
         );
 
         expect(res).toEqual({
@@ -389,7 +389,7 @@ describe('modules/datasource/go/base', () => {
           .reply(200, meta);
 
         const res = await BaseGoDatasource.getDatasource(
-          'buf.build/gen/go/gogo/protobuf/protocolbuffers/go'
+          'buf.build/gen/go/gogo/protobuf/protocolbuffers/go',
         );
 
         expect(res).toBeNull();
diff --git a/lib/modules/datasource/go/base.ts b/lib/modules/datasource/go/base.ts
index 967095b86b164fb0f43c1f4948c0e74220fa259b..876f8745eb7c3becee73bc271aee8cd60d297a09 100644
--- a/lib/modules/datasource/go/base.ts
+++ b/lib/modules/datasource/go/base.ts
@@ -15,13 +15,13 @@ import type { DataSource } from './types';
 // TODO: figure out class hierarchy (#10532)
 export class BaseGoDatasource {
   private static readonly gitlabHttpsRegExp = regEx(
-    /^(?<httpsRegExpUrl>https:\/\/[^/]*gitlab\.[^/]*)\/(?<httpsRegExpName>.+?)(?:\/v\d+)?[/]?$/
+    /^(?<httpsRegExpUrl>https:\/\/[^/]*gitlab\.[^/]*)\/(?<httpsRegExpName>.+?)(?:\/v\d+)?[/]?$/,
   );
   private static readonly gitlabRegExp = regEx(
-    /^(?<regExpUrl>gitlab\.[^/]*)\/(?<regExpPath>.+?)(?:\/v\d+)?[/]?$/
+    /^(?<regExpUrl>gitlab\.[^/]*)\/(?<regExpPath>.+?)(?:\/v\d+)?[/]?$/,
   );
   private static readonly gitVcsRegexp = regEx(
-    /^(?:[^/]+)\/(?<module>.*)\.git(?:$|\/)/
+    /^(?:[^/]+)\/(?<module>.*)\.git(?:$|\/)/,
   );
 
   private static readonly id = 'go';
@@ -61,7 +61,7 @@ export class BaseGoDatasource {
     if (goModule.startsWith('code.cloudfoundry.org/')) {
       const packageName = goModule.replace(
         'code.cloudfoundry.org',
-        'cloudfoundry'
+        'cloudfoundry',
       );
       return {
         datasource: GithubTagsDatasource.id,
@@ -74,7 +74,7 @@ export class BaseGoDatasource {
   }
 
   private static async goGetDatasource(
-    goModule: string
+    goModule: string,
   ): Promise<DataSource | null> {
     const pkgUrl = `https://${goModule}?go-get=1`;
     // GitHub Enterprise only returns a go-import meta
@@ -87,10 +87,10 @@ export class BaseGoDatasource {
 
   private static goSourceHeader(
     res: string,
-    goModule: string
+    goModule: string,
   ): DataSource | null {
     const sourceMatch = regEx(
-      `<meta\\s+name="?go-source"?\\s+content="([^\\s]+)\\s+([^\\s]+)`
+      `<meta\\s+name="?go-source"?\\s+content="([^\\s]+)\\s+([^\\s]+)`,
     ).exec(res);
     if (!sourceMatch) {
       return null;
@@ -106,7 +106,7 @@ export class BaseGoDatasource {
 
   private static detectDatasource(
     goSourceUrl: string,
-    goModule: string
+    goModule: string,
   ): DataSource | null {
     if (goSourceUrl?.startsWith('https://github.com/')) {
       return {
@@ -180,10 +180,10 @@ export class BaseGoDatasource {
 
   private static goImportHeader(
     res: string,
-    goModule: string
+    goModule: string,
   ): DataSource | null {
     const importMatch = regEx(
-      `<meta\\s+name="?go-import"?\\s+content="([^\\s]+)\\s+([^\\s]+)\\s+([^\\s]+)"\\s*\\/?>`
+      `<meta\\s+name="?go-import"?\\s+content="([^\\s]+)\\s+([^\\s]+)\\s+([^\\s]+)"\\s*\\/?>`,
     ).exec(res);
 
     if (!importMatch) {
@@ -208,7 +208,7 @@ export class BaseGoDatasource {
 
     const datasource = this.detectDatasource(
       goImportURL.replace(regEx(/\.git$/), ''),
-      goModule
+      goModule,
     );
     if (datasource !== null) {
       return datasource;
diff --git a/lib/modules/datasource/go/common.ts b/lib/modules/datasource/go/common.ts
index 593eca50b7522f052f8b759a67327f9199abca90..70c10cd676f32fc55f2a0821e74c915f17e582ae 100644
--- a/lib/modules/datasource/go/common.ts
+++ b/lib/modules/datasource/go/common.ts
@@ -11,7 +11,7 @@ export type GoproxyFallback =
   | '|'; // Always
 
 export function getSourceUrl(
-  dataSource?: DataSource | null
+  dataSource?: DataSource | null,
 ): string | undefined {
   if (dataSource) {
     const { datasource, registryUrl, packageName } = dataSource;
diff --git a/lib/modules/datasource/go/index.spec.ts b/lib/modules/datasource/go/index.spec.ts
index b836ff8805808f75b7a4669bb4346162be656ac8..dad705a914dc1813d0208661fe49b1c518b7de61 100644
--- a/lib/modules/datasource/go/index.spec.ts
+++ b/lib/modules/datasource/go/index.spec.ts
@@ -82,7 +82,7 @@ describe('modules/datasource/go/index', () => {
         .reply(200, '');
       const res = await datasource.getDigest(
         { packageName: 'golang.org/y/text' },
-        null
+        null,
       );
       expect(res).toBeNull();
     });
@@ -94,7 +94,7 @@ describe('modules/datasource/go/index', () => {
         .reply(200, Fixtures.get('go-get-github.html'));
       const res = await datasource.getDigest(
         { packageName: 'golang.org/y/text' },
-        null
+        null,
       );
       expect(res).toBeNull();
     });
@@ -107,7 +107,7 @@ describe('modules/datasource/go/index', () => {
       getDigestGitlabMock.mockResolvedValue('abcdefabcdefabcdefabcdef');
       const res = await datasource.getDigest(
         { packageName: 'gitlab.com/group/subgroup' },
-        null
+        null,
       );
       expect(res).toBe('abcdefabcdefabcdefabcdef');
     });
@@ -120,7 +120,7 @@ describe('modules/datasource/go/index', () => {
       getDigestGitMock.mockResolvedValue('abcdefabcdefabcdefabcdef');
       const res = await datasource.getDigest(
         { packageName: 'renovatebot.com/abc/def' },
-        null
+        null,
       );
       expect(res).toBe('abcdefabcdefabcdefabcdef');
     });
@@ -134,7 +134,7 @@ describe('modules/datasource/go/index', () => {
       getDigestGitlabMock.mockResolvedValue('abcdefabcdefabcdefabcdef');
       const res = await datasource.getDigest(
         { packageName: 'gitlab.com/group/subgroup' },
-        branch
+        branch,
       );
       expect(res).toBe('abcdefabcdefabcdefabcdef');
     });
@@ -147,7 +147,7 @@ describe('modules/datasource/go/index', () => {
       getDigestGithubMock.mockResolvedValueOnce('abcdefabcdefabcdefabcdef');
       const res = await datasource.getDigest(
         { packageName: 'golang.org/x/text' },
-        'v1.2.3'
+        'v1.2.3',
       );
       expect(res).toBe('abcdefabcdefabcdefabcdef');
       expect(getDigestGithubMock).toHaveBeenCalledWith(
@@ -156,7 +156,7 @@ describe('modules/datasource/go/index', () => {
           packageName: 'golang/text',
           registryUrl: 'https://github.com',
         },
-        'v1.2.3'
+        'v1.2.3',
       );
     });
 
@@ -168,7 +168,7 @@ describe('modules/datasource/go/index', () => {
       getDigestGithubMock.mockResolvedValueOnce('abcdefabcdefabcdefabcdef');
       const res = await datasource.getDigest(
         { packageName: 'golang.org/x/text' },
-        'v0.0.0'
+        'v0.0.0',
       );
       expect(res).toBe('abcdefabcdefabcdefabcdef');
       expect(getDigestGithubMock).toHaveBeenCalledWith(
@@ -177,7 +177,7 @@ describe('modules/datasource/go/index', () => {
           packageName: 'golang/text',
           registryUrl: 'https://github.com',
         },
-        undefined
+        undefined,
       );
     });
 
@@ -187,7 +187,7 @@ describe('modules/datasource/go/index', () => {
         {
           packageName: 'bitbucket.org/golang/text',
         },
-        null
+        null,
       );
       expect(res).toMatchSnapshot();
       expect(res).not.toBeNull();
diff --git a/lib/modules/datasource/go/index.ts b/lib/modules/datasource/go/index.ts
index 8468524b904bbbd46d2fe94adc430f8f940d90d6..48702e2d2090b908a10b4a9c732863ff158b0d3e 100644
--- a/lib/modules/datasource/go/index.ts
+++ b/lib/modules/datasource/go/index.ts
@@ -34,7 +34,7 @@ export class GoDatasource extends Datasource {
 
   // Pseudo versions https://go.dev/ref/mod#pseudo-versions
   static readonly pversionRegexp = regEx(
-    /v\d+\.\d+\.\d+-(?:\w+\.)?(?:0\.)?\d{14}-(?<digest>[a-f0-9]{12})/
+    /v\d+\.\d+\.\d+-(?:\w+\.)?(?:0\.)?\d{14}-(?<digest>[a-f0-9]{12})/,
   );
   @cache({
     namespace: `datasource-${GoDatasource.id}`,
@@ -61,7 +61,7 @@ export class GoDatasource extends Datasource {
   })
   override async getDigest(
     { packageName }: DigestConfig,
-    value?: string | null
+    value?: string | null,
   ): Promise<string | null> {
     const source = await BaseGoDatasource.getDatasource(packageName);
     if (!source) {
diff --git a/lib/modules/datasource/go/releases-direct.spec.ts b/lib/modules/datasource/go/releases-direct.spec.ts
index fa83dd4655ef4df1430597875a4eb036e45e3e15..c730b9492c7a4c2968f2e7e7f62b175bac857d8e 100644
--- a/lib/modules/datasource/go/releases-direct.spec.ts
+++ b/lib/modules/datasource/go/releases-direct.spec.ts
@@ -18,7 +18,7 @@ describe('modules/datasource/go/releases-direct', () => {
   const gitGetTags = jest.spyOn(GitTagsDatasource.prototype, 'getReleases');
   const githubGetTags = jest.spyOn(
     GithubTagsDatasource.prototype,
-    'getReleases'
+    'getReleases',
   );
 
   beforeEach(() => {
@@ -40,7 +40,7 @@ describe('modules/datasource/go/releases-direct', () => {
       await expect(
         datasource.getReleases({
           packageName: 'golang.org/foo/something',
-        })
+        }),
       ).rejects.toThrow();
     });
 
@@ -215,7 +215,7 @@ describe('modules/datasource/go/releases-direct', () => {
       httpMock
         .scope('https://gitlab.com/')
         .get(
-          '/api/v4/projects/group%2Fsubgroup%2Frepo/repository/tags?per_page=100'
+          '/api/v4/projects/group%2Fsubgroup%2Frepo/repository/tags?per_page=100',
         )
         .reply(200, [{ name: 'v1.0.0' }, { name: 'v2.0.0' }]);
       const res = await datasource.getReleases({
diff --git a/lib/modules/datasource/go/releases-direct.ts b/lib/modules/datasource/go/releases-direct.ts
index 13950d682f5632aaf058faea4666a24d8879827d..b9cb6c2e092d73cc1351d044392c188840b235b2 100644
--- a/lib/modules/datasource/go/releases-direct.ts
+++ b/lib/modules/datasource/go/releases-direct.ts
@@ -52,7 +52,7 @@ export class GoDirectDatasource extends Datasource {
     if (!source) {
       logger.info(
         { packageName },
-        'Unsupported go host - cannot look up versions'
+        'Unsupported go host - cannot look up versions',
       );
       return null;
     }
@@ -127,7 +127,7 @@ export class GoDirectDatasource extends Datasource {
 
     if (res.releases) {
       res.releases = res.releases.filter((release) =>
-        release.version?.startsWith('v')
+        release.version?.startsWith('v'),
       );
     }
 
diff --git a/lib/modules/datasource/go/releases-goproxy.spec.ts b/lib/modules/datasource/go/releases-goproxy.spec.ts
index bcbb07221e7add28065040f4f594f3a55f5ca189..230db9e37682336c4ef6ff1a557238e72d9d8c3f 100644
--- a/lib/modules/datasource/go/releases-goproxy.spec.ts
+++ b/lib/modules/datasource/go/releases-goproxy.spec.ts
@@ -10,12 +10,12 @@ const datasource = new GoProxyDatasource();
 describe('modules/datasource/go/releases-goproxy', () => {
   const githubGetReleases = jest.spyOn(
     GithubReleasesDatasource.prototype,
-    'getReleases'
+    'getReleases',
   );
 
   const githubGetTags = jest.spyOn(
     GithubTagsDatasource.prototype,
-    'getReleases'
+    'getReleases',
   );
 
   it('encodeCase', () => {
@@ -49,7 +49,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
       const release = await datasource.versionInfo(
         baseUrl,
         packageName,
-        'v0.5.0'
+        'v0.5.0',
       );
 
       expect(release).toEqual({
@@ -101,177 +101,177 @@ describe('modules/datasource/go/releases-goproxy', () => {
       expect(GoProxyDatasource.parseNoproxy('')).toBeNull();
       expect(GoProxyDatasource.parseNoproxy('/')).toBeNull();
       expect(GoProxyDatasource.parseNoproxy('*')?.source).toBe(
-        '^(?:[^\\/]*)(?:\\/.*)?$'
+        '^(?:[^\\/]*)(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('?')?.source).toBe(
-        '^(?:[^\\/])(?:\\/.*)?$'
+        '^(?:[^\\/])(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('foo')?.source).toBe(
-        '^(?:foo)(?:\\/.*)?$'
+        '^(?:foo)(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('\\f\\o\\o')?.source).toBe(
-        '^(?:foo)(?:\\/.*)?$'
+        '^(?:foo)(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('foo,bar')?.source).toBe(
-        '^(?:foo|bar)(?:\\/.*)?$'
+        '^(?:foo|bar)(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('[abc]')?.source).toBe(
-        '^(?:[abc])(?:\\/.*)?$'
+        '^(?:[abc])(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('[a-c]')?.source).toBe(
-        '^(?:[a-c])(?:\\/.*)?$'
+        '^(?:[a-c])(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('[\\a-\\c]')?.source).toBe(
-        '^(?:[a-c])(?:\\/.*)?$'
+        '^(?:[a-c])(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('a.b.c')?.source).toBe(
-        '^(?:a\\.b\\.c)(?:\\/.*)?$'
+        '^(?:a\\.b\\.c)(?:\\/.*)?$',
       );
       expect(GoProxyDatasource.parseNoproxy('trailing/')?.source).toBe(
-        '^(?:trailing)(?:\\/.*)?$'
+        '^(?:trailing)(?:\\/.*)?$',
       );
     });
 
     it('matches on real package prefixes', () => {
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('ex.co')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('ex.co/')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo/*')?.test('example.com/foo/bar')
+        GoProxyDatasource.parseNoproxy('*/foo/*')?.test('example.com/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('ex.co')?.test('ex.co/foo/v2'),
       ).toBeTrue();
 
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/bar')?.test('ex.co/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo/*')?.test('example.com/foo/bar')
+        GoProxyDatasource.parseNoproxy('*/foo/*')?.test('example.com/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/bar'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz')
+        GoProxyDatasource.parseNoproxy('ex.co/foo/*')?.test('ex.co/foo/baz'),
       ).toBeTrue();
       expect(
         GoProxyDatasource.parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test(
-          'ex.co/foo/bar'
-        )
+          'ex.co/foo/bar',
+        ),
       ).toBeTrue();
       expect(
         GoProxyDatasource.parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test(
-          'ex.co/foo/baz'
-        )
+          'ex.co/foo/baz',
+        ),
       ).toBeTrue();
       expect(
         GoProxyDatasource.parseNoproxy('ex.co/foo/bar,ex.co/foo/baz')?.test(
-          'ex.co/foo/qux'
-        )
+          'ex.co/foo/qux',
+        ),
       ).toBeFalse();
 
       expect(
-        GoProxyDatasource.parseNoproxy('ex')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('ex')?.test('ex.co/foo'),
       ).toBeFalse();
 
       expect(GoProxyDatasource.parseNoproxy('aba')?.test('x/aba')).toBeFalse();
       expect(GoProxyDatasource.parseNoproxy('x/b')?.test('x/aba')).toBeFalse();
       expect(GoProxyDatasource.parseNoproxy('x/ab')?.test('x/aba')).toBeFalse();
       expect(
-        GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/aba')
+        GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/aba'),
       ).toBeTrue();
     });
 
     it('matches on wildcards', () => {
       expect(
-        GoProxyDatasource.parseNoproxy('/*/')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('/*/')?.test('ex.co/foo'),
       ).toBeFalse();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*/foo')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/fo')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*/fo')?.test('ex.co/foo'),
       ).toBeFalse();
       expect(
-        GoProxyDatasource.parseNoproxy('*/fo?')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*/fo?')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/fo*')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*/fo*')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*fo*')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*fo*')?.test('ex.co/foo'),
       ).toBeFalse();
 
       expect(
-        GoProxyDatasource.parseNoproxy('*.co')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*.co')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('ex*')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('ex*')?.test('ex.co/foo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/foo')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo/')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/foo/')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo/*')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/foo/*')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/foo/*/')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/foo/*/')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/v2')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/v2')?.test('ex.co/foo/v2'),
       ).toBeFalse();
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/v2')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/*/v2')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/*')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/*/*')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/*/')?.test('ex.co/foo/v2')
+        GoProxyDatasource.parseNoproxy('*/*/*/')?.test('ex.co/foo/v2'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/*')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*/*/*')?.test('ex.co/foo'),
       ).toBeFalse();
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/*/')?.test('ex.co/foo')
+        GoProxyDatasource.parseNoproxy('*/*/*/')?.test('ex.co/foo'),
       ).toBeFalse();
 
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/*,,')?.test('ex.co/repo')
+        GoProxyDatasource.parseNoproxy('*/*/*,,')?.test('ex.co/repo'),
       ).toBeFalse();
       expect(
-        GoProxyDatasource.parseNoproxy('*/*/*,,*/repo')?.test('ex.co/repo')
+        GoProxyDatasource.parseNoproxy('*/*/*,,*/repo')?.test('ex.co/repo'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy(',,*/repo')?.test('ex.co/repo')
+        GoProxyDatasource.parseNoproxy(',,*/repo')?.test('ex.co/repo'),
       ).toBeTrue();
     });
 
     it('matches on character ranges', () => {
       expect(
-        GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/aba')
+        GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/aba'),
       ).toBeTrue();
       expect(
-        GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/abc')
+        GoProxyDatasource.parseNoproxy('x/ab[a-b]')?.test('x/abc'),
       ).toBeFalse();
     });
   });
@@ -344,7 +344,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
           codeBlock`
             v1.0.0 2018-08-13T15:31:12Z
             v1.0.1
-          `
+          `,
         )
         .get('/@v/v1.0.1.info')
         .reply(200, { Version: 'v1.0.1', Time: '2019-10-16T16:15:28Z' })
@@ -378,7 +378,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
           codeBlock`
             v1.0.0
             v1.0.1
-          `
+          `,
         )
         .get('/@v/v1.0.0.info')
         .replyWithError('unknown')
@@ -416,7 +416,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
           codeBlock`
             v1.0.0
             v1.0.1
-          `
+          `,
         )
         .get('/@v/v1.0.0.info')
         .reply(200, { Version: 'v1.0.0', Time: '2018-08-13T15:31:12Z' })
@@ -466,7 +466,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
           codeBlock`
             v1.0.0
             v1.0.1
-          `
+          `,
         )
         .get('/@v/v1.0.0.info')
         .reply(200, { Version: 'v1.0.0', Time: '2018-08-13T15:31:12Z' })
@@ -560,7 +560,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
 
     it('supports "off" keyword', async () => {
       process.env.GOPROXY = ['https://foo.com', 'https://bar.com', 'off'].join(
-        ','
+        ',',
       );
 
       httpMock
@@ -622,7 +622,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
           codeBlock`
             v1.0.0
             v1.0.1
-          `
+          `,
         )
         .get('/@v/v1.0.0.info')
         .reply(200, { Version: 'v1.0.0', Time: '2018-08-13T15:31:12Z' })
@@ -635,7 +635,7 @@ describe('modules/datasource/go/releases-goproxy', () => {
           200,
           codeBlock`
             v2.0.0
-          `
+          `,
         )
         .get('/v2/@v/v2.0.0.info')
         .reply(200, { Version: 'v2.0.0', Time: '2020-10-16T16:15:28Z' })
diff --git a/lib/modules/datasource/go/releases-goproxy.ts b/lib/modules/datasource/go/releases-goproxy.ts
index bec01647ce2660eab54ebe879a6abf4373ce551c..82fa68f43e54a2256810633b6c06363d05c1a4d4 100644
--- a/lib/modules/datasource/go/releases-goproxy.ts
+++ b/lib/modules/datasource/go/releases-goproxy.ts
@@ -178,7 +178,7 @@ export class GoProxyDatasource extends Datasource {
   static parsedNoproxy: Record<string, RegExp | null> = {};
 
   static parseNoproxy(
-    input: unknown = process.env.GONOPROXY ?? process.env.GOPRIVATE
+    input: unknown = process.env.GONOPROXY ?? process.env.GOPRIVATE,
   ): RegExp | null {
     if (!is.string(input)) {
       return null;
@@ -221,7 +221,7 @@ export class GoProxyDatasource extends Datasource {
   async versionInfo(
     baseUrl: string,
     packageName: string,
-    version: string
+    version: string,
   ): Promise<Release> {
     const url = `${baseUrl}/${this.encodeCase(packageName)}/@v/${version}.info`;
     const res = await this.http.getJson<VersionInfo>(url);
@@ -239,7 +239,7 @@ export class GoProxyDatasource extends Datasource {
 
   async getLatestVersion(
     baseUrl: string,
-    packageName: string
+    packageName: string,
   ): Promise<string | null> {
     try {
       const url = `${baseUrl}/${this.encodeCase(packageName)}/@latest`;
@@ -253,7 +253,7 @@ export class GoProxyDatasource extends Datasource {
 
   async getVersionsWithInfo(
     baseUrl: string,
-    packageName: string
+    packageName: string,
   ): Promise<ReleaseResult> {
     const isGopkgin = packageName.startsWith('gopkg.in/');
     const majorSuffixSeparator = isGopkgin ? '.' : '/';
diff --git a/lib/modules/datasource/golang-version/index.spec.ts b/lib/modules/datasource/golang-version/index.spec.ts
index 092600d22f5719ea09a20c3591d6b879b942e9f8..846d731c160148e2e55a8c0299643e894cd973a3 100644
--- a/lib/modules/datasource/golang-version/index.spec.ts
+++ b/lib/modules/datasource/golang-version/index.spec.ts
@@ -62,7 +62,7 @@ describe('modules/datasource/golang-version/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'golang',
-        })
+        }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -75,7 +75,7 @@ describe('modules/datasource/golang-version/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'golang',
-        })
+        }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -85,7 +85,7 @@ describe('modules/datasource/golang-version/index', () => {
         .get('/golang/website/HEAD/internal/history/release.go')
         .reply(200, {});
       await expect(
-        getPkgReleases({ datasource, packageName: 'golang' })
+        getPkgReleases({ datasource, packageName: 'golang' }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -95,7 +95,7 @@ describe('modules/datasource/golang-version/index', () => {
         .get('/golang/website/HEAD/internal/history/release.go')
         .reply(200, golangReleasesInvalidContent3);
       await expect(
-        getPkgReleases({ datasource, packageName: 'golang' })
+        getPkgReleases({ datasource, packageName: 'golang' }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -105,7 +105,7 @@ describe('modules/datasource/golang-version/index', () => {
         .get('/golang/website/HEAD/internal/history/release.go')
         .reply(200, golangReleasesInvalidContent4);
       await expect(
-        getPkgReleases({ datasource, packageName: 'golang' })
+        getPkgReleases({ datasource, packageName: 'golang' }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -115,7 +115,7 @@ describe('modules/datasource/golang-version/index', () => {
         .get('/golang/website/HEAD/internal/history/release.go')
         .reply(404);
       expect(
-        await getPkgReleases({ datasource, packageName: 'golang' })
+        await getPkgReleases({ datasource, packageName: 'golang' }),
       ).toBeNull();
     });
 
@@ -125,7 +125,7 @@ describe('modules/datasource/golang-version/index', () => {
         .get('/golang/website/HEAD/internal/history/release.go')
         .reply(200, golangReleasesInvalidContent5);
       await expect(
-        getPkgReleases({ datasource, packageName: 'golang' })
+        getPkgReleases({ datasource, packageName: 'golang' }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -135,7 +135,7 @@ describe('modules/datasource/golang-version/index', () => {
         .get('/golang/website/HEAD/internal/history/release.go')
         .reply(200, golangReleasesInvalidContent6);
       await expect(
-        getPkgReleases({ datasource, packageName: 'golang' })
+        getPkgReleases({ datasource, packageName: 'golang' }),
       ).rejects.toThrow(ExternalHostError);
     });
   });
diff --git a/lib/modules/datasource/golang-version/index.ts b/lib/modules/datasource/golang-version/index.ts
index 33a9959da861f2a784c48a2803431d3bf6fcbc60..79e907f3418136d0e8dfa67002eba2e79dc2096b 100644
--- a/lib/modules/datasource/golang-version/index.ts
+++ b/lib/modules/datasource/golang-version/index.ts
@@ -10,10 +10,10 @@ const lineTerminationRegex = regEx(`\r?\n`);
 const releaseBeginningChar = '\t{';
 const releaseTerminationChar = '\t},';
 const releaseDateRegex = regEx(
-  `Date\\{(?<year>\\d+),\\s+(?<month>\\d+),\\s+(?<day>\\d+)\\}`
+  `Date\\{(?<year>\\d+),\\s+(?<month>\\d+),\\s+(?<day>\\d+)\\}`,
 );
 const releaseVersionRegex = regEx(
-  `Version\\{(?<versionMajor>\\d+),\\s+(?<versionMinor>\\d+),\\s+(?<patch>\\d+)\\}`
+  `Version\\{(?<versionMajor>\\d+),\\s+(?<versionMinor>\\d+),\\s+(?<patch>\\d+)\\}`,
 );
 const releaseFutureRegex = regEx(`Future:\\s+true`);
 
@@ -49,7 +49,7 @@ export class GolangVersionDatasource extends Datasource {
 
     const golangVersionsUrl = joinUrlParts(
       registryUrl,
-      '/HEAD/internal/history/release.go'
+      '/HEAD/internal/history/release.go',
     );
 
     const response = await this.http.get(golangVersionsUrl);
@@ -59,7 +59,7 @@ export class GolangVersionDatasource extends Datasource {
     const startOfReleases = lines.indexOf('var Releases = []*Release{');
     if (startOfReleases === -1) {
       throw new ExternalHostError(
-        new Error('Invalid file - could not find the Releases section')
+        new Error('Invalid file - could not find the Releases section'),
       );
     }
 
@@ -76,7 +76,9 @@ export class GolangVersionDatasource extends Datasource {
       if (line === releaseBeginningChar) {
         if (release.version !== undefined) {
           throw new ExternalHostError(
-            new Error('Invalid file - unexpected error while parsing a release')
+            new Error(
+              'Invalid file - unexpected error while parsing a release',
+            ),
           );
         }
       } else if (line === releaseTerminationChar) {
@@ -85,7 +87,7 @@ export class GolangVersionDatasource extends Datasource {
         } else {
           if (release.version === undefined) {
             throw new ExternalHostError(
-              new Error('Invalid file - release has empty version')
+              new Error('Invalid file - release has empty version'),
             );
           }
           res.releases.push(release as Release);
@@ -109,7 +111,7 @@ export class GolangVersionDatasource extends Datasource {
           release.version = `${releaseVersionMatch.groups.versionMajor}.${releaseVersionMatch.groups.versionMinor}.${releaseVersionMatch.groups.patch}`;
           if (!isVersion(release.version)) {
             throw new ExternalHostError(
-              new Error(`Version ${release.version} is not a valid semver`)
+              new Error(`Version ${release.version} is not a valid semver`),
             );
           }
         }
@@ -118,7 +120,7 @@ export class GolangVersionDatasource extends Datasource {
 
     if (res.releases.length === 0) {
       throw new ExternalHostError(
-        new Error(`Invalid file - zero releases extracted`)
+        new Error(`Invalid file - zero releases extracted`),
       );
     }
 
diff --git a/lib/modules/datasource/gradle-version/index.spec.ts b/lib/modules/datasource/gradle-version/index.spec.ts
index 6bcd65dec8ad9922132f887af8bf25e23641ed31..7db80457e1b4de28ecec06e5e9b801b64b18996f 100644
--- a/lib/modules/datasource/gradle-version/index.spec.ts
+++ b/lib/modules/datasource/gradle-version/index.spec.ts
@@ -32,7 +32,7 @@ describe('modules/datasource/gradle-version/index', () => {
       expect(res).not.toBeNull();
       expect(res?.releases).toHaveLength(300);
       expect(
-        res?.releases.filter(({ isDeprecated }) => isDeprecated)
+        res?.releases.filter(({ isDeprecated }) => isDeprecated),
       ).toHaveLength(1);
     });
 
@@ -80,16 +80,16 @@ describe('modules/datasource/gradle-version/index', () => {
         gradleVersionDatasource.getReleases(
           partial<GetReleasesConfig>({
             registryUrl: 'https://services.gradle.org/versions/all',
-          })
-        )
+          }),
+        ),
       ).rejects.toThrow(ExternalHostError);
 
       await expect(
         gradleVersionDatasource.getReleases(
           partial<GetReleasesConfig>({
             registryUrl: 'http://baz.qux',
-          })
-        )
+          }),
+        ),
       ).rejects.toThrow(ExternalHostError);
     });
   });
diff --git a/lib/modules/datasource/gradle-version/index.ts b/lib/modules/datasource/gradle-version/index.ts
index 7deac775b835331a58dab75bb9bd6a72a0a54162..069b51f7190a775e8d3ca0fb4a7b8b25fcba0fce 100644
--- a/lib/modules/datasource/gradle-version/index.ts
+++ b/lib/modules/datasource/gradle-version/index.ts
@@ -21,7 +21,7 @@ export class GradleVersionDatasource extends Datasource {
   override readonly registryStrategy = 'merge';
 
   private static readonly buildTimeRegex = regEx(
-    '^(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\+\\d\\d\\d\\d)$'
+    '^(\\d\\d\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\d\\d)(\\+\\d\\d\\d\\d)$',
   );
 
   @cache({
@@ -80,7 +80,7 @@ export class GradleVersionDatasource extends Datasource {
     if (GradleVersionDatasource.buildTimeRegex.test(timeStr)) {
       return timeStr.replace(
         GradleVersionDatasource.buildTimeRegex,
-        '$1-$2-$3T$4:$5:$6$7'
+        '$1-$2-$3T$4:$5:$6$7',
       );
     }
     return null;
diff --git a/lib/modules/datasource/helm/common.spec.ts b/lib/modules/datasource/helm/common.spec.ts
index cae8bbaab64b1108744b189e92dc887bc43f4944..2de9e3d13b21a3df3c50cdf653bdc834614957bd 100644
--- a/lib/modules/datasource/helm/common.spec.ts
+++ b/lib/modules/datasource/helm/common.spec.ts
@@ -22,7 +22,7 @@ describe('modules/datasource/helm/common', () => {
       '$input -> $output',
       ({ input, output }: { input: string; output: string }) => {
         expect(findSourceUrl(repo.entries[input][0])).toEqual(output);
-      }
+      },
     );
   });
 });
diff --git a/lib/modules/datasource/helm/common.ts b/lib/modules/datasource/helm/common.ts
index dc85e672f6f0728eedbe9e8aef1e10a52fae25c6..0b32a3c33fa971e2429ea4fcf86b4df69f6d6878 100644
--- a/lib/modules/datasource/helm/common.ts
+++ b/lib/modules/datasource/helm/common.ts
@@ -5,7 +5,7 @@ import type { HelmRelease } from './types';
 
 const chartRepo = regEx(/charts?|helm|helm-charts/i);
 const githubRelease = regEx(
-  /^(https:\/\/github\.com\/[^/]+\/[^/]+)\/releases\//
+  /^(https:\/\/github\.com\/[^/]+\/[^/]+)\/releases\//,
 );
 
 function isPossibleChartRepo(url: string): boolean {
diff --git a/lib/modules/datasource/helm/index.spec.ts b/lib/modules/datasource/helm/index.spec.ts
index 93366c38bcb4b10c2c2ac68b943a7087c53e78f4..3ee646164ba59a3b6861a67e0fefba3e8d7ddbea 100644
--- a/lib/modules/datasource/helm/index.spec.ts
+++ b/lib/modules/datasource/helm/index.spec.ts
@@ -15,7 +15,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: undefined as never, // #22198
           registryUrls: ['https://example-repository.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -30,7 +30,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: 'some_chart',
           registryUrls: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -44,7 +44,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: 'non_existent_chart',
           registryUrls: ['https://example-repository.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -58,7 +58,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: 'non_existent_chart',
           registryUrls: ['https://example-repository.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -72,7 +72,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: 'some_chart',
           registryUrls: ['https://example-repository.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -86,7 +86,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: 'some_chart',
           registryUrls: ['https://example-repository.com'],
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -100,7 +100,7 @@ describe('modules/datasource/helm/index', () => {
           datasource: HelmDatasource.id,
           packageName: 'some_chart',
           registryUrls: ['https://example-repository.com'],
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/helm/index.ts b/lib/modules/datasource/helm/index.ts
index 1a8dd5717797c6f4e4c87b17d8bf46ef8d667da2..5f5eb92fdef39ae7ea749e567a4ab0e6aa172d68 100644
--- a/lib/modules/datasource/helm/index.ts
+++ b/lib/modules/datasource/helm/index.ts
@@ -30,7 +30,7 @@ export class HelmDatasource extends Datasource {
     key: (helmRepository: string) => helmRepository,
   })
   async getRepositoryData(
-    helmRepository: string
+    helmRepository: string,
   ): Promise<HelmRepositoryData | null> {
     let res: HttpResponse<string>;
     try {
@@ -40,7 +40,7 @@ export class HelmDatasource extends Datasource {
       if (!res?.body) {
         logger.warn(
           { helmRepository },
-          `Received invalid response from helm repository`
+          `Received invalid response from helm repository`,
         );
         return null;
       }
@@ -54,7 +54,7 @@ export class HelmDatasource extends Datasource {
       if (!is.plainObject<HelmRepository>(doc)) {
         logger.warn(
           { helmRepository },
-          `Failed to parse index.yaml from helm repository`
+          `Failed to parse index.yaml from helm repository`,
         );
         return null;
       }
@@ -81,7 +81,7 @@ export class HelmDatasource extends Datasource {
     } catch (err) {
       logger.debug(
         { helmRepository, err },
-        `Failed to parse index.yaml from helm repository`
+        `Failed to parse index.yaml from helm repository`,
       );
       return null;
     }
@@ -105,7 +105,7 @@ export class HelmDatasource extends Datasource {
     if (!releases) {
       logger.debug(
         { dependency: packageName },
-        `Entry ${packageName} doesn't exist in index.yaml from ${helmRepository}`
+        `Entry ${packageName} doesn't exist in index.yaml from ${helmRepository}`,
       );
       return null;
     }
diff --git a/lib/modules/datasource/hermit/index.spec.ts b/lib/modules/datasource/hermit/index.spec.ts
index 328dd6f9429aabaafea8d67bb9d173cfa4afdfe7..b0713314829f6bf27433bb875f7e01fabca2876b 100644
--- a/lib/modules/datasource/hermit/index.spec.ts
+++ b/lib/modules/datasource/hermit/index.spec.ts
@@ -99,7 +99,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl,
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -126,7 +126,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl,
-        })
+        }),
       ).rejects.toThrow();
     });
 
@@ -135,7 +135,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl: 'https://gitlab.com/owner/project',
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -144,13 +144,13 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl: 'https://github.com/test',
-        })
+        }),
       ).resolves.toBeNull();
       await expect(
         datasource.getReleases({
           packageName: 'go',
           registryUrl: 'https://github.com/',
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -159,7 +159,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl: 'https://github.com/test/repo/extra-path',
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -167,7 +167,7 @@ describe('modules/datasource/hermit/index', () => {
       await expect(
         datasource.getReleases({
           packageName: 'go',
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -188,7 +188,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl,
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -214,7 +214,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl,
-        })
+        }),
       ).resolves.toBeNull();
     });
 
@@ -223,7 +223,7 @@ describe('modules/datasource/hermit/index', () => {
         datasource.getReleases({
           packageName: 'go',
           registryUrl: 'invalid url',
-        })
+        }),
       ).resolves.toBeNull();
     });
   });
diff --git a/lib/modules/datasource/hermit/index.ts b/lib/modules/datasource/hermit/index.ts
index 5fd91d5b4ae682e92c8e285693b21b359daf88ac..4db90e81b975ecdd7c1ba9453908258cad9a3efd 100644
--- a/lib/modules/datasource/hermit/index.ts
+++ b/lib/modules/datasource/hermit/index.ts
@@ -75,7 +75,7 @@ export class HermitDatasource extends Datasource {
 
     if (!res) {
       logger.debug(
-        `Could not find hermit package ${packageName} at URL ${registryUrl}`
+        `Could not find hermit package ${packageName} at URL ${registryUrl}`,
       );
       return null;
     }
@@ -112,7 +112,7 @@ export class HermitDatasource extends Datasource {
     if (!groups) {
       logger.warn(
         { registryUrl },
-        'failed to get owner and repo from given url'
+        'failed to get owner and repo from given url',
       );
       return null;
     }
@@ -122,18 +122,18 @@ export class HermitDatasource extends Datasource {
     const apiBaseUrl = getApiBaseUrl(`https://${host}`);
 
     const indexRelease = await this.http.getJson<GithubRestRelease>(
-      `${apiBaseUrl}repos/${owner}/${repo}/releases/tags/index`
+      `${apiBaseUrl}repos/${owner}/${repo}/releases/tags/index`,
     );
 
     // finds asset with name index.json
     const asset = indexRelease.body.assets.find(
-      (asset) => asset.name === 'index.json'
+      (asset) => asset.name === 'index.json',
     );
 
     if (!asset) {
       logger.warn(
         { registryUrl },
-        `can't find asset index.json in the given registryUrl`
+        `can't find asset index.json in the given registryUrl`,
       );
       return null;
     }
@@ -149,7 +149,7 @@ export class HermitDatasource extends Datasource {
         headers: {
           accept: 'application/octet-stream',
         },
-      })
+      }),
     );
 
     try {
diff --git a/lib/modules/datasource/hex/index.spec.ts b/lib/modules/datasource/hex/index.spec.ts
index 96f435c6b88204c0c543e850ec1d77597fe61a53..70556645629f5e048b35efe1d49fd00e43897222 100644
--- a/lib/modules/datasource/hex/index.spec.ts
+++ b/lib/modules/datasource/hex/index.spec.ts
@@ -27,7 +27,7 @@ describe('modules/datasource/hex/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'non_existent_package',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -40,42 +40,42 @@ describe('modules/datasource/hex/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'non_existent_package',
-        })
+        }),
       ).toBeNull();
     });
 
     it('returns null for 404', async () => {
       httpMock.scope(baseUrl).get('/packages/some_package').reply(404);
       expect(
-        await getPkgReleases({ datasource, packageName: 'some_package' })
+        await getPkgReleases({ datasource, packageName: 'some_package' }),
       ).toBeNull();
     });
 
     it('returns null for 401', async () => {
       httpMock.scope(baseUrl).get('/packages/some_package').reply(401);
       expect(
-        await getPkgReleases({ datasource, packageName: 'some_package' })
+        await getPkgReleases({ datasource, packageName: 'some_package' }),
       ).toBeNull();
     });
 
     it('throws for 429', async () => {
       httpMock.scope(baseUrl).get('/packages/some_crate').reply(429);
       await expect(
-        getPkgReleases({ datasource, packageName: 'some_crate' })
+        getPkgReleases({ datasource, packageName: 'some_crate' }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
     it('throws for 5xx', async () => {
       httpMock.scope(baseUrl).get('/packages/some_crate').reply(502);
       await expect(
-        getPkgReleases({ datasource, packageName: 'some_crate' })
+        getPkgReleases({ datasource, packageName: 'some_crate' }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
     it('returns null for unknown error', async () => {
       httpMock.scope(baseUrl).get('/packages/some_package').replyWithError('');
       expect(
-        await getPkgReleases({ datasource, packageName: 'some_package' })
+        await getPkgReleases({ datasource, packageName: 'some_package' }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/hex/index.ts b/lib/modules/datasource/hex/index.ts
index a76ec9367cb59fea74778bbf6fd0a5a4c9602245..8e45bd491ab6c3d1de5853502b521b31d2fde568 100644
--- a/lib/modules/datasource/hex/index.ts
+++ b/lib/modules/datasource/hex/index.ts
@@ -71,7 +71,7 @@ export class HexDatasource extends Datasource {
               version,
               releaseTimestamp: inserted_at,
             }
-          : { version }
+          : { version },
       ),
     };
 
diff --git a/lib/modules/datasource/hexpm-bob/index.spec.ts b/lib/modules/datasource/hexpm-bob/index.spec.ts
index 6f494597d0333bf8b8cf9b1f0638cd955ba5a38e..d2efe89c3fc31ae742236475b340e56d6810b1b3 100644
--- a/lib/modules/datasource/hexpm-bob/index.spec.ts
+++ b/lib/modules/datasource/hexpm-bob/index.spec.ts
@@ -15,7 +15,7 @@ describe('modules/datasource/hexpm-bob/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'elixir',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -28,7 +28,7 @@ describe('modules/datasource/hexpm-bob/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'elixir',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -41,7 +41,7 @@ describe('modules/datasource/hexpm-bob/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'elixir',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -54,7 +54,7 @@ describe('modules/datasource/hexpm-bob/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'elixir',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -67,7 +67,7 @@ describe('modules/datasource/hexpm-bob/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'elixir',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -174,7 +174,7 @@ describe('modules/datasource/hexpm-bob/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'invalid',
-        })
+        }),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/datasource/hexpm-bob/index.ts b/lib/modules/datasource/hexpm-bob/index.ts
index f1154cdf517332f2af2ad66cc9e259ea822316d2..5d1f0a89ee10862de51891fff1f1359e1cc31d73 100644
--- a/lib/modules/datasource/hexpm-bob/index.ts
+++ b/lib/modules/datasource/hexpm-bob/index.ts
@@ -41,7 +41,7 @@ export class HexpmBobDatasource extends Datasource {
 
     logger.trace(
       { registryUrl, packageName },
-      `fetching hex.pm bob ${packageName} release`
+      `fetching hex.pm bob ${packageName} release`,
     );
 
     const url = `${registryUrl!}/builds/${packageName}/builds.txt`;
@@ -88,7 +88,7 @@ export class HexpmBobDatasource extends Datasource {
 
   private static cleanVersion(
     version: string,
-    packageType: PackageType
+    packageType: PackageType,
   ): string {
     switch (packageType) {
       case 'elixir':
@@ -108,7 +108,7 @@ export class HexpmBobDatasource extends Datasource {
   }
 
   private static getPackageDetails(
-    packageType: PackageType
+    packageType: PackageType,
   ): Omit<ReleaseResult, 'releases'> {
     switch (packageType) {
       case 'elixir':
diff --git a/lib/modules/datasource/index.spec.ts b/lib/modules/datasource/index.spec.ts
index 28c673dc5d0a9d98b71ab8bbbe050810141ce70e..969ef5eb8244f486517954f2c887b0f7bd37b653 100644
--- a/lib/modules/datasource/index.spec.ts
+++ b/lib/modules/datasource/index.spec.ts
@@ -147,7 +147,7 @@ describe('modules/datasource/index', () => {
       const managerList = fs
         .readdirSync(__dirname, { withFileTypes: true })
         .filter(
-          (dirent) => dirent.isDirectory() && !dirent.name.startsWith('_')
+          (dirent) => dirent.isDirectory() && !dirent.name.startsWith('_'),
         )
         .map((dirent) => dirent.name)
         .sort();
@@ -157,7 +157,7 @@ describe('modules/datasource/index', () => {
     it('validates datasource', () => {
       function validateDatasource(
         module: DatasourceApi,
-        name: string
+        name: string,
       ): boolean {
         if (!module.getReleases) {
           return false;
@@ -180,7 +180,7 @@ describe('modules/datasource/index', () => {
       const loadedDs = loadModules(
         __dirname,
         validateDatasource,
-        filterClassBasedDatasources
+        filterClassBasedDatasources,
       );
       expect(Array.from(dss.keys())).toEqual(Object.keys(loadedDs));
 
@@ -195,7 +195,7 @@ describe('modules/datasource/index', () => {
         await getPkgReleases({
           datasource: null as never, // #22198
           packageName: 'some/dep',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -205,7 +205,7 @@ describe('modules/datasource/index', () => {
         await getPkgReleases({
           datasource,
           packageName: null as never, // #22198
-        })
+        }),
       ).toBeNull();
     });
 
@@ -214,7 +214,7 @@ describe('modules/datasource/index', () => {
         await getPkgReleases({
           datasource: 'some-unknown-datasource',
           packageName: 'some/dep',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -233,7 +233,7 @@ describe('modules/datasource/index', () => {
 
       expect(logger.logger.warn).toHaveBeenCalledWith(
         { datasource: 'dummy', registryUrls, defaultRegistryUrls: undefined },
-        'Custom registries are not allowed for this datasource and will be ignored'
+        'Custom registries are not allowed for this datasource and will be ignored',
       );
       expect(res).toMatchObject({ releases: [{ version: '1.2.3' }] });
     });
@@ -261,7 +261,7 @@ describe('modules/datasource/index', () => {
       class TestDatasource extends DummyDatasource {
         override getDigest(
           config: DigestConfig,
-          newValue?: string
+          newValue?: string,
         ): Promise<string> {
           return Promise.resolve(config.packageName);
         }
@@ -273,7 +273,7 @@ describe('modules/datasource/index', () => {
           datasource,
           packageName: 'pkgName',
           replacementName: 'replacement',
-        })
+        }),
       ).toBe('replacement');
     });
   });
@@ -370,7 +370,7 @@ describe('modules/datasource/index', () => {
             sourceUrl: '   https://abc.com   ',
             releases: [{ version: '1.0.0' }],
           },
-        })
+        }),
       );
       const res = await getPkgReleases({
         datasource,
@@ -387,7 +387,7 @@ describe('modules/datasource/index', () => {
             sourceUrl: 'scm:git@github.com:Jasig/cas.git',
             releases: [{ version: '1.0.0' }],
           },
-        })
+        }),
       );
       const res = await getPkgReleases({
         datasource,
@@ -457,7 +457,7 @@ describe('modules/datasource/index', () => {
               packageName: 'package',
               registryUrls,
             },
-            'Excess registryUrls found for datasource lookup - using first configured only'
+            'Excess registryUrls found for datasource lookup - using first configured only',
           );
         });
 
@@ -478,7 +478,7 @@ describe('modules/datasource/index', () => {
           expect(res).toBeNull();
           expect(logger.logger.warn).toHaveBeenCalledWith(
             { datasource, packageName, registryUrls },
-            'Excess registryUrls found for datasource lookup - using first configured only'
+            'Excess registryUrls found for datasource lookup - using first configured only',
           );
         });
       });
@@ -514,7 +514,7 @@ describe('modules/datasource/index', () => {
         beforeEach(() => {
           datasources.set(
             datasource,
-            new MergeRegistriesDatasource(registries)
+            new MergeRegistriesDatasource(registries),
           );
         });
 
@@ -583,7 +583,7 @@ describe('modules/datasource/index', () => {
                 'https://reg2.com',
                 'https://reg3.com',
               ],
-            })
+            }),
           ).rejects.toThrow(EXTERNAL_HOST_ERROR);
         });
 
@@ -593,7 +593,7 @@ describe('modules/datasource/index', () => {
               datasource,
               packageName,
               registryUrls: ['https://reg4.com', 'https://reg5.com'],
-            })
+            }),
           ).toBeNull();
         });
       });
@@ -658,7 +658,7 @@ describe('modules/datasource/index', () => {
           datasources.set(datasource, new HuntRegistriyDatasource(registries));
 
           await expect(
-            getPkgReleases({ datasource, packageName, registryUrls })
+            getPkgReleases({ datasource, packageName, registryUrls }),
           ).rejects.toThrow(EXTERNAL_HOST_ERROR);
         });
 
diff --git a/lib/modules/datasource/index.ts b/lib/modules/datasource/index.ts
index 29fc0a39a06d92d316b263ec500d30ecfb0e3aa9..743552e57ea2b87cdbcd99bf1825ee9f0d7ada76 100644
--- a/lib/modules/datasource/index.ts
+++ b/lib/modules/datasource/index.ts
@@ -50,7 +50,7 @@ function logError(datasource: string, packageName: string, err: any): void {
   } else if (errCode) {
     logger.debug(
       { datasource, packageName, url, errCode },
-      'Datasource connection error'
+      'Datasource connection error',
     );
   } else {
     logger.debug({ datasource, packageName, err }, 'Datasource unknown error');
@@ -60,13 +60,13 @@ function logError(datasource: string, packageName: string, err: any): void {
 async function getRegistryReleases(
   datasource: DatasourceApi,
   config: GetReleasesConfig,
-  registryUrl: string
+  registryUrl: string,
 ): Promise<ReleaseResult | null> {
   const cacheKey = `${datasource.id} ${registryUrl} ${config.packageName}`;
   if (datasource.caching) {
     const cachedResult = await packageCache.get<ReleaseResult>(
       cacheNamespace,
-      cacheKey
+      cacheKey,
     );
     // istanbul ignore if
     if (cachedResult) {
@@ -90,7 +90,7 @@ async function getRegistryReleases(
 function firstRegistry(
   config: GetReleasesInternalConfig,
   datasource: DatasourceApi,
-  registryUrls: string[]
+  registryUrls: string[],
 ): Promise<ReleaseResult | null> {
   if (registryUrls.length > 1) {
     logger.warn(
@@ -99,7 +99,7 @@ function firstRegistry(
         packageName: config.packageName,
         registryUrls,
       },
-      'Excess registryUrls found for datasource lookup - using first configured only'
+      'Excess registryUrls found for datasource lookup - using first configured only',
     );
   }
   const registryUrl = registryUrls[0];
@@ -109,7 +109,7 @@ function firstRegistry(
 async function huntRegistries(
   config: GetReleasesInternalConfig,
   datasource: DatasourceApi,
-  registryUrls: string[]
+  registryUrls: string[],
 ): Promise<ReleaseResult | null> {
   let res: ReleaseResult | null = null;
   let caughtError: Error | undefined;
@@ -140,7 +140,7 @@ async function huntRegistries(
 async function mergeRegistries(
   config: GetReleasesInternalConfig,
   datasource: DatasourceApi,
-  registryUrls: string[]
+  registryUrls: string[],
 ): Promise<ReleaseResult | null> {
   let combinedRes: ReleaseResult | undefined;
   let caughtError: Error | undefined;
@@ -200,7 +200,7 @@ function resolveRegistryUrls(
   datasource: DatasourceApi,
   defaultRegistryUrls: string[] | undefined,
   registryUrls: string[] | undefined | null,
-  additionalRegistryUrls: string[] | undefined
+  additionalRegistryUrls: string[] | undefined,
 ): string[] {
   if (!datasource.customRegistrySupport) {
     if (
@@ -215,7 +215,7 @@ function resolveRegistryUrls(
           defaultRegistryUrls,
           additionalRegistryUrls,
         },
-        'Custom registries are not allowed for this datasource and will be ignored'
+        'Custom registries are not allowed for this datasource and will be ignored',
       );
     }
     return is.function_(datasource.defaultRegistryUrls)
@@ -240,7 +240,7 @@ function resolveRegistryUrls(
 }
 
 function applyReplacements(
-  config: GetReleasesInternalConfig
+  config: GetReleasesInternalConfig,
 ): Pick<ReleaseResult, 'replacementName' | 'replacementVersion'> | undefined {
   if (config.replacementName && config.replacementVersion) {
     return {
@@ -252,7 +252,7 @@ function applyReplacements(
 }
 
 async function fetchReleases(
-  config: GetReleasesInternalConfig
+  config: GetReleasesInternalConfig,
 ): Promise<ReleaseResult | null> {
   const { datasource: datasourceName } = config;
   let { registryUrls } = config;
@@ -279,7 +279,7 @@ async function fetchReleases(
     datasource,
     config.defaultRegistryUrls,
     registryUrls,
-    config.additionalRegistryUrls
+    config.additionalRegistryUrls,
   );
   let dep: ReleaseResult | null = null;
   const registryStrategy = datasource.registryStrategy ?? 'hunt';
@@ -313,11 +313,11 @@ async function fetchReleases(
 }
 
 function fetchCachedReleases(
-  config: GetReleasesInternalConfig
+  config: GetReleasesInternalConfig,
 ): Promise<ReleaseResult | null> {
   const { datasource, packageName, registryUrls } = config;
   const cacheKey = `${cacheNamespace}${datasource}${packageName}${String(
-    registryUrls
+    registryUrls,
   )}`;
   // By returning a Promise and reusing it, we should only fetch each package at most once
   const cachedResult = memCache.get<Promise<ReleaseResult | null>>(cacheKey);
@@ -331,7 +331,7 @@ function fetchCachedReleases(
 }
 
 export function getRawPkgReleases(
-  config: GetPkgReleasesConfig
+  config: GetPkgReleasesConfig,
 ): AsyncResult<
   ReleaseResult,
   Error | 'no-datasource' | 'no-package-name' | 'no-result'
@@ -360,14 +360,14 @@ export function getRawPkgReleases(
 
 export function applyDatasourceFilters(
   releaseResult: ReleaseResult,
-  config: GetPkgReleasesConfig
+  config: GetPkgReleasesConfig,
 ): ReleaseResult {
   let res = releaseResult;
   res = applyExtractVersion(res, config.extractVersion);
   res = applyVersionCompatibility(
     res,
     config.versionCompatibility,
-    config.currentCompatibility
+    config.currentCompatibility,
   );
   res = filterValidVersions(res, config);
   res = sortAndRemoveDuplicates(res, config);
@@ -376,7 +376,7 @@ export function applyDatasourceFilters(
 }
 
 export async function getPkgReleases(
-  config: GetPkgReleasesConfig
+  config: GetPkgReleasesConfig,
 ): Promise<ReleaseResult | null> {
   const { val = null, err } = await getRawPkgReleases(config)
     .transform((res) => applyDatasourceFilters(res, config))
@@ -396,7 +396,7 @@ export function supportsDigests(datasource: string | undefined): boolean {
 
 function getDigestConfig(
   datasource: DatasourceApi,
-  config: GetDigestInputConfig
+  config: GetDigestInputConfig,
 ): DigestConfig {
   const { currentValue, currentDigest } = config;
   const packageName = config.replacementName ?? config.packageName;
@@ -404,14 +404,14 @@ function getDigestConfig(
     datasource,
     config.defaultRegistryUrls,
     config.registryUrls,
-    config.additionalRegistryUrls
+    config.additionalRegistryUrls,
   );
   return { packageName, registryUrl, currentValue, currentDigest };
 }
 
 export function getDigest(
   config: GetDigestInputConfig,
-  value?: string
+  value?: string,
 ): Promise<string | null> {
   const datasource = getDatasourceFor(config.datasource);
   // istanbul ignore if: need test
@@ -423,10 +423,10 @@ export function getDigest(
 }
 
 export function getDefaultConfig(
-  datasource: string
+  datasource: string,
 ): Promise<Record<string, unknown>> {
   const loadedDatasource = getDatasourceFor(datasource);
   return Promise.resolve<Record<string, unknown>>(
-    loadedDatasource?.defaultConfig ?? Object.create({})
+    loadedDatasource?.defaultConfig ?? Object.create({}),
   );
 }
diff --git a/lib/modules/datasource/java-version/index.spec.ts b/lib/modules/datasource/java-version/index.spec.ts
index a383933bde5187beef7053406f869422091b616b..c32eaeb0c222bf361d8beb24e4fbdc3b8157308d 100644
--- a/lib/modules/datasource/java-version/index.spec.ts
+++ b/lib/modules/datasource/java-version/index.spec.ts
@@ -22,7 +22,7 @@ describe('modules/datasource/java-version/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -32,7 +32,7 @@ describe('modules/datasource/java-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -42,7 +42,7 @@ describe('modules/datasource/java-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -55,7 +55,7 @@ describe('modules/datasource/java-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -65,7 +65,7 @@ describe('modules/datasource/java-version/index', () => {
         getPkgReleases({
           datasource,
           packageName,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
diff --git a/lib/modules/datasource/java-version/index.ts b/lib/modules/datasource/java-version/index.ts
index e52d087449b651c9fe1a9df615da3d8b7693dcf9..1e1d68d4a2d7e9a0717c3d513525d931a7b38dea 100644
--- a/lib/modules/datasource/java-version/index.ts
+++ b/lib/modules/datasource/java-version/index.ts
@@ -27,7 +27,7 @@ export class JavaVersionDatasource extends Datasource {
 
   private async getPageReleases(
     url: string,
-    page: number
+    page: number,
   ): Promise<Release[] | null> {
     const pgUrl = `${url}&page=${page}`;
     try {
@@ -64,7 +64,7 @@ export class JavaVersionDatasource extends Datasource {
     const imageType = getImageType(packageName);
     logger.trace(
       { registryUrl, packageName, imageType },
-      'fetching java release'
+      'fetching java release',
     );
     // TODO: types (#22198)
     const url = `${registryUrl!}v3/info/release_versions?page_size=${pageSize}&image_type=${imageType}&project=jdk&release_type=ga&sort_method=DATE&sort_order=DESC`;
diff --git a/lib/modules/datasource/jenkins-plugins/index.ts b/lib/modules/datasource/jenkins-plugins/index.ts
index d663c14c7d329ea72374a5608ee601a372285069..699c74439b86b78393ba1266b30483c4e0957cf2 100644
--- a/lib/modules/datasource/jenkins-plugins/index.ts
+++ b/lib/modules/datasource/jenkins-plugins/index.ts
@@ -54,11 +54,11 @@ export class JenkinsPluginsDatasource extends Datasource {
     ttlMinutes: 1440,
   })
   async getJenkinsPluginInfo(
-    updateSiteUrl: string
+    updateSiteUrl: string,
   ): Promise<Record<string, ReleaseResult>> {
     const { plugins } =
       await this.getJenkinsUpdateCenterResponse<JenkinsPluginsInfoResponse>(
-        `${updateSiteUrl}${JenkinsPluginsDatasource.packageInfoPath}`
+        `${updateSiteUrl}${JenkinsPluginsDatasource.packageInfoPath}`,
       );
 
     const info: Record<string, ReleaseResult> = {};
@@ -73,11 +73,11 @@ export class JenkinsPluginsDatasource extends Datasource {
 
   @cache({ namespace: JenkinsPluginsDatasource.id, key: 'versions' })
   async getJenkinsPluginVersions(
-    updateSiteUrl: string
+    updateSiteUrl: string,
   ): Promise<Record<string, Release[]>> {
     const { plugins } =
       await this.getJenkinsUpdateCenterResponse<JenkinsPluginsVersionsResponse>(
-        `${updateSiteUrl}${JenkinsPluginsDatasource.packageVersionsPath}`
+        `${updateSiteUrl}${JenkinsPluginsDatasource.packageVersionsPath}`,
       );
 
     const versions: Record<string, Release[]> = {};
@@ -108,7 +108,7 @@ export class JenkinsPluginsDatasource extends Datasource {
       const durationMs = Math.round(Date.now() - startTime);
       logger.debug(
         { durationMs },
-        `jenkins-plugins: Fetched Jenkins plugins from ${url}`
+        `jenkins-plugins: Fetched Jenkins plugins from ${url}`,
       );
     } catch (err) /* istanbul ignore next */ {
       this.handleGenericErrors(err);
diff --git a/lib/modules/datasource/kubernetes-api/index.ts b/lib/modules/datasource/kubernetes-api/index.ts
index 30c130973d6501ae49255101575233c04db8ff8b..d84b8f4ad3b8a76542eeee499101f5d42cfb9fec 100644
--- a/lib/modules/datasource/kubernetes-api/index.ts
+++ b/lib/modules/datasource/kubernetes-api/index.ts
@@ -5,7 +5,7 @@ import { Datasource } from '../datasource';
 import type { GetReleasesConfig, ReleaseResult } from '../types';
 
 const apiData: Record<string, string[]> = JSON5.parse(
-  dataFiles.get('data/kubernetes-api.json5')!
+  dataFiles.get('data/kubernetes-api.json5')!,
 );
 
 export const supportedApis = new Set(Object.keys(apiData));
diff --git a/lib/modules/datasource/maven/index.spec.ts b/lib/modules/datasource/maven/index.spec.ts
index ab4741d35f3db848033459202a4f3bb71c33fd55..f0d931a336450e9bbd5728e6b4f090e1d9256654 100644
--- a/lib/modules/datasource/maven/index.spec.ts
+++ b/lib/modules/datasource/maven/index.spec.ts
@@ -102,8 +102,8 @@ function mockGenericPackage(opts: MockOpts = {}) {
         .get(
           `/${packagePath}/${latest}/${artifact}-${latest.replace(
             '-SNAPSHOT',
-            ''
-          )}-20200101.${major}${minor}${patch}-${parseInt(patch, 10)}.pom`
+            '',
+          )}-20200101.${major}${minor}${patch}-${parseInt(patch, 10)}.pom`,
         )
         .reply(200, pom);
     } else {
@@ -155,14 +155,14 @@ function mockGenericPackage(opts: MockOpts = {}) {
               snapshot.version
             }/${artifact}-${snapshot.version.replace(
               '-SNAPSHOT',
-              ''
-            )}-20200101.${major}${minor}${patch}-${parseInt(patch, 10)}.pom`
+              '',
+            )}-20200101.${major}${minor}${patch}-${parseInt(patch, 10)}.pom`,
           )
           .reply(snapshot.jarStatus, '', { 'Last-Modified': timestamp });
       } else {
         scope
           .head(
-            `/${packagePath}/${snapshot.version}/${artifact}-${snapshot.version}.pom`
+            `/${packagePath}/${snapshot.version}/${artifact}-${snapshot.version}.pom`,
           )
           .reply(404, '');
       }
@@ -311,7 +311,7 @@ describe('modules/datasource/maven/index', () => {
       'https://unauthorized_repo/',
       'https://empty_repo',
       'https://unknown_error',
-      baseUrl
+      baseUrl,
     );
 
     expect(res).toMatchSnapshot();
@@ -333,7 +333,7 @@ describe('modules/datasource/maven/index', () => {
     const res = await get(
       'org.example:package',
       'ftp://protocol_error_repo',
-      base
+      base,
     );
 
     expect(res?.releases).toMatchSnapshot();
@@ -349,7 +349,7 @@ describe('modules/datasource/maven/index', () => {
     const res = await get(
       'org.example:package',
       'https://invalid_metadata_repo',
-      baseUrl
+      baseUrl,
     );
 
     expect(res).toMatchSnapshot();
@@ -365,7 +365,7 @@ describe('modules/datasource/maven/index', () => {
     const res = await get(
       'org.example:package',
       'https://invalid_metadata_repo',
-      baseUrl
+      baseUrl,
     );
 
     expect(res).toMatchSnapshot();
@@ -385,7 +385,7 @@ describe('modules/datasource/maven/index', () => {
     const res = await get(
       'org.example:package',
 
-      '${project.baseUri}../../repository/'
+      '${project.baseUri}../../repository/',
     );
     expect(res).toBeNull();
   });
@@ -457,7 +457,7 @@ describe('modules/datasource/maven/index', () => {
         .get(path)
         .matchHeader(
           'authorization',
-          'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg=='
+          'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg==',
         )
         .reply(200, Fixtures.get('metadata.xml'));
     }
@@ -467,14 +467,14 @@ describe('modules/datasource/maven/index', () => {
       .get(pomfilePath)
       .matchHeader(
         'authorization',
-        'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg=='
+        'Basic b2F1dGgyYWNjZXNzdG9rZW46c29tZS10b2tlbg==',
       )
       .reply(200, Fixtures.get('pom.xml'));
 
     googleAuth.mockImplementation(
       jest.fn().mockImplementation(() => ({
         getAccessToken: jest.fn().mockResolvedValue('some-token'),
-      }))
+      })),
     );
 
     const res = await get('org.example:package', baseUrlAR);
@@ -525,7 +525,7 @@ describe('modules/datasource/maven/index', () => {
     googleAuth.mockImplementation(
       jest.fn().mockImplementation(() => ({
         getAccessToken: jest.fn().mockResolvedValue(undefined),
-      }))
+      })),
     );
 
     const res = await get('org.example:package', baseUrlAR);
diff --git a/lib/modules/datasource/maven/index.ts b/lib/modules/datasource/maven/index.ts
index f4e2862b78134c460fa5b8b3a69fbd803a74f47c..7e9f3e5faf74a87de82bf919a8095b2d02d772ac 100644
--- a/lib/modules/datasource/maven/index.ts
+++ b/lib/modules/datasource/maven/index.ts
@@ -39,7 +39,7 @@ function getLatestSuitableVersion(releases: Release[]): string | null {
   return versions.reduce((latestVersion, version) =>
     compare(version, latestVersion) === 1
       ? version
-      : /* istanbul ignore next: hard to test */ latestVersion
+      : /* istanbul ignore next: hard to test */ latestVersion,
   );
 }
 
@@ -54,7 +54,7 @@ function extractVersions(metadata: XmlDocument): string[] {
 
 const mavenCentralHtmlVersionRegex = regEx(
   '^<a href="(?<version>[^"]+)/" title="(?:[^"]+)/">(?:[^"]+)/</a>\\s+(?<releaseTimestamp>\\d\\d\\d\\d-\\d\\d-\\d\\d \\d\\d:\\d\\d)\\s+-$',
-  'i'
+  'i',
 );
 
 export const defaultRegistryUrls = [MAVEN_REPO];
@@ -74,7 +74,7 @@ export class MavenDatasource extends Datasource {
 
   async fetchReleasesFromMetadata(
     dependency: MavenDependency,
-    repoUrl: string
+    repoUrl: string,
   ): Promise<ReleaseMap> {
     const metadataUrl = getMavenUrl(dependency, repoUrl, 'maven-metadata.xml');
 
@@ -82,7 +82,7 @@ export class MavenDatasource extends Datasource {
     const cacheKey = metadataUrl.toString();
     const cachedVersions = await packageCache.get<ReleaseMap>(
       cacheNamespace,
-      cacheKey
+      cacheKey,
     );
     /* istanbul ignore if */
     if (cachedVersions) {
@@ -91,7 +91,7 @@ export class MavenDatasource extends Datasource {
 
     const { isCacheable, xml: mavenMetadata } = await downloadMavenXml(
       this.http,
-      metadataUrl
+      metadataUrl,
     );
     if (!mavenMetadata) {
       return {};
@@ -100,7 +100,7 @@ export class MavenDatasource extends Datasource {
     const versions = extractVersions(mavenMetadata);
     const releaseMap = versions.reduce(
       (acc, version) => ({ ...acc, [version]: null }),
-      {}
+      {},
     );
     if (isCacheable) {
       await packageCache.set(cacheNamespace, cacheKey, releaseMap, 30);
@@ -111,13 +111,13 @@ export class MavenDatasource extends Datasource {
   async addReleasesFromIndexPage(
     inputReleaseMap: ReleaseMap,
     dependency: MavenDependency,
-    repoUrl: string
+    repoUrl: string,
   ): Promise<ReleaseMap> {
     const cacheNs = 'datasource-maven:index-html-releases';
     const cacheKey = `${repoUrl}${dependency.dependencyUrl}`;
     let workingReleaseMap = await packageCache.get<ReleaseMap>(
       cacheNs,
-      cacheKey
+      cacheKey,
     );
     if (!workingReleaseMap) {
       workingReleaseMap = {};
@@ -138,7 +138,7 @@ export class MavenDatasource extends Datasource {
                   'yyyy-MM-dd HH:mm',
                   {
                     zone: 'UTC',
-                  }
+                  },
                 );
                 if (date.isValid) {
                   const releaseTimestamp = date.toISO();
@@ -152,7 +152,7 @@ export class MavenDatasource extends Datasource {
         retryEarlier = true;
         logger.debug(
           { dependency, err },
-          'Failed to get releases from index.html'
+          'Failed to get releases from index.html',
         );
       }
       const cacheTTL = retryEarlier
@@ -203,7 +203,7 @@ export class MavenDatasource extends Datasource {
   async addReleasesUsingHeadRequests(
     inputReleaseMap: ReleaseMap,
     dependency: MavenDependency,
-    repoUrl: string
+    repoUrl: string,
   ): Promise<ReleaseMap> {
     const releaseMap = { ...inputReleaseMap };
 
@@ -253,7 +253,7 @@ export class MavenDatasource extends Datasource {
           this.http,
           version,
           dependency,
-          repoUrl
+          repoUrl,
         );
         const artifactUrl = getMavenUrl(dependency, repoUrl, pomUrl);
         const release: Release = { version };
@@ -312,12 +312,12 @@ export class MavenDatasource extends Datasource {
     releaseMap = await this.addReleasesFromIndexPage(
       releaseMap,
       dependency,
-      repoUrl
+      repoUrl,
     );
     releaseMap = await this.addReleasesUsingHeadRequests(
       releaseMap,
       dependency,
-      repoUrl
+      repoUrl,
     );
     const releases = this.getReleasesFromMap(releaseMap);
     if (!releases?.length) {
@@ -325,7 +325,7 @@ export class MavenDatasource extends Datasource {
     }
 
     logger.debug(
-      `Found ${releases.length} new releases for ${dependency.display} in repository ${repoUrl}`
+      `Found ${releases.length} new releases for ${dependency.display} in repository ${repoUrl}`,
     );
 
     const latestSuitableVersion = getLatestSuitableVersion(releases);
@@ -335,7 +335,7 @@ export class MavenDatasource extends Datasource {
         this.http,
         dependency,
         repoUrl,
-        latestSuitableVersion
+        latestSuitableVersion,
       ));
 
     return { ...dependency, ...dependencyInfo, releases };
diff --git a/lib/modules/datasource/maven/s3.spec.ts b/lib/modules/datasource/maven/s3.spec.ts
index a73c29a17c944e5e381c2e1ca1e73814dead25de..68a88161a48cb60747164145120211ddd11a045f 100644
--- a/lib/modules/datasource/maven/s3.spec.ts
+++ b/lib/modules/datasource/maven/s3.spec.ts
@@ -116,7 +116,7 @@ describe('modules/datasource/maven/s3', () => {
           {
             failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
           },
-          'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars'
+          'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars',
         );
       });
 
@@ -135,7 +135,7 @@ describe('modules/datasource/maven/s3', () => {
           {
             failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
           },
-          'Dependency lookup failed. Please a correct AWS_REGION env var'
+          'Dependency lookup failed. Please a correct AWS_REGION env var',
         );
       });
 
@@ -154,7 +154,7 @@ describe('modules/datasource/maven/s3', () => {
           {
             failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
           },
-          'S3 url not found'
+          'S3 url not found',
         );
       });
 
@@ -173,7 +173,7 @@ describe('modules/datasource/maven/s3', () => {
           {
             failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
           },
-          'S3 url not found'
+          'S3 url not found',
         );
       });
 
@@ -193,7 +193,7 @@ describe('modules/datasource/maven/s3', () => {
             failedUrl: 's3://repobucket/org/example/package/maven-metadata.xml',
             message: 'Unknown error',
           },
-          'Unknown S3 download error'
+          'Unknown S3 download error',
         );
       });
 
@@ -206,7 +206,7 @@ describe('modules/datasource/maven/s3', () => {
           .resolvesOnce({});
         expect(await get('org.example:package', baseUrlS3)).toBeNull();
         expect(logger.debug).toHaveBeenCalledWith(
-          "Expecting Readable response type got 'undefined' type instead"
+          "Expecting Readable response type got 'undefined' type instead",
         );
       });
     });
diff --git a/lib/modules/datasource/maven/util.spec.ts b/lib/modules/datasource/maven/util.spec.ts
index 557ab1eecd24733d3487bbdbce669a112417be40..eefe823637293d6e0117dccd0188554d0d13dcb3 100644
--- a/lib/modules/datasource/maven/util.spec.ts
+++ b/lib/modules/datasource/maven/util.spec.ts
@@ -15,7 +15,7 @@ describe('modules/datasource/maven/util', () => {
     it('returns empty object for unsupported protocols', async () => {
       const res = await downloadMavenXml(
         null as never, // #22198
-        parseUrl('unsupported://server.com/')
+        parseUrl('unsupported://server.com/'),
       );
       expect(res).toEqual({});
     });
@@ -23,7 +23,7 @@ describe('modules/datasource/maven/util', () => {
     it('returns empty object for invalid URLs', async () => {
       const res = await downloadMavenXml(
         null as never, // #22198
-        null
+        null,
       );
       expect(res).toEqual({});
     });
@@ -83,7 +83,7 @@ describe('modules/datasource/maven/util', () => {
     it('returns not found for unsupported protocols', async () => {
       const res = await checkResource(
         null as never, // #22198
-        'unsupported://server.com/'
+        'unsupported://server.com/',
       );
       expect(res).toBe('not-found');
     });
@@ -91,7 +91,7 @@ describe('modules/datasource/maven/util', () => {
     it('returns error for invalid URLs', async () => {
       const res = await checkResource(
         null as never, // #22198
-        'not-a-valid-url'
+        'not-a-valid-url',
       );
       expect(res).toBe('error');
     });
diff --git a/lib/modules/datasource/maven/util.ts b/lib/modules/datasource/maven/util.ts
index 9dee2d60fde052709ed836dbb3439091eb68a827..072699cc2cb2ab514c79db0024e684eba9199e7a 100644
--- a/lib/modules/datasource/maven/util.ts
+++ b/lib/modules/datasource/maven/util.ts
@@ -69,7 +69,7 @@ function isUnsupportedHostError(err: { name: string }): boolean {
 export async function downloadHttpProtocol(
   http: Http,
   pkgUrl: URL | string,
-  opts: HttpOptions & HttpRequestOptions<string> = {}
+  opts: HttpOptions & HttpRequestOptions<string> = {},
 ): Promise<Partial<HttpResponse>> {
   let raw: HttpResponse;
   try {
@@ -85,7 +85,7 @@ export async function downloadHttpProtocol(
       logger.debug(`Cannot connect to host ${failedUrl}`);
     } else if (isPermissionsIssue(err)) {
       logger.debug(
-        `Dependency lookup unauthorized. Please add authentication with a hostRule for ${failedUrl}`
+        `Dependency lookup unauthorized. Please add authentication with a hostRule for ${failedUrl}`,
       );
     } else if (isTemporalError(err)) {
       logger.debug({ failedUrl, err }, 'Temporary error');
@@ -119,26 +119,26 @@ export async function downloadS3Protocol(pkgUrl: URL): Promise<string | null> {
       return streamToString(res);
     }
     logger.debug(
-      `Expecting Readable response type got '${typeof res}' type instead`
+      `Expecting Readable response type got '${typeof res}' type instead`,
     );
   } catch (err) {
     const failedUrl = pkgUrl.toString();
     if (err.name === 'CredentialsProviderError') {
       logger.debug(
         { failedUrl },
-        'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars'
+        'Dependency lookup authorization failed. Please correct AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY env vars',
       );
     } else if (err.message === 'Region is missing') {
       logger.debug(
         { failedUrl },
-        'Dependency lookup failed. Please a correct AWS_REGION env var'
+        'Dependency lookup failed. Please a correct AWS_REGION env var',
       );
     } else if (isS3NotFound(err)) {
       logger.trace({ failedUrl }, `S3 url not found`);
     } else {
       logger.debug(
         { failedUrl, message: err.message },
-        'Unknown S3 download error'
+        'Unknown S3 download error',
       );
     }
   }
@@ -147,7 +147,7 @@ export async function downloadS3Protocol(pkgUrl: URL): Promise<string | null> {
 
 export async function downloadArtifactRegistryProtocol(
   http: Http,
-  pkgUrl: URL
+  pkgUrl: URL,
 ): Promise<Partial<HttpResponse>> {
   const opts: HttpOptions = {};
   const host = pkgUrl.host;
@@ -160,7 +160,7 @@ export async function downloadArtifactRegistryProtocol(
   } else {
     logger.once.debug(
       { host, path },
-      'Could not get Google access token, using no auth'
+      'Could not get Google access token, using no auth',
     );
   }
 
@@ -171,7 +171,7 @@ export async function downloadArtifactRegistryProtocol(
 
 async function checkHttpResource(
   http: Http,
-  pkgUrl: URL
+  pkgUrl: URL,
 ): Promise<HttpResourceCheckResult> {
   try {
     const res = await http.head(pkgUrl.toString());
@@ -194,14 +194,14 @@ async function checkHttpResource(
     const failedUrl = pkgUrl.toString();
     logger.debug(
       { failedUrl, statusCode: err.statusCode },
-      `Can't check HTTP resource existence`
+      `Can't check HTTP resource existence`,
     );
     return 'error';
   }
 }
 
 export async function checkS3Resource(
-  pkgUrl: URL
+  pkgUrl: URL,
 ): Promise<HttpResourceCheckResult> {
   try {
     const s3Url = parseS3Url(pkgUrl);
@@ -222,7 +222,7 @@ export async function checkS3Resource(
     } else {
       logger.debug(
         { pkgUrl, name: err.name, message: err.message },
-        `Can't check S3 resource existence`
+        `Can't check S3 resource existence`,
       );
     }
     return 'error';
@@ -231,7 +231,7 @@ export async function checkS3Resource(
 
 export async function checkResource(
   http: Http,
-  pkgUrl: URL | string
+  pkgUrl: URL | string,
 ): Promise<HttpResourceCheckResult> {
   const parsedUrl = typeof pkgUrl === 'string' ? parseUrl(pkgUrl) : pkgUrl;
   if (parsedUrl === null) {
@@ -246,7 +246,7 @@ export async function checkResource(
     default:
       logger.debug(
         { url: pkgUrl.toString() },
-        `Unsupported Maven protocol in check resource`
+        `Unsupported Maven protocol in check resource`,
       );
       return 'not-found';
   }
@@ -259,14 +259,14 @@ function containsPlaceholder(str: string): boolean {
 export function getMavenUrl(
   dependency: MavenDependency,
   repoUrl: string,
-  path: string
+  path: string,
 ): URL {
   return new URL(`${dependency.dependencyUrl}/${path}`, repoUrl);
 }
 
 export async function downloadMavenXml(
   http: Http,
-  pkgUrl: URL | null
+  pkgUrl: URL | null,
 ): Promise<MavenXml> {
   if (!pkgUrl) {
     return {};
@@ -304,7 +304,7 @@ export async function downloadMavenXml(
   if (!rawContent) {
     logger.debug(
       { url: pkgUrl.toString(), statusCode },
-      `Content is not found for Maven url`
+      `Content is not found for Maven url`,
     );
     return {};
   }
@@ -355,14 +355,14 @@ async function getSnapshotFullVersion(
   http: Http,
   version: string,
   dependency: MavenDependency,
-  repoUrl: string
+  repoUrl: string,
 ): Promise<string | null> {
   // To determine what actual files are available for the snapshot, first we have to fetch and parse
   // the metadata located at http://<repo>/<group>/<artifact>/<version-SNAPSHOT>/maven-metadata.xml
   const metadataUrl = getMavenUrl(
     dependency,
     repoUrl,
-    `${version}/maven-metadata.xml`
+    `${version}/maven-metadata.xml`,
   );
 
   const { xml: mavenMetadata } = await downloadMavenXml(http, metadataUrl);
@@ -384,7 +384,7 @@ export async function createUrlForDependencyPom(
   http: Http,
   version: string,
   dependency: MavenDependency,
-  repoUrl: string
+  repoUrl: string,
 ): Promise<string> {
   if (isSnapshotVersion(version)) {
     // By default, Maven snapshots are deployed to the repository with fixed file names.
@@ -393,7 +393,7 @@ export async function createUrlForDependencyPom(
       http,
       version,
       dependency,
-      repoUrl
+      repoUrl,
     );
 
     // If we were able to resolve the version, use that, otherwise fall back to using -SNAPSHOT
@@ -412,14 +412,14 @@ export async function getDependencyInfo(
   dependency: MavenDependency,
   repoUrl: string,
   version: string,
-  recursionLimit = 5
+  recursionLimit = 5,
 ): Promise<Partial<ReleaseResult>> {
   const result: Partial<ReleaseResult> = {};
   const path = await createUrlForDependencyPom(
     http,
     version,
     dependency,
-    repoUrl
+    repoUrl,
   );
 
   const pomUrl = getMavenUrl(dependency, repoUrl, path);
@@ -466,7 +466,7 @@ export async function getDependencyInfo(
         parentDependency,
         repoUrl,
         parentVersion,
-        recursionLimit - 1
+        recursionLimit - 1,
       );
       if (!result.sourceUrl && parentInformation.sourceUrl) {
         result.sourceUrl = parentInformation.sourceUrl;
diff --git a/lib/modules/datasource/metadata.spec.ts b/lib/modules/datasource/metadata.spec.ts
index 36661f2490078e4012cb8ffcc8f87cafd5bcc43e..907825ee4da9183a271c8959b8be2d3db5170199 100644
--- a/lib/modules/datasource/metadata.spec.ts
+++ b/lib/modules/datasource/metadata.spec.ts
@@ -97,7 +97,7 @@ describe('modules/datasource/metadata', () => {
       expect(dep).toMatchObject({
         sourceUrl: expectedSourceUrl,
       });
-    }
+    },
   );
 
   it.each`
@@ -119,7 +119,7 @@ describe('modules/datasource/metadata', () => {
       addMetaData(dep, datasource, packageName);
       expect(dep.sourceDirectory).toBeUndefined();
       expect(dep).toMatchObject({ sourceUrl });
-    }
+    },
   );
 
   it('Should not overwrite any existing sourceDirectory', () => {
@@ -340,31 +340,31 @@ describe('modules/datasource/metadata', () => {
 
   it('Should massage github git@ url to valid https url', () => {
     expect(massageGithubUrl('git@example.com:foo/bar')).toMatch(
-      'https://example.com/foo/bar'
+      'https://example.com/foo/bar',
     );
   });
 
   it('Should massage github http url to valid https url', () => {
     expect(massageGithubUrl('http://example.com/foo/bar')).toMatch(
-      'https://example.com/foo/bar'
+      'https://example.com/foo/bar',
     );
   });
 
   it('Should massage github http and git url to valid https url', () => {
     expect(massageGithubUrl('http+git://example.com/foo/bar')).toMatch(
-      'https://example.com/foo/bar'
+      'https://example.com/foo/bar',
     );
   });
 
   it('Should massage github ssh git@ url to valid https url', () => {
     expect(massageGithubUrl('ssh://git@example.com/foo/bar')).toMatch(
-      'https://example.com/foo/bar'
+      'https://example.com/foo/bar',
     );
   });
 
   it('Should massage github git url to valid https url', () => {
     expect(massageGithubUrl('git://example.com/foo/bar')).toMatch(
-      'https://example.com/foo/bar'
+      'https://example.com/foo/bar',
     );
   });
 
@@ -502,7 +502,7 @@ describe('modules/datasource/metadata', () => {
     'shouldDeleteHomepage($sourceUrl, $homepage) -> $expected',
     ({ sourceUrl, homepage, expected }) => {
       expect(shouldDeleteHomepage(sourceUrl, homepage)).toBe(expected);
-    }
+    },
   );
 
   // for coverage
@@ -528,13 +528,13 @@ describe('modules/datasource/metadata', () => {
 
     it('works for string input', () => {
       expect(normalizeDate('2021-01-01')).toBe(
-        new Date('2021-01-01').toISOString()
+        new Date('2021-01-01').toISOString(),
       );
     });
 
     it('works for Date instance', () => {
       expect(normalizeDate(new Date('2021-01-01'))).toBe(
-        new Date('2021-01-01').toISOString()
+        new Date('2021-01-01').toISOString(),
       );
     });
   });
diff --git a/lib/modules/datasource/metadata.ts b/lib/modules/datasource/metadata.ts
index ce7f2132641d0aef6728d734e7092d16948b1264..523677e9b3b42959503d3da46a01e512935dfdd3 100644
--- a/lib/modules/datasource/metadata.ts
+++ b/lib/modules/datasource/metadata.ts
@@ -116,7 +116,7 @@ function massageTimestamps(dep: ReleaseResult): void {
 export function addMetaData(
   dep: ReleaseResult,
   datasource: string,
-  packageName: string
+  packageName: string,
 ): void {
   massageTimestamps(dep);
 
@@ -205,7 +205,7 @@ export function addMetaData(
  */
 export function shouldDeleteHomepage(
   sourceUrl: string | null | undefined,
-  homepage: string | undefined
+  homepage: string | undefined,
 ): boolean {
   if (is.nullOrUndefined(sourceUrl) || is.undefined(homepage)) {
     return false;
diff --git a/lib/modules/datasource/node-version/index.spec.ts b/lib/modules/datasource/node-version/index.spec.ts
index ac1934ce3317c7895affb47d20d4cbfe6fefb046..905f365d9fe84f33a39887c1c25b6b565929b5a6 100644
--- a/lib/modules/datasource/node-version/index.spec.ts
+++ b/lib/modules/datasource/node-version/index.spec.ts
@@ -12,7 +12,7 @@ describe('modules/datasource/node-version/index', () => {
         getPkgReleases({
           datasource,
           packageName: 'node',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -25,7 +25,7 @@ describe('modules/datasource/node-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'node',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -35,7 +35,7 @@ describe('modules/datasource/node-version/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'node',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/node-version/index.ts b/lib/modules/datasource/node-version/index.ts
index 1fc805d04d162e7f4d52843794d51e93f4018358..f93b88bfa8716dc85758236d9d6cb1d38882f40a 100644
--- a/lib/modules/datasource/node-version/index.ts
+++ b/lib/modules/datasource/node-version/index.ts
@@ -42,7 +42,7 @@ export class NodeVersionDatasource extends Datasource {
     try {
       const resp = (
         await this.http.getJson<NodeRelease[]>(
-          joinUrlParts(registryUrl, 'index.json')
+          joinUrlParts(registryUrl, 'index.json'),
         )
       ).body;
       result.releases.push(
@@ -50,7 +50,7 @@ export class NodeVersionDatasource extends Datasource {
           version,
           releaseTimestamp: date,
           isStable: lts !== false,
-        }))
+        })),
       );
     } catch (err) {
       this.handleGenericErrors(err);
diff --git a/lib/modules/datasource/npm/get.spec.ts b/lib/modules/datasource/npm/get.spec.ts
index 907ba35ab9d8a20a23fe02f710f9b06886ec2f8a..b101911166ee7b0f14454365d636d49c1fb84831 100644
--- a/lib/modules/datasource/npm/get.spec.ts
+++ b/lib/modules/datasource/npm/get.spec.ts
@@ -235,7 +235,7 @@ describe('modules/datasource/npm/get', () => {
       .reply(200, 'not-a-json');
     registryUrl = resolveRegistryUrl('npm-parse-error');
     await expect(
-      getDependency(http, registryUrl, 'npm-parse-error')
+      getDependency(http, registryUrl, 'npm-parse-error'),
     ).rejects.toThrow(ExternalHostError);
 
     httpMock
@@ -266,7 +266,7 @@ describe('modules/datasource/npm/get', () => {
     const dep = await getDependency(http, registryUrl, '@neutrinojs/react');
 
     expect(dep?.sourceUrl).toBe(
-      'https://github.com/neutrinojs/neutrino/tree/master/packages/react'
+      'https://github.com/neutrinojs/neutrino/tree/master/packages/react',
     );
     expect(dep?.sourceDirectory).toBeUndefined();
 
@@ -305,7 +305,7 @@ describe('modules/datasource/npm/get', () => {
     const dep = await getDependency(http, registryUrl, '@neutrinojs/react');
 
     expect(dep?.sourceUrl).toBe(
-      'https://github.com/neutrinojs/neutrino/tree/master/packages/react'
+      'https://github.com/neutrinojs/neutrino/tree/master/packages/react',
     );
     expect(dep?.sourceDirectory).toBeUndefined();
   });
@@ -347,7 +347,7 @@ describe('modules/datasource/npm/get', () => {
     expect(dep?.sourceUrl).toBe('https://github.com/vuejs/vue.git');
     expect(dep?.releases[0].sourceUrl).toBeUndefined();
     expect(dep?.releases[1].sourceUrl).toBe(
-      'https://github.com/vuejs/vue-next.git'
+      'https://github.com/vuejs/vue-next.git',
     );
   });
 
@@ -412,7 +412,7 @@ describe('modules/datasource/npm/get', () => {
     const dep = await getDependency(http, registryUrl, '@neutrinojs/react');
 
     expect(dep?.sourceUrl).toBe(
-      'https://github.com/neutrinojs/neutrino/tree/master/packages/react'
+      'https://github.com/neutrinojs/neutrino/tree/master/packages/react',
     );
     expect(dep?.sourceDirectory).toBe('packages/foo');
 
@@ -452,7 +452,7 @@ describe('modules/datasource/npm/get', () => {
     const dep = await getDependency(http, registryUrl, '@neutrinojs/react');
 
     expect(dep?.sourceUrl).toBe(
-      'https://bitbucket.org/neutrinojs/neutrino/tree/master/packages/react'
+      'https://bitbucket.org/neutrinojs/neutrino/tree/master/packages/react',
     );
     expect(dep?.sourceDirectory).toBeUndefined();
 
diff --git a/lib/modules/datasource/npm/get.ts b/lib/modules/datasource/npm/get.ts
index 611ee6d491981365a4e54f3ffb11c6b299671fdd..620942534a7f8e3db262f3c859ff40f4d12335fc 100644
--- a/lib/modules/datasource/npm/get.ts
+++ b/lib/modules/datasource/npm/get.ts
@@ -15,7 +15,7 @@ import type { Release, ReleaseResult } from '../types';
 import type { CachedReleaseResult, NpmResponse } from './types';
 
 const SHORT_REPO_REGEX = regEx(
-  /^((?<platform>bitbucket|github|gitlab):)?(?<shortRepo>[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+)$/
+  /^((?<platform>bitbucket|github|gitlab):)?(?<shortRepo>[A-Za-z0-9_.-]+\/[A-Za-z0-9_.-]+)$/,
 );
 
 const platformMapping: Record<string, string> = {
@@ -70,7 +70,7 @@ const PackageSource = z
 export async function getDependency(
   http: Http,
   registryUrl: string,
-  packageName: string
+  packageName: string,
 ): Promise<ReleaseResult | null> {
   logger.trace(`npm.getDependency(${packageName})`);
 
@@ -80,12 +80,12 @@ export async function getDependency(
   const cacheNamespace = 'datasource-npm:data';
   const cachedResult = await packageCache.get<CachedReleaseResult>(
     cacheNamespace,
-    packageUrl
+    packageUrl,
   );
   if (cachedResult) {
     if (cachedResult.cacheData) {
       const softExpireAt = DateTime.fromISO(
-        cachedResult.cacheData.softExpireAt
+        cachedResult.cacheData.softExpireAt,
       );
       if (softExpireAt.isValid && softExpireAt > DateTime.local()) {
         logger.trace('Cached result is not expired - reusing');
@@ -130,7 +130,7 @@ export async function getDependency(
         cacheNamespace,
         packageUrl,
         cachedResult,
-        cacheHardTtlMinutes
+        cacheHardTtlMinutes,
       );
       delete cachedResult.cacheData;
       return cachedResult;
@@ -211,7 +211,7 @@ export async function getDependency(
         { ...dep, cacheData },
         etag
           ? /* istanbul ignore next: needs test */ cacheHardTtlMinutes
-          : cacheMinutes
+          : cacheMinutes,
       );
     } else {
       dep.isPrivate = true;
@@ -231,7 +231,7 @@ export async function getDependency(
       if (cachedResult) {
         logger.warn(
           { err },
-          'npmjs error, reusing expired cached result instead'
+          'npmjs error, reusing expired cached result instead',
         );
         delete cachedResult.cacheData;
         return cachedResult;
diff --git a/lib/modules/datasource/npm/index.spec.ts b/lib/modules/datasource/npm/index.spec.ts
index ed71b13b93f1d9b322368fd9beb88e676df4f18c..9e249734afedff7e54d4041b6284e81569782e91 100644
--- a/lib/modules/datasource/npm/index.spec.ts
+++ b/lib/modules/datasource/npm/index.spec.ts
@@ -182,35 +182,35 @@ describe('modules/datasource/npm/index', () => {
       .get('/foobar')
       .reply(200, 'oops');
     await expect(
-      getPkgReleases({ datasource, packageName: 'foobar' })
+      getPkgReleases({ datasource, packageName: 'foobar' }),
     ).rejects.toThrow();
   });
 
   it('should throw error for 429', async () => {
     httpMock.scope('https://registry.npmjs.org').get('/foobar').reply(429);
     await expect(
-      getPkgReleases({ datasource, packageName: 'foobar' })
+      getPkgReleases({ datasource, packageName: 'foobar' }),
     ).rejects.toThrow();
   });
 
   it('should throw error for 5xx', async () => {
     httpMock.scope('https://registry.npmjs.org').get('/foobar').reply(503);
     await expect(
-      getPkgReleases({ datasource, packageName: 'foobar' })
+      getPkgReleases({ datasource, packageName: 'foobar' }),
     ).rejects.toThrow(EXTERNAL_HOST_ERROR);
   });
 
   it('should throw error for 408', async () => {
     httpMock.scope('https://registry.npmjs.org').get('/foobar').reply(408);
     await expect(
-      getPkgReleases({ datasource, packageName: 'foobar' })
+      getPkgReleases({ datasource, packageName: 'foobar' }),
     ).rejects.toThrow(EXTERNAL_HOST_ERROR);
   });
 
   it('should throw error for others', async () => {
     httpMock.scope('https://registry.npmjs.org').get('/foobar').reply(451);
     await expect(
-      getPkgReleases({ datasource, packageName: 'foobar' })
+      getPkgReleases({ datasource, packageName: 'foobar' }),
     ).rejects.toThrow();
   });
 
@@ -273,7 +273,7 @@ describe('modules/datasource/npm/index', () => {
         'https://npm.mycustomregistry.com/_packaging/mycustomregistry/npm/registry',
         {
           reqheaders: { authorization: 'Bearer abc' },
-        }
+        },
       )
       .get('/foobar')
       .reply(200, npmResponse);
@@ -345,7 +345,7 @@ describe('modules/datasource/npm/index', () => {
     GlobalConfig.set({ exposeAllEnv: true });
 
     expect(() => setNpmrc('registry=${REGISTRY_MISSING}')).toThrow(
-      Error('env-replace')
+      Error('env-replace'),
     );
   });
 });
diff --git a/lib/modules/datasource/npm/npmrc.spec.ts b/lib/modules/datasource/npm/npmrc.spec.ts
index 3b9c4f381c1e1a3287e26315e3c7bd1b31bbcee0..707270507eb05be81c0db305f90c8de6a7d639a9 100644
--- a/lib/modules/datasource/npm/npmrc.spec.ts
+++ b/lib/modules/datasource/npm/npmrc.spec.ts
@@ -21,19 +21,19 @@ describe('modules/datasource/npm/npmrc', () => {
   describe('getMatchHostFromNpmrcHost()', () => {
     it('parses //host', () => {
       expect(getMatchHostFromNpmrcHost('//registry.npmjs.org')).toBe(
-        'registry.npmjs.org'
+        'registry.npmjs.org',
       );
     });
 
     it('parses //host/path', () => {
       expect(
-        getMatchHostFromNpmrcHost('//registry.company.com/some/path')
+        getMatchHostFromNpmrcHost('//registry.company.com/some/path'),
       ).toBe('https://registry.company.com/some/path');
     });
 
     it('parses https://host', () => {
       expect(getMatchHostFromNpmrcHost('https://registry.npmjs.org')).toBe(
-        'https://registry.npmjs.org'
+        'https://registry.npmjs.org',
       );
     });
   });
@@ -41,7 +41,7 @@ describe('modules/datasource/npm/npmrc', () => {
   describe('convertNpmrcToRules()', () => {
     it('rejects invalid registries', () => {
       const res = convertNpmrcToRules(
-        ini.parse('registry=1\n@scope:registry=2\n')
+        ini.parse('registry=1\n@scope:registry=2\n'),
       );
       expect(res.hostRules).toHaveLength(0);
       expect(res.packageRules).toHaveLength(0);
@@ -65,7 +65,7 @@ describe('modules/datasource/npm/npmrc', () => {
 
     it('handles host, path and auth', () => {
       expect(
-        convertNpmrcToRules(ini.parse('//some.test/with/path:_auth=abc123'))
+        convertNpmrcToRules(ini.parse('//some.test/with/path:_auth=abc123')),
       ).toMatchInlineSnapshot(`
         {
           "hostRules": [
@@ -84,8 +84,8 @@ describe('modules/datasource/npm/npmrc', () => {
     it('handles host, path, port and auth', () => {
       expect(
         convertNpmrcToRules(
-          ini.parse('//some.test:8080/with/path:_authToken=abc123')
-        )
+          ini.parse('//some.test:8080/with/path:_authToken=abc123'),
+        ),
       ).toMatchInlineSnapshot(`
         {
           "hostRules": [
@@ -119,9 +119,9 @@ describe('modules/datasource/npm/npmrc', () => {
       expect(
         convertNpmrcToRules(
           ini.parse(
-            '@fontawesome:registry=https://npm.fontawesome.com/\n//npm.fontawesome.com/:_authToken=abc123'
-          )
-        )
+            '@fontawesome:registry=https://npm.fontawesome.com/\n//npm.fontawesome.com/:_authToken=abc123',
+          ),
+        ),
       ).toMatchInlineSnapshot(`
         {
           "hostRules": [
@@ -152,9 +152,9 @@ describe('modules/datasource/npm/npmrc', () => {
       expect(
         convertNpmrcToRules(
           ini.parse(
-            `//my-registry.example.com/npm-private/:_password=dGVzdA==\n//my-registry.example.com/npm-private/:username=bot\n//my-registry.example.com/npm-private/:always-auth=true`
-          )
-        )
+            `//my-registry.example.com/npm-private/:_password=dGVzdA==\n//my-registry.example.com/npm-private/:username=bot\n//my-registry.example.com/npm-private/:always-auth=true`,
+          ),
+        ),
       ).toMatchInlineSnapshot(`
         {
           "hostRules": [
@@ -185,12 +185,12 @@ describe('modules/datasource/npm/npmrc', () => {
 
   it('sanitize _password', () => {
     setNpmrc(
-      `registry=https://test.org\n//test.org/:username=test\n//test.org/:_password=dGVzdA==`
+      `registry=https://test.org\n//test.org/:username=test\n//test.org/:_password=dGVzdA==`,
     );
     expect(sanitize.addSecretForSanitizing).toHaveBeenNthCalledWith(1, 'test');
     expect(sanitize.addSecretForSanitizing).toHaveBeenNthCalledWith(
       2,
-      'dGVzdDp0ZXN0'
+      'dGVzdDp0ZXN0',
     );
     expect(sanitize.addSecretForSanitizing).toHaveBeenCalledTimes(2);
   });
@@ -199,7 +199,7 @@ describe('modules/datasource/npm/npmrc', () => {
     GlobalConfig.set({ exposeAllEnv: true });
     process.env.TEST_TOKEN = 'test';
     setNpmrc(
-      '//registry.test.com:_authToken=${TEST_TOKEN}\n_authToken=\nregistry=http://localhost'
+      '//registry.test.com:_authToken=${TEST_TOKEN}\n_authToken=\nregistry=http://localhost',
     );
     expect(sanitize.addSecretForSanitizing).toHaveBeenCalledWith('test');
     expect(sanitize.addSecretForSanitizing).toHaveBeenCalledTimes(1);
diff --git a/lib/modules/datasource/npm/npmrc.ts b/lib/modules/datasource/npm/npmrc.ts
index 3cbbd0d897b4cb8145be40c53ab11084cd303fb5..ad605fe4ff2bc727c8cdd5e5bf27dcfb40b20ca7 100644
--- a/lib/modules/datasource/npm/npmrc.ts
+++ b/lib/modules/datasource/npm/npmrc.ts
@@ -141,7 +141,7 @@ export function setNpmrc(input?: string): void {
       ) {
         logger.debug(
           { key, val },
-          'Detected localhost registry - rejecting npmrc file'
+          'Detected localhost registry - rejecting npmrc file',
         );
         npmrc = existingNpmrc;
         return;
@@ -182,10 +182,10 @@ export function resolveRegistryUrl(packageName: string): string {
 
 export function resolvePackageUrl(
   registryUrl: string,
-  packageName: string
+  packageName: string,
 ): string {
   return url.resolve(
     ensureTrailingSlash(registryUrl),
-    encodeURIComponent(packageName).replace(regEx(/^%40/), '@')
+    encodeURIComponent(packageName).replace(regEx(/^%40/), '@'),
   );
 }
diff --git a/lib/modules/datasource/nuget/common.ts b/lib/modules/datasource/nuget/common.ts
index 570ed9f777def732521b467f0909c853ec6f984b..55ce19a78f768b34b4485a12ae28520f7110b82b 100644
--- a/lib/modules/datasource/nuget/common.ts
+++ b/lib/modules/datasource/nuget/common.ts
@@ -28,13 +28,13 @@ export function parseRegistryUrl(registryUrl: string): ParsedRegistryUrl {
   if (!parsedUrl) {
     logger.debug(
       { urL: registryUrl },
-      `nuget registry failure: can't parse ${registryUrl}`
+      `nuget registry failure: can't parse ${registryUrl}`,
     );
     return { feedUrl: registryUrl, protocolVersion: null };
   }
   let protocolVersion = 2;
   const protocolVersionMatch = protocolVersionRegExp.exec(
-    parsedUrl.hash
+    parsedUrl.hash,
   )?.groups;
   if (protocolVersionMatch) {
     const { protocol } = protocolVersionMatch;
diff --git a/lib/modules/datasource/nuget/index.spec.ts b/lib/modules/datasource/nuget/index.spec.ts
index b1c05b17bf6e2f1eae7a06d35796c786e1113289..6dc521ac76322abfcde4ad1c5d2665f075fe6760 100644
--- a/lib/modules/datasource/nuget/index.spec.ts
+++ b/lib/modules/datasource/nuget/index.spec.ts
@@ -19,11 +19,11 @@ const pkgListV3Registration = Fixtures.get('nunit/v3_registration.json');
 
 const pkgListV2 = Fixtures.get('nunit/v2.xml');
 const pkgListV2NoGitHubProjectUrl = Fixtures.get(
-  'nunit/v2_noGitHubProjectUrl.xml'
+  'nunit/v2_noGitHubProjectUrl.xml',
 );
 const pkgListV2NoRelease = Fixtures.get('nunit/v2_no_release.xml');
 const pkgListV2WithoutProjectUrl = Fixtures.get(
-  'nunit/v2_withoutProjectUrl.xml'
+  'nunit/v2_withoutProjectUrl.xml',
 );
 
 const pkgListV2Page1of2 = Fixtures.get('nunit/v2_paginated_1.xml');
@@ -152,7 +152,7 @@ describe('modules/datasource/nuget/index', () => {
       expect(
         await getPkgReleases({
           ...config,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -167,7 +167,7 @@ describe('modules/datasource/nuget/index', () => {
       expect(
         await getPkgReleases({
           ...config,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -209,13 +209,13 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200);
       expect(
         await getPkgReleases({
           ...configV3V2,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -223,13 +223,13 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200, {});
       expect(
         await getPkgReleases({
           ...configV2,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -298,7 +298,7 @@ describe('modules/datasource/nuget/index', () => {
           url: 'https://api.nuget.org/v3/index.json',
           servicesIndexRaw: JSON.parse(nugetIndex),
         },
-        'no PackageBaseAddress services found'
+        'no PackageBaseAddress services found',
       );
     });
 
@@ -307,13 +307,13 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(500);
       expect(
         await getPkgReleases({
           ...configV3V2,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -322,7 +322,7 @@ describe('modules/datasource/nuget/index', () => {
       expect(
         await getPkgReleases({
           ...configV3,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -330,13 +330,13 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(500);
       expect(
         await getPkgReleases({
           ...configV2,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -348,13 +348,13 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .replyWithError('');
       expect(
         await getPkgReleases({
           ...configV3V2,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -392,7 +392,7 @@ describe('modules/datasource/nuget/index', () => {
       expect(
         await getPkgReleases({
           ...configV3,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -406,7 +406,7 @@ describe('modules/datasource/nuget/index', () => {
       expect(
         await getPkgReleases({
           ...configV3,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -414,13 +414,13 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .replyWithError('');
       expect(
         await getPkgReleases({
           ...configV2,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -446,16 +446,16 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://pkgs.dev.azure.com')
         .get(
-          '/organisationName/_packaging/2745c5e9-610a-4537-9032-978c66527b51/nuget/v3/index.json'
+          '/organisationName/_packaging/2745c5e9-610a-4537-9032-978c66527b51/nuget/v3/index.json',
         )
         .twice()
         .reply(200, Fixtures.get('azure_devops/v3_index.json'))
         .get(
-          '/organisationName/_packaging/2745c5e9-610a-4537-9032-978c66527b51/nuget/v3/registrations2-semver2/nunit/index.json'
+          '/organisationName/_packaging/2745c5e9-610a-4537-9032-978c66527b51/nuget/v3/registrations2-semver2/nunit/index.json',
         )
         .reply(200, Fixtures.get('azure_devops/nunit/v3_registration.json'))
         .get(
-          '/organisationName/_packaging/2745c5e9-610a-4537-9032-978c66527b51/nuget/v3/flat2/nunit/3.13.2/nunit.nuspec'
+          '/organisationName/_packaging/2745c5e9-610a-4537-9032-978c66527b51/nuget/v3/flat2/nunit/3.13.2/nunit.nuspec',
         )
         .reply(200, Fixtures.get('azure_devops/nunit/nuspec.xml'));
       const res = await getPkgReleases({
@@ -513,12 +513,12 @@ describe('modules/datasource/nuget/index', () => {
           200,
           pkgListV3Registration
             .replace(/"http:\/\/nunit\.org"/g, '""')
-            .replace('"published": "2012-10-23T15:37:48+00:00",', '')
+            .replace('"published": "2012-10-23T15:37:48+00:00",', ''),
         )
         .get('/v3-flatcontainer/nunit/3.12.0/nunit.nuspec')
         .reply(
           200,
-          pkgInfoV3FromNuget.replace('https://github.com/nunit/nunit', '')
+          pkgInfoV3FromNuget.replace('https://github.com/nunit/nunit', ''),
         );
       httpMock
         .scope('https://myprivatefeed')
@@ -574,7 +574,7 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200, pkgListV2);
       const res = await getPkgReleases({
@@ -589,7 +589,7 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200, pkgListV2NoRelease);
       const res = await getPkgReleases({
@@ -602,7 +602,7 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200, pkgListV2WithoutProjectUrl);
       const res = await getPkgReleases({
@@ -617,7 +617,7 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200, pkgListV2NoGitHubProjectUrl);
       const res = await getPkgReleases({
@@ -631,7 +631,7 @@ describe('modules/datasource/nuget/index', () => {
       httpMock
         .scope('https://www.nuget.org')
         .get(
-          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published'
+          '/api/v2/FindPackagesById()?id=%27nunit%27&$select=Version,IsLatestVersion,ProjectUrl,Published',
         )
         .reply(200, pkgListV2Page1of2);
       httpMock
diff --git a/lib/modules/datasource/nuget/v2.ts b/lib/modules/datasource/nuget/v2.ts
index c92db3da7fd8c2c121cee5a1239ed077ac1bfb19..79c19161a1ab8b1b2ddda3c6fb6521a3caa6736f 100644
--- a/lib/modules/datasource/nuget/v2.ts
+++ b/lib/modules/datasource/nuget/v2.ts
@@ -13,14 +13,14 @@ function getPkgProp(pkgInfo: XmlElement, propName: string): string | undefined {
 export async function getReleases(
   http: Http,
   feedUrl: string,
-  pkgName: string
+  pkgName: string,
 ): Promise<ReleaseResult | null> {
   const dep: ReleaseResult = {
     releases: [],
   };
   let pkgUrlList: string | null = `${feedUrl.replace(
     regEx(/\/+$/),
-    ''
+    '',
   )}/FindPackagesById()?id=%27${pkgName}%27&$select=Version,IsLatestVersion,ProjectUrl,Published`;
   while (pkgUrlList !== null) {
     // typescript issue
@@ -48,7 +48,7 @@ export async function getReleases(
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { err, pkgName, feedUrl },
-          `nuget registry failure: can't parse pkg info for project url`
+          `nuget registry failure: can't parse pkg info for project url`,
         );
       }
     }
diff --git a/lib/modules/datasource/nuget/v3.spec.ts b/lib/modules/datasource/nuget/v3.spec.ts
index 61ae82b9838a68e5cf671b48e74a3b3cf875d4da..0a2bd8f315ec53c8530ecd53d4e20504cf8fefa1 100644
--- a/lib/modules/datasource/nuget/v3.spec.ts
+++ b/lib/modules/datasource/nuget/v3.spec.ts
@@ -14,6 +14,6 @@ describe('modules/datasource/nuget/v3', () => {
     ({ version, other, result }) => {
       const res = sortNugetVersions(version, other);
       expect(res).toBe(result);
-    }
+    },
   );
 });
diff --git a/lib/modules/datasource/nuget/v3.ts b/lib/modules/datasource/nuget/v3.ts
index 5da279f235d0d130268f3643d479409c92f076e4..6f53906b3afc6a63e2dbc0fa08df2400046e1a4c 100644
--- a/lib/modules/datasource/nuget/v3.ts
+++ b/lib/modules/datasource/nuget/v3.ts
@@ -23,13 +23,13 @@ const cacheNamespace = 'datasource-nuget';
 export async function getResourceUrl(
   http: Http,
   url: string,
-  resourceType = 'RegistrationsBaseUrl'
+  resourceType = 'RegistrationsBaseUrl',
 ): Promise<string | null> {
   // https://docs.microsoft.com/en-us/nuget/api/service-index
   const resultCacheKey = `${url}:${resourceType}`;
   const cachedResult = await packageCache.get<string>(
     cacheNamespace,
-    resultCacheKey
+    resultCacheKey,
   );
 
   // istanbul ignore if
@@ -41,7 +41,7 @@ export async function getResourceUrl(
     const responseCacheKey = url;
     servicesIndexRaw = await packageCache.get<ServicesIndexRaw>(
       cacheNamespace,
-      responseCacheKey
+      responseCacheKey,
     );
     // istanbul ignore else: currently not testable
     if (!servicesIndexRaw) {
@@ -50,7 +50,7 @@ export async function getResourceUrl(
         cacheNamespace,
         responseCacheKey,
         servicesIndexRaw,
-        3 * 24 * 60
+        3 * 24 * 60,
       );
     }
 
@@ -61,19 +61,19 @@ export async function getResourceUrl(
         version: t?.split('/')?.pop(),
       }))
       .filter(
-        ({ type, version }) => type === resourceType && semver.valid(version)
+        ({ type, version }) => type === resourceType && semver.valid(version),
       )
       .sort((x, y) =>
         x.version && y.version
           ? semver.compare(x.version, y.version)
-          : /* istanbul ignore next: hard to test */ 0
+          : /* istanbul ignore next: hard to test */ 0,
       );
 
     if (services.length === 0) {
       await packageCache.set(cacheNamespace, resultCacheKey, null, 60);
       logger.debug(
         { url, servicesIndexRaw },
-        `no ${resourceType} services found`
+        `no ${resourceType} services found`,
       );
       return null;
     }
@@ -89,7 +89,7 @@ export async function getResourceUrl(
     ) {
       logger.warn(
         { url, version },
-        `Nuget: Unknown version returned. Only v3 is supported`
+        `Nuget: Unknown version returned. Only v3 is supported`,
       );
     }
 
@@ -102,7 +102,7 @@ export async function getResourceUrl(
     }
     logger.debug(
       { err, url, servicesIndexRaw },
-      `nuget registry failure: can't get ${resourceType}`
+      `nuget registry failure: can't get ${resourceType}`,
     );
     return null;
   }
@@ -110,7 +110,7 @@ export async function getResourceUrl(
 
 async function getCatalogEntry(
   http: Http,
-  catalogPage: CatalogPage
+  catalogPage: CatalogPage,
 ): Promise<CatalogEntry[]> {
   let items = catalogPage.items;
   if (!items) {
@@ -145,14 +145,14 @@ export async function getReleases(
   http: Http,
   registryUrl: string,
   feedUrl: string,
-  pkgName: string
+  pkgName: string,
 ): Promise<ReleaseResult | null> {
   const baseUrl = feedUrl.replace(regEx(/\/*$/), '');
   const url = `${baseUrl}/${pkgName.toLowerCase()}/index.json`;
   const packageRegistration = await http.getJson<PackageRegistration>(url);
   const catalogPages = packageRegistration.body.items || [];
   const catalogPagesQueue = catalogPages.map(
-    (page) => (): Promise<CatalogEntry[]> => getCatalogEntry(http, page)
+    (page) => (): Promise<CatalogEntry[]> => getCatalogEntry(http, page),
   );
   const catalogEntries = (await p.all(catalogPagesQueue))
     .flat()
@@ -174,7 +174,7 @@ export async function getReleases(
         release.isDeprecated = true;
       }
       return release;
-    }
+    },
   );
 
   if (!releases.length) {
@@ -196,12 +196,12 @@ export async function getReleases(
     const packageBaseAddress = await getResourceUrl(
       http,
       registryUrl,
-      'PackageBaseAddress'
+      'PackageBaseAddress',
     );
     // istanbul ignore else: this is a required v3 api
     if (is.nonEmptyString(packageBaseAddress)) {
       const nuspecUrl = `${ensureTrailingSlash(
-        packageBaseAddress
+        packageBaseAddress,
       )}${pkgName.toLowerCase()}/${
         // TODO: types (#22198)
         latestStable
@@ -222,13 +222,13 @@ export async function getReleases(
     if (err instanceof HttpError && err.response?.statusCode === 404) {
       logger.debug(
         { registryUrl, pkgName, pkgVersion: latestStable },
-        `package manifest (.nuspec) not found`
+        `package manifest (.nuspec) not found`,
       );
       return dep;
     }
     logger.debug(
       { err, registryUrl, pkgName, pkgVersion: latestStable },
-      `Cannot obtain sourceUrl`
+      `Cannot obtain sourceUrl`,
     );
     return dep;
   }
diff --git a/lib/modules/datasource/orb/index.spec.ts b/lib/modules/datasource/orb/index.spec.ts
index 2e66d028740b4cda48c16178669d832e67981ed8..e2dd13dd7b817c99b5aaf7621b6877b9cb69703e 100644
--- a/lib/modules/datasource/orb/index.spec.ts
+++ b/lib/modules/datasource/orb/index.spec.ts
@@ -35,7 +35,7 @@ describe('modules/datasource/orb/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hyper-expanse/library-release-workflows',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -48,7 +48,7 @@ describe('modules/datasource/orb/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hyper-expanse/library-release-wonkflows',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -58,7 +58,7 @@ describe('modules/datasource/orb/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hyper-expanse/library-release-workflows',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -68,7 +68,7 @@ describe('modules/datasource/orb/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hyper-expanse/library-release-workflows',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/packagist/index.spec.ts b/lib/modules/datasource/packagist/index.spec.ts
index e445de1790fb52c5819592291236201f9945f40d..6b4c7685e4c515bc5f9c114b34ecefe0907a92e5 100644
--- a/lib/modules/datasource/packagist/index.spec.ts
+++ b/lib/modules/datasource/packagist/index.spec.ts
@@ -163,7 +163,7 @@ describe('modules/datasource/packagist/index', () => {
         .get('/packages.json')
         .reply(200, packagesJson)
         .get(
-          '/include/all$093530b127abe74defbf21affc9589bf713e4e08f898bf11986842f9956eda86.json'
+          '/include/all$093530b127abe74defbf21affc9589bf713e4e08f898bf11986842f9956eda86.json',
         )
         .reply(200, includesJson);
       const res = await getPkgReleases({
@@ -304,11 +304,11 @@ describe('modules/datasource/packagist/index', () => {
         .get('/packages.json')
         .reply(200, packagesJson)
         .get(
-          '/p/providers-2018-09$14346045d7a7261cb3a12a6b7a1a7c4151982530347b115e5e277d879cad1942.json'
+          '/p/providers-2018-09$14346045d7a7261cb3a12a6b7a1a7c4151982530347b115e5e277d879cad1942.json',
         )
         .reply(200, fileJson)
         .get(
-          '/p/wpackagist-plugin/1beyt$b574a802b5bf20a58c0f027e73aea2a75d23a6f654afc298a8dc467331be316a.json'
+          '/p/wpackagist-plugin/1beyt$b574a802b5bf20a58c0f027e73aea2a75d23a6f654afc298a8dc467331be316a.json',
         )
         .reply(200, beytJson);
       const res = await getPkgReleases({
@@ -349,7 +349,7 @@ describe('modules/datasource/packagist/index', () => {
         .get('/packages.json')
         .reply(200, packagesJson)
         .get(
-          '/p/providers-2018-09$14346045d7a7261cb3a12a6b7a1a7c4151982530347b115e5e277d879cad1942.json'
+          '/p/providers-2018-09$14346045d7a7261cb3a12a6b7a1a7c4151982530347b115e5e277d879cad1942.json',
         )
         .reply(200, fileJson);
       httpMock
@@ -389,7 +389,7 @@ describe('modules/datasource/packagist/index', () => {
         .get('/packages.json')
         .reply(200, packagesJson)
         .get(
-          '/p/wpackagist-plugin/1beyt$b574a802b5bf20a58c0f027e73aea2a75d23a6f654afc298a8dc467331be316a.json'
+          '/p/wpackagist-plugin/1beyt$b574a802b5bf20a58c0f027e73aea2a75d23a6f654afc298a8dc467331be316a.json',
         )
         .reply(200, beytJson);
       const res = await getPkgReleases({
@@ -483,7 +483,7 @@ describe('modules/datasource/packagist/index', () => {
           datasource,
           versioning,
           packageName: 'drewm/mailchimp-api',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -503,7 +503,7 @@ describe('modules/datasource/packagist/index', () => {
           datasource,
           versioning,
           packageName: 'drewm/mailchimp-api',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
diff --git a/lib/modules/datasource/packagist/index.ts b/lib/modules/datasource/packagist/index.ts
index c87991de3849d00604842e838a8b72ff90d15bc9..1c4d4719ae717ec7cd455cf4594c3076b87e3bef 100644
--- a/lib/modules/datasource/packagist/index.ts
+++ b/lib/modules/datasource/packagist/index.ts
@@ -43,7 +43,7 @@ export class PackagistDatasource extends Datasource {
 
   private async getJson<T, U extends z.ZodSchema<T>>(
     url: string,
-    schema: U
+    schema: U,
   ): Promise<z.infer<typeof schema>> {
     const opts = PackagistDatasource.getHostOpts(url);
     const { body } = await this.http.getJson(url, opts);
@@ -67,7 +67,7 @@ export class PackagistDatasource extends Datasource {
 
   private static getPackagistFileUrl(
     regUrl: string,
-    regFile: RegistryFile
+    regFile: RegistryFile,
   ): string {
     const { key, hash } = regFile;
     const fileName = hash
@@ -87,7 +87,7 @@ export class PackagistDatasource extends Datasource {
   })
   async getPackagistFile(
     regUrl: string,
-    regFile: RegistryFile
+    regFile: RegistryFile,
   ): Promise<PackagistFile> {
     const url = PackagistDatasource.getPackagistFileUrl(regUrl, regFile);
     const packagistFile = await this.getJson(url, PackagistFile);
@@ -96,7 +96,7 @@ export class PackagistDatasource extends Datasource {
 
   async fetchProviderPackages(
     regUrl: string,
-    meta: RegistryMeta
+    meta: RegistryMeta,
   ): Promise<void> {
     await p.map(meta.files, async (file) => {
       const res = await this.getPackagistFile(regUrl, file);
@@ -106,7 +106,7 @@ export class PackagistDatasource extends Datasource {
 
   async fetchIncludesPackages(
     regUrl: string,
-    meta: RegistryMeta
+    meta: RegistryMeta,
   ): Promise<void> {
     await p.map(meta.includesFiles, async (file) => {
       const res = await this.getPackagistFile(regUrl, file);
@@ -125,21 +125,21 @@ export class PackagistDatasource extends Datasource {
   async packagistV2Lookup(
     registryUrl: string,
     metadataUrl: string,
-    packageName: string
+    packageName: string,
   ): Promise<ReleaseResult | null> {
     const pkgUrl = replaceUrlPath(
       registryUrl,
-      metadataUrl.replace('%package%', packageName)
+      metadataUrl.replace('%package%', packageName),
     );
     const pkgPromise = this.getJson(pkgUrl, z.unknown());
 
     const devUrl = replaceUrlPath(
       registryUrl,
-      metadataUrl.replace('%package%', `${packageName}~dev`)
+      metadataUrl.replace('%package%', `${packageName}~dev`),
     );
     const devPromise = this.getJson(devUrl, z.unknown()).then(
       (x) => x,
-      () => null
+      () => null,
     );
 
     const responses: NonNullable<unknown>[] = await Promise.all([
@@ -152,7 +152,7 @@ export class PackagistDatasource extends Datasource {
   public getPkgUrl(
     packageName: string,
     registryUrl: string,
-    registryMeta: RegistryMeta
+    registryMeta: RegistryMeta,
   ): string | null {
     if (
       registryMeta.providersUrl &&
@@ -169,7 +169,7 @@ export class PackagistDatasource extends Datasource {
     if (registryMeta.providersLazyUrl) {
       return replaceUrlPath(
         registryUrl,
-        registryMeta.providersLazyUrl.replace('%package%', packageName)
+        registryMeta.providersLazyUrl.replace('%package%', packageName),
       );
     }
 
@@ -201,7 +201,7 @@ export class PackagistDatasource extends Datasource {
         const packagistResult = await this.packagistV2Lookup(
           registryUrl,
           meta.metadataUrl,
-          packageName
+          packageName,
         );
         return packagistResult;
       }
diff --git a/lib/modules/datasource/packagist/schema.spec.ts b/lib/modules/datasource/packagist/schema.spec.ts
index 78d90f49f6193de901c2cad178e5ede16cff900e..a83918d124f34e6f1143833e1f3862cc79b83c72 100644
--- a/lib/modules/datasource/packagist/schema.spec.ts
+++ b/lib/modules/datasource/packagist/schema.spec.ts
@@ -38,7 +38,7 @@ describe('modules/datasource/packagist/schema', () => {
             version_normalized: '1.0.0.0',
             homepage: '__unset',
           },
-        ])
+        ]),
       ).toEqual([
         {
           name: 'foo/bar',
@@ -110,11 +110,11 @@ describe('modules/datasource/packagist/schema', () => {
           source: null,
           time: null,
           require: null,
-        }
+        },
       );
 
       expect(
-        ComposerRelease.parse({ version: '1.2.3', homepage: 'example.com' })
+        ComposerRelease.parse({ version: '1.2.3', homepage: 'example.com' }),
       ).toEqual({
         version: '1.2.3',
         homepage: 'example.com',
@@ -124,7 +124,7 @@ describe('modules/datasource/packagist/schema', () => {
       });
 
       expect(
-        ComposerRelease.parse({ version: '1.2.3', source: 'nonsense' })
+        ComposerRelease.parse({ version: '1.2.3', source: 'nonsense' }),
       ).toEqual({
         version: '1.2.3',
         homepage: null,
@@ -134,7 +134,7 @@ describe('modules/datasource/packagist/schema', () => {
       });
 
       expect(
-        ComposerRelease.parse({ version: '1.2.3', source: { url: 'foobar' } })
+        ComposerRelease.parse({ version: '1.2.3', source: { url: 'foobar' } }),
       ).toEqual({
         version: '1.2.3',
         source: { url: 'foobar' },
@@ -144,7 +144,7 @@ describe('modules/datasource/packagist/schema', () => {
       });
 
       expect(
-        ComposerRelease.parse({ version: '1.2.3', time: '12345' })
+        ComposerRelease.parse({ version: '1.2.3', time: '12345' }),
       ).toEqual({
         version: '1.2.3',
         time: '12345',
@@ -166,7 +166,7 @@ describe('modules/datasource/packagist/schema', () => {
       expect(ComposerReleases.parse([1, 2, 3])).toBeEmptyArray();
       expect(ComposerReleases.parse(['foobar'])).toBeEmptyArray();
       expect(
-        ComposerReleases.parse([{ version: '1.2.3' }, { version: 'dev-main' }])
+        ComposerReleases.parse([{ version: '1.2.3' }, { version: 'dev-main' }]),
       ).toEqual([
         {
           version: '1.2.3',
@@ -199,7 +199,7 @@ describe('modules/datasource/packagist/schema', () => {
             'foo/bar': [{ version: '1.2.3' }],
             'baz/qux': [{ version: '4.5.6' }],
           },
-        })
+        }),
       ).toEqual([
         {
           version: '1.2.3',
@@ -216,7 +216,7 @@ describe('modules/datasource/packagist/schema', () => {
             'foo/bar': { '1.2.3': { version: '1.2.3' } },
             'baz/qux': { '4.5.6': { version: '4.5.6' } },
           },
-        })
+        }),
       ).toEqual([
         {
           version: '1.2.3',
@@ -242,7 +242,7 @@ describe('modules/datasource/packagist/schema', () => {
               { version: '0.0.1' },
             ],
           },
-        })
+        }),
       ).toEqual([
         {
           version: '3.3.3',
@@ -302,7 +302,7 @@ describe('modules/datasource/packagist/schema', () => {
       expect(parsePackagesResponses('foo/bar', [null])).toBeNull();
       expect(parsePackagesResponses('foo/bar', [{}])).toBeNull();
       expect(
-        parsePackagesResponses('foo/bar', [{ packages: '123' }])
+        parsePackagesResponses('foo/bar', [{ packages: '123' }]),
       ).toBeNull();
       expect(parsePackagesResponses('foo/bar', [{ packages: {} }])).toBeNull();
       expect(
@@ -351,7 +351,7 @@ describe('modules/datasource/packagist/schema', () => {
               ],
             },
           },
-        ] satisfies { packages: Record<string, ComposerRelease[]> }[])
+        ] satisfies { packages: Record<string, ComposerRelease[]> }[]),
       ).toEqual({
         homepage: 'https://example.com/1',
         sourceUrl: 'git@example.com:foo/bar-1',
diff --git a/lib/modules/datasource/packagist/schema.ts b/lib/modules/datasource/packagist/schema.ts
index 3da1757c593e591241fb1d9d963f52b8fb7b09a8..2d95b2ad95d6115fc7e73c4dedbe314a2b8aa849 100644
--- a/lib/modules/datasource/packagist/schema.ts
+++ b/lib/modules/datasource/packagist/schema.ts
@@ -66,21 +66,21 @@ export const ComposerPackagesResponse = z
   })
   .transform(
     ({ packageName, packagesResponse }) =>
-      packagesResponse.packages[packageName]
+      packagesResponse.packages[packageName],
   )
   .transform((xs) => ComposerReleases.parse(xs));
 export type ComposerPackagesResponse = z.infer<typeof ComposerPackagesResponse>;
 
 export function parsePackagesResponse(
   packageName: string,
-  packagesResponse: unknown
+  packagesResponse: unknown,
 ): ComposerReleases {
   try {
     return ComposerPackagesResponse.parse({ packageName, packagesResponse });
   } catch (err) {
     logger.debug(
       { packageName, err },
-      `Error parsing packagist response for ${packageName}`
+      `Error parsing packagist response for ${packageName}`,
     );
     return [];
   }
@@ -138,7 +138,7 @@ export function extractReleaseResult(
 }
 
 export function extractDepReleases(
-  composerReleases: unknown
+  composerReleases: unknown,
 ): ReleaseResult | null {
   const parsedReleases = ComposerReleases.parse(composerReleases);
   return extractReleaseResult(parsedReleases);
@@ -146,10 +146,10 @@ export function extractDepReleases(
 
 export function parsePackagesResponses(
   packageName: string,
-  packagesResponses: unknown[]
+  packagesResponses: unknown[],
 ): ReleaseResult | null {
   const releaseArrays = packagesResponses.map((pkgResp) =>
-    parsePackagesResponse(packageName, pkgResp)
+    parsePackagesResponse(packageName, pkgResp),
   );
   return extractReleaseResult(...releaseArrays);
 }
@@ -166,7 +166,7 @@ export type HashSpec = z.infer<typeof HashSpec>;
 
 export const RegistryFile = z.intersection(
   HashSpec,
-  z.object({ key: z.string() })
+  z.object({ key: z.string() }),
 );
 export type RegistryFile = z.infer<typeof RegistryFile>;
 
@@ -180,11 +180,11 @@ export const PackagistFile = PackagesResponse.merge(
     providers: LooseRecord(HashSpec)
       .transform((x) =>
         Object.fromEntries(
-          Object.entries(x).map(([key, { hash }]) => [key, hash])
-        )
+          Object.entries(x).map(([key, { hash }]) => [key, hash]),
+        ),
       )
       .catch({}),
-  })
+  }),
 );
 export type PackagistFile = z.infer<typeof PackagistFile>;
 
@@ -196,20 +196,20 @@ export const RegistryMeta = z
       z.object({
         ['includes']: LooseRecord(HashSpec)
           .transform((x) =>
-            Object.entries(x).map(([name, { hash }]) => ({ key: name, hash }))
+            Object.entries(x).map(([name, { hash }]) => ({ key: name, hash })),
           )
           .catch([]),
         ['provider-includes']: LooseRecord(HashSpec)
           .transform((x) =>
-            Object.entries(x).map(([key, { hash }]) => ({ key, hash }))
+            Object.entries(x).map(([key, { hash }]) => ({ key, hash })),
           )
           .catch([]),
         ['providers-lazy-url']: z.string().nullable().catch(null),
         ['providers-url']: z.string().nullable().catch(null),
         ['metadata-url']: z.string().nullable().catch(null),
         ['available-packages']: z.array(z.string()).nullable().catch(null),
-      })
-    )
+      }),
+    ),
   )
   .transform(
     ({
@@ -231,6 +231,6 @@ export const RegistryMeta = z
       metadataUrl,
       includesPackages: {} as Record<string, ReleaseResult | null>,
       availablePackages,
-    })
+    }),
   );
 export type RegistryMeta = z.infer<typeof RegistryMeta>;
diff --git a/lib/modules/datasource/pod/index.spec.ts b/lib/modules/datasource/pod/index.spec.ts
index 366cd0886ebb5c129c075ad5659b7272fc3e34c3..385c729788dadf2eea2117d5a264973a3d558e9a 100644
--- a/lib/modules/datasource/pod/index.spec.ts
+++ b/lib/modules/datasource/pod/index.spec.ts
@@ -34,7 +34,7 @@ describe('modules/datasource/pod/index', () => {
           datasource: PodDatasource.id,
           packageName: 'foobar',
           registryUrls: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -44,7 +44,7 @@ describe('modules/datasource/pod/index', () => {
         await getPkgReleases({
           datasource: PodDatasource.id,
           packageName: 'foobar',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -155,7 +155,7 @@ describe('modules/datasource/pod/index', () => {
         await getPkgReleases({
           ...config,
           registryUrls: ['https://github.com/CocoaPods/Specs'],
-        })
+        }),
       ).toEqual({
         registryUrl: 'https://github.com/CocoaPods/Specs',
         releases: [
diff --git a/lib/modules/datasource/pod/index.ts b/lib/modules/datasource/pod/index.ts
index 92cb4aadd85827bdf0d8905f44855a4a4e39669a..86aa0449081bf1d2044d4d055fb9418ee0a13db7 100644
--- a/lib/modules/datasource/pod/index.ts
+++ b/lib/modules/datasource/pod/index.ts
@@ -26,7 +26,7 @@ function shardParts(packageName: string): string[] {
 }
 
 const githubRegex = regEx(
-  /(?<hostURL>^https:\/\/[a-zA-Z0-9-.]+)\/(?<account>[^/]+)\/(?<repo>[^/]+?)(?:\.git|\/.*)?$/
+  /(?<hostURL>^https:\/\/[a-zA-Z0-9-.]+)\/(?<account>[^/]+)\/(?<repo>[^/]+?)(?:\.git|\/.*)?$/,
 );
 
 function releasesGithubUrl(
@@ -37,7 +37,7 @@ function releasesGithubUrl(
     repo: string;
     useShard: boolean;
     useSpecs: boolean;
-  }
+  },
 ): string {
   const { hostURL, account, repo, useShard, useSpecs } = opts;
   const prefix =
@@ -106,7 +106,7 @@ export class PodDatasource extends Datasource {
 
   private async requestCDN(
     url: string,
-    packageName: string
+    packageName: string,
   ): Promise<string | null> {
     try {
       const resp = await this.http.get(url);
@@ -122,7 +122,7 @@ export class PodDatasource extends Datasource {
 
   private async requestGithub<T = unknown>(
     url: string,
-    packageName: string
+    packageName: string,
   ): Promise<T | null> {
     try {
       const resp = await this.githubHttp.getJson<T>(url);
@@ -141,7 +141,7 @@ export class PodDatasource extends Datasource {
     opts: { hostURL: string; account: string; repo: string },
     useShard = true,
     useSpecs = true,
-    urlFormatOptions: URLFormatOptions = 'withShardWithSpec'
+    urlFormatOptions: URLFormatOptions = 'withShardWithSpec',
   ): Promise<ReleaseResult | null> {
     const url = releasesGithubUrl(packageName, { ...opts, useShard, useSpecs });
     const resp = await this.requestGithub<{ name: string }[]>(url, packageName);
@@ -158,7 +158,7 @@ export class PodDatasource extends Datasource {
           opts,
           true,
           false,
-          'withShardWithoutSpec'
+          'withShardWithoutSpec',
         );
       case 'withShardWithoutSpec':
         return this.getReleasesFromGithub(
@@ -166,7 +166,7 @@ export class PodDatasource extends Datasource {
           opts,
           false,
           true,
-          'withSpecsWithoutShard'
+          'withSpecsWithoutShard',
         );
       case 'withSpecsWithoutShard':
         return this.getReleasesFromGithub(
@@ -174,7 +174,7 @@ export class PodDatasource extends Datasource {
           opts,
           false,
           false,
-          'withoutSpecsWithoutShard'
+          'withoutSpecsWithoutShard',
         );
       case 'withoutSpecsWithoutShard':
       default:
@@ -184,7 +184,7 @@ export class PodDatasource extends Datasource {
 
   private async getReleasesFromCDN(
     packageName: string,
-    registryUrl: string
+    registryUrl: string,
   ): Promise<ReleaseResult | null> {
     const url = releasesCDNUrl(packageName, registryUrl);
     const resp = await this.requestCDN(url, packageName);
diff --git a/lib/modules/datasource/puppet-forge/index.ts b/lib/modules/datasource/puppet-forge/index.ts
index cb2a0ca687f6d1aa3b5732c1116800669c6f42ab..fed7e182eca36ebce500a6e43e836aecc1e9f83f 100644
--- a/lib/modules/datasource/puppet-forge/index.ts
+++ b/lib/modules/datasource/puppet-forge/index.ts
@@ -46,7 +46,7 @@ export class PuppetForgeDatasource extends Datasource {
 
   static createReleaseResult(
     releases: Release[],
-    module: PuppetModule
+    module: PuppetModule,
   ): ReleaseResult {
     const result: ReleaseResult = {
       releases,
diff --git a/lib/modules/datasource/pypi/index.spec.ts b/lib/modules/datasource/pypi/index.spec.ts
index 1627305641296d59b18b996d73d40efecf96c688..4172900b268d96754c5886f0e6730286a29fcdf2 100644
--- a/lib/modules/datasource/pypi/index.spec.ts
+++ b/lib/modules/datasource/pypi/index.spec.ts
@@ -9,7 +9,7 @@ const res2 = Fixtures.get('azure-cli-monitor-updated.json');
 const htmlResponse = Fixtures.get('versions-html.html');
 const badResponse = Fixtures.get('versions-html-badfile.html');
 const dataRequiresPythonResponse = Fixtures.get(
-  'versions-html-data-requires-python.html'
+  'versions-html-data-requires-python.html',
 );
 const mixedHyphensResponse = Fixtures.get('versions-html-mixed-hyphens.html');
 const mixedCaseResponse = Fixtures.get('versions-html-mixed-case.html');
@@ -38,7 +38,7 @@ describe('modules/datasource/pypi/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'something',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -49,7 +49,7 @@ describe('modules/datasource/pypi/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'something',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -59,7 +59,7 @@ describe('modules/datasource/pypi/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'azure-cli-monitor',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -76,7 +76,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           datasource,
           packageName: 'azure-cli-monitor',
-        })
+        }),
       ).toMatchObject({
         registryUrl: 'https://custom.pypi.net/foo',
         releases: expect.toBeArrayOfSize(22),
@@ -149,7 +149,7 @@ describe('modules/datasource/pypi/index', () => {
             datasource,
             packageName: 'something',
           })
-        )?.homepage
+        )?.homepage,
       ).toBe('https://microsoft.com');
     });
 
@@ -271,7 +271,7 @@ describe('modules/datasource/pypi/index', () => {
           constraints: { python: '2.7' },
           packageName: 'doit',
           constraintsFiltering: 'strict',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -289,7 +289,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           constraints: { python: '2.7' },
           packageName: 'dj-database-url',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -307,7 +307,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           constraints: { python: '2.7' },
           packageName: 'dj-database-url',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -366,7 +366,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           constraints: { python: '2.7' },
           packageName: 'image-collector',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -444,7 +444,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           constraints: { python: '2.7' },
           packageName: 'dj-database-url',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -462,7 +462,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           constraints: { python: '2.7' },
           packageName: 'dj-database-url',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -480,7 +480,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           constraints: { python: '2.7' },
           packageName: 'dj-database-url',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -519,7 +519,7 @@ describe('modules/datasource/pypi/index', () => {
           ...config,
           packageName: 'dj-database-url',
           constraintsFiltering: 'strict',
-        })
+        }),
       ).toMatchSnapshot();
     });
   });
@@ -535,7 +535,7 @@ describe('modules/datasource/pypi/index', () => {
         ...config,
         constraints: { python: '2.7' },
         packageName: 'azure-cli-monitor',
-      })
+      }),
     ).toMatchSnapshot();
   });
 });
diff --git a/lib/modules/datasource/pypi/index.ts b/lib/modules/datasource/pypi/index.ts
index 78231497ceac33550a4491e6eb2900be38881a42..11e4c2dd5fb805a2e8033a1148a6b80127232167 100644
--- a/lib/modules/datasource/pypi/index.ts
+++ b/lib/modules/datasource/pypi/index.ts
@@ -37,7 +37,7 @@ export class PypiDatasource extends Datasource {
     let dependency: ReleaseResult | null = null;
     // TODO: null check (#22198)
     const hostUrl = ensureTrailingSlash(
-      registryUrl!.replace('https://pypi.org/simple', 'https://pypi.org/pypi')
+      registryUrl!.replace('https://pypi.org/simple', 'https://pypi.org/pypi'),
     );
     const normalizedLookupName = PypiDatasource.normalizeName(packageName);
 
@@ -45,11 +45,11 @@ export class PypiDatasource extends Datasource {
     if (hostUrl.endsWith('/simple/') || hostUrl.endsWith('/+simple/')) {
       logger.trace(
         { packageName, hostUrl },
-        'Looking up pypi simple dependency'
+        'Looking up pypi simple dependency',
       );
       dependency = await this.getSimpleDependency(
         normalizedLookupName,
-        hostUrl
+        hostUrl,
       );
     } else {
       logger.trace({ packageName, hostUrl }, 'Looking up pypi api dependency');
@@ -64,11 +64,11 @@ export class PypiDatasource extends Datasource {
         // error contacting json-style api -- attempt to fallback to a simple-style api
         logger.trace(
           { packageName, hostUrl },
-          'Looking up pypi simple dependency via fallback'
+          'Looking up pypi simple dependency via fallback',
         );
         dependency = await this.getSimpleDependency(
           normalizedLookupName,
-          hostUrl
+          hostUrl,
         );
       }
     }
@@ -85,11 +85,11 @@ export class PypiDatasource extends Datasource {
 
   private async getDependency(
     packageName: string,
-    hostUrl: string
+    hostUrl: string,
   ): Promise<ReleaseResult | null> {
     const lookupUrl = url.resolve(
       hostUrl,
-      `${PypiDatasource.normalizeNameForUrlLookup(packageName)}/json`
+      `${PypiDatasource.normalizeNameForUrlLookup(packageName)}/json`,
     );
     const dependency: ReleaseResult = { releases: [] };
     logger.trace({ lookupUrl }, 'Pypi api got lookup');
@@ -109,7 +109,7 @@ export class PypiDatasource extends Datasource {
       if (isGitHubRepo(dep.info.home_page)) {
         dependency.sourceUrl = dep.info.home_page.replace(
           'http://',
-          'https://'
+          'https://',
         );
       }
     }
@@ -172,7 +172,7 @@ export class PypiDatasource extends Datasource {
 
   private static extractVersionFromLinkText(
     text: string,
-    packageName: string
+    packageName: string,
   ): string | null {
     // source packages
     const srcText = PypiDatasource.normalizeName(text);
@@ -212,22 +212,24 @@ export class PypiDatasource extends Datasource {
         // Certain simple repositories like artifactory don't escape > and <
         .replace(
           regEx(/data-requires-python="([^"]*?)>([^"]*?)"/g),
-          'data-requires-python="$1&gt;$2"'
+          'data-requires-python="$1&gt;$2"',
         )
         .replace(
           regEx(/data-requires-python="([^"]*?)<([^"]*?)"/g),
-          'data-requires-python="$1&lt;$2"'
+          'data-requires-python="$1&lt;$2"',
         )
     );
   }
 
   private async getSimpleDependency(
     packageName: string,
-    hostUrl: string
+    hostUrl: string,
   ): Promise<ReleaseResult | null> {
     const lookupUrl = url.resolve(
       hostUrl,
-      ensureTrailingSlash(PypiDatasource.normalizeNameForUrlLookup(packageName))
+      ensureTrailingSlash(
+        PypiDatasource.normalizeNameForUrlLookup(packageName),
+      ),
     );
     const dependency: ReleaseResult = { releases: [] };
     const response = await this.http.get(lookupUrl);
@@ -245,7 +247,7 @@ export class PypiDatasource extends Datasource {
     for (const link of Array.from(links)) {
       const version = PypiDatasource.extractVersionFromLinkText(
         link.text,
-        packageName
+        packageName,
       );
       if (version) {
         const release: PypiJSONRelease = {
@@ -273,7 +275,7 @@ export class PypiDatasource extends Datasource {
       result.constraints = {
         // TODO: string[] isn't allowed here
         python: versionReleases.map(
-          ({ requires_python }) => requires_python
+          ({ requires_python }) => requires_python,
         ) as any,
       };
       return result;
diff --git a/lib/modules/datasource/repology/index.spec.ts b/lib/modules/datasource/repology/index.spec.ts
index f102a95da374d6ccf026eb072d2172cf55e72436..a89687544a253dcb8ea0ac62f341fb18ce6219ff 100644
--- a/lib/modules/datasource/repology/index.spec.ts
+++ b/lib/modules/datasource/repology/index.spec.ts
@@ -28,7 +28,7 @@ const mockResolverCall = (
   repo: string,
   name: string,
   name_type: string,
-  response: ResponseMock
+  response: ResponseMock,
 ) => {
   const query = {
     repo,
@@ -77,7 +77,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -94,7 +94,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'this_should/never-exist',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -113,7 +113,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -127,7 +127,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -145,7 +145,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -164,7 +164,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -178,7 +178,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -193,7 +193,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'ubuntu_20_04/git',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -203,7 +203,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'invalid-lookup-name',
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
 
@@ -214,7 +214,7 @@ describe('modules/datasource/repology/index', () => {
           datasource,
           versioning,
           packageName: 'debian_stable/nginx',
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/repology/index.ts b/lib/modules/datasource/repology/index.ts
index 86e026bff0b1e67af1c1dd6968956c43a1ba78ca..d7f6c3a5148eead4c6e34e4639a21b1ee1861bc4 100644
--- a/lib/modules/datasource/repology/index.ts
+++ b/lib/modules/datasource/repology/index.ts
@@ -14,7 +14,7 @@ function findPackageInResponse(
   response: RepologyPackage[],
   repoName: string,
   pkgName: string,
-  types: RepologyPackageType[]
+  types: RepologyPackageType[],
 ): RepologyPackage[] | null {
   const repoPackages = response.filter((pkg) => pkg.repo === repoName);
 
@@ -76,7 +76,7 @@ export class RepologyDatasource extends Datasource {
     registryUrl: string,
     repoName: string,
     packageName: string,
-    packageType: RepologyPackageType
+    packageType: RepologyPackageType,
   ): Promise<RepologyPackage[]> {
     const query = getQueryString({
       repo: repoName,
@@ -88,7 +88,7 @@ export class RepologyDatasource extends Datasource {
 
     // Retrieve list of packages by looking up Repology project
     const packages = await this.queryPackages(
-      joinUrlParts(registryUrl, `tools/project-by?${query}`)
+      joinUrlParts(registryUrl, `tools/project-by?${query}`),
     );
 
     return packages;
@@ -96,12 +96,12 @@ export class RepologyDatasource extends Datasource {
 
   private async queryPackagesViaAPI(
     registryUrl: string,
-    packageName: string
+    packageName: string,
   ): Promise<RepologyPackage[]> {
     // Directly query the package via the API. This will only work if `packageName` has the
     // same name as the repology project
     const packages = await this.queryPackages(
-      joinUrlParts(registryUrl, `api/v1/project`, packageName)
+      joinUrlParts(registryUrl, `api/v1/project`, packageName),
     );
 
     return packages;
@@ -116,7 +116,7 @@ export class RepologyDatasource extends Datasource {
   async queryPackage(
     registryUrl: string,
     repoName: string,
-    pkgName: string
+    pkgName: string,
   ): Promise<RepologyPackage[] | undefined> {
     let response: RepologyPackage[];
     // Try getting the packages from tools/project-by first for type binname and
@@ -131,7 +131,7 @@ export class RepologyDatasource extends Datasource {
           registryUrl,
           repoName,
           pkgName,
-          pkgType
+          pkgType,
         );
 
         if (response) {
@@ -148,7 +148,7 @@ export class RepologyDatasource extends Datasource {
       if (err.statusCode === 403) {
         logger.debug(
           { repoName, pkgName },
-          'Repology does not support tools/project-by lookups for repository. Will try direct API access now'
+          'Repology does not support tools/project-by lookups for repository. Will try direct API access now',
         );
 
         // If the repository is not supported in tools/project-by we try directly accessing the
@@ -160,7 +160,7 @@ export class RepologyDatasource extends Datasource {
           response,
           repoName,
           pkgName,
-          packageTypes
+          packageTypes,
         );
         if (is.nonEmptyArray(pkg)) {
           // exit immediately if package found
@@ -169,7 +169,7 @@ export class RepologyDatasource extends Datasource {
       } else if (err.statusCode === 300) {
         logger.warn(
           { repoName, pkgName },
-          'Ambiguous redirection from package name to project name in Repology. Skipping this package'
+          'Ambiguous redirection from package name to project name in Repology. Skipping this package',
         );
         return undefined;
       }
@@ -179,7 +179,7 @@ export class RepologyDatasource extends Datasource {
 
     logger.debug(
       { repoName, pkgName },
-      'Repository or package not found on Repology'
+      'Repository or package not found on Repology',
     );
 
     return undefined;
@@ -198,8 +198,8 @@ export class RepologyDatasource extends Datasource {
     if (!repoName || !pkgName) {
       throw new ExternalHostError(
         new Error(
-          'Repology lookup name must contain repository and package separated by slash (<repo>/<pkg>)'
-        )
+          'Repology lookup name must contain repository and package separated by slash (<repo>/<pkg>)',
+        ),
       );
     }
 
@@ -223,7 +223,7 @@ export class RepologyDatasource extends Datasource {
       } else {
         logger.warn(
           { packageName, err },
-          'Repology lookup failed with unexpected error'
+          'Repology lookup failed with unexpected error',
         );
       }
 
diff --git a/lib/modules/datasource/ruby-version/index.spec.ts b/lib/modules/datasource/ruby-version/index.spec.ts
index b020219607ec8c1d2ece44ef8c205e6609f364bb..04ab255a5a9541639129a96ae9bc22ab85fd9c39 100644
--- a/lib/modules/datasource/ruby-version/index.spec.ts
+++ b/lib/modules/datasource/ruby-version/index.spec.ts
@@ -37,7 +37,7 @@ describe('modules/datasource/ruby-version/index', () => {
         .get('/en/downloads/releases/')
         .reply(404);
       await expect(
-        getPkgReleases({ datasource, packageName: 'ruby' })
+        getPkgReleases({ datasource, packageName: 'ruby' }),
       ).rejects.toThrow();
     });
   });
diff --git a/lib/modules/datasource/rubygems/common.ts b/lib/modules/datasource/rubygems/common.ts
index 5dd38e90df81d5c7ee86515464a1816fb18280fb..319a53b3ca8433faa88026b6ea4b30661089380d 100644
--- a/lib/modules/datasource/rubygems/common.ts
+++ b/lib/modules/datasource/rubygems/common.ts
@@ -7,7 +7,7 @@ import { GemMetadata, GemVersions } from './schema';
 
 export function assignMetadata(
   releases: ReleaseResult,
-  metadata: GemMetadata
+  metadata: GemMetadata,
 ): ReleaseResult {
   return assignKeys(releases, metadata, [
     'changelogUrl',
@@ -19,7 +19,7 @@ export function assignMetadata(
 export function getV1Metadata(
   http: Http,
   registryUrl: string,
-  packageName: string
+  packageName: string,
 ): AsyncResult<GemMetadata, SafeJsonError> {
   const metadataUrl = join(registryUrl, '/api/v1/gems', `${packageName}.json`);
   return http.getJsonSafe(metadataUrl, GemMetadata);
@@ -28,17 +28,17 @@ export function getV1Metadata(
 export function getV1Releases(
   http: Http,
   registryUrl: string,
-  packageName: string
+  packageName: string,
 ): AsyncResult<ReleaseResult, SafeJsonError | 'unsupported-api'> {
   const versionsUrl = join(
     registryUrl,
     '/api/v1/versions',
-    `${packageName}.json`
+    `${packageName}.json`,
   );
 
   return http.getJsonSafe(versionsUrl, GemVersions).transform((releaseResult) =>
     getV1Metadata(http, registryUrl, packageName)
       .transform((metadata) => assignMetadata(releaseResult, metadata))
-      .unwrapOrElse(releaseResult)
+      .unwrapOrElse(releaseResult),
   );
 }
diff --git a/lib/modules/datasource/rubygems/index.spec.ts b/lib/modules/datasource/rubygems/index.spec.ts
index 942ced25dbcdf62ed46ffe89ebddecad481e1df9..d4a3580ad38cb99080a35f517833afac2e2aa98d 100644
--- a/lib/modules/datasource/rubygems/index.spec.ts
+++ b/lib/modules/datasource/rubygems/index.spec.ts
@@ -36,7 +36,7 @@ describe('modules/datasource/rubygems/index', () => {
           datasource: RubyGemsDatasource.id,
           packageName: 'foobar',
           registryUrls: ['https://example.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -168,7 +168,7 @@ describe('modules/datasource/rubygems/index', () => {
             { number: '1.0.0' },
             { number: '2.0.0' },
             { number: '3.0.0' },
-          ])
+          ]),
         );
 
       const res = await getPkgReleases({
@@ -200,7 +200,7 @@ describe('modules/datasource/rubygems/index', () => {
             1.0.0 |checksum:aaa
             2.0.0 |checksum:bbb
             3.0.0 |checksum:ccc
-          `
+          `,
         );
 
       const res = await getPkgReleases({
@@ -231,7 +231,7 @@ describe('modules/datasource/rubygems/index', () => {
           datasource: RubyGemsDatasource.id,
           packageName: 'foobar',
           registryUrls: ['https://example.com'],
-        })
+        }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -251,7 +251,7 @@ describe('modules/datasource/rubygems/index', () => {
           datasource: RubyGemsDatasource.id,
           packageName: 'foobar',
           registryUrls: ['https://example.com'],
-        })
+        }),
       ).rejects.toThrow(ExternalHostError);
     });
 
@@ -281,7 +281,7 @@ describe('modules/datasource/rubygems/index', () => {
             { number: '1.0.0' },
             { number: '2.0.0' },
             { number: '3.0.0' },
-          ])
+          ]),
         );
 
       const res = await getPkgReleases({
diff --git a/lib/modules/datasource/rubygems/index.ts b/lib/modules/datasource/rubygems/index.ts
index 670cd75bbe6ea8fea8bcc0bd63d5361bdfa488af..58833d513b67dc861e7926411d854742d36e902e 100644
--- a/lib/modules/datasource/rubygems/index.ts
+++ b/lib/modules/datasource/rubygems/index.ts
@@ -16,7 +16,7 @@ import { VersionsEndpointCache } from './versions-endpoint-cache';
 
 function unlessServerSide<
   T extends NonNullable<unknown>,
-  E extends NonNullable<unknown>
+  E extends NonNullable<unknown>,
 >(err: E, cb: () => AsyncResult<T, E>): AsyncResult<T, E> {
   if (err instanceof HttpError && err.response?.statusCode) {
     const code = err.response.statusCode;
@@ -71,9 +71,9 @@ export class RubyGemsDatasource extends Datasource {
     let result: AsyncResult<ReleaseResult, Error | string>;
     if (registryHostname === 'rubygems.org') {
       result = Result.wrap(
-        this.versionsEndpointCache.getVersions(registryUrl, packageName)
+        this.versionsEndpointCache.getVersions(registryUrl, packageName),
       ).transform((versions) =>
-        this.metadataCache.getRelease(registryUrl, packageName, versions)
+        this.metadataCache.getRelease(registryUrl, packageName, versions),
       );
     } else if (
       registryHostname === 'rubygems.pkg.github.com' ||
@@ -84,13 +84,13 @@ export class RubyGemsDatasource extends Datasource {
       result = getV1Releases(this.http, registryUrl, packageName)
         .catch((err) =>
           unlessServerSide(err, () =>
-            this.getReleasesViaInfoEndpoint(registryUrl, packageName)
-          )
+            this.getReleasesViaInfoEndpoint(registryUrl, packageName),
+          ),
         )
         .catch((err) =>
           unlessServerSide(err, () =>
-            this.getReleasesViaDeprecatedAPI(registryUrl, packageName)
-          )
+            this.getReleasesViaDeprecatedAPI(registryUrl, packageName),
+          ),
         );
     }
 
@@ -109,7 +109,7 @@ export class RubyGemsDatasource extends Datasource {
 
   private getReleasesViaInfoEndpoint(
     registryUrl: string,
-    packageName: string
+    packageName: string,
   ): AsyncResult<ReleaseResult, Error | ZodError> {
     const url = joinUrlParts(registryUrl, '/info', packageName);
     return Result.wrap(this.http.get(url))
@@ -119,14 +119,14 @@ export class RubyGemsDatasource extends Datasource {
 
   private getReleasesViaDeprecatedAPI(
     registryUrl: string,
-    packageName: string
+    packageName: string,
   ): AsyncResult<ReleaseResult, Error | ZodError> {
     const path = joinUrlParts(registryUrl, `/api/v1/dependencies`);
     const query = getQueryString({ gems: packageName });
     const url = `${path}?${query}`;
     const bufPromise = this.http.getBuffer(url);
     return Result.wrap(bufPromise).transform(({ body }) =>
-      MarshalledVersionInfo.safeParse(Marshal.parse(body))
+      MarshalledVersionInfo.safeParse(Marshal.parse(body)),
     );
   }
 }
diff --git a/lib/modules/datasource/rubygems/metadata-cache.spec.ts b/lib/modules/datasource/rubygems/metadata-cache.spec.ts
index be203610a91ab5413f293f11986e70301c658673..d1d611ac3c588279d2e58eeea0727578272daff7 100644
--- a/lib/modules/datasource/rubygems/metadata-cache.spec.ts
+++ b/lib/modules/datasource/rubygems/metadata-cache.spec.ts
@@ -15,7 +15,7 @@ describe('modules/datasource/rubygems/metadata-cache', () => {
 
     packageCache.get.mockImplementation(
       (ns, key) =>
-        Promise.resolve(packageCacheMock.get(`${ns}::${key}`)) as never
+        Promise.resolve(packageCacheMock.get(`${ns}::${key}`)) as never,
     );
 
     packageCache.set.mockImplementation((ns, key, value) => {
@@ -148,7 +148,7 @@ describe('modules/datasource/rubygems/metadata-cache', () => {
             { version: '3.0.0' },
           ],
         },
-      }
+      },
     );
     const cache = new MetadataCache(new Http('test'));
 
@@ -198,7 +198,7 @@ describe('modules/datasource/rubygems/metadata-cache', () => {
         hash: '123',
         isFallback: true,
       },
-      24 * 60
+      24 * 60,
     );
   });
 
@@ -221,12 +221,12 @@ describe('modules/datasource/rubygems/metadata-cache', () => {
     const res1 = await cache.getRelease(
       'https://rubygems.org',
       'foobar',
-      versions
+      versions,
     );
     const res2 = await cache.getRelease(
       'https://rubygems.org',
       'foobar',
-      versions
+      versions,
     );
 
     expect(res1).toEqual(res2);
@@ -293,7 +293,7 @@ describe('modules/datasource/rubygems/metadata-cache', () => {
     const res = await cache.getRelease(
       'https://rubygems.org',
       'foobar',
-      versions
+      versions,
     );
 
     expect(res).toEqual({
@@ -313,7 +313,7 @@ describe('modules/datasource/rubygems/metadata-cache', () => {
     const res = await cache.getRelease(
       'https://rubygems.org',
       'foobar',
-      versions
+      versions,
     );
 
     expect(res).toEqual({
diff --git a/lib/modules/datasource/rubygems/metadata-cache.ts b/lib/modules/datasource/rubygems/metadata-cache.ts
index 834b8cdc4cdb92bbb4d3a366c9def8395a99aab9..28e66cc29ede186d1112bda748819990363ee772 100644
--- a/lib/modules/datasource/rubygems/metadata-cache.ts
+++ b/lib/modules/datasource/rubygems/metadata-cache.ts
@@ -36,7 +36,7 @@ export class MetadataCache {
   async getRelease(
     registryUrl: string,
     packageName: string,
-    versions: string[]
+    versions: string[],
   ): Promise<ReleaseResult> {
     const cacheNs = `datasource-rubygems`;
     const cacheKey = `metadata-cache:${registryUrl}:${packageName}`;
@@ -45,7 +45,7 @@ export class MetadataCache {
     const loadCache = (): AsyncResult<ReleaseResult, CacheLoadError> =>
       Result.wrapNullable<CacheRecord, CacheLoadError, CacheLoadError>(
         packageCache.get<CacheRecord>(cacheNs, cacheKey),
-        { type: 'cache-not-found' }
+        { type: 'cache-not-found' },
       ).transform((cache) => {
         return versionsHash === cache.hash
           ? Result.ok(cache.data)
@@ -55,7 +55,7 @@ export class MetadataCache {
     const saveCache = async (
       cache: CacheRecord,
       ttlMinutes = 100 * 24 * 60,
-      ttlDelta = 10 * 24 * 60
+      ttlDelta = 10 * 24 * 60,
     ): Promise<void> => {
       const registryHostname = parseUrl(registryUrl)?.hostname;
       if (registryHostname === 'rubygems.org') {
@@ -69,7 +69,7 @@ export class MetadataCache {
       .catch((err) =>
         getV1Releases(this.http, registryUrl, packageName).transform(
           async (
-            data: ReleaseResult
+            data: ReleaseResult,
           ): Promise<Result<ReleaseResult, CacheError>> => {
             const dataHash = hashReleases(data);
             if (dataHash === versionsHash) {
@@ -90,20 +90,20 @@ export class MetadataCache {
                 await saveCache(
                   { ...staleCache, isFallback: true },
                   24 * 60,
-                  0
+                  0,
                 );
               }
               return Result.ok(staleCache.data);
             }
 
             return Result.err({ type: 'cache-invalid' });
-          }
-        )
+          },
+        ),
       )
       .catch((err) => {
         logger.debug(
           { err },
-          'Rubygems: error fetching rubygems data, falling back to versions-only result'
+          'Rubygems: error fetching rubygems data, falling back to versions-only result',
         );
         const releases = versions.map((version) => ({ version }));
         return Result.ok({ releases } as ReleaseResult);
diff --git a/lib/modules/datasource/rubygems/schema.spec.ts b/lib/modules/datasource/rubygems/schema.spec.ts
index a881d5cf46431a332467bc873f7fbd7640812cbf..5ece346c68d547bb7c735c3bfdbe7e7b85db618f 100644
--- a/lib/modules/datasource/rubygems/schema.spec.ts
+++ b/lib/modules/datasource/rubygems/schema.spec.ts
@@ -26,7 +26,7 @@ describe('modules/datasource/rubygems/schema', () => {
 
     it('errors on empty input', () => {
       expect(() => MarshalledVersionInfo.parse([])).toThrow(
-        'Empty response from `/v1/dependencies` endpoint'
+        'Empty response from `/v1/dependencies` endpoint',
       );
     });
   });
@@ -153,7 +153,7 @@ describe('modules/datasource/rubygems/schema', () => {
 
     it('errors on empty input', () => {
       expect(() => GemInfo.parse('')).toThrow(
-        'Empty response from `/info` endpoint'
+        'Empty response from `/info` endpoint',
       );
     });
   });
diff --git a/lib/modules/datasource/rubygems/schema.ts b/lib/modules/datasource/rubygems/schema.ts
index 3e244120950848af24a8740b4b437cea8e3167f0..86c856cfebd37698f00a1666d013ec0fea03821c 100644
--- a/lib/modules/datasource/rubygems/schema.ts
+++ b/lib/modules/datasource/rubygems/schema.ts
@@ -8,11 +8,11 @@ import type { Release } from '../types';
 export const MarshalledVersionInfo = LooseArray(
   z
     .object({ number: z.string() })
-    .transform(({ number: version }): Release => ({ version }))
+    .transform(({ number: version }): Release => ({ version })),
 )
   .refine(
     (value) => !is.emptyArray(value),
-    'Empty response from `/v1/dependencies` endpoint'
+    'Empty response from `/v1/dependencies` endpoint',
   )
   .transform((releases) => ({ releases }));
 type MarshalledVersionInfo = z.infer<typeof MarshalledVersionInfo>;
@@ -28,7 +28,7 @@ export const GemMetadata = z
       changelog_uri: changelogUrl,
       homepage_uri: homepage,
       source_code_uri: sourceUrl,
-    }) => ({ changelogUrl, homepage, sourceUrl })
+    }) => ({ changelogUrl, homepage, sourceUrl }),
   );
 export type GemMetadata = z.infer<typeof GemMetadata>;
 
@@ -84,12 +84,12 @@ export const GemVersions = LooseArray(
         }
 
         return result;
-      }
-    )
+      },
+    ),
 )
   .refine(
     (value) => !is.emptyArray(value),
-    'Empty response from `/v1/gems` endpoint'
+    'Empty response from `/v1/gems` endpoint',
   )
   .transform((releases) => ({ releases }));
 export type GemVersions = z.infer<typeof GemVersions>;
@@ -100,11 +100,11 @@ export const GemInfo = z
     filterMap(body.split(newlineRegex), (line) => {
       const spaceIdx = line.indexOf(' ');
       return spaceIdx > 0 ? line.slice(0, spaceIdx) : null;
-    }).map((version): Release => ({ version }))
+    }).map((version): Release => ({ version })),
   )
   .refine(
     (value) => !is.emptyArray(value),
-    'Empty response from `/info` endpoint'
+    'Empty response from `/info` endpoint',
   )
   .transform((releases) => ({ releases }));
 export type GemInfo = z.infer<typeof GemInfo>;
diff --git a/lib/modules/datasource/rubygems/versions-endpoint-cache.spec.ts b/lib/modules/datasource/rubygems/versions-endpoint-cache.spec.ts
index dee86fe7e1a3f38548ec05a61d78d43b09206082..776289a7a9276460374904c4299c6cc0ca5a87d6 100644
--- a/lib/modules/datasource/rubygems/versions-endpoint-cache.spec.ts
+++ b/lib/modules/datasource/rubygems/versions-endpoint-cache.spec.ts
@@ -73,7 +73,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
         .replyWithError('Unknown error');
 
       await expect(rubygems.getVersions(registryUrl, 'foo')).rejects.toThrow(
-        'Unknown error'
+        'Unknown error',
       );
       expect(memCache.size).toBe(0);
     });
@@ -103,7 +103,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
           codeBlock`
             33333333333333333333333333333333
             foo -1.1.1,1.2.3 44444444444444444444444444444444
-          ` + '\n'
+          ` + '\n',
         );
 
       const res2 = await rubygems.getVersions(registryUrl, 'foo');
@@ -129,7 +129,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
           codeBlock`
             01010101010101010101010101010101
             foo -1.1.1,1.2.3 44444444444444444444444444444444
-          ` + '\n'
+          ` + '\n',
         )
         .get('/versions')
         .reply(
@@ -140,7 +140,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
             foo 1.2.3 11111111111111111111111111111111
             bar 2.2.2 22222222222222222222222222222222
             baz 3.3.3 01010101010101010101010101010101
-          ` + '\n'
+          ` + '\n',
         );
 
       const res2 = await rubygems.getVersions(registryUrl, 'foo');
@@ -163,7 +163,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
         .get('/versions')
         .reply(
           200,
-          fullBody + `foo -1.1.1,1.2.3 44444444444444444444444444444444\n`
+          fullBody + `foo -1.1.1,1.2.3 44444444444444444444444444444444\n`,
         );
 
       const res2 = await rubygems.getVersions(registryUrl, 'foo');
@@ -205,7 +205,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
               created_at: 2021-05-05T00:00:00.000Z
               ---
               foo 9.9.9 99999999999999999999999999999999
-            ` + '\n'
+            ` + '\n',
           );
 
         const res = await rubygems.getVersions(registryUrl, 'foo');
@@ -220,7 +220,7 @@ describe('modules/datasource/rubygems/versions-endpoint-cache', () => {
           .replyWithError('Unknown error');
 
         await expect(rubygems.getVersions(registryUrl, 'foo')).rejects.toThrow(
-          'Unknown error'
+          'Unknown error',
         );
 
         expect(memCache.get('https://rubygems.org')).toBeUndefined();
diff --git a/lib/modules/datasource/rubygems/versions-endpoint-cache.ts b/lib/modules/datasource/rubygems/versions-endpoint-cache.ts
index ecc4222bcc5b4a6f0d281f9279b28d9aea1b4935..24270848419ec6afd61adb309eb0160a76646f8c 100644
--- a/lib/modules/datasource/rubygems/versions-endpoint-cache.ts
+++ b/lib/modules/datasource/rubygems/versions-endpoint-cache.ts
@@ -36,7 +36,7 @@ function stripContentHead(content: string): string {
 
 function reconcilePackageVersions(
   packageVersions: PackageVersions,
-  versionLines: VersionLines
+  versionLines: VersionLines,
 ): PackageVersions {
   for (const line of versionLines) {
     const packageName = copystr(line.packageName);
@@ -67,7 +67,7 @@ function reconcilePackageVersions(
 function parseFullBody(body: string): VersionsEndpointResult {
   const packageVersions = reconcilePackageVersions(
     new Map<string, string[]>(),
-    VersionLines.parse(body)
+    VersionLines.parse(body),
   );
   const syncedAt = new Date();
   const contentLength = body.length;
@@ -87,7 +87,7 @@ export const memCache = new Map<string, VersionsEndpointResult>();
 
 function cacheResult(
   registryUrl: string,
-  result: VersionsEndpointResult
+  result: VersionsEndpointResult,
 ): void {
   const registryHostname = parseUrl(registryUrl)?.hostname;
   if (registryHostname === 'rubygems.org') {
@@ -119,8 +119,8 @@ const VersionLines = z
             }
           }
           return { packageName, deletedVersions, addedVersions };
-        })
-    )
+        }),
+    ),
   );
 type VersionLines = z.infer<typeof VersionLines>;
 
@@ -166,7 +166,7 @@ export class VersionsEndpointCache {
 
   async getVersions(
     registryUrl: string,
-    packageName: string
+    packageName: string,
   ): Promise<VersionsResult> {
     /**
      * Ensure that only one request for a given registryUrl is in flight at a time.
@@ -187,7 +187,7 @@ export class VersionsEndpointCache {
     if (!cachedData) {
       logger.debug(
         { packageName, registryUrl },
-        'Rubygems: endpoint not supported'
+        'Rubygems: endpoint not supported',
       );
       return Result.err('unsupported-api');
     }
@@ -196,7 +196,7 @@ export class VersionsEndpointCache {
     if (!versions?.length) {
       logger.debug(
         { packageName, registryUrl },
-        'Rubygems: versions not found'
+        'Rubygems: versions not found',
       );
       return Result.err('package-not-found');
     }
@@ -221,7 +221,7 @@ export class VersionsEndpointCache {
 
   private async deltaSync(
     oldCache: VersionsEndpointData,
-    registryUrl: string
+    registryUrl: string,
   ): Promise<VersionsEndpointResult> {
     try {
       const url = `${registryUrl}/versions`;
@@ -259,7 +259,7 @@ export class VersionsEndpointCache {
       const delta = stripContentHead(body);
       const packageVersions = reconcilePackageVersions(
         oldCache.packageVersions,
-        VersionLines.parse(delta)
+        VersionLines.parse(delta),
       );
       const syncedAt = new Date();
       const contentLength = oldCache.contentLength + delta.length;
diff --git a/lib/modules/datasource/sbt-package/index.spec.ts b/lib/modules/datasource/sbt-package/index.spec.ts
index b1335ced6e25656aa177993f08cda951cc0f414e..05e015d651994f9f58703098e3df6edfc3626289 100644
--- a/lib/modules/datasource/sbt-package/index.spec.ts
+++ b/lib/modules/datasource/sbt-package/index.spec.ts
@@ -13,7 +13,7 @@ describe('modules/datasource/sbt-package/index', () => {
 
   it('parses sbt index directory', () => {
     expect(
-      parseIndexDir(Fixtures.get(`sbt-plugins-index.html`))
+      parseIndexDir(Fixtures.get(`sbt-plugins-index.html`)),
     ).toMatchSnapshot();
   });
 
@@ -81,7 +81,7 @@ describe('modules/datasource/sbt-package/index', () => {
             `<a href="example_2.12/" title='example_2.12/'>example_2.12/</a>`,
             `<a href="example_native/" title='example_native/'>example_native/</a>`,
             `<a href="example_sjs/" title='example_sjs/'>example_sjs/</a>`,
-          ].join('\n')
+          ].join('\n'),
         )
         .get('/org/example/example/')
         .reply(200, `<a href='1.2.0/'>1.2.0/</a>`)
@@ -115,7 +115,7 @@ describe('modules/datasource/sbt-package/index', () => {
         .get('/org/example/')
         .reply(
           200,
-          `<a href="example_2.12/" title='example_2.12/'>example_2.12/</a>`
+          `<a href="example_2.12/" title='example_2.12/'>example_2.12/</a>`,
         )
         .get('/org/example/example_2.12/')
         .reply(200, `<a href='1.2.3/'>1.2.3/</a>`)
@@ -144,12 +144,12 @@ describe('modules/datasource/sbt-package/index', () => {
         .get('/')
         .reply(
           200,
-          '<a href="/maven/io/confluent/kafka-avro-serializer/">kafka-avro-serializer/</a>'
+          '<a href="/maven/io/confluent/kafka-avro-serializer/">kafka-avro-serializer/</a>',
         )
         .get('/kafka-avro-serializer/')
         .reply(
           200,
-          '<a href="/maven/io/confluent/kafka-avro-serializer/7.0.1/">7.0.1/</a>'
+          '<a href="/maven/io/confluent/kafka-avro-serializer/7.0.1/">7.0.1/</a>',
         )
         .get('/kafka-avro-serializer/7.0.1/kafka-avro-serializer-7.0.1.pom')
         .reply(
@@ -162,7 +162,7 @@ describe('modules/datasource/sbt-package/index', () => {
             <packaging>jar</packaging>
             <name>kafka-avro-serializer</name>
           </project>
-        `
+        `,
         );
 
       const res = await getPkgReleases({
@@ -195,7 +195,7 @@ describe('modules/datasource/sbt-package/index', () => {
                 <url>https://example.org/repo.git</url>
               </scm>
             </project>
-          `
+          `,
         );
 
       const res = await getPkgReleases({
@@ -235,7 +235,7 @@ describe('modules/datasource/sbt-package/index', () => {
                 </versions>
               </versioning>
             </metadata>
-          `
+          `,
         )
         .head('/org/example/example_2.13/1.2.3/example_2.13-1.2.3.pom')
         .reply(200)
diff --git a/lib/modules/datasource/sbt-package/index.ts b/lib/modules/datasource/sbt-package/index.ts
index 36818864ed581feb6d223d28241aeec2c42cfd81..c0b83d6c95014667ae24fd12783bc3a12bf7b763 100644
--- a/lib/modules/datasource/sbt-package/index.ts
+++ b/lib/modules/datasource/sbt-package/index.ts
@@ -32,12 +32,12 @@ export class SbtPackageDatasource extends MavenDatasource {
   async getArtifactSubdirs(
     searchRoot: string,
     artifact: string,
-    scalaVersion: string
+    scalaVersion: string,
   ): Promise<string[] | null> {
     const pkgUrl = ensureTrailingSlash(searchRoot);
     const { body: indexContent } = await downloadHttpProtocol(
       this.http,
-      pkgUrl
+      pkgUrl,
     );
     if (indexContent) {
       const parseSubdirs = (content: string): string[] =>
@@ -69,7 +69,7 @@ export class SbtPackageDatasource extends MavenDatasource {
 
   async getPackageReleases(
     searchRoot: string,
-    artifactSubdirs: string[] | null
+    artifactSubdirs: string[] | null,
   ): Promise<string[] | null> {
     if (artifactSubdirs) {
       const releases: string[] = [];
@@ -95,7 +95,7 @@ export class SbtPackageDatasource extends MavenDatasource {
   async getUrls(
     searchRoot: string,
     artifactDirs: string[] | null,
-    version: string | null
+    version: string | null,
   ): Promise<Partial<ReleaseResult>> {
     const result: Partial<ReleaseResult> = {};
 
@@ -144,7 +144,7 @@ export class SbtPackageDatasource extends MavenDatasource {
   }
 
   override async getReleases(
-    config: GetReleasesConfig
+    config: GetReleasesConfig,
   ): Promise<ReleaseResult | null> {
     const { packageName, registryUrl } = config;
     // istanbul ignore if
@@ -168,17 +168,17 @@ export class SbtPackageDatasource extends MavenDatasource {
       const artifactSubdirs = await this.getArtifactSubdirs(
         searchRoot,
         artifact,
-        scalaVersion
+        scalaVersion,
       );
       const versions = await this.getPackageReleases(
         searchRoot,
-        artifactSubdirs
+        artifactSubdirs,
       );
       const latestVersion = getLatestVersion(versions);
       const urls = await this.getUrls(
         searchRoot,
         artifactSubdirs,
-        latestVersion
+        latestVersion,
       );
 
       const dependencyUrl = searchRoot;
@@ -194,7 +194,7 @@ export class SbtPackageDatasource extends MavenDatasource {
     }
 
     logger.debug(
-      `No versions discovered for ${packageName} listing organization root package folder, fallback to maven datasource for version discovery`
+      `No versions discovered for ${packageName} listing organization root package folder, fallback to maven datasource for version discovery`,
     );
     const mavenReleaseResult = await super.getReleases(config);
     if (mavenReleaseResult) {
@@ -202,7 +202,7 @@ export class SbtPackageDatasource extends MavenDatasource {
     }
 
     logger.debug(
-      `No versions found for ${packageName} in ${searchRoots.length} repositories`
+      `No versions found for ${packageName} in ${searchRoots.length} repositories`,
     );
     return null;
   }
diff --git a/lib/modules/datasource/sbt-package/util.ts b/lib/modules/datasource/sbt-package/util.ts
index c67973a4efd7fd23a5862be1fab5416e664db65c..b2d017972e4c4589d0e26c6cc72ca394c79c5749 100644
--- a/lib/modules/datasource/sbt-package/util.ts
+++ b/lib/modules/datasource/sbt-package/util.ts
@@ -6,7 +6,7 @@ const linkRegExp = /(?<=href=['"])[^'"]*(?=\/['"])/gi;
 
 export function parseIndexDir(
   content: string,
-  filterFn = (x: string): boolean => !regEx(/^\.+/).test(x)
+  filterFn = (x: string): boolean => !regEx(/^\.+/).test(x),
 ): string[] {
   const unfiltered = coerceArray(content.match(linkRegExp));
   return unfiltered.filter(filterFn);
@@ -14,18 +14,18 @@ export function parseIndexDir(
 
 export function normalizeRootRelativeUrls(
   content: string,
-  rootUrl: string | URL
+  rootUrl: string | URL,
 ): string {
   const rootRelativePath = new URL(rootUrl.toString()).pathname;
   return content.replace(linkRegExp, (href: string) =>
-    href.replace(rootRelativePath, '')
+    href.replace(rootRelativePath, ''),
   );
 }
 
 export function getLatestVersion(versions: string[] | null): string | null {
   if (versions?.length) {
     return versions.reduce((latestVersion, version) =>
-      compare(version, latestVersion) === 1 ? version : latestVersion
+      compare(version, latestVersion) === 1 ? version : latestVersion,
     );
   }
   return null;
diff --git a/lib/modules/datasource/sbt-plugin/index.spec.ts b/lib/modules/datasource/sbt-plugin/index.spec.ts
index 4a8c99d76bfa30aebfce1bc7512749554a407855..f2b3daaaa81df9a8bd64e570ab3bdb321aadef31 100644
--- a/lib/modules/datasource/sbt-plugin/index.spec.ts
+++ b/lib/modules/datasource/sbt-plugin/index.spec.ts
@@ -40,7 +40,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           '<a href="scalatest/" title=\'scalatest/\'>scalatest_2.12/</a>\n' +
             '<a href="scalatest_2.12/" title=\'scalatest_2.12/\'>scalatest_2.12/</a>\n' +
             "<a href='scalatest_sjs2.12/'>scalatest_2.12/</a>" +
-            "<a href='scalatest_native2.12/'>scalatest_2.12/</a>"
+            "<a href='scalatest_native2.12/'>scalatest_2.12/</a>",
         );
       httpMock
         .scope('https://repo.maven.apache.org')
@@ -66,12 +66,12 @@ describe('modules/datasource/sbt-plugin/index', () => {
             '<body>\n' +
             '<pre><a href="scala_2.12/">scala_2.12/</a></pre>\n' +
             '</body>\n' +
-            '</html>'
+            '</html>',
         );
       httpMock
         .scope('https://repo.scala-sbt.org')
         .get(
-          '/scalasbt/sbt-plugin-releases/org.foundweekends/sbt-bintray/scala_2.12/'
+          '/scalasbt/sbt-plugin-releases/org.foundweekends/sbt-bintray/scala_2.12/',
         )
         .reply(
           200,
@@ -82,12 +82,12 @@ describe('modules/datasource/sbt-plugin/index', () => {
             '<body>\n' +
             '<pre><a href="sbt_1.0/">sbt_1.0/</a></pre>\n' +
             '</body>\n' +
-            '</html>\n'
+            '</html>\n',
         );
       httpMock
         .scope('https://repo.scala-sbt.org')
         .get(
-          '/scalasbt/sbt-plugin-releases/org.foundweekends/sbt-bintray/scala_2.12/sbt_1.0/'
+          '/scalasbt/sbt-plugin-releases/org.foundweekends/sbt-bintray/scala_2.12/sbt_1.0/',
         )
         .reply(
           200,
@@ -98,7 +98,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
             '<body>\n' +
             '<pre><a href="0.5.5/">0.5.5/</a></pre>\n' +
             '</body>\n' +
-            '</html>\n'
+            '</html>\n',
         );
 
       httpMock
@@ -109,7 +109,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           '<a href="sbt-coursier_2.10_0.13/">sbt-coursier_2.10_0.13/</a>\n' +
             '<a href="sbt-coursier_2.12_1.0/">sbt-coursier_2.12_1.0/</a>\n' +
             '<a href="sbt-coursier_2.12_1.0.0-M5/">sbt-coursier_2.12_1.0.0-M5/</a>\n' +
-            '<a href="sbt-coursier_2.12_1.0.0-M6/">sbt-coursier_2.12_1.0.0-M6/</a>\n'
+            '<a href="sbt-coursier_2.12_1.0.0-M6/">sbt-coursier_2.12_1.0.0-M6/</a>\n',
         );
       httpMock
         .scope('https://repo.maven.apache.org')
@@ -119,12 +119,12 @@ describe('modules/datasource/sbt-plugin/index', () => {
           '<a href="2.0.0-RC2/">2.0.0-RC2/</a>\n' +
             '<a href="2.0.0-RC6-1/">2.0.0-RC6-1/</a>\n' +
             '<a href="2.0.0-RC6-2/">2.0.0-RC6-2/</a>\n' +
-            '<a href="2.0.0-RC6-6/">2.0.0-RC6-6/</a>\n'
+            '<a href="2.0.0-RC6-6/">2.0.0-RC6-6/</a>\n',
         );
       httpMock
         .scope('https://repo.maven.apache.org')
         .get(
-          '/maven2/io/get-coursier/sbt-coursier_2.12_1.0/2.0.0-RC6-6/sbt-coursier-2.0.0-RC6-6.pom'
+          '/maven2/io/get-coursier/sbt-coursier_2.12_1.0/2.0.0-RC6-6/sbt-coursier-2.0.0-RC6-6.pom',
         )
         .reply(
           200,
@@ -133,7 +133,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
             '<scm>\n' +
             '<url>https://github.com/coursier/sbt-coursier</url>\n' +
             '</scm>\n' +
-            '</project>\n'
+            '</project>\n',
         );
     });
 
@@ -147,7 +147,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           datasource: SbtPluginDatasource.id,
           packageName: 'org.scalatest:scalatest',
           registryUrls: ['https://failed_repo/maven'],
-        })
+        }),
       ).toBeNull();
       expect(
         await getPkgReleases({
@@ -155,7 +155,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           datasource: SbtPluginDatasource.id,
           packageName: 'org.scalatest:scalaz',
           registryUrls: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -166,7 +166,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           datasource: SbtPluginDatasource.id,
           packageName: 'org.foundweekends:sbt-bintray',
           registryUrls: [],
-        })
+        }),
       ).toEqual({
         dependencyUrl:
           'https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/org.foundweekends/sbt-bintray',
@@ -182,7 +182,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           datasource: SbtPluginDatasource.id,
           packageName: 'org.foundweekends:sbt-bintray_2.12',
           registryUrls: [],
-        })
+        }),
       ).toEqual({
         dependencyUrl:
           'https://repo.scala-sbt.org/scalasbt/sbt-plugin-releases/org.foundweekends/sbt-bintray',
@@ -198,7 +198,7 @@ describe('modules/datasource/sbt-plugin/index', () => {
           datasource: SbtPluginDatasource.id,
           packageName: 'io.get-coursier:sbt-coursier',
           registryUrls: [MAVEN_REPO],
-        })
+        }),
       ).toEqual({
         dependencyUrl:
           'https://repo.maven.apache.org/maven2/io/get-coursier/sbt-coursier',
diff --git a/lib/modules/datasource/sbt-plugin/index.ts b/lib/modules/datasource/sbt-plugin/index.ts
index ee815340dab956768ae6f28e36ec247ae2042982..8fd574e56d227e6026322d8241889e0c53dd70c0 100644
--- a/lib/modules/datasource/sbt-plugin/index.ts
+++ b/lib/modules/datasource/sbt-plugin/index.ts
@@ -31,20 +31,20 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
   async resolvePluginReleases(
     rootUrl: string,
     artifact: string,
-    scalaVersion: string
+    scalaVersion: string,
   ): Promise<string[] | null> {
     const searchRoot = `${rootUrl}/${artifact}`;
     const parse = (content: string): string[] =>
       parseIndexDir(content, (x) => !regEx(/^\.+$/).test(x));
     const { body: indexContent } = await downloadHttpProtocol(
       this.http,
-      ensureTrailingSlash(searchRoot)
+      ensureTrailingSlash(searchRoot),
     );
     if (indexContent) {
       const releases: string[] = [];
       const scalaVersionItems = parse(indexContent);
       const scalaVersions = scalaVersionItems.map((x) =>
-        x.replace(regEx(/^scala_/), '')
+        x.replace(regEx(/^scala_/), ''),
       );
       const searchVersions = scalaVersions.includes(scalaVersion)
         ? [scalaVersion]
@@ -53,7 +53,7 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
         const searchSubRoot = `${searchRoot}/scala_${searchVersion}`;
         const { body: subRootContent } = await downloadHttpProtocol(
           this.http,
-          ensureTrailingSlash(searchSubRoot)
+          ensureTrailingSlash(searchSubRoot),
         );
         if (subRootContent) {
           const sbtVersionItems = parse(subRootContent);
@@ -61,7 +61,7 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
             const releasesRoot = `${searchSubRoot}/${sbtItem}`;
             const { body: releasesIndexContent } = await downloadHttpProtocol(
               this.http,
-              ensureTrailingSlash(releasesRoot)
+              ensureTrailingSlash(releasesRoot),
             );
             if (releasesIndexContent) {
               const releasesParsed = parse(releasesIndexContent);
@@ -102,7 +102,7 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
       let versions = await this.resolvePluginReleases(
         searchRoot,
         artifact,
-        scalaVersion
+        scalaVersion,
       );
       let urls = {};
 
@@ -110,7 +110,7 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
         const artifactSubdirs = await this.getArtifactSubdirs(
           searchRoot,
           artifact,
-          scalaVersion
+          scalaVersion,
         );
         versions = await this.getPackageReleases(searchRoot, artifactSubdirs);
         const latestVersion = getLatestVersion(versions);
@@ -130,7 +130,7 @@ export class SbtPluginDatasource extends SbtPackageDatasource {
     }
 
     logger.debug(
-      `No versions found for ${packageName} in ${searchRoots.length} repositories`
+      `No versions found for ${packageName} in ${searchRoots.length} repositories`,
     );
     return null;
   }
diff --git a/lib/modules/datasource/terraform-module/base.ts b/lib/modules/datasource/terraform-module/base.ts
index 405b6dcc501ab85fd49cf31298ca43609a150cbd..e88b16eaf15a96afb4d2ddc5a5fce2f981d0cf1a 100644
--- a/lib/modules/datasource/terraform-module/base.ts
+++ b/lib/modules/datasource/terraform-module/base.ts
@@ -16,7 +16,7 @@ export abstract class TerraformDatasource extends Datasource {
     ttlMinutes: 1440,
   })
   async getTerraformServiceDiscoveryResult(
-    registryUrl: string
+    registryUrl: string,
   ): Promise<ServiceDiscoveryResult> {
     const discoveryURL = TerraformDatasource.getDiscoveryUrl(registryUrl);
     const serviceDiscovery = (
diff --git a/lib/modules/datasource/terraform-module/index.spec.ts b/lib/modules/datasource/terraform-module/index.spec.ts
index 8360e066faba4efc618d1c33f23aae9d57a05a0f..399e2cf9c123b9d833d8844e5fed3c6e28234766 100644
--- a/lib/modules/datasource/terraform-module/index.spec.ts
+++ b/lib/modules/datasource/terraform-module/index.spec.ts
@@ -6,11 +6,11 @@ import { TerraformModuleDatasource } from '.';
 const consulData = Fixtures.get('registry-consul.json');
 const consulVersionsData = Fixtures.get('registry-consul-versions.json');
 const versionsDataWithSourceUrl = Fixtures.get(
-  'registry-versions-with-source.json'
+  'registry-versions-with-source.json',
 );
 const serviceDiscoveryResult = Fixtures.get('service-discovery.json');
 const serviceDiscoveryCustomResult = Fixtures.get(
-  'service-custom-discovery.json'
+  'service-custom-discovery.json',
 );
 
 const datasource = TerraformModuleDatasource.id;
@@ -30,7 +30,7 @@ describe('modules/datasource/terraform-module/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hashicorp/consul/aws',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -45,7 +45,7 @@ describe('modules/datasource/terraform-module/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hashicorp/consul/aws',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -60,7 +60,7 @@ describe('modules/datasource/terraform-module/index', () => {
         await getPkgReleases({
           datasource,
           packageName: 'hashicorp/consul/aws',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -109,7 +109,7 @@ describe('modules/datasource/terraform-module/index', () => {
           datasource,
           packageName: 'hashicorp/consul/aws',
           registryUrls: ['https://terraform.company.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -125,7 +125,7 @@ describe('modules/datasource/terraform-module/index', () => {
           datasource,
           packageName: 'hashicorp/consul/aws',
           registryUrls: ['https://terraform.company.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -141,7 +141,7 @@ describe('modules/datasource/terraform-module/index', () => {
           datasource,
           packageName: 'hashicorp/consul/aws',
           registryUrls: ['https://terraform.company.com'],
-        })
+        }),
       ).toBeNull();
     });
 
diff --git a/lib/modules/datasource/terraform-module/index.ts b/lib/modules/datasource/terraform-module/index.ts
index 81e3e2a6c36ad9ebc0f1a109455a03e9d29f56fb..9476cef7e5c3c35caf59f9ef670e2754b9c63fcf 100644
--- a/lib/modules/datasource/terraform-module/index.ts
+++ b/lib/modules/datasource/terraform-module/index.ts
@@ -53,24 +53,24 @@ export class TerraformModuleDatasource extends TerraformDatasource {
       TerraformModuleDatasource.getRegistryRepository(packageName, registryUrl);
     logger.trace(
       { registryUrlNormalized, terraformRepository: repository },
-      'terraform-module.getReleases()'
+      'terraform-module.getReleases()',
     );
 
     const serviceDiscovery = await this.getTerraformServiceDiscoveryResult(
-      registryUrlNormalized
+      registryUrlNormalized,
     );
     if (this.extendedApiRegistryUrls.includes(registryUrlNormalized)) {
       return await this.queryRegistryExtendedApi(
         serviceDiscovery,
         registryUrlNormalized,
-        repository
+        repository,
       );
     }
 
     return await this.queryRegistryVersions(
       serviceDiscovery,
       registryUrlNormalized,
-      repository
+      repository,
     );
   }
 
@@ -82,7 +82,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
   private async queryRegistryExtendedApi(
     serviceDiscovery: ServiceDiscoveryResult,
     registryUrl: string,
-    repository: string
+    repository: string,
   ): Promise<ReleaseResult | null> {
     let res: TerraformRelease;
     let pkgUrl: string;
@@ -94,7 +94,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
         registryUrl,
         'modules.v1',
         serviceDiscovery,
-        repository
+        repository,
       );
       res = (await this.http.getJson<TerraformRelease>(pkgUrl)).body;
       const returnedName = res.namespace + '/' + res.name + '/' + res.provider;
@@ -118,7 +118,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
     dep.homepage = `${registryUrl}/modules/${repository}`;
     // set published date for latest release
     const latestVersion = dep.releases.find(
-      (release) => res.version === release.version
+      (release) => res.version === release.version,
     );
     if (latestVersion) {
       latestVersion.releaseTimestamp = res.published_at;
@@ -133,7 +133,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
   private async queryRegistryVersions(
     serviceDiscovery: ServiceDiscoveryResult,
     registryUrl: string,
-    repository: string
+    repository: string,
   ): Promise<ReleaseResult | null> {
     let res: TerraformModuleVersions;
     let pkgUrl: string;
@@ -143,7 +143,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
         registryUrl,
         'modules.v1',
         serviceDiscovery,
-        `${repository}/versions`
+        `${repository}/versions`,
       );
       res = (await this.http.getJson<TerraformModuleVersions>(pkgUrl)).body;
       if (res.modules.length < 1) {
@@ -171,7 +171,7 @@ export class TerraformModuleDatasource extends TerraformDatasource {
 
   private static getRegistryRepository(
     packageName: string,
-    registryUrl: string | undefined
+    registryUrl: string | undefined,
   ): RegistryRepository {
     let registry: string;
     const split = packageName.split('/');
diff --git a/lib/modules/datasource/terraform-module/utils.spec.ts b/lib/modules/datasource/terraform-module/utils.spec.ts
index 5477b6247335c6d5761dda7bcc0e89e029a3689c..5a7ebb61478a48dd6ec84b8fa6aad39f66d81325 100644
--- a/lib/modules/datasource/terraform-module/utils.spec.ts
+++ b/lib/modules/datasource/terraform-module/utils.spec.ts
@@ -11,10 +11,10 @@ describe('modules/datasource/terraform-module/utils', () => {
         {
           'modules.v1': '/v1/modules/',
         },
-        'hashicorp/consul/aws'
+        'hashicorp/consul/aws',
       );
       expect(result).toBe(
-        'https://registry.example.com/v1/modules/hashicorp/consul/aws'
+        'https://registry.example.com/v1/modules/hashicorp/consul/aws',
       );
     });
 
@@ -25,10 +25,10 @@ describe('modules/datasource/terraform-module/utils', () => {
         {
           'providers.v1': '/v1/providers/',
         },
-        'hashicorp/azure'
+        'hashicorp/azure',
       );
       expect(result).toBe(
-        'https://registry.example.com/v1/providers/hashicorp/azure'
+        'https://registry.example.com/v1/providers/hashicorp/azure',
       );
     });
 
@@ -39,10 +39,10 @@ describe('modules/datasource/terraform-module/utils', () => {
         {
           'modules.v1': 'https://other.example.com/v1/modules/',
         },
-        'hashicorp/consul/aws'
+        'hashicorp/consul/aws',
       );
       expect(result).toBe(
-        'https://other.example.com/v1/modules/hashicorp/consul/aws'
+        'https://other.example.com/v1/modules/hashicorp/consul/aws',
       );
     });
 
@@ -53,10 +53,10 @@ describe('modules/datasource/terraform-module/utils', () => {
         {
           'providers.v1': 'https://other.example.com/providers',
         },
-        'hashicorp/azure'
+        'hashicorp/azure',
       );
       expect(result).toBe(
-        'https://other.example.com/providers/hashicorp/azure'
+        'https://other.example.com/providers/hashicorp/azure',
       );
     });
 
@@ -67,7 +67,7 @@ describe('modules/datasource/terraform-module/utils', () => {
         {
           'providers.v1': '',
         },
-        'hashicorp/azure'
+        'hashicorp/azure',
       );
       expect(result).toBe('https://registry.example.com/hashicorp/azure');
     });
@@ -77,7 +77,7 @@ describe('modules/datasource/terraform-module/utils', () => {
         defaultRegistryURL,
         'providers.v1',
         {},
-        'hashicorp/azure'
+        'hashicorp/azure',
       );
       expect(result).toBe('https://registry.example.com/hashicorp/azure');
     });
diff --git a/lib/modules/datasource/terraform-module/utils.ts b/lib/modules/datasource/terraform-module/utils.ts
index 02195cd8253c37b18ef9d84d5807ee3616112321..e3449faa23f039a11bdc6e2507568be54bffe418 100644
--- a/lib/modules/datasource/terraform-module/utils.ts
+++ b/lib/modules/datasource/terraform-module/utils.ts
@@ -8,7 +8,7 @@ export function createSDBackendURL(
   registryURL: string,
   sdType: ServiceDiscoveryEndpointType,
   sdResult: ServiceDiscoveryResult,
-  subPath: string
+  subPath: string,
 ): string {
   const sdEndpoint = sdResult[sdType] ?? '';
   const fullPath = joinUrlParts(sdEndpoint, subPath);
diff --git a/lib/modules/datasource/terraform-provider/index.spec.ts b/lib/modules/datasource/terraform-provider/index.spec.ts
index 531861d27dad692fe80d6d6d91725e5be35197ac..65f0f2c3803ae7a4f244ded93e32819cca918fea 100644
--- a/lib/modules/datasource/terraform-provider/index.spec.ts
+++ b/lib/modules/datasource/terraform-provider/index.spec.ts
@@ -6,11 +6,11 @@ import { TerraformProviderDatasource } from '.';
 const azurermData = Fixtures.get('azurerm-provider.json');
 const azurermVersionsData = Fixtures.get('azurerm-provider-versions.json');
 const hashicorpGoogleBetaReleases = Fixtures.get(
-  'releaseBackendIndexGoogleBeta.json'
+  'releaseBackendIndexGoogleBeta.json',
 );
 const serviceDiscoveryResult = Fixtures.get('service-discovery.json');
 const telmateProxmoxVersions = Fixtures.get(
-  'telmate-proxmox-versions-response.json'
+  'telmate-proxmox-versions-response.json',
 );
 
 const terraformProviderDatasource = new TerraformProviderDatasource();
@@ -34,7 +34,7 @@ describe('modules/datasource/terraform-provider/index', () => {
         await getPkgReleases({
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -53,7 +53,7 @@ describe('modules/datasource/terraform-provider/index', () => {
         await getPkgReleases({
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -72,7 +72,7 @@ describe('modules/datasource/terraform-provider/index', () => {
         await getPkgReleases({
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -116,7 +116,7 @@ describe('modules/datasource/terraform-provider/index', () => {
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
           registryUrls: ['https://registry.company.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -132,7 +132,7 @@ describe('modules/datasource/terraform-provider/index', () => {
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
           registryUrls: ['https://registry.company.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -148,7 +148,7 @@ describe('modules/datasource/terraform-provider/index', () => {
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
           registryUrls: ['https://registry.company.com'],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -247,7 +247,7 @@ describe('modules/datasource/terraform-provider/index', () => {
         await getPkgReleases({
           datasource: TerraformProviderDatasource.id,
           packageName: 'azurerm',
-        })
+        }),
       ).toBeNull();
     });
   });
@@ -264,7 +264,7 @@ describe('modules/datasource/terraform-provider/index', () => {
       const result = await terraformProviderDatasource.getBuilds(
         terraformProviderDatasource.defaultRegistryUrls[0],
         'hashicorp/azurerm',
-        '2.50.0'
+        '2.50.0',
       );
       expect(result).toBeNull();
     });
@@ -273,7 +273,7 @@ describe('modules/datasource/terraform-provider/index', () => {
       const result = await terraformProviderDatasource.getBuilds(
         terraformProviderDatasource.defaultRegistryUrls[1],
         'test/azurerm',
-        '2.50.0'
+        '2.50.0',
       );
       expect(result).toBeNull();
     });
@@ -288,7 +288,7 @@ describe('modules/datasource/terraform-provider/index', () => {
       const result = await terraformProviderDatasource.getBuilds(
         terraformProviderDatasource.defaultRegistryUrls[0],
         'Telmate/proxmox',
-        '2.8.0'
+        '2.8.0',
       );
       expect(result).toBeNull();
     });
@@ -331,7 +331,7 @@ describe('modules/datasource/terraform-provider/index', () => {
       const res = await terraformProviderDatasource.getBuilds(
         terraformProviderDatasource.defaultRegistryUrls[0],
         'Telmate/proxmox',
-        '2.6.1'
+        '2.6.1',
       );
       expect(res).toEqual([
         {
@@ -406,7 +406,7 @@ describe('modules/datasource/terraform-provider/index', () => {
       const res = await terraformProviderDatasource.getBuilds(
         terraformProviderDatasource.defaultRegistryUrls[0],
         'Telmate/proxmox',
-        '2.6.1'
+        '2.6.1',
       );
       expect(res).toBeNull();
     });
@@ -417,17 +417,17 @@ describe('modules/datasource/terraform-provider/index', () => {
       httpMock
         .scope(secondaryUrl)
         .get(
-          '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS'
+          '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS',
         )
         .reply(
           200,
           '500d4e787bf046bbe64c4853530aff3dfddee2fdbff0087d7b1e7a8c24388628 terraform-provider-azurerm_2.56.0_darwin_amd64.zip\n' +
             '766ff42596d643f9945b3aab2e83e306fe77c3020a5196366bbbb77eeea13b71 terraform-provider-azurerm_2.56.0_linux_amd64.zip\n' +
-            'fbdb892d9822ed0e4cb60f2fedbdbb556e4da0d88d3b942ae963ed6ff091e48f terraform-provider-azurerm_2.56.0_manifest.json'
+            'fbdb892d9822ed0e4cb60f2fedbdbb556e4da0d88d3b942ae963ed6ff091e48f terraform-provider-azurerm_2.56.0_manifest.json',
         );
 
       const res = await terraformProviderDatasource.getZipHashes(
-        'https://releases.hashicorp.com/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS'
+        'https://releases.hashicorp.com/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS',
       );
 
       expect(res).toMatchObject([
@@ -441,12 +441,12 @@ describe('modules/datasource/terraform-provider/index', () => {
       httpMock
         .scope(secondaryUrl)
         .get(
-          '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS'
+          '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS',
         )
         .reply(404);
 
       const res = await terraformProviderDatasource.getZipHashes(
-        'https://releases.hashicorp.com/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS'
+        'https://releases.hashicorp.com/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS',
       );
 
       expect(res).toBeUndefined();
diff --git a/lib/modules/datasource/terraform-provider/index.ts b/lib/modules/datasource/terraform-provider/index.ts
index 5e66240a50724129a66219a011cf790170261664..09cb34092dcb9177e8ac2102ae2047db42a78226 100644
--- a/lib/modules/datasource/terraform-provider/index.ts
+++ b/lib/modules/datasource/terraform-provider/index.ts
@@ -58,7 +58,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       return null;
     }
     logger.trace(
-      `terraform-provider.getDependencies() packageName: ${packageName}`
+      `terraform-provider.getDependencies() packageName: ${packageName}`,
     );
 
     if (registryUrl === this.defaultRegistryUrls[1]) {
@@ -68,21 +68,21 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       packageName,
     });
     const serviceDiscovery = await this.getTerraformServiceDiscoveryResult(
-      registryUrl
+      registryUrl,
     );
 
     if (registryUrl === this.defaultRegistryUrls[0]) {
       return await this.queryRegistryExtendedApi(
         serviceDiscovery,
         registryUrl,
-        repository
+        repository,
       );
     }
 
     return await this.queryRegistryVersions(
       serviceDiscovery,
       registryUrl,
-      repository
+      repository,
     );
   }
 
@@ -98,13 +98,13 @@ export class TerraformProviderDatasource extends TerraformDatasource {
   private async queryRegistryExtendedApi(
     serviceDiscovery: ServiceDiscoveryResult,
     registryUrl: string,
-    repository: string
+    repository: string,
   ): Promise<ReleaseResult> {
     const backendURL = createSDBackendURL(
       registryUrl,
       'providers.v1',
       serviceDiscovery,
-      repository
+      repository,
     );
     const res = (await this.http.getJson<TerraformProvider>(backendURL)).body;
     const dep: ReleaseResult = {
@@ -117,7 +117,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
     }
     // set published date for latest release
     const latestVersion = dep.releases.find(
-      (release) => res.version === release.version
+      (release) => res.version === release.version,
     );
     // istanbul ignore else
     if (latestVersion) {
@@ -134,13 +134,13 @@ export class TerraformProviderDatasource extends TerraformDatasource {
   private async queryRegistryVersions(
     serviceDiscovery: ServiceDiscoveryResult,
     registryUrl: string,
-    repository: string
+    repository: string,
   ): Promise<ReleaseResult> {
     const backendURL = createSDBackendURL(
       registryUrl,
       'providers.v1',
       serviceDiscovery,
-      `${repository}/versions`
+      `${repository}/versions`,
     );
     const res = (await this.http.getJson<TerraformProviderVersions>(backendURL))
       .body;
@@ -154,14 +154,14 @@ export class TerraformProviderDatasource extends TerraformDatasource {
 
   private async queryReleaseBackend(
     packageName: string,
-    registryURL: string
+    registryURL: string,
   ): Promise<ReleaseResult | null> {
     const hashicorpPackage = packageName.replace('hashicorp/', '');
     const backendLookUpName = `terraform-provider-${hashicorpPackage}`;
     const backendURL = joinUrlParts(
       registryURL,
       backendLookUpName,
-      `index.json`
+      `index.json`,
     );
     const res = (
       await this.http.getJson<TerraformProviderReleaseBackend>(backendURL)
@@ -173,7 +173,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       })),
       sourceUrl: joinUrlParts(
         'https://github.com/terraform-providers',
-        backendLookUpName
+        backendLookUpName,
       ),
     };
     return dep;
@@ -187,7 +187,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
   async getBuilds(
     registryURL: string,
     repository: string,
-    version: string
+    version: string,
   ): Promise<TerraformBuild[] | null> {
     if (registryURL === TerraformProviderDatasource.defaultRegistryUrls[1]) {
       // check if registryURL === secondary backend
@@ -203,7 +203,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       try {
         versionReleaseBackend = await this.getReleaseBackendIndex(
           backendLookUpName,
-          version
+          version,
         );
       } catch (err) {
         /* istanbul ignore next */
@@ -212,7 +212,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
         }
         logger.debug(
           { err, backendLookUpName, version },
-          `Failed to retrieve builds for ${backendLookUpName} ${version}`
+          `Failed to retrieve builds for ${backendLookUpName} ${version}`,
         );
         return null;
       }
@@ -221,7 +221,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
 
     // check public or private Terraform registry
     const serviceDiscovery = await this.getTerraformServiceDiscoveryResult(
-      registryURL
+      registryURL,
     );
     if (!serviceDiscovery) {
       logger.trace(`Failed to retrieve service discovery from ${registryURL}`);
@@ -231,11 +231,11 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       registryURL,
       'providers.v1',
       serviceDiscovery,
-      repository
+      repository,
     );
     const versionsResponse = (
       await this.http.getJson<TerraformRegistryVersions>(
-        `${backendURL}/versions`
+        `${backendURL}/versions`,
       )
     ).body;
     if (!versionsResponse.versions) {
@@ -243,11 +243,11 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       return null;
     }
     const builds = versionsResponse.versions.find(
-      (value) => value.version === version
+      (value) => value.version === version,
     );
     if (!builds) {
       logger.trace(
-        `No builds found for ${repository}:${version} on ${registryURL}`
+        `No builds found for ${repository}:${version} on ${registryURL}`,
       );
       return null;
     }
@@ -275,7 +275,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
           return null;
         }
       },
-      { concurrency: 4 }
+      { concurrency: 4 },
     );
 
     const filteredResult = result.filter(is.truthy);
@@ -298,7 +298,7 @@ export class TerraformProviderDatasource extends TerraformDatasource {
       }
       logger.debug(
         { err, zipHashUrl },
-        `Failed to retrieve zip hashes from ${zipHashUrl}`
+        `Failed to retrieve zip hashes from ${zipHashUrl}`,
       );
       return undefined;
     }
@@ -316,11 +316,11 @@ export class TerraformProviderDatasource extends TerraformDatasource {
   })
   async getReleaseBackendIndex(
     backendLookUpName: string,
-    version: string
+    version: string,
   ): Promise<VersionDetailResponse> {
     return (
       await this.http.getJson<VersionDetailResponse>(
-        `${TerraformProviderDatasource.defaultRegistryUrls[1]}/${backendLookUpName}/${version}/index.json`
+        `${TerraformProviderDatasource.defaultRegistryUrls[1]}/${backendLookUpName}/${version}/index.json`,
       )
     ).body;
   }
diff --git a/lib/modules/datasource/util.ts b/lib/modules/datasource/util.ts
index 10e68673e974e8fb17d9803913bd75e9a211d0ff..abd252282ab05c81850756307a9578a2b7fd888e 100644
--- a/lib/modules/datasource/util.ts
+++ b/lib/modules/datasource/util.ts
@@ -7,7 +7,7 @@ import { addSecretForSanitizing } from '../../util/sanitize';
 const JFROG_ARTIFACTORY_RES_HEADER = 'x-jfrog-version';
 
 export function isArtifactoryServer<T = unknown>(
-  res: HttpResponse<T> | undefined
+  res: HttpResponse<T> | undefined,
 ): boolean {
   return is.string(res?.headers[JFROG_ARTIFACTORY_RES_HEADER]);
 }
@@ -24,7 +24,7 @@ export async function getGoogleAuthToken(): Promise<string | null> {
       return Buffer.from(`oauth2accesstoken:${accessToken}`).toString('base64');
     } else {
       logger.warn(
-        'Could not retrieve access token using google-auth-library getAccessToken'
+        'Could not retrieve access token using google-auth-library getAccessToken',
       );
     }
   } catch (err) {
diff --git a/lib/modules/datasource/utils.spec.ts b/lib/modules/datasource/utils.spec.ts
index 395dbcc9120b5f6c280e5cfbdec5d489c9df90ea..0f60f3cd2d57f6c7734215ddbc918530a7f92c59 100644
--- a/lib/modules/datasource/utils.spec.ts
+++ b/lib/modules/datasource/utils.spec.ts
@@ -29,7 +29,7 @@ describe('modules/datasource/utils', () => {
     googleAuth.mockImplementationOnce(
       jest.fn().mockImplementationOnce(() => ({
         getAccessToken: jest.fn().mockResolvedValue('some-token'),
-      }))
+      })),
     );
 
     const res = await getGoogleAuthToken();
@@ -40,7 +40,7 @@ describe('modules/datasource/utils', () => {
     googleAuth.mockImplementationOnce(
       jest.fn().mockImplementationOnce(() => ({
         getAccessToken: jest.fn().mockReturnValue(''),
-      }))
+      })),
     );
 
     const res = await getGoogleAuthToken();
@@ -52,7 +52,7 @@ describe('modules/datasource/utils', () => {
     googleAuth.mockImplementationOnce(
       jest.fn().mockImplementationOnce(() => ({
         getAccessToken: jest.fn().mockRejectedValue(new Error(err)),
-      }))
+      })),
     );
 
     await expect(getGoogleAuthToken()).rejects.toThrow('some-error');
@@ -64,7 +64,7 @@ describe('modules/datasource/utils', () => {
         getAccessToken: jest.fn().mockRejectedValue({
           message: 'Could not load the default credentials',
         }),
-      }))
+      })),
     );
 
     const res = await getGoogleAuthToken();
diff --git a/lib/modules/manager/ansible-galaxy/collections-metadata.ts b/lib/modules/manager/ansible-galaxy/collections-metadata.ts
index 289726b8e4feeaffaf130b9774f8d0ef52ebfc9e..56466ebdbf55934875bd5e7e58ebd2f074f3214d 100644
--- a/lib/modules/manager/ansible-galaxy/collections-metadata.ts
+++ b/lib/modules/manager/ansible-galaxy/collections-metadata.ts
@@ -3,7 +3,7 @@ import type { PackageDependency } from '../types';
 import { dependencyRegex, galaxyRegEx } from './util';
 
 export function extractCollectionsMetaDataFile(
-  lines: string[]
+  lines: string[],
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
   // in a galaxy.yml the dependency map is inside a `dependencies:` block
diff --git a/lib/modules/manager/ansible-galaxy/collections.ts b/lib/modules/manager/ansible-galaxy/collections.ts
index 0846b3da532c1036eb180c908489efe55dff6e07..81d0de6b3ab990f8262348c9520de454c273e3f1 100644
--- a/lib/modules/manager/ansible-galaxy/collections.ts
+++ b/lib/modules/manager/ansible-galaxy/collections.ts
@@ -13,7 +13,7 @@ import {
 
 function interpretLine(
   lineMatch: RegExpMatchArray,
-  dependency: AnsibleGalaxyPackageDependency
+  dependency: AnsibleGalaxyPackageDependency,
 ): void {
   const localDependency = dependency;
   const key = lineMatch[2];
@@ -52,7 +52,7 @@ function interpretLine(
 
 function handleGitDep(
   dep: AnsibleGalaxyPackageDependency,
-  nameMatch: RegExpExecArray | null
+  nameMatch: RegExpExecArray | null,
 ): void {
   dep.datasource = GitTagsDatasource.id;
 
@@ -67,7 +67,7 @@ function handleGitDep(
     const source = nameMatch.groups.source;
     const massagedDepName = nameMatch.groups.depName.replace(
       regEx(/.git$/),
-      ''
+      '',
     );
     dep.depName = `${nameMatch.groups.hostname}/${massagedDepName}`;
     // remove leading `git+` from URLs like `git+https://...`
diff --git a/lib/modules/manager/ansible-galaxy/extract.spec.ts b/lib/modules/manager/ansible-galaxy/extract.spec.ts
index 13c6f630fb6a2ed776d8d18d479eac5366dc6374..86b0116235ff41407db4e220cf5df75fdf3930eb 100644
--- a/lib/modules/manager/ansible-galaxy/extract.spec.ts
+++ b/lib/modules/manager/ansible-galaxy/extract.spec.ts
@@ -49,7 +49,7 @@ describe('modules/manager/ansible-galaxy/extract', () => {
       expect(res?.deps[0].currentValue).toBe('2.7.5');
       expect(res?.deps[0].registryUrls).toBeUndefined();
       expect(res?.deps[0].packageName).toBe(
-        'git@github.com:ansible-collections/community.docker'
+        'git@github.com:ansible-collections/community.docker',
       );
     });
 
diff --git a/lib/modules/manager/ansible-galaxy/extract.ts b/lib/modules/manager/ansible-galaxy/extract.ts
index 70ef1dc02ab51a8aa62a3fa409650fe1da0982ca..a75ffb698f81b28fa37207673f54312053e6896a 100644
--- a/lib/modules/manager/ansible-galaxy/extract.ts
+++ b/lib/modules/manager/ansible-galaxy/extract.ts
@@ -24,7 +24,7 @@ export function getSliceEndNumber(
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   logger.trace(`ansible-galaxy.extractPackageFile(${packageFile})`);
   const galaxyFileNameRegEx = regEx(/galaxy\.ya?ml$/);
@@ -59,8 +59,8 @@ export function extractPackageFile(
           getSliceEndNumber(
             positions.collections,
             lines.length,
-            positions.roles
-          )
+            positions.roles,
+          ),
         );
         const collectionDeps = extractCollections(collectionLines);
         deps.push(...collectionDeps);
@@ -70,8 +70,8 @@ export function extractPackageFile(
           getSliceEndNumber(
             positions.roles,
             lines.length,
-            positions.collections
-          )
+            positions.collections,
+          ),
         );
         const roleDeps = extractRoles(roleLines);
         deps.push(...roleDeps);
diff --git a/lib/modules/manager/ansible-galaxy/roles.ts b/lib/modules/manager/ansible-galaxy/roles.ts
index 0b3cb655f9c92b6d04cbd16fa2ace0fa30bb0514..32eaa3b9b1bc8c444bee03a28b0d3b9bbe6621d7 100644
--- a/lib/modules/manager/ansible-galaxy/roles.ts
+++ b/lib/modules/manager/ansible-galaxy/roles.ts
@@ -13,7 +13,7 @@ import {
 function interpretLine(
   lineMatch: RegExpMatchArray,
   lineNumber: number,
-  dependency: AnsibleGalaxyPackageDependency
+  dependency: AnsibleGalaxyPackageDependency,
 ): AnsibleGalaxyPackageDependency | null {
   const localDependency = dependency;
   const key = lineMatch[2];
diff --git a/lib/modules/manager/ansible-galaxy/util.ts b/lib/modules/manager/ansible-galaxy/util.ts
index a112dc25ab8e1107400f92134c5d585b6c135e78..53595bf0585ea6096a9ce8b27b692d4de4e87398 100644
--- a/lib/modules/manager/ansible-galaxy/util.ts
+++ b/lib/modules/manager/ansible-galaxy/util.ts
@@ -5,8 +5,8 @@ export const blockLineRegEx = /^\s*((\w+):\s*(\S+))\s*$/;
 export const galaxyDepRegex = /[\w-]+\.[\w-]+/;
 export const dependencyRegex = /^dependencies:/;
 export const galaxyRegEx = regEx(
-  /^\s+(?<packageName>[\w.]+):\s*["']?(?<version>.+?)["']?\s*(\s#.*)?$/
+  /^\s+(?<packageName>[\w.]+):\s*["']?(?<version>.+?)["']?\s*(\s#.*)?$/,
 );
 export const nameMatchRegex = regEx(
-  /(?<source>((git\+)?(?:(git|ssh|https?):\/\/)?(.*@)?(?<hostname>[\w.-]+)(?:(:\d+)?\/|:))(?<depName>[\w./-]+)(?:\.git)?)(,(?<version>[\w.]*))?/
+  /(?<source>((git\+)?(?:(git|ssh|https?):\/\/)?(.*@)?(?<hostname>[\w.-]+)(?:(:\d+)?\/|:))(?<depName>[\w./-]+)(?:\.git)?)(,(?<version>[\w.]*))?/,
 );
diff --git a/lib/modules/manager/ansible/extract.spec.ts b/lib/modules/manager/ansible/extract.spec.ts
index 6730ca20f81234d4040d43f4dc260ea0c9840d3a..b807ce7eb5a2fe5d5b59cc75e333de5f24badce8 100644
--- a/lib/modules/manager/ansible/extract.spec.ts
+++ b/lib/modules/manager/ansible/extract.spec.ts
@@ -31,7 +31,7 @@ describe('modules/manager/ansible/extract', () => {
           registryAliases: {
             'quay.io': 'my-quay-mirror.registry.com',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -61,7 +61,7 @@ describe('modules/manager/ansible/extract', () => {
           registryAliases: {
             'index.docker.io': 'my-docker-mirror.registry.com',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -92,7 +92,7 @@ describe('modules/manager/ansible/extract', () => {
             'quay.io': 'my-quay-mirror.registry.com',
             'my-quay-mirror.registry.com': 'quay.io',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
diff --git a/lib/modules/manager/ansible/extract.ts b/lib/modules/manager/ansible/extract.ts
index 7a35a74cdb380cc4241086007548e11506c20e5b..847b1104d69b4bbd09fd7d9ed522ead3d2daa11e 100644
--- a/lib/modules/manager/ansible/extract.ts
+++ b/lib/modules/manager/ansible/extract.ts
@@ -11,7 +11,7 @@ import type {
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   logger.trace(`ansible.extractPackageFile(${packageFile})`);
   let deps: PackageDependency[] = [];
@@ -27,7 +27,7 @@ export function extractPackageFile(
           currentValue: dep.currentValue,
           currentDigest: dep.currentDigest,
         },
-        'Docker image inside ansible'
+        'Docker image inside ansible',
       );
       dep.versioning = dockerVersioning.id;
       deps.push(dep);
diff --git a/lib/modules/manager/argocd/extract.spec.ts b/lib/modules/manager/argocd/extract.spec.ts
index 7d6537b92d067ff0c15e9c7dbcb591d591dea0ab..0d4c70873a44af657c3aa3fc595da4c155becb8c 100644
--- a/lib/modules/manager/argocd/extract.spec.ts
+++ b/lib/modules/manager/argocd/extract.spec.ts
@@ -14,7 +14,7 @@ describe('modules/manager/argocd/extract', () => {
 
     it('returns null for invalid', () => {
       expect(
-        extractPackageFile(`${malformedApplication}\n123`, 'applications.yml')
+        extractPackageFile(`${malformedApplication}\n123`, 'applications.yml'),
       ).toBeNull();
     });
 
@@ -26,7 +26,7 @@ describe('modules/manager/argocd/extract', () => {
     it('return null if deps array would be empty', () => {
       const result = extractPackageFile(
         malformedApplication,
-        'applications.yml'
+        'applications.yml',
       );
       expect(result).toBeNull();
     });
@@ -42,7 +42,7 @@ spec:
     repoURL: https://prometheus-community.github.io/helm-charts
     targetRevision: 2.4.1
         `,
-        'applications.yml'
+        'applications.yml',
       );
       expect(result).toMatchObject({
         deps: [
@@ -69,7 +69,7 @@ spec:
     repoURL: https://prometheus-community.github.io/helm-charts
     targetRevision: 2.4.1
         `,
-        'applications.yml'
+        'applications.yml',
       );
       expect(result).toMatchObject({
         deps: [
@@ -172,7 +172,7 @@ spec:
     it('supports applicationsets', () => {
       const result = extractPackageFile(
         validApplicationSet,
-        'applicationsets.yml'
+        'applicationsets.yml',
       );
       expect(result).toEqual({
         deps: [
diff --git a/lib/modules/manager/argocd/extract.ts b/lib/modules/manager/argocd/extract.ts
index 4bdb704d78bbda101cd7cd3cc37c603e65d8ca32..249fb4a9a9e4f7641d8c41db67a0659c5abe759e 100644
--- a/lib/modules/manager/argocd/extract.ts
+++ b/lib/modules/manager/argocd/extract.ts
@@ -21,12 +21,12 @@ import { fileTestRegex } from './util';
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  _config?: ExtractConfig
+  _config?: ExtractConfig,
 ): PackageFileContent | null {
   // check for argo reference. API version for the kind attribute is used
   if (fileTestRegex.test(content) === false) {
     logger.debug(
-      `Skip file ${packageFile} as no argoproj.io apiVersion could be found in matched file`
+      `Skip file ${packageFile} as no argoproj.io apiVersion could be found in matched file`,
     );
     return null;
   }
@@ -86,7 +86,7 @@ function processSource(source: ApplicationSource): PackageDependency | null {
 }
 
 function processAppSpec(
-  definition: ApplicationDefinition
+  definition: ApplicationDefinition,
 ): PackageDependency[] {
   const spec: ApplicationSpec | null | undefined =
     definition.kind === 'Application'
diff --git a/lib/modules/manager/argocd/util.ts b/lib/modules/manager/argocd/util.ts
index 720db2bddfeea8488a4dd33eb2596756151da99b..453577cbbdee690f7cddcf1ca19c29c2421107dd 100644
--- a/lib/modules/manager/argocd/util.ts
+++ b/lib/modules/manager/argocd/util.ts
@@ -1,7 +1,7 @@
 import { regEx } from '../../../util/regex';
 
 export const keyValueExtractionRegex = regEx(
-  /^\s*(?<key>[^\s]+):\s+"?(?<value>[^"\s]+)"?\s*$/
+  /^\s*(?<key>[^\s]+):\s+"?(?<value>[^"\s]+)"?\s*$/,
 );
 // looks for `apiVersion: argoproj.io/` with optional quoting of the value
 export const fileTestRegex = regEx(/\s*apiVersion:\s*'?"?argoproj.io\//);
diff --git a/lib/modules/manager/asdf/extract.spec.ts b/lib/modules/manager/asdf/extract.spec.ts
index 18af44f9533ed6e2587cc2a64976436da449a908..a844ba386716a0e71d606793d3dcc846c2a43126 100644
--- a/lib/modules/manager/asdf/extract.spec.ts
+++ b/lib/modules/manager/asdf/extract.spec.ts
@@ -112,7 +112,7 @@ yamlfmt 0.9.0
 typos 1.16.1
 steampipe 0.20.10
 dummy 1.2.3
-`
+`,
       );
       expect(res).toEqual({
         deps: [
@@ -578,7 +578,7 @@ dummy 1.2.3
 adr-tools 3.0.0
 argocd    2.5.4
 awscli    2.8.6
-`
+`,
       );
       expect(res).toEqual({
         deps: [
@@ -641,7 +641,7 @@ awscli    2.8.6
         ],
       });
       const adoptOpenJreRes = extractPackageFile(
-        'java adoptopenjdk-jre-16.0.0+36'
+        'java adoptopenjdk-jre-16.0.0+36',
       );
       expect(adoptOpenJreRes).toEqual({
         deps: [
@@ -654,7 +654,7 @@ awscli    2.8.6
         ],
       });
       const semeruJdkRes = extractPackageFile(
-        'java semeru-openj9-17.0.8.1+1_openj9-0.40.0'
+        'java semeru-openj9-17.0.8.1+1_openj9-0.40.0',
       );
       expect(semeruJdkRes).toEqual({
         deps: [
@@ -667,7 +667,7 @@ awscli    2.8.6
         ],
       });
       const semeruJreRes = extractPackageFile(
-        'java semeru-jre-openj9-17.0.8.1+1_openj9-0.40.0'
+        'java semeru-jre-openj9-17.0.8.1+1_openj9-0.40.0',
       );
       expect(semeruJreRes).toEqual({
         deps: [
@@ -774,12 +774,12 @@ awscli    2.8.6
               ],
             });
           });
-        }
+        },
       );
 
       it('invalid comment placements fail to parse', () => {
         const res = extractPackageFile(
-          'nodejs 16.16.0# invalid comment spacing'
+          'nodejs 16.16.0# invalid comment spacing',
         );
         expect(res).toBeNull();
       });
@@ -791,7 +791,7 @@ awscli    2.8.6
 
       it('ignores comments across multiple lines', () => {
         const res = extractPackageFile(
-          '# this is a full line comment\nnodejs 16.16.0 # this is a comment\n'
+          '# this is a full line comment\nnodejs 16.16.0 # this is a comment\n',
         );
         expect(res).toEqual({
           deps: [
diff --git a/lib/modules/manager/asdf/extract.ts b/lib/modules/manager/asdf/extract.ts
index c9ef4a796890a488b02ce94273db47da1f9eeb40..e56693ff8ddd4f62fa9fa9594c278f2babb36914 100644
--- a/lib/modules/manager/asdf/extract.ts
+++ b/lib/modules/manager/asdf/extract.ts
@@ -9,7 +9,7 @@ export function extractPackageFile(content: string): PackageFileContent | null {
   logger.trace(`asdf.extractPackageFile()`);
 
   const regex = regEx(
-    /^(?<toolName>([\w_-]+)) +(?<version>[^\s#]+)(?: +[^\s#]+)* *(?: #(?<comment>.*))?$/gm
+    /^(?<toolName>([\w_-]+)) +(?<version>[^\s#]+)(?: +[^\s#]+)* *(?: #(?<comment>.*))?$/gm,
   );
 
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/asdf/index.spec.ts b/lib/modules/manager/asdf/index.spec.ts
index 6e6f8dc60ff237e66364ba9601bf54c82b45c609..45b06fe83d2c79eda76d2a40b5a2da01f493291b 100644
--- a/lib/modules/manager/asdf/index.spec.ts
+++ b/lib/modules/manager/asdf/index.spec.ts
@@ -14,7 +14,7 @@ scala 3.0.0`)!.deps,
     ];
 
     const usedDatasources = new Set(
-      toolConfigs.map((config) => config.datasource!)
+      toolConfigs.map((config) => config.datasource!),
     );
 
     for (const datasource of usedDatasources) {
diff --git a/lib/modules/manager/asdf/upgradeable-tooling.ts b/lib/modules/manager/asdf/upgradeable-tooling.ts
index 0807836353e1e1c8ab65f12dad342e49ee720218..983703b9927c798090529068986b9fc0d642f279 100644
--- a/lib/modules/manager/asdf/upgradeable-tooling.ts
+++ b/lib/modules/manager/asdf/upgradeable-tooling.ts
@@ -258,7 +258,7 @@ export const upgradeableTooling: Record<string, ToolingDefinition> = {
     asdfPluginUrl: 'https://github.com/halcyon/asdf-java',
     config: (version) => {
       const adoptOpenJdkMatches = version.match(
-        /^adoptopenjdk-(?<version>\d\S+)/
+        /^adoptopenjdk-(?<version>\d\S+)/,
       )?.groups;
       if (adoptOpenJdkMatches) {
         return {
@@ -268,7 +268,7 @@ export const upgradeableTooling: Record<string, ToolingDefinition> = {
         };
       }
       const adoptOpenJreMatches = version.match(
-        /^adoptopenjdk-jre-(?<version>\d\S+)/
+        /^adoptopenjdk-jre-(?<version>\d\S+)/,
       )?.groups;
       if (adoptOpenJreMatches) {
         return {
@@ -278,7 +278,7 @@ export const upgradeableTooling: Record<string, ToolingDefinition> = {
         };
       }
       const semeruJdkMatches = version.match(
-        /^semeru-openj9-(?<version>\d\S+)_openj9-(?<openj9>\d\S+)/
+        /^semeru-openj9-(?<version>\d\S+)_openj9-(?<openj9>\d\S+)/,
       )?.groups;
       if (semeruJdkMatches) {
         return {
@@ -288,7 +288,7 @@ export const upgradeableTooling: Record<string, ToolingDefinition> = {
         };
       }
       const semeruJreMatches = version.match(
-        /^semeru-jre-openj9-(?<version>\d\S+)_openj9-\d\S+/
+        /^semeru-jre-openj9-(?<version>\d\S+)_openj9-\d\S+/,
       )?.groups;
       if (semeruJreMatches) {
         return {
@@ -298,7 +298,7 @@ export const upgradeableTooling: Record<string, ToolingDefinition> = {
         };
       }
       const temurinJdkMatches = version.match(
-        /^temurin-(?<version>\d\S+)/
+        /^temurin-(?<version>\d\S+)/,
       )?.groups;
       if (temurinJdkMatches) {
         return {
@@ -308,7 +308,7 @@ export const upgradeableTooling: Record<string, ToolingDefinition> = {
         };
       }
       const temurinJreMatches = version.match(
-        /^temurin-jre-(?<version>\d\S+)/
+        /^temurin-jre-(?<version>\d\S+)/,
       )?.groups;
       if (temurinJreMatches) {
         return {
diff --git a/lib/modules/manager/azure-pipelines/extract.spec.ts b/lib/modules/manager/azure-pipelines/extract.spec.ts
index 84b198a997450f03f3abd9b2ce43dc53ee3f3363..7e4f6e40423e03555620d964c2df06e96e8f0f9f 100644
--- a/lib/modules/manager/azure-pipelines/extract.spec.ts
+++ b/lib/modules/manager/azure-pipelines/extract.spec.ts
@@ -13,7 +13,7 @@ const azurePipelinesFilename = 'azure-pipelines.yaml';
 
 const azurePipelines = Fixtures.get('azure-pipelines.yaml');
 const azurePipelinesNoDependency = Fixtures.get(
-  'azure-pipelines-no-dependency.yaml'
+  'azure-pipelines-no-dependency.yaml',
 );
 const azurePipelinesStages = Fixtures.get('azure-pipelines-stages.yaml');
 const azurePipelinesJobs = Fixtures.get('azure-pipelines-jobs.yaml');
@@ -41,7 +41,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'github',
           name: 'user/repo',
           ref: 'refs/tags/v1.0.0',
-        })
+        }),
       ).toMatchObject({
         depName: 'user/repo',
         packageName: 'https://github.com/user/repo.git',
@@ -54,7 +54,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'bitbucket',
           name: 'user/repo',
           ref: 'refs/tags/v1.0.0',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -64,7 +64,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'github',
           name: 'user/repo',
           ref: null,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -74,7 +74,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'github',
           name: 'user/repo',
           ref: 'refs/head/master',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -89,7 +89,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'git',
           name: 'project/repo',
           ref: 'refs/tags/v1.0.0',
-        })
+        }),
       ).toMatchObject({
         depName: 'project/repo',
         packageName: 'https://dev.azure.com/renovate-org/project/_git/repo',
@@ -107,7 +107,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'git',
           name: 'repo',
           ref: 'refs/tags/v1.0.0',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -121,7 +121,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
           type: 'git',
           name: 'project/repo',
           ref: 'refs/tags/v1.0.0',
-        })
+        }),
       ).toBeNull();
     });
   });
@@ -131,7 +131,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
       expect(
         extractContainer({
           image: 'ubuntu:16.04',
-        })
+        }),
       ).toMatchObject({
         depName: 'ubuntu',
         currentValue: '16.04',
@@ -187,14 +187,14 @@ describe('modules/manager/azure-pipelines/extract', () => {
 
     it('should return null when there is no dependency found', () => {
       expect(
-        extractPackageFile(azurePipelinesNoDependency, azurePipelinesFilename)
+        extractPackageFile(azurePipelinesNoDependency, azurePipelinesFilename),
       ).toBeNull();
     });
 
     it('should extract stages', () => {
       const res = extractPackageFile(
         azurePipelinesStages,
-        azurePipelinesFilename
+        azurePipelinesFilename,
       );
       expect(res?.deps).toEqual([
         {
@@ -208,7 +208,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
     it('should extract jobs', () => {
       const res = extractPackageFile(
         azurePipelinesJobs,
-        azurePipelinesFilename
+        azurePipelinesFilename,
       );
       expect(res?.deps).toEqual([
         {
@@ -222,7 +222,7 @@ describe('modules/manager/azure-pipelines/extract', () => {
     it('should extract steps', () => {
       const res = extractPackageFile(
         azurePipelinesSteps,
-        azurePipelinesFilename
+        azurePipelinesFilename,
       );
       expect(res?.deps).toEqual([
         {
diff --git a/lib/modules/manager/azure-pipelines/extract.ts b/lib/modules/manager/azure-pipelines/extract.ts
index 4125141ee879ac1062543c3a8f02686df5182220..33f770c2f9c5e364203d1b3c1545d474bcb55a38 100644
--- a/lib/modules/manager/azure-pipelines/extract.ts
+++ b/lib/modules/manager/azure-pipelines/extract.ts
@@ -13,7 +13,7 @@ import type { AzurePipelines, Container, Repository } from './types';
 const AzurePipelinesTaskRegex = regEx(/^(?<name>[^@]+)@(?<version>.*)$/);
 
 export function extractRepository(
-  repository: Repository
+  repository: Repository,
 ): PackageDependency | null {
   let repositoryUrl = null;
 
@@ -36,11 +36,11 @@ export function extractRepository(
           endpoint,
           encodeURIComponent(projectName),
           '_git',
-          encodeURIComponent(repoName)
+          encodeURIComponent(repoName),
         );
       } else {
         logger.debug(
-          'Renovate cannot update repositories that do not include the project name'
+          'Renovate cannot update repositories that do not include the project name',
         );
       }
     }
@@ -66,7 +66,7 @@ export function extractRepository(
 }
 
 export function extractContainer(
-  container: Container
+  container: Container,
 ): PackageDependency | null {
   if (!container.image) {
     return null;
@@ -79,7 +79,7 @@ export function extractContainer(
       currentValue: dep.currentValue,
       currentDigest: dep.currentDigest,
     },
-    'Azure pipelines docker image'
+    'Azure pipelines docker image',
   );
   dep.depType = 'docker';
 
@@ -87,7 +87,7 @@ export function extractContainer(
 }
 
 export function extractAzurePipelinesTasks(
-  task: string
+  task: string,
 ): PackageDependency | null {
   const match = AzurePipelinesTaskRegex.exec(task);
   if (match?.groups) {
@@ -102,7 +102,7 @@ export function extractAzurePipelinesTasks(
 
 export function parseAzurePipelines(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): AzurePipelines | null {
   let pkg: AzurePipelines | null = null;
   try {
@@ -117,7 +117,7 @@ export function parseAzurePipelines(
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   logger.trace(`azurePipelines.extractPackageFile(${packageFile})`);
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/batect-wrapper/artifacts.spec.ts b/lib/modules/manager/batect-wrapper/artifacts.spec.ts
index 6b501fd432a981e2be3b76535d96bb825f1c45c4..4885a64b3878a93e9659112ab70bdd10f994d250 100644
--- a/lib/modules/manager/batect-wrapper/artifacts.spec.ts
+++ b/lib/modules/manager/batect-wrapper/artifacts.spec.ts
@@ -8,7 +8,7 @@ const newWindowsWrapperContent = `Windows wrapper script for ${defaultTo}`;
 
 function artifactForPath(
   path: string,
-  newVersion: string = defaultTo
+  newVersion: string = defaultTo,
 ): UpdateArtifact {
   return {
     packageFileName: path,
diff --git a/lib/modules/manager/batect-wrapper/artifacts.ts b/lib/modules/manager/batect-wrapper/artifacts.ts
index 06a4ac25632957942244fd9b6968aa64fe8ed5d8..344782786f8fb04117fe2f56a9a921d2f1010e78 100644
--- a/lib/modules/manager/batect-wrapper/artifacts.ts
+++ b/lib/modules/manager/batect-wrapper/artifacts.ts
@@ -7,7 +7,7 @@ const http = new Http('batect-wrapper');
 async function updateArtifact(
   path: string,
   fileName: string,
-  version: string
+  version: string,
 ): Promise<UpdateArtifactsResult> {
   const url = `https://github.com/batect/batect/releases/download/${version}/${fileName}`;
 
@@ -37,7 +37,7 @@ export async function updateArtifacts({
   const version = config.newVersion!;
 
   logger.debug(
-    `Updating Batect wrapper scripts for ${packageFileName} to ${version}`
+    `Updating Batect wrapper scripts for ${packageFileName} to ${version}`,
   );
 
   return [
diff --git a/lib/modules/manager/batect-wrapper/extract.ts b/lib/modules/manager/batect-wrapper/extract.ts
index 6dac56cdad473bdfe3f7ff1934afe09360b9e87b..0774c1e627993c5be0054918f6f011139539b265 100644
--- a/lib/modules/manager/batect-wrapper/extract.ts
+++ b/lib/modules/manager/batect-wrapper/extract.ts
@@ -7,7 +7,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 const VERSION_REGEX = regEx(/^\s+VERSION="(.*)"$/m);
 
 export function extractPackageFile(
-  fileContent: string
+  fileContent: string,
 ): PackageFileContent | null {
   logger.trace('batect.extractPackageFile()');
   const match = VERSION_REGEX.exec(fileContent);
diff --git a/lib/modules/manager/batect/extract.spec.ts b/lib/modules/manager/batect/extract.spec.ts
index 09f34966cd4d60ecede1b4cf11d704c65d2106e8..eaf2075522e355077f12199c550ee1ef240e0c93 100644
--- a/lib/modules/manager/batect/extract.spec.ts
+++ b/lib/modules/manager/batect/extract.spec.ts
@@ -46,7 +46,7 @@ describe('modules/manager/batect/extract', () => {
       expect(
         await extractAllPackageFiles(config, [
           `${fixturesDir}/empty/batect.yml`,
-        ])
+        ]),
       ).toEqual([]);
     });
 
@@ -54,7 +54,7 @@ describe('modules/manager/batect/extract', () => {
       expect(
         await extractAllPackageFiles(config, [
           `${fixturesDir}/invalid/batect.yml`,
-        ])
+        ]),
       ).toEqual([]);
     });
 
@@ -78,7 +78,7 @@ describe('modules/manager/batect/extract', () => {
 
       // TODO: #22198
       expect(
-        result?.sort((a, b) => a.packageFile.localeCompare(b.packageFile))
+        result?.sort((a, b) => a.packageFile.localeCompare(b.packageFile)),
       ).toEqual([
         {
           packageFile: `${fixturesDir}/valid/another-include.yml`,
@@ -86,7 +86,7 @@ describe('modules/manager/batect/extract', () => {
             createDockerDependency('ubuntu:19.10'),
             createGitDependency(
               'https://another-include.com/my-repo.git',
-              '4.5.6'
+              '4.5.6',
             ),
           ],
         },
@@ -97,12 +97,12 @@ describe('modules/manager/batect/extract', () => {
             createDockerDependency('alpine:1.2.3'),
             createDockerDependency('ubuntu:20.04'),
             createDockerDependency(
-              'postgres:9.6.20@sha256:166179811e4c75f8a092367afed6091208c8ecf60b111c7e49f29af45ca05e08'
+              'postgres:9.6.20@sha256:166179811e4c75f8a092367afed6091208c8ecf60b111c7e49f29af45ca05e08',
             ),
             createGitDependency('https://includes.com/my-repo.git', '1.2.3'),
             createGitDependency(
               'https://includes.com/my-other-repo.git',
-              '4.5.6'
+              '4.5.6',
             ),
           ],
         },
diff --git a/lib/modules/manager/batect/extract.ts b/lib/modules/manager/batect/extract.ts
index f41ad3c1d9df5bf3b3934689bb6bb25ec28bd2bd..b2c8105dc29a1cf3d454965a904a99eeb7d12a39 100644
--- a/lib/modules/manager/batect/extract.ts
+++ b/lib/modules/manager/batect/extract.ts
@@ -21,7 +21,7 @@ function loadConfig(content: string): BatectConfig {
 
   if (typeof config !== 'object') {
     throw new Error(
-      `Configuration file does not contain a YAML object (it is ${typeof config}).`
+      `Configuration file does not contain a YAML object (it is ${typeof config}).`,
     );
   }
 
@@ -55,7 +55,7 @@ function extractImageDependencies(config: BatectConfig): PackageDependency[] {
 }
 
 function includeIsGitInclude(
-  include: BatectInclude
+  include: BatectInclude,
 ): include is BatectGitInclude {
   return typeof include === 'object' && include.type === 'git';
 }
@@ -92,14 +92,14 @@ function includeIsStringFileInclude(include: BatectInclude): include is string {
 }
 
 function includeIsObjectFileInclude(
-  include: BatectInclude
+  include: BatectInclude,
 ): include is BatectFileInclude {
   return typeof include === 'object' && include.type === 'file';
 }
 
 function extractReferencedConfigFiles(
   config: BatectConfig,
-  fileName: string
+  fileName: string,
 ): string[] {
   if (config.include === undefined) {
     return [];
@@ -119,7 +119,7 @@ function extractReferencedConfigFiles(
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): ExtractionResult | null {
   logger.trace(`batect.extractPackageFile(${packageFile})`);
 
@@ -132,14 +132,14 @@ export function extractPackageFile(
 
     const referencedConfigFiles = extractReferencedConfigFiles(
       config,
-      packageFile
+      packageFile,
     );
 
     return { deps, referencedConfigFiles };
   } catch (err) {
     logger.debug(
       { err, packageFile },
-      'Extracting dependencies from Batect configuration file failed'
+      'Extracting dependencies from Batect configuration file failed',
     );
 
     return null;
@@ -148,7 +148,7 @@ export function extractPackageFile(
 
 export async function extractAllPackageFiles(
   config: ExtractConfig,
-  packageFiles: string[]
+  packageFiles: string[],
 ): Promise<PackageFile[] | null> {
   const filesToExamine = new Set<string>(packageFiles);
   const filesAlreadyExamined = new Set<string>();
diff --git a/lib/modules/manager/bazel-module/bazelrc.spec.ts b/lib/modules/manager/bazel-module/bazelrc.spec.ts
index 3f35581999709233fbb398493e5031cc970ae64d..c928b722ee153f02fdc589dd8ab0dfdbe694f2fd 100644
--- a/lib/modules/manager/bazel-module/bazelrc.spec.ts
+++ b/lib/modules/manager/bazel-module/bazelrc.spec.ts
@@ -32,7 +32,7 @@ describe('modules/manager/bazel-module/bazelrc', () => {
       ${'--jobs 600'}                         | ${[['jobs', '600']]}
     `('parse($a)', ({ a, expArgs }) => {
       const exp = (expArgs as [string, string | undefined][]).map(
-        (args) => new BazelOption(...args)
+        (args) => new BazelOption(...args),
       );
       const result = BazelOption.parse(a);
       expect(result).toEqual(exp);
@@ -85,7 +85,7 @@ describe('modules/manager/bazel-module/bazelrc', () => {
           new BazelOption('strip', 'never'),
           new BazelOption('test_timeout', '3600'),
         ],
-        'memcheck'
+        'memcheck',
       ),
       new ImportEntry('%workspace%/local.bazelrc', true),
     ]);
@@ -223,8 +223,8 @@ describe('modules/manager/bazel-module/bazelrc', () => {
       expect.assertions(1);
       await expect(read('.')).rejects.toEqual(
         new Error(
-          'Attempted to read a bazelrc multiple times. file: shared.bazelrc'
-        )
+          'Attempted to read a bazelrc multiple times. file: shared.bazelrc',
+        ),
       );
     });
 
diff --git a/lib/modules/manager/bazel-module/bazelrc.ts b/lib/modules/manager/bazel-module/bazelrc.ts
index 52eccba42d29aeb7cc6561cf0a506d430cb4a976..4ea652988d464cd486410c4e3c8c2c45e953cd45 100644
--- a/lib/modules/manager/bazel-module/bazelrc.ts
+++ b/lib/modules/manager/bazel-module/bazelrc.ts
@@ -6,7 +6,7 @@ import { regEx } from '../../../util/regex';
 
 const importRegex = regEx(`^(?<type>(?:try-)?import)\\s+(?<path>\\S+)$`);
 const optionRegex = regEx(
-  `^(?<command>\\w+)(:(?<config>\\S+))?\\s+(?<options>.*)$`
+  `^(?<command>\\w+)(:(?<config>\\S+))?\\s+(?<options>.*)$`,
 );
 const spaceRegex = regEx(`\\s+`);
 
@@ -55,7 +55,7 @@ export class CommandEntry {
   constructor(
     readonly command: string,
     readonly options: BazelOption[],
-    readonly config?: string
+    readonly config?: string,
   ) {}
 
   getOption(name: string): BazelOption | undefined {
@@ -84,7 +84,7 @@ function createEntry(line: string): BazelrcEntries | undefined {
     return new CommandEntry(
       orGroups.command,
       BazelOption.parse(orGroups.options),
-      orGroups.config
+      orGroups.config,
     );
   }
   return undefined;
@@ -102,11 +102,11 @@ export function parse(contents: string): BazelrcEntries[] {
 async function readFile(
   file: string,
   workspaceDir: string,
-  readFiles: Set<string>
+  readFiles: Set<string>,
 ): Promise<CommandEntry[]> {
   if (readFiles.has(file)) {
     throw new Error(
-      `Attempted to read a bazelrc multiple times. file: ${file}`
+      `Attempted to read a bazelrc multiple times. file: ${file}`,
     );
   }
   readFiles.add(file);
@@ -123,7 +123,7 @@ async function readFile(
     }
 
     const importFile = upath.normalize(
-      entry.path.replace('%workspace%', workspaceDir)
+      entry.path.replace('%workspace%', workspaceDir),
     );
     if (fs.isValidLocalPath(importFile)) {
       const importEntries = await readFile(importFile, workspaceDir, readFiles);
diff --git a/lib/modules/manager/bazel-module/context.spec.ts b/lib/modules/manager/bazel-module/context.spec.ts
index 06d22d0b35884e6b03f34115830f07680fcf7202..f2dc8d7b993073aa1ec92761ed6a02ea5dcd71c3 100644
--- a/lib/modules/manager/bazel-module/context.spec.ts
+++ b/lib/modules/manager/bazel-module/context.spec.ts
@@ -19,7 +19,7 @@ describe('modules/manager/bazel-module/context', () => {
             name: fragments.string('rules_foo'),
             version: fragments.string('1.2.3'),
           },
-          true
+          true,
         ),
       ]);
     });
@@ -43,10 +43,10 @@ describe('modules/manager/bazel-module/context', () => {
             name: fragments.string('my_library'),
             srcs: fragments.array(
               [fragments.string('first'), fragments.string('second')],
-              true
+              true,
             ),
           },
-          true
+          true,
         ),
       ]);
     });
@@ -60,14 +60,14 @@ describe('modules/manager/bazel-module/context', () => {
       it('throws if there is no current', () => {
         const ctx = new Ctx();
         expect(() => ctx.currentRecord).toThrow(
-          new Error('Requested current, but no value.')
+          new Error('Requested current, but no value.'),
         );
       });
 
       it('throws if the current is not a record fragment', () => {
         const ctx = new Ctx().startArray();
         expect(() => ctx.currentRecord).toThrow(
-          new Error('Requested current record, but does not exist.')
+          new Error('Requested current record, but does not exist.'),
         );
       });
     });
@@ -81,7 +81,7 @@ describe('modules/manager/bazel-module/context', () => {
       it('throws if the current is not a record fragment', () => {
         const ctx = new Ctx().startRecord();
         expect(() => ctx.currentArray).toThrow(
-          new Error('Requested current array, but does not exist.')
+          new Error('Requested current array, but does not exist.'),
         );
       });
     });
@@ -90,8 +90,8 @@ describe('modules/manager/bazel-module/context', () => {
       const ctx = new Ctx().startAttribute('name');
       expect(() => ctx.addString('chicken')).toThrow(
         new CtxProcessingError(
-          fragments.attribute('name', fragments.string('chicken'))
-        )
+          fragments.attribute('name', fragments.string('chicken')),
+        ),
       );
     });
   });
diff --git a/lib/modules/manager/bazel-module/extract.spec.ts b/lib/modules/manager/bazel-module/extract.spec.ts
index 96197b0f4be9e57a66bc787f2df62d191ce90e87..15776b4035680670d6ff21be95bfc7d9a4c653b0 100644
--- a/lib/modules/manager/bazel-module/extract.spec.ts
+++ b/lib/modules/manager/bazel-module/extract.spec.ts
@@ -22,7 +22,7 @@ describe('modules/manager/bazel-module/extract', () => {
     it('returns null if fails to parse', async () => {
       const result = await extractPackageFile(
         'blahhhhh:foo:@what\n',
-        'MODULE.bazel'
+        'MODULE.bazel',
       );
       expect(result).toBeNull();
     });
@@ -87,7 +87,7 @@ describe('modules/manager/bazel-module/extract', () => {
             currentDigest: '850cb49c8649e463b80ef7984e7c744279746170',
             packageName: 'example/rules_foo',
           },
-        ])
+        ]),
       );
     });
 
@@ -143,7 +143,7 @@ describe('modules/manager/bazel-module/extract', () => {
             depName: 'rules_foo',
             skipReason: 'unsupported-datasource',
           },
-        ])
+        ]),
       );
     });
 
@@ -174,7 +174,7 @@ describe('modules/manager/bazel-module/extract', () => {
             depName: 'rules_foo',
             skipReason: 'unsupported-datasource',
           },
-        ])
+        ]),
       );
     });
 
@@ -209,7 +209,7 @@ describe('modules/manager/bazel-module/extract', () => {
             skipReason: 'ignored',
             registryUrls: ['https://example.com/custom_registry'],
           },
-        ])
+        ]),
       );
     });
 
diff --git a/lib/modules/manager/bazel-module/extract.ts b/lib/modules/manager/bazel-module/extract.ts
index 93a6d0c9b5ea866f543bd3fee07072db9b1e646c..a6b298eb26d4d26e91cd6cfdaaeb1396e4f65eed 100644
--- a/lib/modules/manager/bazel-module/extract.ts
+++ b/lib/modules/manager/bazel-module/extract.ts
@@ -10,12 +10,12 @@ import * as rules from './rules';
 
 export async function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   try {
     const records = parse(content);
     const pfc: PackageFileContent | null = LooseArray(
-      RuleToBazelModulePackageDep
+      RuleToBazelModulePackageDep,
     )
       .transform(rules.toPackageDependencies)
       .transform((deps) => (deps.length ? { deps } : null))
diff --git a/lib/modules/manager/bazel-module/fragments.ts b/lib/modules/manager/bazel-module/fragments.ts
index 0a873a4b2eed4ac998d92bf192404202bd7a20fc..c77a056e3c11aca894d70e75c5e254c43121bb4c 100644
--- a/lib/modules/manager/bazel-module/fragments.ts
+++ b/lib/modules/manager/bazel-module/fragments.ts
@@ -73,7 +73,7 @@ export function boolean(value: string | boolean): BooleanFragment {
 
 export function record(
   children: ChildFragments = {},
-  isComplete = false
+  isComplete = false,
 ): RecordFragment {
   return {
     type: 'record',
@@ -85,7 +85,7 @@ export function record(
 export function attribute(
   name: string,
   value?: ValueFragments,
-  isComplete = false
+  isComplete = false,
 ): AttributeFragment {
   return {
     type: 'attribute',
@@ -97,7 +97,7 @@ export function attribute(
 
 export function array(
   items: PrimitiveFragments[] = [],
-  isComplete = false
+  isComplete = false,
 ): ArrayFragment {
   return {
     type: 'array',
diff --git a/lib/modules/manager/bazel-module/parser.spec.ts b/lib/modules/manager/bazel-module/parser.spec.ts
index 062d199dd9f89b4d3e34bc0eaf01bd9b9985f8b6..4f6faef34e3a636c4a8a1f55884e6b6fdc7cb608 100644
--- a/lib/modules/manager/bazel-module/parser.spec.ts
+++ b/lib/modules/manager/bazel-module/parser.spec.ts
@@ -27,7 +27,7 @@ describe('modules/manager/bazel-module/parser', () => {
             name: fragments.string('rules_foo'),
             version: fragments.string('1.2.3'),
           },
-          true
+          true,
         ),
         fragments.record(
           {
@@ -36,7 +36,7 @@ describe('modules/manager/bazel-module/parser', () => {
             version: fragments.string('1.0.0'),
             dev_dependency: fragments.boolean(true),
           },
-          true
+          true,
         ),
       ]);
     });
@@ -60,20 +60,20 @@ describe('modules/manager/bazel-module/parser', () => {
             name: fragments.string('rules_foo'),
             version: fragments.string('1.2.3'),
           },
-          true
+          true,
         ),
         fragments.record(
           {
             rule: fragments.string('git_override'),
             module_name: fragments.string('rules_foo'),
             remote: fragments.string(
-              'https://github.com/example/rules_foo.git'
+              'https://github.com/example/rules_foo.git',
             ),
             commit: fragments.string(
-              '6a2c2e22849b3e6b33d5ea9aa72222d4803a986a'
+              '6a2c2e22849b3e6b33d5ea9aa72222d4803a986a',
             ),
           },
-          true
+          true,
         ),
       ]);
     });
@@ -96,14 +96,14 @@ describe('modules/manager/bazel-module/parser', () => {
             name: fragments.string('rules_foo'),
             version: fragments.string('1.2.3'),
           },
-          true
+          true,
         ),
         fragments.record(
           {
             rule: fragments.string('archive_override'),
             module_name: fragments.string('rules_foo'),
           },
-          true
+          true,
         ),
       ]);
     });
@@ -124,7 +124,7 @@ describe('modules/manager/bazel-module/parser', () => {
             name: fragments.string('rules_foo'),
             version: fragments.string('1.2.3'),
           },
-          true
+          true,
         ),
         fragments.record(
           {
@@ -132,7 +132,7 @@ describe('modules/manager/bazel-module/parser', () => {
             module_name: fragments.string('rules_foo'),
             urls: fragments.string('/path/to/repo'),
           },
-          true
+          true,
         ),
       ]);
     });
@@ -154,7 +154,7 @@ describe('modules/manager/bazel-module/parser', () => {
             name: fragments.string('rules_foo'),
             version: fragments.string('1.2.3'),
           },
-          true
+          true,
         ),
         fragments.record(
           {
@@ -163,7 +163,7 @@ describe('modules/manager/bazel-module/parser', () => {
             version: fragments.string('1.2.3'),
             registry: fragments.string('https://example.com/custom_registry'),
           },
-          true
+          true,
         ),
       ]);
     });
diff --git a/lib/modules/manager/bazel-module/parser.ts b/lib/modules/manager/bazel-module/parser.ts
index 22bf891502354093db4eb7007ec41b157eb9d716..94391648597e69f594d835e366d52358ca2cf078 100644
--- a/lib/modules/manager/bazel-module/parser.ts
+++ b/lib/modules/manager/bazel-module/parser.ts
@@ -23,7 +23,7 @@ const kvParams = q
   .op('=')
   .alt(
     q.str((ctx, token) => ctx.addString(token.value)),
-    q.sym<Ctx>(booleanValuesRegex, (ctx, token) => ctx.addBoolean(token.value))
+    q.sym<Ctx>(booleanValuesRegex, (ctx, token) => ctx.addBoolean(token.value)),
   );
 
 const moduleRules = q
@@ -34,7 +34,7 @@ const moduleRules = q
       maxDepth: 1,
       search: kvParams,
       postHandler: (ctx, tree) => ctx.endRule(),
-    })
+    }),
   );
 
 const rule = q.alt<Ctx>(moduleRules);
diff --git a/lib/modules/manager/bazel-module/rules.ts b/lib/modules/manager/bazel-module/rules.ts
index 173104facc7ce902c9be3c359c58281bf9422c84..fda2cf253c0fd08f2b49b5b452489ba36f9e04db 100644
--- a/lib/modules/manager/bazel-module/rules.ts
+++ b/lib/modules/manager/bazel-module/rules.ts
@@ -49,7 +49,7 @@ function isMerge(value: BazelModulePackageDep): value is MergePackageDep {
 // be surprising to someone outside the bazel-module code to see the extra
 // properties.
 export function bazelModulePackageDepToPackageDependency(
-  bmpd: BazelModulePackageDep
+  bmpd: BazelModulePackageDep,
 ): PackageDependency {
   const copy: BazelModulePackageDep = clone(bmpd);
   if (isOverride(copy)) {
@@ -77,7 +77,7 @@ const BazelDepToPackageDep = RecordFragmentSchema.extend({
     depType: rule.value,
     depName: name.value,
     currentValue: version.value,
-  })
+  }),
 );
 
 const GitOverrideToPackageDep = RecordFragmentSchema.extend({
@@ -107,7 +107,7 @@ const GitOverrideToPackageDep = RecordFragmentSchema.extend({
       override.skipReason = 'unsupported-datasource';
     }
     return override;
-  }
+  },
 );
 
 const SingleVersionOverrideToPackageDep = RecordFragmentSchema.extend({
@@ -141,7 +141,7 @@ const SingleVersionOverrideToPackageDep = RecordFragmentSchema.extend({
       merge.registryUrls = [registry.value];
     }
     return base;
-  }
+  },
 );
 
 const UnsupportedOverrideToPackageDep = RecordFragmentSchema.extend({
@@ -168,7 +168,7 @@ const UnsupportedOverrideToPackageDep = RecordFragmentSchema.extend({
       skipReason: 'unsupported-datasource',
       bazelDepSkipReason,
     };
-  }
+  },
 );
 
 export const RuleToBazelModulePackageDep = z.union([
@@ -179,14 +179,14 @@ export const RuleToBazelModulePackageDep = z.union([
 ]);
 
 const githubRemoteRegex = regEx(
-  /^https:\/\/github\.com\/(?<packageName>[^/]+\/.+)$/
+  /^https:\/\/github\.com\/(?<packageName>[^/]+\/.+)$/,
 );
 function githubPackageName(remote: string): string | undefined {
   return parseGithubUrl(remote)?.match(githubRemoteRegex)?.groups?.packageName;
 }
 
 function collectByModule(
-  packageDeps: BazelModulePackageDep[]
+  packageDeps: BazelModulePackageDep[],
 ): BazelModulePackageDep[][] {
   const rulesByModule = new Map<string, BasePackageDep[]>();
   for (const pkgDep of packageDeps) {
@@ -198,7 +198,7 @@ function collectByModule(
 }
 
 export function processModulePkgDeps(
-  packageDeps: BazelModulePackageDep[]
+  packageDeps: BazelModulePackageDep[],
 ): PackageDependency[] {
   if (!packageDeps.length) {
     return [];
@@ -227,7 +227,7 @@ export function processModulePkgDeps(
     const depTypes = overrides.map((o) => o.depType);
     logger.debug(
       { depName: moduleName, depTypes },
-      'More than one override for a module was found'
+      'More than one override for a module was found',
     );
     return deps;
   }
@@ -238,7 +238,7 @@ export function processModulePkgDeps(
 }
 
 export function toPackageDependencies(
-  packageDeps: BazelModulePackageDep[]
+  packageDeps: BazelModulePackageDep[],
 ): PackageDependency[] {
   return collectByModule(packageDeps).map(processModulePkgDeps).flat();
 }
diff --git a/lib/modules/manager/bazel-module/starlark.spec.ts b/lib/modules/manager/bazel-module/starlark.spec.ts
index efd3e50ab19a74310080b7da4e3043b6e6c84fcb..f98cce37a12474e9fdef8c81cbb936cc3f537b9c 100644
--- a/lib/modules/manager/bazel-module/starlark.spec.ts
+++ b/lib/modules/manager/bazel-module/starlark.spec.ts
@@ -11,7 +11,7 @@ describe('modules/manager/bazel-module/starlark', () => {
 
   it('asBoolean', () => {
     expect(() => starlark.asBoolean('bad')).toThrow(
-      new Error('Invalid Starlark boolean string: bad')
+      new Error('Invalid Starlark boolean string: bad'),
     );
   });
 });
diff --git a/lib/modules/manager/bazel/artifacts.spec.ts b/lib/modules/manager/bazel/artifacts.spec.ts
index e70c4f0499b68fcfd3ced96c2ecfa6998547f2de..b016eff6d1e6ada154422bf85e28a54ff1757691 100644
--- a/lib/modules/manager/bazel/artifacts.spec.ts
+++ b/lib/modules/manager/bazel/artifacts.spec.ts
@@ -43,7 +43,7 @@ describe('modules/manager/bazel/artifacts', () => {
     httpMock
       .scope('https://github.com')
       .get(
-        '/GoogleContainerTools/distroless/archive/033387ac8853e6cc1cd47df6c346bc53cbc490d8.tar.gz'
+        '/GoogleContainerTools/distroless/archive/033387ac8853e6cc1cd47df6c346bc53cbc490d8.tar.gz',
       )
       .reply(200, tarContent);
 
@@ -52,7 +52,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -110,7 +110,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -169,7 +169,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -216,7 +216,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
     expect(res).toBeNull();
   });
@@ -243,7 +243,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
     expect(res).toBeNull();
   });
@@ -271,7 +271,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
     expect(res).toBeNull();
   });
@@ -298,7 +298,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
     expect(res).toBeNull();
   });
@@ -326,7 +326,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
     expect(res).toBeNull();
   });
@@ -384,7 +384,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -451,7 +451,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -518,7 +518,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -585,7 +585,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -640,7 +640,7 @@ describe('modules/manager/bazel/artifacts', () => {
     httpMock
       .scope('https://github.com')
       .get(
-        '/bazelbuild/rules_nodejs/releases/download/5.5.4/rules_nodejs-core-5.5.4.tar.gz'
+        '/bazelbuild/rules_nodejs/releases/download/5.5.4/rules_nodejs-core-5.5.4.tar.gz',
       )
       .reply(200, tarContent);
 
@@ -655,7 +655,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
@@ -711,14 +711,14 @@ describe('modules/manager/bazel/artifacts', () => {
     httpMock
       .scope('https://github.com')
       .get(
-        '/bazelbuild/rules_nodejs/releases/download/1.2.3/rules_nodejs-core-1.2.3.tar.gz'
+        '/bazelbuild/rules_nodejs/releases/download/1.2.3/rules_nodejs-core-1.2.3.tar.gz',
       )
       .reply(200, tarContent);
 
     const output = input
       .replace(
         `${currentValue}/rules_nodejs-core-${currentValue}`,
-        `${newValue}/rules_nodejs-core-${newValue}`
+        `${newValue}/rules_nodejs-core-${newValue}`,
       )
       .replace(inputHash, outputHash);
 
@@ -727,7 +727,7 @@ describe('modules/manager/bazel/artifacts', () => {
         packageFileName: 'WORKSPACE',
         updatedDeps: [upgrade],
         newPackageFileContent: input,
-      })
+      }),
     );
 
     expect(res).toEqual([
diff --git a/lib/modules/manager/bazel/artifacts.ts b/lib/modules/manager/bazel/artifacts.ts
index 57e066f5eab7ac271775da93a0ca88c6e19630ff..5071fb2e797fa43b21a6a7af7e3eca8df4b3a7d9 100644
--- a/lib/modules/manager/bazel/artifacts.ts
+++ b/lib/modules/manager/bazel/artifacts.ts
@@ -54,7 +54,7 @@ function replaceAll(input: string, from: string, to: string): string {
 function replaceValues(
   content: string,
   from: string | null | undefined,
-  to: string | null | undefined
+  to: string | null | undefined,
 ): string {
   // istanbul ignore if
   if (!from || !to || from === to) {
@@ -69,7 +69,7 @@ async function getHashFromUrl(url: string): Promise<string | null> {
   const cacheNamespace = 'url-sha256';
   const cachedResult = await packageCache.get<string | null>(
     cacheNamespace,
-    url
+    url,
   );
   /* istanbul ignore next line */
   if (cachedResult) {
@@ -104,7 +104,7 @@ async function getHashFromUrls(urls: string[]): Promise<string | null> {
 }
 
 export async function updateArtifacts(
-  updateArtifact: UpdateArtifact
+  updateArtifact: UpdateArtifact,
 ): Promise<UpdateArtifactsResult[] | null> {
   const { packageFileName: path, updatedDeps: upgrades } = updateArtifact;
   let { newPackageFileContent: contents } = updateArtifact;
diff --git a/lib/modules/manager/bazel/common.spec.ts b/lib/modules/manager/bazel/common.spec.ts
index e4946216e58282c583f41871f2fd24ac62b90a6c..9aa1413825881177976dd75a56be724fbee30fce 100644
--- a/lib/modules/manager/bazel/common.spec.ts
+++ b/lib/modules/manager/bazel/common.spec.ts
@@ -36,7 +36,7 @@ describe('modules/manager/bazel/common', () => {
       const input = `git_repository(name = "foo", deps = ["bar", "baz", "qux"])`;
       const output = updateCode(input, [0, 'deps', 1], 'BAZ');
       expect(output).toBe(
-        `git_repository(name = "foo", deps = ["bar", "BAZ", "qux"])`
+        `git_repository(name = "foo", deps = ["bar", "BAZ", "qux"])`,
       );
     });
 
diff --git a/lib/modules/manager/bazel/common.ts b/lib/modules/manager/bazel/common.ts
index b13a23121c394d35bfba4f69c1cdb1cb84ce369b..747cb68b864cde833b5ddf35911222b644861f90 100644
--- a/lib/modules/manager/bazel/common.ts
+++ b/lib/modules/manager/bazel/common.ts
@@ -4,7 +4,7 @@ import type { Fragment, FragmentPath, FragmentUpdater } from './types';
 
 export function findCodeFragment(
   input: string,
-  path: FragmentPath
+  path: FragmentPath,
 ): Fragment | null {
   const parsed = parse(input);
   if (!parsed) {
@@ -35,7 +35,7 @@ export function findCodeFragment(
 export function patchCodeAtFragment(
   input: string,
   fragment: Fragment,
-  updater: FragmentUpdater
+  updater: FragmentUpdater,
 ): string {
   const { value, offset } = fragment;
   const left = input.slice(0, offset);
@@ -48,10 +48,10 @@ export function patchCodeAtFragment(
 export function patchCodeAtFragments(
   input: string,
   fragments: Fragment[],
-  updater: FragmentUpdater
+  updater: FragmentUpdater,
 ): string {
   const sortedFragments = fragments.sort(
-    ({ offset: a }, { offset: b }) => b - a
+    ({ offset: a }, { offset: b }) => b - a,
   );
   let result = input;
   for (const fragment of sortedFragments) {
@@ -63,7 +63,7 @@ export function patchCodeAtFragments(
 export function updateCode(
   input: string,
   path: FragmentPath,
-  updater: FragmentUpdater
+  updater: FragmentUpdater,
 ): string {
   const fragment = findCodeFragment(input, path);
   if (!fragment) {
diff --git a/lib/modules/manager/bazel/extract.spec.ts b/lib/modules/manager/bazel/extract.spec.ts
index 9b3bcf2d3255e990b7c60e1f7b7e757a0d7ee8fd..fbc9ff001c106bf7920dc9d3f658cecedd31da87 100644
--- a/lib/modules/manager/bazel/extract.spec.ts
+++ b/lib/modules/manager/bazel/extract.spec.ts
@@ -73,7 +73,7 @@ describe('modules/manager/bazel/extract', () => {
             digest="sha256:a4e8d8c444ca04fe706649e82263c9f4c2a4229bc30d2a64561b5e1d20cc8548",
             tag="v1.0.0-alpha31.cli-migrations"
           )
-        `
+        `,
       );
       expect(res?.deps).toMatchObject([
         {
@@ -97,7 +97,7 @@ describe('modules/manager/bazel/extract', () => {
             digest="sha256:a4e8d8c444ca04fe706649e82263c9f4c2a4229bc30d2a64561b5e1d20cc8548",
             tag="v1.0.0-alpha31.cli-migrations"
           )
-        `
+        `,
       );
       expect(res?.deps).toMatchObject([
         {
@@ -119,11 +119,11 @@ describe('modules/manager/bazel/extract', () => {
             remote = "https://github.com/test/uuid-fork",
             commit = "dec09d789f3dba190787f8b4454c7d3c936fed9e"
           )
-        `
+        `,
       );
       expect(successStory?.deps[0].datasource).toBe('go');
       expect(successStory?.deps[0].packageName).toBe(
-        'github.com/test/uuid-fork'
+        'github.com/test/uuid-fork',
       );
 
       const badStory = extractPackageFile(
@@ -134,7 +134,7 @@ describe('modules/manager/bazel/extract', () => {
             remote = "https://github.com/test/uuid.git#branch",
             commit = "dec09d789f3dba190787f8b4454c7d3c936fed9e"
           )
-        `
+        `,
       );
       expect(badStory?.deps[0].skipReason).toBe('unsupported-remote');
 
@@ -146,7 +146,7 @@ describe('modules/manager/bazel/extract', () => {
             remote = "https://github.mycompany.com/test/uuid",
             commit = "dec09d789f3dba190787f8b4454c7d3c936fed9e"
           )
-        `
+        `,
       );
       expect(gheStory?.deps[0].skipReason).toBe('unsupported-remote');
 
@@ -158,7 +158,7 @@ describe('modules/manager/bazel/extract', () => {
             remote = "https://gitlab.com/test/uuid",
             commit = "dec09d789f3dba190787f8b4454c7d3c936fed9e"
           )
-        `
+        `,
       );
       expect(gitlabRemote?.deps[0].skipReason).toBe('unsupported-remote');
     });
@@ -180,7 +180,7 @@ describe('modules/manager/bazel/extract', () => {
               sha256 = "5aef09ed3279aa01d5c928e3beb248f9ad32dde6aafe6373a8c994c3ce643064",
               urls = ["https://github.com/bazelbuild/rules_nodejs/releases/download/5.5.3/rules_nodejs-core-5.5.3.tar.gz"],
           )
-        `
+        `,
       );
 
       expect(res?.deps).toHaveLength(2);
@@ -211,7 +211,7 @@ describe('modules/manager/bazel/extract', () => {
                 "https://gitlab.com/libeigen/eigen/-/archive/90ee821c563fa20db4d64d6991ddca256d5c52f2/eigen-90ee821c563fa20db4d64d6991ddca256d5c52f2.tar.gz",
             ],
           )
-        `
+        `,
       );
 
       expect(res?.deps).toHaveLength(2);
diff --git a/lib/modules/manager/bazel/extract.ts b/lib/modules/manager/bazel/extract.ts
index 78349672af895f3493a2c5099e8fd3b6f9d47f8d..cd68dbac716e159467a70243a659084f7b226234 100644
--- a/lib/modules/manager/bazel/extract.ts
+++ b/lib/modules/manager/bazel/extract.ts
@@ -9,7 +9,7 @@ import type { RecordFragment } from './types';
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
 
diff --git a/lib/modules/manager/bazel/parser.ts b/lib/modules/manager/bazel/parser.ts
index 996a05c65e1cfe84d5cfd3ce50cac0335b4e3e3a..072563de75d8752350d8fa107a948098bda56857 100644
--- a/lib/modules/manager/bazel/parser.ts
+++ b/lib/modules/manager/bazel/parser.ts
@@ -30,7 +30,7 @@ function currentFragment(ctx: Ctx): NestedFragment {
 function extractTreeValue(
   source: string,
   tree: parser.Tree,
-  offset: number
+  offset: number,
 ): string {
   if (tree.type === 'wrapped-tree') {
     const { endsWith } = tree;
@@ -133,7 +133,7 @@ const kwParams = q
               return ctx;
             }),
             0,
-            3
+            3,
           )
           .tree({
             type: 'wrapped-tree',
@@ -147,7 +147,7 @@ const kwParams = q
                     ...ctx,
                     subRecordKey,
                   }))
-                  .op('=')
+                  .op('='),
               )
               .str((ctx, { value: subRecordValue, offset }) => {
                 const argIndex = ctx.argIndex ?? 0;
@@ -174,7 +174,7 @@ const kwParams = q
                 callFrag.value = extractTreeValue(
                   ctx.source,
                   tree,
-                  callFrag.offset
+                  callFrag.offset,
                 );
 
                 const parentRecord = currentFragment(ctx);
@@ -188,7 +188,7 @@ const kwParams = q
               }
               return ctx;
             },
-          })
+          }),
       ),
       postHandler: (ctx, tree) => {
         const parentRecord = currentFragment(ctx);
@@ -205,7 +205,7 @@ const kwParams = q
         }
         return ctx;
       },
-    })
+    }),
   )
   .handler((ctx) => {
     delete ctx.recordKey;
@@ -220,7 +220,7 @@ const kwParams = q
  * @param search something to match inside parens
  */
 function ruleCall(
-  search: q.QueryBuilder<Ctx, parser.Node>
+  search: q.QueryBuilder<Ctx, parser.Node>,
 ): q.QueryBuilder<Ctx, parser.Node> {
   return q.tree({
     type: 'wrapped-tree',
@@ -265,7 +265,7 @@ function ruleNameHandler(ctx: Ctx, { value, offset }: lexer.Token): Ctx {
  */
 const regularRule = q
   .sym<Ctx>(supportedRulesRegex, (ctx, token) =>
-    ruleNameHandler(recordStartHandler(ctx, token), token)
+    ruleNameHandler(recordStartHandler(ctx, token), token),
   )
   .join(ruleCall(kwParams));
 
@@ -280,9 +280,9 @@ const maybeRule = q
     ruleCall(
       q.alt(
         q.begin<Ctx>().sym(supportedRulesRegex, ruleNameHandler).op(','),
-        kwParams
-      )
-    )
+        kwParams,
+      ),
+    ),
   );
 
 const rule = q.alt<Ctx>(maybeRule, regularRule);
@@ -302,7 +302,7 @@ const starlark = lang.createLang('starlark');
 
 export function parse(
   input: string,
-  packageFile?: string
+  packageFile?: string,
 ): RecordFragment[] | null {
   const cacheKey = getCacheKey(input);
 
diff --git a/lib/modules/manager/bazel/rules/docker.ts b/lib/modules/manager/bazel/rules/docker.ts
index 6d75313c5ae35e5a5f194106ea8b0300e7f8c68e..c9878dd97447637ffc8f91f56a0aacdaac091d13 100644
--- a/lib/modules/manager/bazel/rules/docker.ts
+++ b/lib/modules/manager/bazel/rules/docker.ts
@@ -33,5 +33,5 @@ export const DockerTarget = z
         currentDigest: digest,
         registryUrls: [registry],
       },
-    ]
+    ],
   );
diff --git a/lib/modules/manager/bazel/rules/git.ts b/lib/modules/manager/bazel/rules/git.ts
index d63f5956b761ed615a1d7af55cd4f21aaad43486..5779a67bac178254e7cca3307eff3c169217feaf 100644
--- a/lib/modules/manager/bazel/rules/git.ts
+++ b/lib/modules/manager/bazel/rules/git.ts
@@ -6,7 +6,7 @@ import { GithubReleasesDatasource } from '../../../datasource/github-releases';
 import type { PackageDependency } from '../../types';
 
 const githubUrlRegex = regEx(
-  /^https:\/\/github\.com\/(?<packageName>[^/]+\/[^/]+)/
+  /^https:\/\/github\.com\/(?<packageName>[^/]+\/[^/]+)/,
 );
 
 function githubPackageName(input: string): string | undefined {
diff --git a/lib/modules/manager/bazel/rules/go.ts b/lib/modules/manager/bazel/rules/go.ts
index 1512001e04a5419f17df3620f28040a1a5f1c8a8..e91ed1bf840cf9d47686e0825a8f77cd06836762 100644
--- a/lib/modules/manager/bazel/rules/go.ts
+++ b/lib/modules/manager/bazel/rules/go.ts
@@ -37,7 +37,7 @@ export const GoTarget = z
 
       if (remote) {
         const remoteMatch = regEx(
-          /https:\/\/github\.com(?:.*\/)(([a-zA-Z]+)([-])?([a-zA-Z]+))/
+          /https:\/\/github\.com(?:.*\/)(([a-zA-Z]+)([-])?([a-zA-Z]+))/,
         ).exec(remote);
         if (remoteMatch && remoteMatch[0].length === remote.length) {
           dep.packageName = remote.replace('https://', '');
@@ -47,5 +47,5 @@ export const GoTarget = z
       }
 
       return [dep];
-    }
+    },
   );
diff --git a/lib/modules/manager/bazel/rules/http.ts b/lib/modules/manager/bazel/rules/http.ts
index fe4b166172d22b151414c48f924020769e3f07e8..1cc0da54c89f8e601452a2a0feb311c5943f1375 100644
--- a/lib/modules/manager/bazel/rules/http.ts
+++ b/lib/modules/manager/bazel/rules/http.ts
@@ -36,7 +36,7 @@ const archives = [
 ];
 
 const archiveSuffixRegex = regEx(
-  `(?:${archives.map(escapeRegExp).join('|')})$`
+  `(?:${archives.map(escapeRegExp).join('|')})$`,
 );
 
 function stripArchiveSuffix(value: string): string {
@@ -48,7 +48,7 @@ function isHash(value: unknown): value is string {
 }
 
 export function parseGithubPath(
-  pathname: string
+  pathname: string,
 ): Partial<PackageDependency> | null {
   const [p0, p1, p2, p3, p4, p5] = pathname.split('/').slice(1);
   const packageName = p0 + '/' + p1;
@@ -100,7 +100,7 @@ function parseGitlabPath(pathname: string): Partial<PackageDependency> | null {
 }
 
 export function parseArchiveUrl(
-  urlString: string | undefined | null
+  urlString: string | undefined | null,
 ): Partial<PackageDependency> | null {
   if (!urlString) {
     return null;
diff --git a/lib/modules/manager/bazel/rules/index.spec.ts b/lib/modules/manager/bazel/rules/index.spec.ts
index 3c932597af7a7d71da5e91eeb89345048721c737..ba1baf2d691159bd5a1566605bdb1709abbfa43f 100644
--- a/lib/modules/manager/bazel/rules/index.spec.ts
+++ b/lib/modules/manager/bazel/rules/index.spec.ts
@@ -12,8 +12,8 @@ describe('modules/manager/bazel/rules/index', () => {
     // Archive of a commit.
     expect(
       parseArchiveUrl(
-        'https://github.com/foo/bar/archive/abcdef0123abcdef0123abcdef0123abcdef0123.tar.gz'
-      )
+        'https://github.com/foo/bar/archive/abcdef0123abcdef0123abcdef0123abcdef0123.tar.gz',
+      ),
     ).toEqual({
       datasource: 'github-tags',
       packageName: 'foo/bar',
@@ -23,8 +23,8 @@ describe('modules/manager/bazel/rules/index', () => {
     // Archive of a release
     expect(
       parseArchiveUrl(
-        'https://github.com/foo/bar/releases/download/1.2.3/foobar-1.2.3.tar.gz'
-      )
+        'https://github.com/foo/bar/releases/download/1.2.3/foobar-1.2.3.tar.gz',
+      ),
     ).toEqual({
       datasource: 'github-releases',
       packageName: 'foo/bar',
@@ -34,8 +34,8 @@ describe('modules/manager/bazel/rules/index', () => {
     // Archive of a tag.
     expect(
       parseArchiveUrl(
-        'https://github.com/aspect-build/rules_js/archive/refs/tags/v1.1.2.tar.gz'
-      )
+        'https://github.com/aspect-build/rules_js/archive/refs/tags/v1.1.2.tar.gz',
+      ),
     ).toEqual({
       datasource: 'github-tags',
       packageName: 'aspect-build/rules_js',
@@ -46,11 +46,14 @@ describe('modules/manager/bazel/rules/index', () => {
   describe('git', () => {
     it('extracts git dependencies', () => {
       expect(
-        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
-        extractDepsFromFragmentData({ rule: 'git_repository', name: 'foo_bar' })
+        extractDepsFromFragmentData({
+          rule: 'git_repository',
+          name: 'foo_bar',
+        }),
       ).toBeEmptyArray();
 
       expect(
@@ -58,7 +61,7 @@ describe('modules/manager/bazel/rules/index', () => {
           rule: 'git_repository',
           name: 'foo_bar',
           tag: '1.2.3',
-        })
+        }),
       ).toBeEmptyArray();
 
       expect(
@@ -67,7 +70,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           tag: '1.2.3',
           remote: 'https://github.com/foo/bar',
-        })
+        }),
       ).toEqual([
         {
           datasource: 'github-releases',
@@ -84,7 +87,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           commit: 'abcdef0123abcdef0123abcdef0123abcdef0123',
           remote: 'https://github.com/foo/bar',
-        })
+        }),
       ).toEqual([
         {
           datasource: 'github-releases',
@@ -101,7 +104,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           tag: '1.2.3',
           remote: 'https://gitlab.com/foo/bar',
-        })
+        }),
       ).toMatchObject([
         {
           currentValue: '1.2.3',
@@ -116,11 +119,11 @@ describe('modules/manager/bazel/rules/index', () => {
   describe('go', () => {
     it('extracts go dependencies', () => {
       expect(
-        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
-        extractDepsFromFragmentData({ rule: 'go_repository', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'go_repository', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
@@ -128,7 +131,7 @@ describe('modules/manager/bazel/rules/index', () => {
           rule: 'go_repository',
           name: 'foo_bar',
           tag: '1.2.3',
-        })
+        }),
       ).toBeEmptyArray();
 
       expect(
@@ -137,7 +140,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           tag: '1.2.3',
           importpath: 'foo/bar/baz',
-        })
+        }),
       ).toEqual([
         {
           datasource: 'go',
@@ -154,7 +157,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           commit: 'abcdef0123abcdef0123abcdef0123abcdef0123',
           importpath: 'foo/bar/baz',
-        })
+        }),
       ).toEqual([
         {
           datasource: 'go',
@@ -173,7 +176,7 @@ describe('modules/manager/bazel/rules/index', () => {
           tag: '1.2.3',
           importpath: 'foo/bar/baz',
           remote: 'https://github.com/foo/bar',
-        })
+        }),
       ).toEqual([
         {
           datasource: 'go',
@@ -191,7 +194,7 @@ describe('modules/manager/bazel/rules/index', () => {
           tag: '1.2.3',
           importpath: 'foo/bar/baz',
           remote: 'https://example.com/foo/bar',
-        })
+        }),
       ).toEqual([
         {
           datasource: 'go',
@@ -208,11 +211,11 @@ describe('modules/manager/bazel/rules/index', () => {
   describe('http', () => {
     it('extracts http dependencies', () => {
       expect(
-        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
-        extractDepsFromFragmentData({ rule: 'http_archive', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'http_archive', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
@@ -220,7 +223,7 @@ describe('modules/manager/bazel/rules/index', () => {
           rule: 'http_archive',
           name: 'foo_bar',
           sha256: 'abcdef0123abcdef0123abcdef0123abcdef0123',
-        })
+        }),
       ).toBeEmptyArray();
 
       expect(
@@ -229,7 +232,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           sha256: 'abcdef0123abcdef0123abcdef0123abcdef0123',
           url: 'https://github.com/foo/bar/archive/abcdef0123abcdef0123abcdef0123abcdef0123.tar.gz',
-        })
+        }),
       ).toEqual([
         {
           currentDigest: 'abcdef0123abcdef0123abcdef0123abcdef0123',
@@ -249,7 +252,7 @@ describe('modules/manager/bazel/rules/index', () => {
             'https://example.com/foo/bar',
             'https://github.com/foo/bar/archive/abcdef0123abcdef0123abcdef0123abcdef0123.tar.gz',
           ],
-        })
+        }),
       ).toEqual([
         {
           currentDigest: 'abcdef0123abcdef0123abcdef0123abcdef0123',
@@ -266,7 +269,7 @@ describe('modules/manager/bazel/rules/index', () => {
           name: 'foo_bar',
           sha256: 'abcdef0123abcdef0123abcdef0123abcdef0123',
           url: 'https://github.com/foo/bar/releases/download/1.2.3/foobar-1.2.3.tar.gz',
-        })
+        }),
       ).toEqual([
         {
           currentValue: '1.2.3',
@@ -286,7 +289,7 @@ describe('modules/manager/bazel/rules/index', () => {
             'https://example.com/foo/bar',
             'https://github.com/foo/bar/releases/download/1.2.3/foobar-1.2.3.tar.gz',
           ],
-        })
+        }),
       ).toEqual([
         {
           currentValue: '1.2.3',
@@ -306,7 +309,7 @@ describe('modules/manager/bazel/rules/index', () => {
           urls: [
             'https://github.com/aspect-build/rules_js/archive/refs/tags/v1.1.2.tar.gz',
           ],
-        })
+        }),
       ).toEqual([
         {
           currentValue: 'v1.1.2',
@@ -322,7 +325,7 @@ describe('modules/manager/bazel/rules/index', () => {
   describe('docker', () => {
     it('extracts docker dependencies', () => {
       expect(
-        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
@@ -333,7 +336,7 @@ describe('modules/manager/bazel/rules/index', () => {
           digest: 'abcdef0123abcdef0123abcdef0123abcdef0123',
           repository: 'example.com/foo/bar',
           registry: 'https://example.com',
-        })
+        }),
       ).toEqual([
         {
           currentDigest: 'abcdef0123abcdef0123abcdef0123abcdef0123',
@@ -352,7 +355,7 @@ describe('modules/manager/bazel/rules/index', () => {
   describe('oci', () => {
     it('extracts oci dependencies', () => {
       expect(
-        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' })
+        extractDepsFromFragmentData({ rule: 'foo_bar', name: 'foo_bar' }),
       ).toBeEmptyArray();
 
       expect(
@@ -362,7 +365,7 @@ describe('modules/manager/bazel/rules/index', () => {
           tag: '1.2.3',
           digest: 'abcdef0123abcdef0123abcdef0123abcdef0123',
           image: 'example.com/foo/bar',
-        })
+        }),
       ).toEqual([
         {
           currentDigest: 'abcdef0123abcdef0123abcdef0123abcdef0123',
@@ -401,7 +404,7 @@ describe('modules/manager/bazel/rules/index', () => {
             'https://example1.com/maven2',
             'https://example2.com/maven2',
           ],
-        })
+        }),
       ).toEqual([
         {
           currentValue: '1.1.1',
diff --git a/lib/modules/manager/bazel/rules/index.ts b/lib/modules/manager/bazel/rules/index.ts
index 30d275fe0f0fc8b3ed5a81bf255da8e5da7ab6a3..5389229934be8431af4e8601216ec25e878b7c0d 100644
--- a/lib/modules/manager/bazel/rules/index.ts
+++ b/lib/modules/manager/bazel/rules/index.ts
@@ -33,7 +33,7 @@ const supportedRules = [
 export const supportedRulesRegex = regEx(`^(?:${supportedRules.join('|')})$`);
 
 export function extractDepsFromFragmentData(
-  fragmentData: FragmentData
+  fragmentData: FragmentData,
 ): PackageDependency[] {
   const res = Target.safeParse(fragmentData);
   if (!res.success) {
@@ -43,7 +43,7 @@ export function extractDepsFromFragmentData(
 }
 
 export function extractDepsFromFragment(
-  fragment: Fragment
+  fragment: Fragment,
 ): PackageDependency[] {
   const fragmentData = extract(fragment);
   return extractDepsFromFragmentData(fragmentData);
diff --git a/lib/modules/manager/bazel/rules/maven.ts b/lib/modules/manager/bazel/rules/maven.ts
index 7622bb7b374ea0c9681954c2eb23f87366fffb27..f1f77bb3ff9450fa62d70273f615fb0cc228a2c5 100644
--- a/lib/modules/manager/bazel/rules/maven.ts
+++ b/lib/modules/manager/bazel/rules/maven.ts
@@ -58,5 +58,5 @@ export const MavenTarget = z
         currentValue,
         depType,
         registryUrls,
-      }))
+      })),
   );
diff --git a/lib/modules/manager/bicep/extract.spec.ts b/lib/modules/manager/bicep/extract.spec.ts
index 7c9d8bcb8ab8b2fa636c975dd69d6079dfa697d0..a20d57c47cb88dd477ae6726c922e17e5ab52072 100644
--- a/lib/modules/manager/bicep/extract.spec.ts
+++ b/lib/modules/manager/bicep/extract.spec.ts
@@ -17,7 +17,7 @@ describe('modules/manager/bicep/extract', () => {
       }
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -49,7 +49,7 @@ describe('modules/manager/bicep/extract', () => {
       // }
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toBeNull();
@@ -70,7 +70,7 @@ describe('modules/manager/bicep/extract', () => {
       }
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -97,7 +97,7 @@ describe('modules/manager/bicep/extract', () => {
       output id string = storageAccount.id
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -129,7 +129,7 @@ describe('modules/manager/bicep/extract', () => {
       }]
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -161,7 +161,7 @@ describe('modules/manager/bicep/extract', () => {
       }]
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -197,7 +197,7 @@ describe('modules/manager/bicep/extract', () => {
       }
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -233,7 +233,7 @@ describe('modules/manager/bicep/extract', () => {
       }
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
@@ -258,7 +258,7 @@ describe('modules/manager/bicep/extract', () => {
       }
       `,
       '',
-      {}
+      {},
     );
 
     expect(result).toEqual({
diff --git a/lib/modules/manager/bicep/extract.ts b/lib/modules/manager/bicep/extract.ts
index 86b77e2c8319c066fdda2a794344174743561955..b9887bcae2eac9b670db8316ad11157ee985cae3 100644
--- a/lib/modules/manager/bicep/extract.ts
+++ b/lib/modules/manager/bicep/extract.ts
@@ -7,13 +7,13 @@ import type {
 } from '../types';
 
 const RESOURCE_REGEX = regEx(
-  /resource\s+[A-Za-z0-9_]+\s+(?<replaceString>'(?<depName>.+\..+\/.+)@(?<currentValue>.+?)')/
+  /resource\s+[A-Za-z0-9_]+\s+(?<replaceString>'(?<depName>.+\..+\/.+)@(?<currentValue>.+?)')/,
 );
 
 export function extractPackageFile(
   content: string,
   _packageFile: string,
-  _config: ExtractConfig
+  _config: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   const deps: PackageDependency[] = [];
 
diff --git a/lib/modules/manager/bitbucket-pipelines/extract.spec.ts b/lib/modules/manager/bitbucket-pipelines/extract.spec.ts
index a0264cdbc04a1b6c5ab1098fc9de1d9498702372..454c7673d46bf498427f7bf4204b4fce6b5cb957 100644
--- a/lib/modules/manager/bitbucket-pipelines/extract.spec.ts
+++ b/lib/modules/manager/bitbucket-pipelines/extract.spec.ts
@@ -5,7 +5,7 @@ describe('modules/manager/bitbucket-pipelines/extract', () => {
   describe('extractPackageFile()', () => {
     it('returns null for empty', () => {
       expect(
-        extractPackageFile('nothing here', 'bitbucket-pipelines.yaml', {})
+        extractPackageFile('nothing here', 'bitbucket-pipelines.yaml', {}),
       ).toBeNull();
     });
 
@@ -14,8 +14,8 @@ describe('modules/manager/bitbucket-pipelines/extract', () => {
         extractPackageFile(
           'image:\n  username: ccc',
           'bitbucket-pipelines.yaml',
-          {}
-        )
+          {},
+        ),
       ).toBeNull();
     });
 
@@ -23,7 +23,7 @@ describe('modules/manager/bitbucket-pipelines/extract', () => {
       const res = extractPackageFile(
         Fixtures.get('bitbucket-pipelines.yaml'),
         'bitbucket-pipelines.yaml',
-        {}
+        {},
       );
       expect(res).toMatchObject({
         deps: [
@@ -87,7 +87,7 @@ describe('modules/manager/bitbucket-pipelines/extract', () => {
           registryAliases: {
             jfrogecosystem: 'some.jfrog.mirror',
           },
-        }
+        },
       );
       expect(res).toMatchObject({
         deps: [
diff --git a/lib/modules/manager/bitbucket-pipelines/extract.ts b/lib/modules/manager/bitbucket-pipelines/extract.ts
index 30f76d4d12cb36465a3db366dd2b12831f4c4a6e..ccfb2da4ef802915d003f29ad84b0c72f62abd53 100644
--- a/lib/modules/manager/bitbucket-pipelines/extract.ts
+++ b/lib/modules/manager/bitbucket-pipelines/extract.ts
@@ -17,7 +17,7 @@ import {
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
 
@@ -40,7 +40,7 @@ export function extractPackageFile(
           lineIdx,
           len,
           dockerImageObjectGroups.spaces,
-          config.registryAliases
+          config.registryAliases,
         );
         continue;
       }
@@ -67,7 +67,7 @@ export function extractPackageFile(
   } catch (err) /* istanbul ignore next */ {
     logger.debug(
       { err, packageFile },
-      'Error extracting Bitbucket Pipes dependencies'
+      'Error extracting Bitbucket Pipes dependencies',
     );
   }
   if (!deps.length) {
diff --git a/lib/modules/manager/bitbucket-pipelines/util.ts b/lib/modules/manager/bitbucket-pipelines/util.ts
index be2b8684d3b1911c9c3a870efb081d4569c2db77..bb156004e811c05130a8265283ea6b32c93761ff 100644
--- a/lib/modules/manager/bitbucket-pipelines/util.ts
+++ b/lib/modules/manager/bitbucket-pipelines/util.ts
@@ -5,13 +5,13 @@ import type { PackageDependency } from '../types';
 
 export const pipeRegex = regEx(`^\\s*-\\s?pipe:\\s*'?"?([^\\s'"]+)'?"?\\s*$`);
 export const dockerImageRegex = regEx(
-  `^\\s*-?\\s?image:\\s*'?"?([^\\s'"]+)'?"?\\s*$`
+  `^\\s*-?\\s?image:\\s*'?"?([^\\s'"]+)'?"?\\s*$`,
 );
 export const dockerImageObjectRegex = regEx('^(?<spaces>\\s*)image:\\s*$');
 
 export function addDepAsBitbucketTag(
   deps: PackageDependency[],
-  pipe: string
+  pipe: string,
 ): void {
   const [depName, currentValue] = pipe.split(':');
   const dep: PackageDependency = {
@@ -26,7 +26,7 @@ export function addDepAsBitbucketTag(
 export function addDepAsDockerImage(
   deps: PackageDependency[],
   currentDockerImage: string,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): void {
   const dep = getDep(currentDockerImage, true, registryAliases);
   dep.depType = 'docker';
@@ -39,10 +39,10 @@ export function addDepFromObject(
   start: number,
   len: number,
   spaces: string,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): number {
   const nameRegex = regEx(
-    `^${spaces}\\s+name:\\s*['"]?(?<image>[^\\s'"]+)['"]?\\s*$`
+    `^${spaces}\\s+name:\\s*['"]?(?<image>[^\\s'"]+)['"]?\\s*$`,
   );
   const indentRegex = regEx(`^${spaces}\\s+`);
 
diff --git a/lib/modules/manager/buildkite/extract.ts b/lib/modules/manager/buildkite/extract.ts
index 87d0fdcc272c287ac2cb4f11c293210d14f4fbcf..a70e90611157983c31112cc2cb2fd8cfb6ed8e86 100644
--- a/lib/modules/manager/buildkite/extract.ts
+++ b/lib/modules/manager/buildkite/extract.ts
@@ -8,7 +8,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   try {
@@ -17,7 +17,7 @@ export function extractPackageFile(
     for (const line of lines) {
       // Search each line for plugin names
       const depLineMatch = regEx(
-        /^\s*(?:-\s+(?:\?\s+)?)?(?<depName>[^#\s]+)#(?<currentValue>[^:]+)/
+        /^\s*(?:-\s+(?:\?\s+)?)?(?<depName>[^#\s]+)#(?<currentValue>[^:]+)/,
       ).exec(line);
 
       if (depLineMatch?.groups) {
@@ -28,7 +28,7 @@ export function extractPackageFile(
         logger.trace(`Found Buildkite plugin ${depName}`);
         // Plugins may simply be git repos. If so, we need to parse out the registry.
         const gitPluginMatch = regEx(
-          /(ssh:\/\/git@|https:\/\/)(?<registry>[^/]+)\/(?<gitPluginName>.*)/
+          /(ssh:\/\/git@|https:\/\/)(?<registry>[^/]+)\/(?<gitPluginName>.*)/,
         ).exec(depName);
         if (gitPluginMatch?.groups) {
           logger.debug('Examining git plugin');
@@ -56,13 +56,13 @@ export function extractPackageFile(
           } else {
             logger.warn(
               { dependency: depName },
-              'Something is wrong with Buildkite plugin name'
+              'Something is wrong with Buildkite plugin name',
             );
             skipReason = 'invalid-dependency-specification';
           }
         } else {
           logger.debug(
-            `Skipping non-pinned Buildkite current version ${currentValue}`
+            `Skipping non-pinned Buildkite current version ${currentValue}`,
           );
           skipReason = 'invalid-version';
         }
diff --git a/lib/modules/manager/bun/artifacts.spec.ts b/lib/modules/manager/bun/artifacts.spec.ts
index 8af6097beb7301ecc023b4fc120ee5b9565e76df..6ebdb9f0014d683685155a09834043f2d8bd79d7 100644
--- a/lib/modules/manager/bun/artifacts.spec.ts
+++ b/lib/modules/manager/bun/artifacts.spec.ts
@@ -111,7 +111,7 @@ describe('modules/manager/bun/artifacts', () => {
       fs.readFile.mockResolvedValueOnce(oldLock as never);
       exec.mockRejectedValueOnce(execError);
       await expect(updateArtifacts(updateArtifact)).rejects.toThrow(
-        TEMPORARY_ERROR
+        TEMPORARY_ERROR,
       );
     });
 
diff --git a/lib/modules/manager/bun/artifacts.ts b/lib/modules/manager/bun/artifacts.ts
index 611439b0690375e088b4c2cd33fea639685af762..d643d6e68accba169692e7ef4e84d85ff3e988ee 100644
--- a/lib/modules/manager/bun/artifacts.ts
+++ b/lib/modules/manager/bun/artifacts.ts
@@ -11,7 +11,7 @@ import {
 import type { UpdateArtifact, UpdateArtifactsResult } from '../types';
 
 export async function updateArtifacts(
-  updateArtifact: UpdateArtifact
+  updateArtifact: UpdateArtifact,
 ): Promise<UpdateArtifactsResult[] | null> {
   const { packageFileName, updatedDeps, newPackageFileContent, config } =
     updateArtifact;
diff --git a/lib/modules/manager/bun/extract.spec.ts b/lib/modules/manager/bun/extract.spec.ts
index 15411f0f492f3f6a67aff9df8531354c6144bccf..8ca869f9c3ad0b5fe9bc85e0abd1b84d58f80772 100644
--- a/lib/modules/manager/bun/extract.spec.ts
+++ b/lib/modules/manager/bun/extract.spec.ts
@@ -26,7 +26,7 @@ describe('modules/manager/bun/extract', () => {
           _id: 1,
           _args: 1,
           _from: 1,
-        })
+        }),
       );
       expect(await extractAllPackageFiles({}, ['bun.lockb'])).toEqual([]);
     });
@@ -40,7 +40,7 @@ describe('modules/manager/bun/extract', () => {
           dependencies: {
             dep1: '1.0.0',
           },
-        })
+        }),
       );
       expect(await extractAllPackageFiles({}, ['bun.lockb'])).toMatchObject([
         {
diff --git a/lib/modules/manager/bun/extract.ts b/lib/modules/manager/bun/extract.ts
index 9509fa210d188b296568aab817be67aa27768127..9f41b23cb9acc6205fb2bbf898e47759a13de0ba 100644
--- a/lib/modules/manager/bun/extract.ts
+++ b/lib/modules/manager/bun/extract.ts
@@ -14,7 +14,7 @@ function matchesFileName(fileNameWithPath: string, fileName: string): boolean {
 
 export async function extractAllPackageFiles(
   config: ExtractConfig,
-  matchedFiles: string[]
+  matchedFiles: string[],
 ): Promise<PackageFile[]> {
   const packageFiles: PackageFile<NpmManagerData>[] = [];
   for (const matchedFile of matchedFiles) {
diff --git a/lib/modules/manager/bundler/artifacts.spec.ts b/lib/modules/manager/bundler/artifacts.spec.ts
index d462310850dedd0c97eb0f9043bd527d696cdaec..df9b06bdad2c7733268322d6540dba32692f692d 100644
--- a/lib/modules/manager/bundler/artifacts.spec.ts
+++ b/lib/modules/manager/bundler/artifacts.spec.ts
@@ -76,7 +76,7 @@ describe('modules/manager/bundler/artifacts', () => {
           updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
           newPackageFileContent: '',
           config,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -87,7 +87,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: [],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -96,7 +96,7 @@ describe('modules/manager/bundler/artifacts', () => {
           updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
           newPackageFileContent: 'Updated Gemfile content',
           config,
-        })
+        }),
       ).toBeNull();
       expect(execSnapshots).toMatchObject([
         { cmd: 'bundler lock --update foo bar' },
@@ -110,7 +110,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: [],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -119,7 +119,7 @@ describe('modules/manager/bundler/artifacts', () => {
           updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
           newPackageFileContent: 'Updated Gemfile content',
           config,
-        })
+        }),
       ).toBeNull();
       expect(execSnapshots).toMatchObject([
         { options: { cwd: '/tmp/github/some/repo' } },
@@ -133,7 +133,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['Gemfile.lock'],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -142,7 +142,7 @@ describe('modules/manager/bundler/artifacts', () => {
           updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
           newPackageFileContent: 'Updated Gemfile content',
           config,
-        })
+        }),
       ).toEqual([updatedGemfileLock]);
       expect(execSnapshots).toMatchObject([
         { cmd: 'bundler lock --update foo bar' },
@@ -157,7 +157,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['Gemfile.lock'],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -166,7 +166,7 @@ describe('modules/manager/bundler/artifacts', () => {
           updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
           newPackageFileContent: 'Updated Gemfile content',
           config,
-        })
+        }),
       ).toEqual([updatedGemfileLock]);
       expect(execSnapshots).toMatchObject([
         { cmd: 'bundler lock --update foo bar' },
@@ -180,7 +180,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['Gemfile.lock'],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -199,7 +199,7 @@ describe('modules/manager/bundler/artifacts', () => {
               'bundlerConservative',
             ],
           },
-        })
+        }),
       ).toEqual([updatedGemfileLock]);
       expect(execSnapshots).toMatchObject([
         expect.objectContaining({
@@ -226,7 +226,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['Gemfile.lock'],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -235,7 +235,7 @@ describe('modules/manager/bundler/artifacts', () => {
           updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
           newPackageFileContent: 'Updated Gemfile content',
           config,
-        })
+        }),
       ).toEqual([updatedGemfileLock]);
       expect(execSnapshots).toMatchObject([
         { cmd: 'install-tool ruby 1.2.0' },
@@ -264,7 +264,7 @@ describe('modules/manager/bundler/artifacts', () => {
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -273,7 +273,7 @@ describe('modules/manager/bundler/artifacts', () => {
             updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
             newPackageFileContent: 'Updated Gemfile content',
             config,
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -317,7 +317,7 @@ describe('modules/manager/bundler/artifacts', () => {
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -332,7 +332,7 @@ describe('modules/manager/bundler/artifacts', () => {
                 bundler: '3.2.1',
               },
             },
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -378,7 +378,7 @@ describe('modules/manager/bundler/artifacts', () => {
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -393,7 +393,7 @@ describe('modules/manager/bundler/artifacts', () => {
                 bundler: 'bar',
               },
             },
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -438,13 +438,13 @@ describe('modules/manager/bundler/artifacts', () => {
           },
         ]);
         bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue(
-          'some-user:some-password'
+          'some-user:some-password',
         );
         const execSnapshots = mockExecAll();
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -453,7 +453,7 @@ describe('modules/manager/bundler/artifacts', () => {
             updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
             newPackageFileContent: 'Updated Gemfile content',
             config,
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -503,13 +503,13 @@ describe('modules/manager/bundler/artifacts', () => {
           },
         ]);
         bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue(
-          'some-user:some-password'
+          'some-user:some-password',
         );
         const execSnapshots = mockExecAll();
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -523,7 +523,7 @@ describe('modules/manager/bundler/artifacts', () => {
                 bundler: '1.2',
               },
             },
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -574,13 +574,13 @@ describe('modules/manager/bundler/artifacts', () => {
           },
         ]);
         bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue(
-          'some-user:some-password'
+          'some-user:some-password',
         );
         const execSnapshots = mockExecAll();
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -594,7 +594,7 @@ describe('modules/manager/bundler/artifacts', () => {
                 bundler: '2.1',
               },
             },
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -649,13 +649,13 @@ describe('modules/manager/bundler/artifacts', () => {
           },
         ]);
         bundlerHostRules.getAuthenticationHeaderValue.mockReturnValue(
-          'some-user:some-password'
+          'some-user:some-password',
         );
         const execSnapshots = mockExecAll();
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
         expect(
@@ -664,7 +664,7 @@ describe('modules/manager/bundler/artifacts', () => {
             updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
             newPackageFileContent: 'Updated Gemfile content',
             config,
-          })
+          }),
         ).toEqual([updatedGemfileLock]);
         expect(execSnapshots).toMatchObject([
           { cmd: 'docker pull ghcr.io/containerbase/sidecar' },
@@ -707,7 +707,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['Gemfile.lock'],
-        })
+        }),
       );
       expect(
         await updateArtifacts({
@@ -718,7 +718,7 @@ describe('modules/manager/bundler/artifacts', () => {
             ...config,
             isLockFileMaintenance: true,
           },
-        })
+        }),
       ).toMatchObject([{ artifactError: { lockFile: 'Gemfile.lock' } }]);
       expect(execSnapshots).toMatchObject([{ cmd: 'bundler lock --update' }]);
     });
@@ -730,7 +730,7 @@ describe('modules/manager/bundler/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['Gemfile.lock'],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('Updated Gemfile.lock');
       expect(
@@ -743,7 +743,7 @@ describe('modules/manager/bundler/artifacts', () => {
             isLockFileMaintenance: true,
             updateType: 'patch', // This will have no effect together with isLockFileMaintenance
           },
-        })
+        }),
       ).not.toBeNull();
       expect(execSnapshots).toMatchObject([{ cmd: 'bundler lock --update' }]);
     });
@@ -761,7 +761,7 @@ describe('modules/manager/bundler/artifacts', () => {
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
         expect(
           await updateArtifacts({
@@ -772,7 +772,7 @@ describe('modules/manager/bundler/artifacts', () => {
               ...config,
               isLockFileMaintenance: true,
             },
-          })
+          }),
         ).toMatchObject([
           {
             artifactError: {
@@ -801,7 +801,7 @@ describe('modules/manager/bundler/artifacts', () => {
               ...config,
               isLockFileMaintenance: true,
             },
-          })
+          }),
         ).rejects.toThrow(TEMPORARY_ERROR);
       });
 
@@ -823,7 +823,7 @@ describe('modules/manager/bundler/artifacts', () => {
               ...config,
               isLockFileMaintenance: true,
             },
-          })
+          }),
         ).toMatchObject([{ artifactError: { lockFile: 'Gemfile.lock' } }]);
       });
 
@@ -845,7 +845,7 @@ describe('modules/manager/bundler/artifacts', () => {
               ...config,
               isLockFileMaintenance: true,
             },
-          })
+          }),
         ).rejects.toThrow(BUNDLER_INVALID_CREDENTIALS);
       });
 
@@ -864,7 +864,7 @@ describe('modules/manager/bundler/artifacts', () => {
         git.getRepoStatus.mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['Gemfile.lock'],
-          })
+          }),
         );
 
         const res = await updateArtifacts({
diff --git a/lib/modules/manager/bundler/artifacts.ts b/lib/modules/manager/bundler/artifacts.ts
index d6fb6e14a0f227f872f344d8ccf9497e09a059bd..94fc4ad54c506c56d63b824075eaae05a8f9ee27 100644
--- a/lib/modules/manager/bundler/artifacts.ts
+++ b/lib/modules/manager/bundler/artifacts.ts
@@ -39,7 +39,7 @@ function buildBundleHostVariable(hostRule: HostRule): Record<string, string> {
     hostRule.resolvedHost
       .split('.')
       .map((term) => term.toUpperCase())
-      .join('__')
+      .join('__'),
   );
   return {
     [varName]: `${getAuthenticationHeaderValue(hostRule)}`,
@@ -47,7 +47,7 @@ function buildBundleHostVariable(hostRule: HostRule): Record<string, string> {
 }
 
 const resolvedPkgRegex = regEx(
-  /(?<pkg>\S+)(?:\s*\([^)]+\)\s*)? was resolved to/
+  /(?<pkg>\S+)(?:\s*\([^)]+\)\s*)? was resolved to/,
 );
 
 function getResolvedPackages(input: string): string[] {
@@ -66,7 +66,7 @@ function getResolvedPackages(input: string): string[] {
 
 export async function updateArtifacts(
   updateArtifact: UpdateArtifact,
-  recursionLimit = 10
+  recursionLimit = 10,
 ): Promise<UpdateArtifactsResult[] | null> {
   const { packageFileName, updatedDeps, newPackageFileContent, config } =
     updateArtifact;
@@ -132,7 +132,7 @@ export async function updateArtifacts(
         ...variables,
         ...buildBundleHostVariable(hostRule),
       }),
-      {} as Record<string, string>
+      {} as Record<string, string>,
     );
 
     // Detect hosts with a hyphen '-' in the url.
@@ -147,12 +147,12 @@ export async function updateArtifacts(
         }
         return authCommands;
       },
-      []
+      [],
     );
 
     const bundler = getBundlerConstraint(
       updateArtifact,
-      existingLockFileContent
+      existingLockFileContent,
     );
     const preCommands = ['ruby --version'];
 
@@ -165,14 +165,14 @@ export async function updateArtifacts(
     ) {
       preCommands.push(
         ...bundlerHostRulesAuthCommands.map(
-          (authCommand) => `bundler config --local ${authCommand}`
-        )
+          (authCommand) => `bundler config --local ${authCommand}`,
+        ),
       );
     } else if (bundlerHostRulesAuthCommands) {
       preCommands.push(
         ...bundlerHostRulesAuthCommands.map(
-          (authCommand) => `bundler config set --local ${authCommand}`
-        )
+          (authCommand) => `bundler config set --local ${authCommand}`,
+        ),
       );
     }
 
@@ -234,19 +234,19 @@ export async function updateArtifacts(
       err.stdout?.includes('Please supply credentials for this source') ||
       err.stderr?.includes('Authentication is required') ||
       err.stderr?.includes(
-        'Please make sure you have the correct access rights'
+        'Please make sure you have the correct access rights',
       )
     ) {
       logger.debug(
         { err },
-        'Gemfile.lock update failed due to missing credentials - skipping branch'
+        'Gemfile.lock update failed due to missing credentials - skipping branch',
       );
       // Do not generate these PRs because we don't yet support Bundler authentication
       memCache.set('bundlerArtifactsError', BUNDLER_INVALID_CREDENTIALS);
       throw new Error(BUNDLER_INVALID_CREDENTIALS);
     }
     const resolveMatches: string[] = getResolvedPackages(output).filter(
-      (depName) => !updatedDepNames.includes(depName)
+      (depName) => !updatedDepNames.includes(depName),
     );
     if (
       recursionLimit > 0 &&
@@ -255,7 +255,7 @@ export async function updateArtifacts(
     ) {
       logger.debug(
         { resolveMatches, updatedDeps },
-        'Found new resolve matches - reattempting recursively'
+        'Found new resolve matches - reattempting recursively',
       );
       const newUpdatedDeps = [
         ...new Set([
@@ -270,7 +270,7 @@ export async function updateArtifacts(
           newPackageFileContent,
           config,
         },
-        recursionLimit - 1
+        recursionLimit - 1,
       );
     }
 
diff --git a/lib/modules/manager/bundler/common.ts b/lib/modules/manager/bundler/common.ts
index 43e55f5f31df90dc3765782de289e58822023c61..cf85fee10eda70a1ff968fb84ffc81289e272fd0 100644
--- a/lib/modules/manager/bundler/common.ts
+++ b/lib/modules/manager/bundler/common.ts
@@ -19,7 +19,7 @@ export function extractRubyVersion(txt: string): string | null {
 }
 
 export async function getRubyConstraint(
-  updateArtifact: UpdateArtifact
+  updateArtifact: UpdateArtifact,
 ): Promise<string | null> {
   const { packageFileName, config, newPackageFileContent } = updateArtifact;
   const { constraints = {} } = config;
@@ -36,7 +36,7 @@ export async function getRubyConstraint(
     }
     const rubyVersionFile = getSiblingFileName(
       packageFileName,
-      '.ruby-version'
+      '.ruby-version',
     );
     const rubyVersionFileContent = await readLocalFile(rubyVersionFile, 'utf8');
     if (rubyVersionFileContent) {
@@ -52,7 +52,7 @@ export async function getRubyConstraint(
 
 export function getBundlerConstraint(
   updateArtifact: Pick<UpdateArtifact, 'config'>,
-  existingLockFileContent: string
+  existingLockFileContent: string,
 ): string | null {
   const { config } = updateArtifact;
   const { constraints = {} } = config;
@@ -63,7 +63,7 @@ export function getBundlerConstraint(
     return bundler;
   } else {
     const bundledWith = regEx(/\nBUNDLED WITH\n\s+(.*?)(\n|$)/).exec(
-      existingLockFileContent
+      existingLockFileContent,
     );
     if (bundledWith) {
       logger.debug('Using bundler version specified in lockfile');
@@ -74,7 +74,7 @@ export function getBundlerConstraint(
 }
 
 export async function getLockFilePath(
-  packageFilePath: string
+  packageFilePath: string,
 ): Promise<string> {
   const lockFilePath = (await localPathExists(`${packageFilePath}.lock`))
     ? `${packageFilePath}.lock`
diff --git a/lib/modules/manager/bundler/extract.spec.ts b/lib/modules/manager/bundler/extract.spec.ts
index c0a054675e4a7eef163dc50694185e1badf14736..fe7916baeebcf1689e3a23b666d06647eef4d605 100644
--- a/lib/modules/manager/bundler/extract.spec.ts
+++ b/lib/modules/manager/bundler/extract.spec.ts
@@ -21,10 +21,10 @@ const gitlabFossGemfileLock = Fixtures.get('Gemfile.gitlab-foss.lock');
 const gitlabFossGemfile = Fixtures.get('Gemfile.gitlab-foss');
 const sourceBlockGemfile = Fixtures.get('Gemfile.sourceBlock');
 const sourceBlockWithNewLinesGemfileLock = Fixtures.get(
-  'Gemfile.sourceBlockWithNewLines.lock'
+  'Gemfile.sourceBlockWithNewLines.lock',
 );
 const sourceBlockWithNewLinesGemfile = Fixtures.get(
-  'Gemfile.sourceBlockWithNewLines'
+  'Gemfile.sourceBlockWithNewLines',
 );
 
 describe('modules/manager/bundler/extract', () => {
@@ -41,11 +41,11 @@ describe('modules/manager/bundler/extract', () => {
       expect(
         res?.deps
           .filter((dep) =>
-            Object.prototype.hasOwnProperty.call(dep, 'lockedVersion')
+            Object.prototype.hasOwnProperty.call(dep, 'lockedVersion'),
           )
           .every(
-            (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion)
-          )
+            (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion),
+          ),
       ).toBeTrue();
       expect(res?.deps).toHaveLength(68);
     });
@@ -62,8 +62,8 @@ describe('modules/manager/bundler/extract', () => {
       expect(res).toMatchSnapshot();
       expect(
         res?.deps.every(
-          (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion)
-        )
+          (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion),
+        ),
       ).toBeTrue();
       expect(res?.deps).toHaveLength(5);
     });
@@ -75,11 +75,11 @@ describe('modules/manager/bundler/extract', () => {
       expect(
         res?.deps
           .filter((dep) =>
-            Object.prototype.hasOwnProperty.call(dep, 'lockedVersion')
+            Object.prototype.hasOwnProperty.call(dep, 'lockedVersion'),
           )
           .every(
-            (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion)
-          )
+            (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion),
+          ),
       ).toBeTrue();
       expect(res?.deps).toHaveLength(125);
     });
@@ -90,8 +90,8 @@ describe('modules/manager/bundler/extract', () => {
       expect(res).toMatchSnapshot();
       expect(
         res?.deps.every(
-          (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion)
-        )
+          (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion),
+        ),
       ).toBeTrue();
       expect(res?.deps).toHaveLength(14);
     });
@@ -103,8 +103,8 @@ describe('modules/manager/bundler/extract', () => {
     expect(res).toMatchSnapshot();
     expect(
       res?.deps.every(
-        (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion)
-      )
+        (dep) => is.string(dep.lockedVersion) && isValid(dep.lockedVersion),
+      ),
     ).toBeTrue();
     expect(res?.deps).toHaveLength(252);
   });
@@ -119,7 +119,7 @@ describe('modules/manager/bundler/extract', () => {
     fs.readLocalFile.mockResolvedValueOnce(sourceBlockWithNewLinesGemfileLock);
     const res = await extractPackageFile(
       sourceBlockWithNewLinesGemfile,
-      'Gemfile'
+      'Gemfile',
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(2);
diff --git a/lib/modules/manager/bundler/extract.ts b/lib/modules/manager/bundler/extract.ts
index 7380919c256c64f319149e3383d21889432eb9d0..167dcaeccb960088ac7f76541d2f4990d5175e6b 100644
--- a/lib/modules/manager/bundler/extract.ts
+++ b/lib/modules/manager/bundler/extract.ts
@@ -14,7 +14,7 @@ function formatContent(input: string): string {
 
 export async function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): Promise<PackageFileContent | null> {
   const res: PackageFileContent = {
     registryUrls: [],
@@ -28,7 +28,7 @@ export async function extractPackageFile(
       sourceMatch =
         sourceMatch ??
         regEx(`^source ${delimiter}([^${delimiter}]+)${delimiter}\\s*$`).exec(
-          line
+          line,
         );
     }
     if (sourceMatch) {
@@ -46,7 +46,7 @@ export async function extractPackageFile(
     }
 
     const gemMatchRegex = regEx(
-      `^\\s*gem\\s+(['"])(?<depName>[^'"]+)(['"])(\\s*,\\s*(?<currentValue>(['"])[^'"]+['"](\\s*,\\s*['"][^'"]+['"])?))?`
+      `^\\s*gem\\s+(['"])(?<depName>[^'"]+)(['"])(\\s*,\\s*(?<currentValue>(['"])[^'"]+['"](\\s*,\\s*['"][^'"]+['"])?))?`,
     );
     const gemMatch = gemMatchRegex.exec(line);
     if (gemMatch) {
@@ -77,7 +77,7 @@ export async function extractPackageFile(
         if (!is.string(groupLine)) {
           logger.debug(
             { content, packageFile, type: 'groupLine' },
-            'Bundler parsing error'
+            'Bundler parsing error',
           );
           groupLine = 'end';
         }
@@ -95,13 +95,13 @@ export async function extractPackageFile(
               lineNumber:
                 Number(dep.managerData?.lineNumber) + groupLineNumber + 1,
             },
-          }))
+          })),
         );
       }
     }
     for (const delimiter of delimiters) {
       const sourceBlockMatch = regEx(
-        `^source\\s+${delimiter}(.*?)${delimiter}\\s+do`
+        `^source\\s+${delimiter}(.*?)${delimiter}\\s+do`,
       ).exec(line);
       if (sourceBlockMatch) {
         const repositoryUrl = sourceBlockMatch[1];
@@ -115,7 +115,7 @@ export async function extractPackageFile(
           if (!is.string(sourceLine)) {
             logger.debug(
               { content, packageFile, type: 'sourceLine' },
-              'Bundler parsing error'
+              'Bundler parsing error',
             );
             sourceLine = 'end';
           }
@@ -133,7 +133,7 @@ export async function extractPackageFile(
                 lineNumber:
                   Number(dep.managerData?.lineNumber) + sourceLineNumber + 1,
               },
-            }))
+            })),
           );
         }
       }
@@ -150,7 +150,7 @@ export async function extractPackageFile(
         if (!is.string(platformsLine)) {
           logger.debug(
             { content, packageFile, type: 'platformsLine' },
-            'Bundler parsing error'
+            'Bundler parsing error',
           );
           platformsLine = 'end';
         }
@@ -167,7 +167,7 @@ export async function extractPackageFile(
               lineNumber:
                 Number(dep.managerData?.lineNumber) + platformsLineNumber + 1,
             },
-          }))
+          })),
         );
       }
     }
@@ -183,7 +183,7 @@ export async function extractPackageFile(
         if (!is.string(ifLine)) {
           logger.debug(
             { content, packageFile, type: 'ifLine' },
-            'Bundler parsing error'
+            'Bundler parsing error',
           );
           ifLine = 'end';
         }
@@ -200,7 +200,7 @@ export async function extractPackageFile(
               lineNumber:
                 Number(dep.managerData?.lineNumber) + ifLineNumber + 1,
             },
-          }))
+          })),
         );
       }
     }
@@ -214,7 +214,7 @@ export async function extractPackageFile(
     const lockContent = await readLocalFile(gemfileLockPath, 'utf8');
     if (lockContent) {
       logger.debug(
-        `Found lock file ${gemfileLockPath} for packageFile: ${packageFile}`
+        `Found lock file ${gemfileLockPath} for packageFile: ${packageFile}`,
       );
       res.lockFiles = [gemfileLockPath];
       const lockedEntries = extractLockFileEntries(lockContent);
diff --git a/lib/modules/manager/bundler/host-rules.spec.ts b/lib/modules/manager/bundler/host-rules.spec.ts
index 943bca95e2449602c89c596d1ddf672e1d623316..f54ae62f969b83b3ec7b06a7604ac919fce47e03 100644
--- a/lib/modules/manager/bundler/host-rules.spec.ts
+++ b/lib/modules/manager/bundler/host-rules.spec.ts
@@ -17,7 +17,7 @@ describe('modules/manager/bundler/host-rules', () => {
         getAuthenticationHeaderValue({
           username: 'test',
           password: 'password',
-        })
+        }),
       ).toBe('test:password');
     });
 
@@ -25,7 +25,7 @@ describe('modules/manager/bundler/host-rules', () => {
       expect(
         getAuthenticationHeaderValue({
           token: 'token',
-        })
+        }),
       ).toBe('token');
     });
   });
@@ -47,7 +47,7 @@ describe('modules/manager/bundler/host-rules', () => {
       delete hostRule.matchHost;
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toBeEmptyArray();
     });
 
@@ -57,7 +57,7 @@ describe('modules/manager/bundler/host-rules', () => {
 
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toBeEmptyArray();
     });
 
@@ -67,7 +67,7 @@ describe('modules/manager/bundler/host-rules', () => {
 
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toBeEmptyArray();
     });
 
@@ -76,7 +76,7 @@ describe('modules/manager/bundler/host-rules', () => {
 
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toMatchObject([hostRule]);
     });
 
@@ -85,7 +85,7 @@ describe('modules/manager/bundler/host-rules', () => {
 
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toMatchObject([hostRule]);
     });
 
@@ -94,7 +94,7 @@ describe('modules/manager/bundler/host-rules', () => {
 
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toMatchObject([hostRule]);
     });
 
@@ -103,7 +103,7 @@ describe('modules/manager/bundler/host-rules', () => {
 
       add(hostRule);
       expect(
-        findAllAuthenticatable({ hostType: 'nuget' } as any)
+        findAllAuthenticatable({ hostType: 'nuget' } as any),
       ).toMatchObject([hostRule]);
     });
   });
diff --git a/lib/modules/manager/bundler/locked-version.ts b/lib/modules/manager/bundler/locked-version.ts
index 5055aede48ef2bbd0b29752598a0777ca3ccdaf8..2ba7a1720374510b5aabc7981541f1e4b35e0b30 100644
--- a/lib/modules/manager/bundler/locked-version.ts
+++ b/lib/modules/manager/bundler/locked-version.ts
@@ -4,7 +4,7 @@ import { isVersion } from '../../versioning/ruby';
 
 const DEP_REGEX = new RegExp('(?<=\\().*(?=\\))'); // TODO #12872  (?<=re)	after text matching
 export function extractLockFileEntries(
-  lockFileContent: string
+  lockFileContent: string,
 ): Map<string, string> {
   const gemLock = new Map<string, string>();
   try {
diff --git a/lib/modules/manager/bundler/update-locked.ts b/lib/modules/manager/bundler/update-locked.ts
index b6c8609ef223da8cc1c2a8d0f0558bf270733cd7..40ae244ae57f665e9946224441034eefd7bcfa66 100644
--- a/lib/modules/manager/bundler/update-locked.ts
+++ b/lib/modules/manager/bundler/update-locked.ts
@@ -3,12 +3,12 @@ import type { UpdateLockedConfig, UpdateLockedResult } from '../types';
 import { extractLockFileEntries } from './locked-version';
 
 export function updateLockedDependency(
-  config: UpdateLockedConfig
+  config: UpdateLockedConfig,
 ): UpdateLockedResult {
   const { depName, currentVersion, newVersion, lockFile, lockFileContent } =
     config;
   logger.debug(
-    `bundler.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
+    `bundler.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`,
   );
   try {
     const locked = extractLockFileEntries(lockFileContent ?? '');
diff --git a/lib/modules/manager/cargo/artifacts.spec.ts b/lib/modules/manager/cargo/artifacts.spec.ts
index 44b53dc198bc83d12d3850a4ce2e0ebe5c07e55f..bfc64bdb0c9f7f586632b6a648677231065ccee3 100644
--- a/lib/modules/manager/cargo/artifacts.spec.ts
+++ b/lib/modules/manager/cargo/artifacts.spec.ts
@@ -52,7 +52,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -63,7 +63,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -86,7 +86,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -109,7 +109,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -132,17 +132,17 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
 
   it('returns updated workspace Cargo.lock', async () => {
     fs.statLocalFile.mockRejectedValueOnce(
-      new Error('crates/one/Cargo.lock not found')
+      new Error('crates/one/Cargo.lock not found'),
     );
     fs.statLocalFile.mockRejectedValueOnce(
-      new Error('crates/Cargo.lock not found')
+      new Error('crates/Cargo.lock not found'),
     );
     fs.statLocalFile.mockResolvedValueOnce({ name: 'Cargo.lock' } as any);
 
@@ -161,7 +161,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -178,7 +178,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: { ...config, updateType: 'lockFileMaintenance' },
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -201,7 +201,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config: { ...config, constraints: { rust: '1.65.0' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -266,7 +266,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config: { ...config, constraints: { rust: '1.65.0' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -371,7 +371,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config: { ...config, constraints: { rust: '1.65.0' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -415,7 +415,7 @@ describe('modules/manager/cargo/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -447,7 +447,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config: { ...config, constraints: { rust: '1.65.0' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -474,7 +474,7 @@ describe('modules/manager/cargo/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -506,7 +506,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config: { ...config, constraints: { rust: '1.65.0' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -542,7 +542,7 @@ describe('modules/manager/cargo/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -564,7 +564,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config: { ...config, constraints: { rust: '1.65.0' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -615,7 +615,7 @@ describe('modules/manager/cargo/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'Cargo.lock', stderr: 'not found' } },
     ]);
diff --git a/lib/modules/manager/cargo/artifacts.ts b/lib/modules/manager/cargo/artifacts.ts
index 3c9b4222e35e6393f64fe891f0673913c7e09e55..d340fccf0cde1d6098120a0dc786946953442d6c 100644
--- a/lib/modules/manager/cargo/artifacts.ts
+++ b/lib/modules/manager/cargo/artifacts.ts
@@ -14,10 +14,10 @@ import type { UpdateArtifact, UpdateArtifactsResult } from '../types';
 async function cargoUpdate(
   manifestPath: string,
   isLockFileMaintenance: boolean,
-  constraint: string | undefined
+  constraint: string | undefined,
 ): Promise<void> {
   let cmd = `cargo update --config net.git-fetch-with-cli=true --manifest-path ${quote(
-    manifestPath
+    manifestPath,
   )}`;
   // If we're updating a specific crate, `cargo-update` requires `--workspace`
   // for more information, see: https://github.com/renovatebot/renovate/issues/12332
@@ -56,7 +56,7 @@ export async function updateArtifacts({
   // will be further up.
   const lockFileName = await findLocalSiblingOrParent(
     packageFileName,
-    'Cargo.lock'
+    'Cargo.lock',
   );
   const existingLockFileContent = lockFileName
     ? await readLocalFile(lockFileName)
@@ -71,7 +71,7 @@ export async function updateArtifacts({
     await cargoUpdate(
       packageFileName,
       isLockFileMaintenance,
-      config.constraints?.rust
+      config.constraints?.rust,
     );
     logger.debug('Returning updated Cargo.lock');
     const newCargoLockContent = await readLocalFile(lockFileName);
diff --git a/lib/modules/manager/cargo/extract.spec.ts b/lib/modules/manager/cargo/extract.spec.ts
index bbf4be8410eb75a156a105f28f3c7b2edab7efa0..aca495cb86d6cc17b917928bb788d35162a883d5 100644
--- a/lib/modules/manager/cargo/extract.spec.ts
+++ b/lib/modules/manager/cargo/extract.spec.ts
@@ -42,28 +42,28 @@ describe('modules/manager/cargo/extract', () => {
 
     it('returns null for invalid toml', async () => {
       expect(
-        await extractPackageFile('invalid toml', 'Cargo.toml', config)
+        await extractPackageFile('invalid toml', 'Cargo.toml', config),
       ).toBeNull();
     });
 
     it('returns null for empty dependencies', async () => {
       const cargotoml = '[dependencies]\n';
       expect(
-        await extractPackageFile(cargotoml, 'Cargo.toml', config)
+        await extractPackageFile(cargotoml, 'Cargo.toml', config),
       ).toBeNull();
     });
 
     it('returns null for empty dev-dependencies', async () => {
       const cargotoml = '[dev-dependencies]\n';
       expect(
-        await extractPackageFile(cargotoml, 'Cargo.toml', config)
+        await extractPackageFile(cargotoml, 'Cargo.toml', config),
       ).toBeNull();
     });
 
     it('returns null for empty custom target', async () => {
       const cargotoml = '[target."foo".dependencies]\n';
       expect(
-        await extractPackageFile(cargotoml, 'Cargo.toml', config)
+        await extractPackageFile(cargotoml, 'Cargo.toml', config),
       ).toBeNull();
     });
 
@@ -128,7 +128,7 @@ index = "https://github.com/mcorbin/testregistry"
 replace-with = "mcorbin"
 
 [source.mcorbin]
-replace-with = "private-crates"`
+replace-with = "private-crates"`,
       );
       const res = await extractPackageFile(cargo6toml, 'Cargo.toml', {
         ...config,
@@ -180,7 +180,7 @@ replace-with = "private-crates"`
 replace-with = "crates-io"
 
 [source.private-crates]
-replace-with = "mcorbin"`
+replace-with = "mcorbin"`,
       );
       const res = await extractPackageFile(cargo6toml, 'Cargo.toml', {
         ...config,
@@ -407,7 +407,7 @@ tokio = { version = "1.21.1" }`;
 foo = "bar"
 
 [source.crates-io]
-replace-with = "mine"`
+replace-with = "mine"`,
       );
 
       const res = await extractPackageFile(cargo6toml, 'Cargo.toml', {
@@ -464,7 +464,7 @@ replace-with = "private-crates"
 
 [source.private-crates]
 replace-with = "mcorbin"
-`
+`,
       );
 
       const res = await extractPackageFile(cargo6toml, 'Cargo.toml', {
diff --git a/lib/modules/manager/cargo/extract.ts b/lib/modules/manager/cargo/extract.ts
index d1c748ab79c4e2947ec0513b4f96bb67434225f5..b70bf3a7202c44bc4371106beeaabf6ef7a8eada 100644
--- a/lib/modules/manager/cargo/extract.ts
+++ b/lib/modules/manager/cargo/extract.ts
@@ -29,7 +29,7 @@ function extractFromSection(
   section: keyof CargoSection,
   cargoRegistries: CargoRegistries,
   target?: string,
-  depTypeOverride?: string
+  depTypeOverride?: string,
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
   const sectionContent = parsedContent[section];
@@ -151,7 +151,7 @@ function extractCargoRegistries(config: CargoConfig): CargoRegistries {
   // check if we're overriding our default registry index
   result[DEFAULT_REGISTRY_ID] = resolveRegistryIndex(
     DEFAULT_REGISTRY_ID,
-    config
+    config,
   );
 
   const registryNames = new Set([
@@ -168,14 +168,14 @@ function extractCargoRegistries(config: CargoConfig): CargoRegistries {
 function resolveRegistryIndex(
   registryName: string,
   config: CargoConfig,
-  originalNames: Set<string> = new Set()
+  originalNames: Set<string> = new Set(),
 ): CargoRegistryUrl {
   // if we have a source replacement, follow that.
   // https://doc.rust-lang.org/cargo/reference/source-replacement.html
   const replacementName = config.source?.[registryName]?.['replace-with'];
   if (replacementName) {
     logger.debug(
-      `Replacing index of cargo registry ${registryName} with ${replacementName}`
+      `Replacing index of cargo registry ${registryName} with ${replacementName}`,
     );
     if (originalNames.has(replacementName)) {
       logger.warn(`${registryName} cargo registry resolves to itself`);
@@ -184,7 +184,7 @@ function resolveRegistryIndex(
     return resolveRegistryIndex(
       replacementName,
       config,
-      originalNames.add(replacementName)
+      originalNames.add(replacementName),
     );
   }
 
@@ -205,7 +205,7 @@ function resolveRegistryIndex(
 export async function extractPackageFile(
   content: string,
   packageFile: string,
-  _config?: ExtractConfig
+  _config?: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   logger.trace(`cargo.extractPackageFile(${packageFile})`);
 
@@ -240,19 +240,19 @@ export async function extractPackageFile(
           targetContent,
           'dependencies',
           cargoRegistries,
-          target
+          target,
         ),
         ...extractFromSection(
           targetContent,
           'dev-dependencies',
           cargoRegistries,
-          target
+          target,
         ),
         ...extractFromSection(
           targetContent,
           'build-dependencies',
           cargoRegistries,
-          target
+          target,
         ),
       ];
       targetDeps = targetDeps.concat(deps);
@@ -267,7 +267,7 @@ export async function extractPackageFile(
       'dependencies',
       cargoRegistries,
       undefined,
-      'workspace.dependencies'
+      'workspace.dependencies',
     );
   }
 
@@ -283,7 +283,7 @@ export async function extractPackageFile(
   }
   const lockFileName = await findLocalSiblingOrParent(
     packageFile,
-    'Cargo.lock'
+    'Cargo.lock',
   );
   const res: PackageFileContent = { deps };
   // istanbul ignore if
diff --git a/lib/modules/manager/cdnurl/extract.ts b/lib/modules/manager/cdnurl/extract.ts
index 45cb2dea7a8f4263906bf9564886f73e72b9d620..5af79ad59fe57eee7a139ad2f547a7d73e09a015 100644
--- a/lib/modules/manager/cdnurl/extract.ts
+++ b/lib/modules/manager/cdnurl/extract.ts
@@ -3,7 +3,7 @@ import { CdnJsDatasource } from '../../datasource/cdnjs';
 import type { PackageDependency, PackageFileContent } from '../types';
 
 export const cloudflareUrlRegex = regEx(
-  /\/\/cdnjs\.cloudflare\.com\/ajax\/libs\/(?<depName>[^/]+?)\/(?<currentValue>[^/]+?)\/(?<asset>[-/_.a-zA-Z0-9]+)/
+  /\/\/cdnjs\.cloudflare\.com\/ajax\/libs\/(?<depName>[^/]+?)\/(?<currentValue>[^/]+?)\/(?<asset>[-/_.a-zA-Z0-9]+)/,
 );
 
 export function extractPackageFile(content: string): PackageFileContent {
diff --git a/lib/modules/manager/circleci/extract.spec.ts b/lib/modules/manager/circleci/extract.spec.ts
index dab382be61e966a5e2aa79e1c09c49fb16d9f35c..4222a22a5423ec54d339e96195ae18dd3c099fde 100644
--- a/lib/modules/manager/circleci/extract.spec.ts
+++ b/lib/modules/manager/circleci/extract.spec.ts
@@ -60,8 +60,8 @@ describe('modules/manager/circleci/extract', () => {
           'jobs:\n' +
             '  build:\n' +
             '    machine:\n' +
-            '      image: android:202102-01'
-        )
+            '      image: android:202102-01',
+        ),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/manager/circleci/extract.ts b/lib/modules/manager/circleci/extract.ts
index 30f59084967f5a510c86925d7e388ce12e0eca3f..fe1e6adf445e226fe95001d7964aaeec6da5a187 100644
--- a/lib/modules/manager/circleci/extract.ts
+++ b/lib/modules/manager/circleci/extract.ts
@@ -7,7 +7,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   try {
@@ -59,7 +59,7 @@ export function extractPackageFile(
             currentValue: dep.currentValue,
             currentDigest: dep.currentDigest,
           },
-          'CircleCI docker image'
+          'CircleCI docker image',
         );
         dep.depType = 'docker';
         dep.versioning = 'docker';
diff --git a/lib/modules/manager/cloudbuild/extract.ts b/lib/modules/manager/cloudbuild/extract.ts
index 84f6e76401736596d91e254f39af09dd9e6344c2..b00c00e8122cabf5fb9005eef39a7a4cc9e55328 100644
--- a/lib/modules/manager/cloudbuild/extract.ts
+++ b/lib/modules/manager/cloudbuild/extract.ts
@@ -6,7 +6,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   try {
@@ -22,7 +22,7 @@ export function extractPackageFile(
               currentValue: dep.currentValue,
               currentDigest: dep.currentDigest,
             },
-            'Cloud Build docker image'
+            'Cloud Build docker image',
           );
 
           deps.push(dep);
@@ -33,12 +33,12 @@ export function extractPackageFile(
     if (err.stack?.startsWith('YAMLException:')) {
       logger.debug(
         { err, packageFile },
-        'YAML exception extracting Docker images from a Cloud Build configuration file.'
+        'YAML exception extracting Docker images from a Cloud Build configuration file.',
       );
     } else {
       logger.debug(
         { err, packageFile },
-        'Error extracting Docker images from a Cloud Build configuration file.'
+        'Error extracting Docker images from a Cloud Build configuration file.',
       );
     }
   }
diff --git a/lib/modules/manager/cocoapods/artifacts.spec.ts b/lib/modules/manager/cocoapods/artifacts.spec.ts
index fcffceaccf586d7e813d96d4ece9b7aa384788ce..11f40812ba8360ed0055cb9054c704bea8c8ac0f 100644
--- a/lib/modules/manager/cocoapods/artifacts.spec.ts
+++ b/lib/modules/manager/cocoapods/artifacts.spec.ts
@@ -60,7 +60,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -73,7 +73,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -90,7 +90,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: '',
         config: {},
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -103,7 +103,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -115,7 +115,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: [],
-      })
+      }),
     );
     fs.findLocalSiblingOrParent.mockResolvedValueOnce('Podfile.lock');
     fs.readLocalFile.mockResolvedValueOnce('Current Podfile');
@@ -125,7 +125,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -139,7 +139,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['Podfile.lock'],
-      })
+      }),
     );
     fs.findLocalSiblingOrParent.mockResolvedValueOnce('Podfile');
     fs.readLocalFile.mockResolvedValueOnce('New Podfile');
@@ -149,7 +149,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: 'plugin "cocoapods-acknowledgements"',
         config,
-      })
+      }),
     ).toMatchObject([{ file: { contents: 'New Podfile' } }]);
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -169,7 +169,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         not_added: ['Pods/New'],
         modified: ['Podfile.lock', 'Pods/Manifest.lock'],
         deleted: ['Pods/Deleted'],
-      })
+      }),
     );
     expect(
       await updateArtifacts({
@@ -177,7 +177,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toMatchObject([
       { file: { type: 'addition', path: 'Podfile.lock' } },
       { file: { type: 'addition', path: 'Pods/Manifest.lock' } },
@@ -201,7 +201,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'Podfile.lock', stderr: 'not found' } },
     ]);
@@ -222,7 +222,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'Podfile.lock', stderr: 'exec exception' } },
     ]);
@@ -242,7 +242,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['Podfile.lock'],
-      })
+      }),
     );
 
     await updateArtifacts({
@@ -285,7 +285,7 @@ describe('modules/manager/cocoapods/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['Podfile.lock'],
-      })
+      }),
     );
 
     await updateArtifacts({
diff --git a/lib/modules/manager/cocoapods/artifacts.ts b/lib/modules/manager/cocoapods/artifacts.ts
index fc9dcb70941ff6397119917b477f4459b89d22df..21636344aa1624a1a9bb633ffedc5d641f97b5a5 100644
--- a/lib/modules/manager/cocoapods/artifacts.ts
+++ b/lib/modules/manager/cocoapods/artifacts.ts
@@ -66,7 +66,7 @@ export async function updateArtifacts({
   }
 
   const match = regEx(/^COCOAPODS: (?<cocoapodsVersion>.*)$/m).exec(
-    existingLockFileContent
+    existingLockFileContent,
   );
   const cocoapods = match?.groups?.cocoapodsVersion ?? null;
 
diff --git a/lib/modules/manager/cocoapods/extract.ts b/lib/modules/manager/cocoapods/extract.ts
index f873584432565e0af1c8369de26d79b0e1a09d25..041c2a8a0aace5e0c0c552164313c5b04372f180 100644
--- a/lib/modules/manager/cocoapods/extract.ts
+++ b/lib/modules/manager/cocoapods/extract.ts
@@ -12,7 +12,7 @@ import type { ParsedLine } from './types';
 const regexMappings = [
   regEx(`^\\s*pod\\s+(['"])(?<spec>[^'"/]+)(/(?<subspec>[^'"]+))?(['"])`),
   regEx(
-    `^\\s*pod\\s+(['"])[^'"]+(['"])\\s*,\\s*(['"])(?<currentValue>[^'"]+)(['"])\\s*$`
+    `^\\s*pod\\s+(['"])[^'"]+(['"])\\s*,\\s*(['"])(?<currentValue>[^'"]+)(['"])\\s*$`,
   ),
   regEx(`,\\s*:git\\s*=>\\s*(['"])(?<git>[^'"]+)(['"])`),
   regEx(`,\\s*:tag\\s*=>\\s*(['"])(?<tag>[^'"]+)(['"])`),
@@ -54,7 +54,7 @@ export function gitDep(parsedLine: ParsedLine): PackageDependency | null {
   const { depName, git, tag } = parsedLine;
 
   const platformMatch = regEx(
-    /[@/](?<platform>github|gitlab)\.com[:/](?<account>[^/]+)\/(?<repo>[^/]+)/
+    /[@/](?<platform>github|gitlab)\.com[:/](?<account>[^/]+)\/(?<repo>[^/]+)/,
   ).exec(coerceString(git));
 
   if (platformMatch?.groups) {
@@ -83,7 +83,7 @@ export function gitDep(parsedLine: ParsedLine): PackageDependency | null {
 
 export async function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   logger.trace(`cocoapods.extractPackageFile(${packageFile})`);
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/composer/artifacts.spec.ts b/lib/modules/manager/composer/artifacts.spec.ts
index 29fa33a66f631a0c7edbc7be77b9d79e0aec9240..4229040a6c11a1de57590fc87748dbad0bef4eed 100644
--- a/lib/modules/manager/composer/artifacts.spec.ts
+++ b/lib/modules/manager/composer/artifacts.spec.ts
@@ -74,7 +74,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -97,7 +97,7 @@ describe('modules/manager/composer/artifacts', () => {
         ],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -165,7 +165,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -209,7 +209,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -248,7 +248,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -281,7 +281,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -321,7 +321,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -360,7 +360,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -400,7 +400,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots[0].options?.env).not.toContainKey('COMPOSER_AUTH');
   });
@@ -432,7 +432,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -493,7 +493,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -534,7 +534,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -598,7 +598,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: authConfig,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -632,7 +632,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -728,7 +728,7 @@ describe('modules/manager/composer/artifacts', () => {
           ...config,
           isLockFileMaintenance: true,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -775,7 +775,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: { ...config, constraints: { composer: '^1.10.0', php: '7.3' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -853,7 +853,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: { ...config, constraints: { composer: '^1.10.0', php: '7.3' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -897,7 +897,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -927,7 +927,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         artifactError: {
@@ -953,7 +953,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([{ artifactError: { lockFile: 'composer.lock', stderr } }]);
     expect(execSnapshots).toBeEmptyArray();
   });
@@ -963,7 +963,7 @@ describe('modules/manager/composer/artifacts', () => {
     fs.readLocalFile.mockResolvedValueOnce('{}');
     fs.writeLocalFile.mockImplementationOnce(() => {
       throw new Error(
-        'vendor/composer/07fe2366/sebastianbergmann-php-code-coverage-c896779/src/Report/Html/Renderer/Template/js/d3.min.js:  write error (disk full?).  Continue? (y/n/^C) '
+        'vendor/composer/07fe2366/sebastianbergmann-php-code-coverage-c896779/src/Report/Html/Renderer/Template/js/d3.min.js:  write error (disk full?).  Continue? (y/n/^C) ',
       );
     });
     await expect(
@@ -972,7 +972,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).rejects.toThrow();
     expect(execSnapshots).toBeEmptyArray();
   });
@@ -994,7 +994,7 @@ describe('modules/manager/composer/artifacts', () => {
           ...config,
           composerIgnorePlatformReqs: undefined,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -1029,7 +1029,7 @@ describe('modules/manager/composer/artifacts', () => {
           ...config,
           composerIgnorePlatformReqs: ['ext-posix', 'ext-sodium'],
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -1049,7 +1049,7 @@ describe('modules/manager/composer/artifacts', () => {
 
   it('installs before running the update when symfony flex is installed', async () => {
     fs.readLocalFile.mockResolvedValueOnce(
-      '{"packages":[{"name":"symfony/flex","version":"1.17.1"}]}'
+      '{"packages":[{"name":"symfony/flex","version":"1.17.1"}]}',
     );
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce('{ }');
@@ -1065,7 +1065,7 @@ describe('modules/manager/composer/artifacts', () => {
         config: {
           ...config,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -1095,7 +1095,7 @@ describe('modules/manager/composer/artifacts', () => {
 
   it('installs before running the update when symfony flex is installed as dev', async () => {
     fs.readLocalFile.mockResolvedValueOnce(
-      '{"packages-dev":[{"name":"symfony/flex","version":"1.17.1"}]}'
+      '{"packages-dev":[{"name":"symfony/flex","version":"1.17.1"}]}',
     );
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce('{ }');
@@ -1111,7 +1111,7 @@ describe('modules/manager/composer/artifacts', () => {
         config: {
           ...config,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -1154,7 +1154,7 @@ describe('modules/manager/composer/artifacts', () => {
         ],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -1179,7 +1179,7 @@ describe('modules/manager/composer/artifacts', () => {
           ...config,
           ignorePlugins: true,
         },
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -1201,7 +1201,7 @@ describe('modules/manager/composer/artifacts', () => {
         updatedDeps: [{ depName: 'foo', newVersion: '1.1.0' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
diff --git a/lib/modules/manager/composer/artifacts.ts b/lib/modules/manager/composer/artifacts.ts
index 56c1179b92c0d64d8f87d8a66d4939550e025312..44cbfca896fa4d4cb8a6e00d857ec5743c2f3590 100644
--- a/lib/modules/manager/composer/artifacts.ts
+++ b/lib/modules/manager/composer/artifacts.ts
@@ -57,7 +57,7 @@ function getAuthJson(): string | null {
       : undefined,
     isArtifactAuthEnabled(gitTagsHostRule)
       ? findGithubToken(gitTagsHostRule)
-      : undefined
+      : undefined,
   );
 
   if (selectedGithubToken) {
@@ -181,7 +181,7 @@ export async function updateArtifacts({
           'update ' +
           updatedDeps
             .map((dep) =>
-              dep.newVersion ? `${dep.depName}:${dep.newVersion}` : dep.depName
+              dep.newVersion ? `${dep.depName}:${dep.newVersion}` : dep.depName,
             )
             .filter(is.string)
             .map((dep) => quote(dep))
@@ -241,7 +241,7 @@ export async function updateArtifacts({
     }
     if (
       err.message?.includes(
-        'Your requirements could not be resolved to an installable set of packages.'
+        'Your requirements could not be resolved to an installable set of packages.',
       )
     ) {
       logger.info('Composer requirements cannot be resolved');
diff --git a/lib/modules/manager/composer/extract.spec.ts b/lib/modules/manager/composer/extract.spec.ts
index 9cccbb73dea575f4c5c9d48564e1fbadb78b2936..c55d8857d4b0563663fda00d8dfa15909cf21da0 100644
--- a/lib/modules/manager/composer/extract.spec.ts
+++ b/lib/modules/manager/composer/extract.spec.ts
@@ -295,7 +295,7 @@ describe('modules/manager/composer/extract', () => {
             }
           }
         `,
-        packageFile
+        packageFile,
       );
       expect(res?.deps).toEqual([
         {
diff --git a/lib/modules/manager/composer/extract.ts b/lib/modules/manager/composer/extract.ts
index 816540e8b0a55b5f01824c22cfe6a75db7c60821..7acaf29a66f0ce028e9efcf44f8ed72ad356f93e 100644
--- a/lib/modules/manager/composer/extract.ts
+++ b/lib/modules/manager/composer/extract.ts
@@ -4,7 +4,7 @@ import { ComposerExtract } from './schema';
 
 export async function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   const res = await ComposerExtract.safeParseAsync({
     content,
diff --git a/lib/modules/manager/composer/range.ts b/lib/modules/manager/composer/range.ts
index 98741ad6211d0cc09ebdb5408f6b9d4740fd169f..36524f4f7a1c418ba250fa656394ce181baac18b 100644
--- a/lib/modules/manager/composer/range.ts
+++ b/lib/modules/manager/composer/range.ts
@@ -10,7 +10,7 @@ export function getRangeStrategy(config: RangeConfig): RangeStrategy {
   if (rangeStrategy === 'bump' && isComplexRange) {
     logger.debug(
       { currentValue },
-      'Replacing bump strategy for complex range with widen'
+      'Replacing bump strategy for complex range with widen',
     );
     return 'widen';
   }
diff --git a/lib/modules/manager/composer/schema.spec.ts b/lib/modules/manager/composer/schema.spec.ts
index d105b70690e9263e501ed16dae41300d42c28209..d8d78707a0dec5f41ac950e67cd474794dafa10b 100644
--- a/lib/modules/manager/composer/schema.spec.ts
+++ b/lib/modules/manager/composer/schema.spec.ts
@@ -15,7 +15,7 @@ describe('modules/manager/composer/schema', () => {
           packagist: false,
           'packagist.org': false,
           foo: 'bar',
-        })
+        }),
       ).toEqual([
         { type: 'composer', url: 'https://wpackagist.org' },
         { name: 'someGit', type: 'git', url: 'https://some-vcs.com' },
@@ -43,7 +43,7 @@ describe('modules/manager/composer/schema', () => {
           { packagist: false },
           { 'packagist.org': false },
           { foo: 'bar' },
-        ])
+        ]),
       ).toEqual([
         { type: 'composer', url: 'https://wpackagist.org' },
         { name: 'someGit', type: 'git', url: 'https://some-vcs.com' },
@@ -73,7 +73,7 @@ describe('modules/manager/composer/schema', () => {
           },
           { name: 'someGit', type: 'vcs', url: 'https://some-vcs.com' },
           { name: 'somePath', type: 'path', url: '/some/path' },
-        ])
+        ]),
       ).toEqual({
         pathRepos: {
           somePath: { name: 'somePath', type: 'path', url: '/some/path' },
@@ -96,7 +96,7 @@ describe('modules/manager/composer/schema', () => {
           someGit: { type: 'vcs', url: 'https://some-vcs.com' },
           somePath: { type: 'path', url: '/some/path' },
           packagist: false,
-        })
+        }),
       ).toEqual({
         pathRepos: {
           somePath: { name: 'somePath', type: 'path', url: '/some/path' },
diff --git a/lib/modules/manager/composer/schema.ts b/lib/modules/manager/composer/schema.ts
index e8e12e3f9c7e7c3331b06242eafe4ebdcdf0dbba..e1d9ffa47fd45553607691aa67bfa7dacaca56f1 100644
--- a/lib/modules/manager/composer/schema.ts
+++ b/lib/modules/manager/composer/schema.ts
@@ -62,7 +62,7 @@ const DisablePackagist = z.object({ type: z.literal('disable-packagist') });
 export type DisablePackagist = z.infer<typeof DisablePackagist>;
 
 const bitbucketUrlRegex = regEx(
-  /^(?:https:\/\/|git@)bitbucket\.org[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)bitbucket\.org[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 
 export const ReposRecord = LooseRecord(z.union([Repo, z.literal(false)]), {
@@ -108,7 +108,7 @@ export const ReposArray = LooseArray(
     onError: ({ error: err }) => {
       logger.debug({ err }, 'Composer: error parsing repositories array');
     },
-  }
+  },
 ).transform((repos) => {
   const result: (NamedRepo | DisablePackagist)[] = [];
   for (let idx = 0; idx < repos.length; idx++) {
@@ -158,7 +158,7 @@ export const Repos = z
 export type Repos = z.infer<typeof Repos>;
 
 const RequireDefs = LooseRecord(z.string().transform((x) => x.trim())).catch(
-  {}
+  {},
 );
 
 export const PackageFile = z
@@ -189,7 +189,7 @@ export const PackageFile = z
       repositories,
       require,
       requireDev,
-    })
+    }),
   );
 export type PackageFile = z.infer<typeof PackageFile>;
 
@@ -210,7 +210,7 @@ export const Lockfile = z
       'plugin-api-version': pluginApiVersion,
       packages,
       'packages-dev': packagesDev,
-    }) => ({ pluginApiVersion, packages, packagesDev })
+    }) => ({ pluginApiVersion, packages, packagesDev }),
   );
 export type Lockfile = z.infer<typeof Lockfile>;
 
@@ -246,9 +246,9 @@ export const ComposerExtract = z
                 logger.debug({ err }, 'Composer: lockfile parsing error');
                 return null;
               }),
-          ])
+          ]),
         ),
-    })
+    }),
   )
   .transform(({ file, lockfile, lockfileName }) => {
     const { composerJsonType, require, requireDev } = file;
@@ -310,7 +310,7 @@ export const ComposerExtract = z
         const gitRepo = gitRepos[depName];
         if (gitRepo) {
           const bitbucketMatchGroups = bitbucketUrlRegex.exec(
-            gitRepo.url
+            gitRepo.url,
           )?.groups;
 
           if (bitbucketMatchGroups) {
diff --git a/lib/modules/manager/composer/update-locked.ts b/lib/modules/manager/composer/update-locked.ts
index 469ac1a81df64b811c00a20873814b73b05c6436..598570fbb7cd8fa726d0f96dd4a4271617c4857c 100644
--- a/lib/modules/manager/composer/update-locked.ts
+++ b/lib/modules/manager/composer/update-locked.ts
@@ -5,19 +5,19 @@ import type { UpdateLockedConfig, UpdateLockedResult } from '../types';
 import { Lockfile } from './schema';
 
 export function updateLockedDependency(
-  config: UpdateLockedConfig
+  config: UpdateLockedConfig,
 ): UpdateLockedResult {
   const { depName, currentVersion, newVersion, lockFile, lockFileContent } =
     config;
   logger.debug(
-    `composer.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
+    `composer.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`,
   );
   try {
     const lockfile = Json.pipe(Lockfile).parse(lockFileContent);
     if (
       lockfile?.packages.find(
         ({ name, version }) =>
-          name === depName && composer.equals(version, newVersion)
+          name === depName && composer.equals(version, newVersion),
       )
     ) {
       return { status: 'already-updated' };
diff --git a/lib/modules/manager/composer/utils.spec.ts b/lib/modules/manager/composer/utils.spec.ts
index 46abbf0fcc75fbca068f3937546abeb480e8a57b..22fb07fd2981e3cb35184496d746c02468ec936a 100644
--- a/lib/modules/manager/composer/utils.spec.ts
+++ b/lib/modules/manager/composer/utils.spec.ts
@@ -142,9 +142,9 @@ describe('modules/manager/composer/utils', () => {
 
     it('disables scripts and plugins by default', () => {
       expect(
-        getComposerArguments({}, { toolName: 'composer', constraint: '1.*' })
+        getComposerArguments({}, { toolName: 'composer', constraint: '1.*' }),
       ).toBe(
-        ' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
 
@@ -154,10 +154,10 @@ describe('modules/manager/composer/utils', () => {
           {
             composerIgnorePlatformReqs: [],
           },
-          { toolName: 'composer', constraint: '1.*' }
-        )
+          { toolName: 'composer', constraint: '1.*' },
+        ),
       ).toBe(
-        ' --ignore-platform-reqs --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --ignore-platform-reqs --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
 
@@ -167,10 +167,10 @@ describe('modules/manager/composer/utils', () => {
           {
             composerIgnorePlatformReqs: [],
           },
-          { toolName: 'composer', constraint: '2.1.0' }
-        )
+          { toolName: 'composer', constraint: '2.1.0' },
+        ),
       ).toBe(
-        ' --ignore-platform-reqs --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --ignore-platform-reqs --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
 
@@ -180,10 +180,10 @@ describe('modules/manager/composer/utils', () => {
           {
             composerIgnorePlatformReqs: [],
           },
-          { toolName: 'composer', constraint: '2.2.0' }
-        )
+          { toolName: 'composer', constraint: '2.2.0' },
+        ),
       ).toBe(
-        " --ignore-platform-req='ext-*' --ignore-platform-req='lib-*' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins"
+        " --ignore-platform-req='ext-*' --ignore-platform-req='lib-*' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins",
       );
     });
 
@@ -193,10 +193,10 @@ describe('modules/manager/composer/utils', () => {
           {
             composerIgnorePlatformReqs: [],
           },
-          { toolName: 'composer', constraint: '^2.2' }
-        )
+          { toolName: 'composer', constraint: '^2.2' },
+        ),
       ).toBe(
-        " --ignore-platform-req='ext-*' --ignore-platform-req='lib-*' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins"
+        " --ignore-platform-req='ext-*' --ignore-platform-req='lib-*' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins",
       );
     });
 
@@ -206,10 +206,10 @@ describe('modules/manager/composer/utils', () => {
           {
             composerIgnorePlatformReqs: ['ext-intl'],
           },
-          { toolName: 'composer', constraint: '1.*' }
-        )
+          { toolName: 'composer', constraint: '1.*' },
+        ),
       ).toBe(
-        ' --ignore-platform-req ext-intl --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --ignore-platform-req ext-intl --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
 
@@ -219,10 +219,10 @@ describe('modules/manager/composer/utils', () => {
           {
             composerIgnorePlatformReqs: ['ext-intl', 'ext-icu'],
           },
-          { toolName: 'composer', constraint: '1.*' }
-        )
+          { toolName: 'composer', constraint: '1.*' },
+        ),
       ).toBe(
-        ' --ignore-platform-req ext-intl --ignore-platform-req ext-icu --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --ignore-platform-req ext-intl --ignore-platform-req ext-icu --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
 
@@ -231,7 +231,7 @@ describe('modules/manager/composer/utils', () => {
         allowScripts: true,
       });
       expect(
-        getComposerArguments({}, { toolName: 'composer', constraint: '1.*' })
+        getComposerArguments({}, { toolName: 'composer', constraint: '1.*' }),
       ).toBe(' --no-ansi --no-interaction --no-plugins');
     });
 
@@ -244,10 +244,10 @@ describe('modules/manager/composer/utils', () => {
           {
             ignoreScripts: true,
           },
-          { toolName: 'composer', constraint: '1.*' }
-        )
+          { toolName: 'composer', constraint: '1.*' },
+        ),
       ).toBe(
-        ' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
 
@@ -256,7 +256,7 @@ describe('modules/manager/composer/utils', () => {
         allowPlugins: true,
       });
       expect(
-        getComposerArguments({}, { toolName: 'composer', constraint: '1.*' })
+        getComposerArguments({}, { toolName: 'composer', constraint: '1.*' }),
       ).toBe(' --no-ansi --no-interaction --no-scripts --no-autoloader');
     });
 
@@ -269,10 +269,10 @@ describe('modules/manager/composer/utils', () => {
           {
             ignorePlugins: true,
           },
-          { toolName: 'composer', constraint: '1.*' }
-        )
+          { toolName: 'composer', constraint: '1.*' },
+        ),
       ).toBe(
-        ' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins'
+        ' --no-ansi --no-interaction --no-scripts --no-autoloader --no-plugins',
       );
     });
   });
diff --git a/lib/modules/manager/composer/utils.ts b/lib/modules/manager/composer/utils.ts
index 974ab2678252a7c3a4549ed557bcae5afe1c0ad1..353f2b7d0365c2de6e346832f04be109d809dca9 100644
--- a/lib/modules/manager/composer/utils.ts
+++ b/lib/modules/manager/composer/utils.ts
@@ -15,7 +15,7 @@ const depRequireInstall = new Set(['symfony/flex']);
 
 export function getComposerArguments(
   config: UpdateArtifactsConfig,
-  toolConstraint: ToolConstraint
+  toolConstraint: ToolConstraint,
 ): string {
   let args = '';
 
@@ -47,7 +47,7 @@ export function getComposerArguments(
 }
 
 export function getPhpConstraint(
-  constraints: Record<string, string>
+  constraints: Record<string, string>,
 ): string | null {
   const { php } = constraints;
 
@@ -71,7 +71,7 @@ export function requireComposerDependencyInstallation({
 
 export function extractConstraints(
   { config, require, requireDev }: PackageFile,
-  { pluginApiVersion }: Lockfile
+  { pluginApiVersion }: Lockfile,
 ): Record<string, string> {
   const res: Record<string, string> = { composer: '1.*' };
 
diff --git a/lib/modules/manager/conan/extract.ts b/lib/modules/manager/conan/extract.ts
index 2c8d274525995d589d548352d6bc0447aa5c3d81..98143b9b28b3f520575a3bde236882b30cc735b0 100644
--- a/lib/modules/manager/conan/extract.ts
+++ b/lib/modules/manager/conan/extract.ts
@@ -4,7 +4,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 import { isComment } from './common';
 
 const regex = regEx(
-  `(?<name>[-_a-z0-9]+)/(?<version>[^@\n{*"']+)(?<userChannel>@[-_a-zA-Z0-9]+/[^#\n.{*"' ]+)?#?(?<revision>[-_a-f0-9]+[^\n{*"'])?`
+  `(?<name>[-_a-z0-9]+)/(?<version>[^@\n{*"']+)(?<userChannel>@[-_a-zA-Z0-9]+/[^#\n.{*"' ]+)?#?(?<revision>[-_a-f0-9]+[^\n{*"'])?`,
 );
 
 function setDepType(content: string, originalType: string): string {
@@ -25,7 +25,7 @@ export function extractPackageFile(content: string): PackageFileContent | null {
     (part) =>
       part.includes('python_requires') || // only matches python_requires
       part.includes('build_require') || // matches [build_requires], build_requirements(), and build_requires
-      part.includes('require') // matches [requires], requirements(), and requires
+      part.includes('require'), // matches [requires], requirements(), and requires
   );
 
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/cpanfile/extract.spec.ts b/lib/modules/manager/cpanfile/extract.spec.ts
index e5ebdc5ce1e3fb3290a2e8298918a9a6ae6b3a45..6bdc1809a731714b0876fafbfdd85afa73a168f5 100644
--- a/lib/modules/manager/cpanfile/extract.spec.ts
+++ b/lib/modules/manager/cpanfile/extract.spec.ts
@@ -18,8 +18,8 @@ describe('modules/manager/cpanfile/extract', () => {
         expect(
           extractPackageFile(
             `requires 'perl', ${version as string};`,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
@@ -52,8 +52,8 @@ describe('modules/manager/cpanfile/extract', () => {
             requires 'C', '> 1.3';
             requires 'CC', '> v1.3';
           `,
-          'cpanfile'
-        )
+          'cpanfile',
+        ),
       ).toEqual({
         deps: [
           {
@@ -117,8 +117,8 @@ describe('modules/manager/cpanfile/extract', () => {
             recommends 'Crypt::URandom';
             recommends 'HTTP::XSCookies', '0.000015';
           `,
-          'cpanfile'
-        )
+          'cpanfile',
+        ),
       ).toEqual({
         deps: [
           {
@@ -142,8 +142,8 @@ describe('modules/manager/cpanfile/extract', () => {
             suggests 'Test::MockTime::HiRes', '0.06';
             suggests 'Authen::Simple::Passwd';
           `,
-          'cpanfile'
-        )
+          'cpanfile',
+        ),
       ).toEqual({
         deps: [
           {
@@ -169,8 +169,8 @@ describe('modules/manager/cpanfile/extract', () => {
                 requires "ExtUtils::MakeMaker" => "0";
               };
             `,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
@@ -191,8 +191,8 @@ describe('modules/manager/cpanfile/extract', () => {
                 requires 'Test::More', '0.98';
               };
             `,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
@@ -214,8 +214,8 @@ describe('modules/manager/cpanfile/extract', () => {
                 requires 'Test::Requires';
               };
             `,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
@@ -243,8 +243,8 @@ describe('modules/manager/cpanfile/extract', () => {
                 suggests 'FCGI::ProcManager';
               };
             `,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
@@ -272,8 +272,8 @@ describe('modules/manager/cpanfile/extract', () => {
                 requires "Term::Table" => "0.013";
               };
             `,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
@@ -304,8 +304,8 @@ describe('modules/manager/cpanfile/extract', () => {
         expect(
           extractPackageFile(
             `${shortcut as string} 'Capture::Tiny', '0.12';`,
-            'cpanfile'
-          )
+            'cpanfile',
+          ),
         ).toEqual({
           deps: [
             {
diff --git a/lib/modules/manager/cpanfile/extract.ts b/lib/modules/manager/cpanfile/extract.ts
index 7c45e14973e9202c63a81dc9d5d9b06f7ba1c405..b89253ee9f7287cceec121b0b763a76823eb9748 100644
--- a/lib/modules/manager/cpanfile/extract.ts
+++ b/lib/modules/manager/cpanfile/extract.ts
@@ -3,7 +3,7 @@ import { parse } from './parser';
 
 export function extractPackageFile(
   content: string,
-  _packageFile?: string
+  _packageFile?: string,
 ): PackageFileContent | null {
   const result = parse(content);
   if (!result?.deps.length) {
diff --git a/lib/modules/manager/cpanfile/parser.ts b/lib/modules/manager/cpanfile/parser.ts
index a90275e8f77e5aa563dc49594e9123dd6703d602..2ab216003d41724a9629578e65d2b8ca4dd83304 100644
--- a/lib/modules/manager/cpanfile/parser.ts
+++ b/lib/modules/manager/cpanfile/parser.ts
@@ -25,7 +25,7 @@ const perlVersionMatch = q
   .alt(q.op(','), q.op('=>'))
   .alt(
     q.num<Ctx>((ctx, { value: perlVersion }) => ({ ...ctx, perlVersion })),
-    q.str<Ctx>((ctx, { value: perlVersion }) => ({ ...ctx, perlVersion }))
+    q.str<Ctx>((ctx, { value: perlVersion }) => ({ ...ctx, perlVersion })),
   )
   .op(';')
   .handler((ctx) => {
@@ -49,7 +49,7 @@ const phasedRequiresMatch = q.sym<Ctx>(
   (ctx, { value: phase }) => {
     ctx.tempPhase = phase.replace(/_requires/, '').replace(/author/, 'develop');
     return ctx;
-  }
+  },
 );
 
 // requires 'Foo::Bar';
@@ -71,8 +71,8 @@ const moduleMatch = q
       q.str<Ctx>((ctx, { value }) => {
         const currentValue = value.replace(/^(?:\s*(?:==|>=|>))?\s*v?/, '');
         return { ...ctx, currentValue };
-      })
-    )
+      }),
+    ),
   )
   .op(';')
   .handler((ctx) => {
@@ -108,7 +108,7 @@ const phaseRegex = /^(?:configure|build|test|runtime|develop)/;
 
 const phaseMatch = q.alt<Ctx>(
   q.sym(phaseRegex, (ctx, { value: phase }) => ({ ...ctx, phase })),
-  q.str(phaseRegex, (ctx, { value: phase }) => ({ ...ctx, phase }))
+  q.str(phaseRegex, (ctx, { value: phase }) => ({ ...ctx, phase })),
 );
 
 // on 'configure' => sub {
@@ -138,7 +138,7 @@ const query = q.tree<Ctx>({
 });
 
 export function parse(
-  content: string
+  content: string,
 ): Pick<Ctx, 'deps' | 'perlVersion'> | null {
   return cpanfile.query(content, query, {
     deps: [],
diff --git a/lib/modules/manager/custom/regex/index.spec.ts b/lib/modules/manager/custom/regex/index.spec.ts
index 608ba97fadb830e490ec731094d891b22fb58c7d..1859c097011c87a0f50eb7cd5dbbbb436a69d04b 100644
--- a/lib/modules/manager/custom/regex/index.spec.ts
+++ b/lib/modules/manager/custom/regex/index.spec.ts
@@ -32,15 +32,15 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       dockerfileContent,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(8);
     expect(res?.deps.find((dep) => dep.depName === 'yarn')?.versioning).toBe(
-      'semver'
+      'semver',
     );
     expect(res?.deps.find((dep) => dep.depName === 'gradle')?.versioning).toBe(
-      'maven'
+      'maven',
     );
     expect(res?.deps.filter((dep) => dep.depType === 'final')).toHaveLength(8);
   });
@@ -67,7 +67,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       dockerfileContent,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toBeNull();
   });
@@ -81,14 +81,14 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       dockerfileContent,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
     expect(
       res?.deps.find(
-        (dep) => dep.depName === 'openresty/headers-more-nginx-module'
-      )?.extractVersion
+        (dep) => dep.depName === 'openresty/headers-more-nginx-module',
+      )?.extractVersion,
     ).toBe('^v(?<version>.*)$');
   });
 
@@ -114,7 +114,7 @@ describe('modules/manager/custom/regex/index', () => {
           version: 8.12.13
       `,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toMatchSnapshot({
       deps: [
@@ -138,12 +138,12 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       dockerfileContent,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
     expect(
-      res?.deps.find((dep) => dep.depName === 'gradle')?.registryUrls
+      res?.deps.find((dep) => dep.depName === 'gradle')?.registryUrls,
     ).toEqual(['http://registry.gradle.com/']);
   });
 
@@ -157,7 +157,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       dockerfileContent,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toMatchSnapshot({
       deps: [
@@ -173,7 +173,7 @@ describe('modules/manager/custom/regex/index', () => {
     });
     expect(logger.warn).toHaveBeenCalledWith(
       { value: 'this-is-not-a-valid-url-gradle' },
-      'Invalid regex manager registryUrl'
+      'Invalid regex manager registryUrl',
     );
   });
 
@@ -189,15 +189,15 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       dockerfileContent,
       'Dockerfile',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(2);
     expect(
-      res?.deps.find((dep) => dep.depName === 'nodejs/node')?.versioning
+      res?.deps.find((dep) => dep.depName === 'nodejs/node')?.versioning,
     ).toBe('node');
     expect(res?.deps.find((dep) => dep.depName === 'gradle')?.versioning).toBe(
-      'maven'
+      'maven',
     );
   });
 
@@ -213,7 +213,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       'image: my.old.registry/aRepository/andImage:1.18-alpine',
       'values.yaml',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -231,7 +231,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       '     image: eclipse-temurin:17.0.0-alpine',
       'bitbucket-pipelines.yml',
-      config
+      config,
     );
     expect(res).toMatchObject({
       deps: [
@@ -258,7 +258,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       'name: image: eclipse-temurin:17.0.0-alpine',
       'bitbucket-pipelines.yml',
-      config
+      config,
     );
     expect(res).toMatchObject({
       deps: [
@@ -285,7 +285,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       ansibleYamlContent,
       'ansible.yml',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -306,7 +306,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       ansibleYamlContent,
       'ansible.yml',
-      config
+      config,
     );
     expect(res?.deps).toHaveLength(1);
     expect(res?.deps[0].depName).toBe('docker.io/prom/prometheus');
@@ -325,7 +325,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       ansibleYamlContent,
       'ansible.yml',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -344,7 +344,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleGitlabCiYml,
       '.gitlab-ci.yml',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -362,7 +362,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       'image: eclipse-temurin:17.0.0-alpine',
       'bitbucket-pipelines.yml',
-      config
+      config,
     );
     expect(res).toMatchObject({
       deps: [
@@ -388,7 +388,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       'image: eclipse-temurin@sha256:1234567890abcdef',
       'bitbucket-pipelines.yml',
-      config
+      config,
     );
     expect(res).toMatchObject({
       deps: [
@@ -415,7 +415,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleGitlabCiYml,
       '.gitlab-ci.yml',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -446,7 +446,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleJsonContent,
       'example.json',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -463,7 +463,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleJsonContent,
       'example.json',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(2);
@@ -481,7 +481,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleJsonContent,
       'example.json',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(1);
@@ -495,7 +495,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleJsonContent,
       'example.json',
-      config
+      config,
     );
     expect(res).toBeNull();
   });
@@ -508,7 +508,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleJsonContent,
       'example.json',
-      config
+      config,
     );
     expect(res).toBeNull();
   });
@@ -526,7 +526,7 @@ describe('modules/manager/custom/regex/index', () => {
     const res = await extractPackageFile(
       exampleJsonContent,
       'example.json',
-      config
+      config,
     );
     expect(res).toMatchSnapshot();
     expect(res?.deps).toHaveLength(4);
@@ -549,7 +549,7 @@ describe('modules/manager/custom/regex/index', () => {
     }
     `,
       'build.gradle.kts',
-      config
+      config,
     );
     expect(res).toMatchObject({
       deps: [
@@ -604,7 +604,7 @@ describe('modules/manager/custom/regex/index', () => {
       packageFile,
       newDatasource,
       packageName,
-      depName
+      depName,
     ) => {
       const config: CustomExtractConfig = {
         matchStrings: [
@@ -624,6 +624,6 @@ describe('modules/manager/custom/regex/index', () => {
           },
         ],
       });
-    }
+    },
   );
 });
diff --git a/lib/modules/manager/custom/regex/index.ts b/lib/modules/manager/custom/regex/index.ts
index 2860ba4bc3887fb27ea8a0562beda54ce2eb32a0..603591bf79e9a189951259a754d0fca60281e0fe 100644
--- a/lib/modules/manager/custom/regex/index.ts
+++ b/lib/modules/manager/custom/regex/index.ts
@@ -18,7 +18,7 @@ export const displayName = 'Regex';
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): Result<PackageFileContent | null> {
   let deps: PackageDependency[];
   switch (config.matchStringsStrategy) {
@@ -30,14 +30,14 @@ export function extractPackageFile(
       deps = handleCombination(
         content,
         packageFile,
-        config as RegexManagerConfig
+        config as RegexManagerConfig,
       );
       break;
     case 'recursive':
       deps = handleRecursive(
         content,
         packageFile,
-        config as RegexManagerConfig
+        config as RegexManagerConfig,
       );
       break;
   }
@@ -54,7 +54,7 @@ export function extractPackageFile(
     }
     // copy over templates for autoreplace
     for (const field of validMatchFields.map(
-      (f) => `${f}Template` as keyof RegexManagerTemplates
+      (f) => `${f}Template` as keyof RegexManagerTemplates,
     )) {
       if (config[field]) {
         res[field] = config[field];
diff --git a/lib/modules/manager/custom/regex/strategies.ts b/lib/modules/manager/custom/regex/strategies.ts
index a77c072be4748fade1a3b4e6a6056f7ffdad6f6c..b2b1a3f3f9f60963fee004c66dffbd974e285460 100644
--- a/lib/modules/manager/custom/regex/strategies.ts
+++ b/lib/modules/manager/custom/regex/strategies.ts
@@ -13,7 +13,7 @@ import {
 export function handleAny(
   content: string,
   _packageFile: string,
-  config: RegexManagerConfig
+  config: RegexManagerConfig,
 ): PackageDependency[] {
   return config.matchStrings
     .map((matchString) => regEx(matchString, 'g'))
@@ -26,8 +26,8 @@ export function handleAny(
             /* istanbul ignore next: can this happen? */ {},
           replaceString: matchResult[0],
         },
-        config
-      )
+        config,
+      ),
     )
     .filter(is.truthy)
     .filter(isValidDependency);
@@ -36,7 +36,7 @@ export function handleAny(
 export function handleCombination(
   content: string,
   _packageFile: string,
-  config: RegexManagerConfig
+  config: RegexManagerConfig,
 ): PackageDependency[] {
   const matches = config.matchStrings
     .map((matchString) => regEx(matchString, 'g'))
@@ -63,10 +63,10 @@ export function handleCombination(
 export function handleRecursive(
   content: string,
   packageFile: string,
-  config: RegexManagerConfig
+  config: RegexManagerConfig,
 ): PackageDependency[] {
   const regexes = config.matchStrings.map((matchString) =>
-    regEx(matchString, 'g')
+    regEx(matchString, 'g'),
   );
 
   return processRecursive({
@@ -96,7 +96,7 @@ function processRecursive(parameters: RecursionParameter): PackageDependency[] {
         groups: combinedGroups,
         replaceString: content,
       },
-      config
+      config,
     );
     return result ? [result] : /* istanbul ignore next: can this happen? */ [];
   }
diff --git a/lib/modules/manager/custom/regex/utils.spec.ts b/lib/modules/manager/custom/regex/utils.spec.ts
index a983f127c1a912148f75fb09aac0dd2b10b4127a..6b997f9c783ef1112e4a8eec13b0924dd258a1a4 100644
--- a/lib/modules/manager/custom/regex/utils.spec.ts
+++ b/lib/modules/manager/custom/regex/utils.spec.ts
@@ -7,8 +7,8 @@ describe('modules/manager/custom/regex/utils', () => {
     expect(
       utils.regexMatchAll(
         lazyMatch,
-        '1f699d2bfc99bbbe4c1ed5bb8fc21e6911d69c6e\n'
-      )
+        '1f699d2bfc99bbbe4c1ed5bb8fc21e6911d69c6e\n',
+      ),
     ).toBeArray();
   });
 });
diff --git a/lib/modules/manager/custom/regex/utils.ts b/lib/modules/manager/custom/regex/utils.ts
index 37759af74f8a82ca3eb43e2a063cb538efe7a1f7..48a0dbfe82a7c3d1c6844f72754fb7f356776109 100644
--- a/lib/modules/manager/custom/regex/utils.ts
+++ b/lib/modules/manager/custom/regex/utils.ts
@@ -28,7 +28,7 @@ type ValidMatchFields = (typeof validMatchFields)[number];
 function updateDependency(
   dependency: PackageDependency,
   field: ValidMatchFields,
-  value: string
+  value: string,
 ): void {
   switch (field) {
     case 'registryUrl':
@@ -55,7 +55,7 @@ function updateDependency(
 export function createDependency(
   extractionTemplate: ExtractionTemplate,
   config: RegexManagerConfig,
-  dep?: PackageDependency
+  dep?: PackageDependency,
 ): PackageDependency | null {
   const dependency = dep ?? {};
   const { groups, replaceString } = extractionTemplate;
@@ -70,7 +70,7 @@ export function createDependency(
       } catch (err) {
         logger.warn(
           { template: tmpl },
-          'Error compiling template for custom manager'
+          'Error compiling template for custom manager',
         );
         return null;
       }
@@ -84,7 +84,7 @@ export function createDependency(
 
 export function regexMatchAll(
   regex: RegExp,
-  content: string
+  content: string,
 ): RegExpMatchArray[] {
   const matches: RegExpMatchArray[] = [];
   let matchResult: RegExpMatchArray | null;
@@ -105,14 +105,14 @@ export function regexMatchAll(
 
 export function mergeGroups(
   mergedGroup: Record<string, string>,
-  secondGroup: Record<string, string>
+  secondGroup: Record<string, string>,
 ): Record<string, string> {
   return { ...mergedGroup, ...secondGroup };
 }
 
 export function mergeExtractionTemplate(
   base: ExtractionTemplate,
-  addition: ExtractionTemplate
+  addition: ExtractionTemplate,
 ): ExtractionTemplate {
   return {
     groups: mergeGroups(base.groups, addition.groups),
diff --git a/lib/modules/manager/deps-edn/extract.ts b/lib/modules/manager/deps-edn/extract.ts
index 1684aba2acf2bf45fc717071ac2b7b3f3b04b8b4..d31d9fd9f994a525ca3b87d362f3ee8eeff73e0d 100644
--- a/lib/modules/manager/deps-edn/extract.ts
+++ b/lib/modules/manager/deps-edn/extract.ts
@@ -16,7 +16,7 @@ import type {
 } from './types';
 
 const dependencyRegex = regEx(
-  /^(?<groupId>[a-zA-Z][-_a-zA-Z0-9]*(?:\.[a-zA-Z0-9][-_a-zA-Z0-9]*)*)(?:\/(?<artifactId>[a-zA-Z][-_a-zA-Z0-9]*(?:\.[a-zA-Z0-9][-_a-zA-Z0-9]*)*))?$/
+  /^(?<groupId>[a-zA-Z][-_a-zA-Z0-9]*(?:\.[a-zA-Z0-9][-_a-zA-Z0-9]*)*)(?:\/(?<artifactId>[a-zA-Z][-_a-zA-Z0-9]*(?:\.[a-zA-Z0-9][-_a-zA-Z0-9]*)*))?$/,
 );
 
 function getPackageName(depName: string): string | null {
@@ -33,18 +33,18 @@ function getPackageName(depName: string): string | null {
 }
 
 const githubDependencyRegex = regEx(
-  /^(?:com|io)\.github\.(?<packageName>[^/]+\/[^/]+)$/
+  /^(?:com|io)\.github\.(?<packageName>[^/]+\/[^/]+)$/,
 );
 const gitlabDependencyRegex = regEx(
-  /^(?:com|io)\.gitlab\.(?<packageName>[^/]+\/[^/]+)$/
+  /^(?:com|io)\.gitlab\.(?<packageName>[^/]+\/[^/]+)$/,
 );
 const bitbucketDependencyRegex = regEx(
-  /^(?:org|io)\.bitbucket\.(?<packageName>[^/]+\/[^/]+)$/
+  /^(?:org|io)\.bitbucket\.(?<packageName>[^/]+\/[^/]+)$/,
 );
 
 function resolveGitPackageFromEdnKey(
   dep: PackageDependency,
-  key: string
+  key: string,
 ): void {
   if (dep.datasource) {
     return;
@@ -73,18 +73,18 @@ function resolveGitPackageFromEdnKey(
 }
 
 const githubUrlRegex = regEx(
-  /^(?:https:\/\/|git@)github\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)github\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 const gitlabUrlRegex = regEx(
-  /^(?:https:\/\/|git@)gitlab\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)gitlab\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 const bitbucketUrlRegex = regEx(
-  /^(?:https:\/\/|git@)bitbucket\.org[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)bitbucket\.org[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 
 function resolveGitPackageFromEdnVal(
   dep: PackageDependency,
-  val: ParsedEdnRecord
+  val: ParsedEdnRecord,
 ): void {
   const gitUrl = val['git/url'];
   if (!is.string(gitUrl)) {
@@ -128,7 +128,7 @@ function extractDependency(
   val: ParsedEdnData,
   metadata: ParsedEdnMetadata,
   mavenRegistries: string[],
-  depType?: string
+  depType?: string,
 ): PackageDependency | null {
   if (!is.plainObject(val)) {
     return null;
@@ -185,7 +185,7 @@ function extractSection(
   section: ParsedEdnData,
   metadata: ParsedEdnMetadata,
   mavenRegistries: string[],
-  depType?: string
+  depType?: string,
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
   if (is.plainObject(section)) {
@@ -195,7 +195,7 @@ function extractSection(
         val,
         metadata,
         mavenRegistries,
-        depType
+        depType,
       );
       if (dep) {
         deps.push(dep);
@@ -244,16 +244,16 @@ export function extractPackageFile(content: string): PackageFileContent | null {
             aliasSection['extra-deps'],
             metadata,
             mavenRegistries,
-            depType
-          )
+            depType,
+          ),
         );
         deps.push(
           ...extractSection(
             aliasSection['override-deps'],
             metadata,
             mavenRegistries,
-            depType
-          )
+            depType,
+          ),
         );
       }
     }
diff --git a/lib/modules/manager/docker-compose/extract.spec.ts b/lib/modules/manager/docker-compose/extract.spec.ts
index b84f0b169d79be7b9185fefbf6a16b082d27032e..01a4caf31c9d80fbec0c77a8d5e4713fcd5534bd 100644
--- a/lib/modules/manager/docker-compose/extract.spec.ts
+++ b/lib/modules/manager/docker-compose/extract.spec.ts
@@ -68,7 +68,7 @@ describe('modules/manager/docker-compose/extract', () => {
           registryAliases: {
             'quay.io': 'my-quay-mirror.registry.com',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -98,7 +98,7 @@ describe('modules/manager/docker-compose/extract', () => {
           registryAliases: {
             'index.docker.io': 'my-docker-mirror.registry.com',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -129,7 +129,7 @@ describe('modules/manager/docker-compose/extract', () => {
             'quay.io': 'my-quay-mirror.registry.com',
             'my-quay-mirror.registry.com': 'quay.io',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
diff --git a/lib/modules/manager/docker-compose/extract.ts b/lib/modules/manager/docker-compose/extract.ts
index a61843992d883ac7d6dd07f7a691b852392170f9..67ffe6db2356df9949e4574c8d2c1c9a4f3e8dd0 100644
--- a/lib/modules/manager/docker-compose/extract.ts
+++ b/lib/modules/manager/docker-compose/extract.ts
@@ -17,7 +17,7 @@ class LineMapper {
 
   pluckLineNumber(imageName: string | undefined): number | null {
     const lineMeta = this.imageLines.find(
-      ({ line, used }) => !used && imageName && line.includes(imageName)
+      ({ line, used }) => !used && imageName && line.includes(imageName),
     );
     // istanbul ignore if
     if (!lineMeta) {
@@ -31,7 +31,7 @@ class LineMapper {
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  extractConfig: ExtractConfig
+  extractConfig: ExtractConfig,
 ): PackageFileContent | null {
   logger.debug(`docker-compose.extractPackageFile(${packageFile})`);
   let config: DockerComposeConfig;
@@ -41,21 +41,21 @@ export function extractPackageFile(
     if (!config) {
       logger.debug(
         { packageFile },
-        'Null config when parsing Docker Compose content'
+        'Null config when parsing Docker Compose content',
       );
       return null;
     }
     if (typeof config !== 'object') {
       logger.debug(
         { packageFile, type: typeof config },
-        'Unexpected type for Docker Compose content'
+        'Unexpected type for Docker Compose content',
       );
       return null;
     }
   } catch (err) {
     logger.debug(
       { err, packageFile },
-      `Parsing Docker Compose config YAML failed`
+      `Parsing Docker Compose config YAML failed`,
     );
     return null;
   }
@@ -72,7 +72,7 @@ export function extractPackageFile(
     // Image name/tags for services are only eligible for update if they don't
     // use variables and if the image is not built locally
     const deps = Object.values(
-      services || /* istanbul ignore next: can never happen */ {}
+      services || /* istanbul ignore next: can never happen */ {},
     )
       .filter((service) => is.string(service?.image) && !service?.build)
       .map((service) => {
diff --git a/lib/modules/manager/dockerfile/extract.spec.ts b/lib/modules/manager/dockerfile/extract.spec.ts
index 440ff682129930ad9bb840ef64747a7bb9510595..606ca371acdea5c34798b2585f6581b5c4d31b2d 100644
--- a/lib/modules/manager/dockerfile/extract.spec.ts
+++ b/lib/modules/manager/dockerfile/extract.spec.ts
@@ -67,7 +67,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node@sha256:eb85fc5b1198f5e1ec025ea07586bdbbf397e7d82df66c90d7511f533517e063\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -89,7 +89,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:8.9.0@sha256:eb85fc5b1198f5e1ec025ea07586bdbbf397e7d82df66c90d7511f533517e063\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -111,7 +111,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:8.9.0-alpine as base\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -131,7 +131,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         '# some comment\n# another\n\nFROM node\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -151,7 +151,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM registry2.something.info/node:8\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -171,7 +171,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM registry2.something.info/node:8-alpine\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -191,7 +191,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM registry2.something.info:5005/node:8\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -211,7 +211,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM registry2.something.info:5005/node\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -264,7 +264,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM registry2.something.info/someaccount/node:8\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -284,7 +284,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM    registry.allmine.info:5005/node:8.7.0\n\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -304,7 +304,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:6.12.3 as frontend\n\n# comment\nENV foo=bar\nFROM python:3.6-slim\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -339,7 +339,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:6.12.3 as frontend\n\n# comment\nENV foo=bar\nFROM frontend\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -359,7 +359,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM scratch\nCOPY --from=gcr.io/k8s-skaffold/skaffold:v0.11.0 /usr/bin/skaffold /usr/bin/skaffold\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -379,7 +379,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:6.12.3 as frontend\n\n# comment\nENV foo=bar\nCOPY --from=frontend /usr/bin/node /usr/bin/node\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -399,7 +399,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:6.12.3 as frontend\n\n# comment\nENV foo=bar\nCOPY --from=0 /usr/bin/node /usr/bin/node\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -419,7 +419,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM node:8.15.1-alpine as skippedfrom\nFROM golang:1.7.3 as builder\n\n# comment\nWORKDIR /go/src/github.com/alexellis/href-counter/\nRUN go get -d -v golang.org/x/net/html  \nCOPY app.go    .\nRUN CGO_ENABLED=0 GOOS=linux go build -a -installsuffix cgo -o app .\n\nFROM alpine:latest  \nRUN apk --no-cache add ca-certificates\nWORKDIR /root/\nCOPY --from=builder /go/src/github.com/alexellis/href-counter/app .\nCMD ["./app"]\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -691,7 +691,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'FROM alpine:3.5\n\nRUN something \\',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -727,7 +727,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG img_base\nFROM $img_base\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -745,7 +745,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG patch1=""\nARG patch2=\nFROM nginx:1.20${patch1}$patch2\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -765,7 +765,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG\tVARIANT="1.60.0-bullseye" \nFROM\trust:${VARIANT}\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -785,7 +785,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG IMAGE_VERSION=${IMAGE_VERSION:-ubuntu:xenial}\nfrom ${IMAGE_VERSION} as base\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -806,7 +806,7 @@ describe('modules/manager/dockerfile/extract', () => {
         'ARG sha_digest=sha256:ab37242e81cbc031b2600eef4440fe87055a05c14b40686df85078cc5086c98f\n' +
           '      FROM gcr.io/distroless/java17@$sha_digest',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -828,7 +828,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG base=nginx:1.19\nFROM $base as stage1\nARG base=nginx:1.20\nFROM --platform=amd64 $base as stage2\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -858,7 +858,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG CUDA=9.2\nARG LINUX_VERSION ubuntu16.04\nFROM nvidia/cuda:${CUDA}-devel-${LINUX_VERSION}\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -878,7 +878,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG img="scratch"\nFROM $img as base\n',
         '',
-        {}
+        {},
       );
       expect(res).toBeNull();
     });
@@ -928,7 +928,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         '# dummy\n# escape = `\n\nFROM\\\nnginx:1.20',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -1008,7 +1008,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG REF_NAME=${REF_NAME:-"gcr.io/distroless/static-debian11:nonroot@sha256:abc"}\nfrom ${REF_NAME}',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -1029,7 +1029,7 @@ describe('modules/manager/dockerfile/extract', () => {
       const res = extractPackageFile(
         'ARG IMAGE_TAG=14.04\r\n#something unrelated\r\nFROM ubuntu:$IMAGE_TAG@sha256:abc\r\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -1057,7 +1057,7 @@ describe('modules/manager/dockerfile/extract', () => {
           'ARG DUMMY_PREFIX=\n' +
           'FROM ${DUMMY_PREFIX}${NODE_IMAGE_HOST}${NODE_IMAGE_NAME}:${NODE_IMAGE_TAG}${NODE_IMAGE_HASH} as yarn\n',
         '',
-        {}
+        {},
       )?.deps;
       expect(res).toEqual([
         {
@@ -1086,7 +1086,7 @@ describe('modules/manager/dockerfile/extract', () => {
     const res = extractPackageFile(
       'FROM quay.io/myName/myPackage:0.6.2\n',
       '',
-      {}
+      {},
     );
     expect(res).toEqual({
       deps: [
@@ -1113,7 +1113,7 @@ describe('modules/manager/dockerfile/extract', () => {
           'quay.io': 'my-quay-mirror.registry.com',
           'index.docker.io': 'my-docker-mirror.registry.com',
         },
-      }
+      },
     );
     expect(res).toEqual({
       deps: [
@@ -1140,7 +1140,7 @@ describe('modules/manager/dockerfile/extract', () => {
           'quay.io': 'my-quay-mirror.registry.com',
           'index.docker.io': 'my-docker-mirror.registry.com',
         },
-      }
+      },
     );
     expect(res).toEqual({
       deps: [
@@ -1200,7 +1200,7 @@ describe('modules/manager/dockerfile/extract', () => {
       });
 
       const res4 = getDep(
-        '${REF_NAME:-"gcr.io/distroless/static-debian11:nonroot@sha256:abc"}'
+        '${REF_NAME:-"gcr.io/distroless/static-debian11:nonroot@sha256:abc"}',
       );
       expect(res4).toEqual({
         autoReplaceStringTemplate:
@@ -1213,7 +1213,7 @@ describe('modules/manager/dockerfile/extract', () => {
       });
 
       const res5 = getDep(
-        '${REF_NAME:+-gcr.io/distroless/static-debian11:nonroot@sha256:abc}'
+        '${REF_NAME:+-gcr.io/distroless/static-debian11:nonroot@sha256:abc}',
       );
       expect(res5).toEqual({
         autoReplaceStringTemplate:
@@ -1307,7 +1307,7 @@ describe('modules/manager/dockerfile/extract', () => {
           ...dep,
           replaceString: imageName,
         });
-      }
+      },
     );
   });
 
diff --git a/lib/modules/manager/dockerfile/extract.ts b/lib/modules/manager/dockerfile/extract.ts
index b6f0fe8a220d516cb70f9af34f29cf27c977eed7..2841052c8d6358e0e0871602f3e65ab038cf2b71 100644
--- a/lib/modules/manager/dockerfile/extract.ts
+++ b/lib/modules/manager/dockerfile/extract.ts
@@ -15,7 +15,7 @@ const variableMarker = '$';
 export function extractVariables(image: string): Record<string, string> {
   const variables: Record<string, string> = {};
   const variableRegex = regEx(
-    /(?<fullvariable>\\?\$(?<simplearg>\w+)|\\?\${(?<complexarg>\w+)(?::.+?)?}+)/gi
+    /(?<fullvariable>\\?\$(?<simplearg>\w+)|\\?\${(?<complexarg>\w+)(?::.+?)?}+)/gi,
   );
 
   let match: RegExpExecArray | null;
@@ -44,7 +44,7 @@ function getAutoReplaceTemplate(dep: PackageDependency): string | undefined {
   if (dep.currentDigest) {
     template = template?.replace(
       dep.currentDigest,
-      '{{#if newDigest}}{{newDigest}}{{/if}}'
+      '{{#if newDigest}}{{newDigest}}{{/if}}',
     );
   }
 
@@ -55,7 +55,7 @@ function processDepForAutoReplace(
   dep: PackageDependency,
   lineNumberRanges: number[][],
   lines: string[],
-  linefeed: string
+  linefeed: string,
 ): void {
   const lineNumberRangesToReplace: number[][] = [];
   for (const lineNumberRange of lineNumberRanges) {
@@ -88,7 +88,7 @@ function processDepForAutoReplace(
 
   const unfoldedLineNumbers = Array.from(
     { length: maxLine - minLine + 1 },
-    (_v, k) => k + minLine
+    (_v, k) => k + minLine,
   );
 
   dep.replaceString = unfoldedLineNumbers
@@ -165,7 +165,7 @@ const quayRegex = regEx(/^quay\.io(?::[1-9][0-9]{0,4})?/i);
 export function getDep(
   currentFrom: string | null | undefined,
   specifyReplaceString = true,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): PackageDependency {
   if (!is.string(currentFrom) || is.emptyStringOrWhitespace(currentFrom)) {
     return {
@@ -177,7 +177,7 @@ export function getDep(
   for (const [name, value] of Object.entries(registryAliases ?? {})) {
     const escapedName = escapeRegExp(name);
     const groups = regEx(`(?<prefix>${escapedName})/(?<depName>.+)`).exec(
-      currentFrom
+      currentFrom,
     )?.groups;
     if (groups) {
       const dep = {
@@ -242,7 +242,7 @@ export function getDep(
 export function extractPackageFile(
   content: string,
   _packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   const stageNames: string[] = [];
@@ -260,7 +260,7 @@ export function extractPackageFile(
 
     if (lookForEscapeChar) {
       const directivesMatch = regEx(
-        /^[ \t]*#[ \t]*(?<directive>syntax|escape)[ \t]*=[ \t]*(?<escapeChar>\S)/i
+        /^[ \t]*#[ \t]*(?<directive>syntax|escape)[ \t]*=[ \t]*(?<escapeChar>\S)/i,
       ).exec(instruction);
       if (!directivesMatch) {
         lookForEscapeChar = false;
@@ -287,7 +287,7 @@ export function extractPackageFile(
       '^[ \\t]*ARG(?:' +
         escapeChar +
         '[ \\t]*\\r?\\n| |\\t|#.*?\\r?\\n)+(?<name>\\w+)[ =](?<value>\\S*)',
-      'im'
+      'im',
     );
     const argMatch = argRegex.exec(instruction);
     if (argMatch?.groups?.name) {
@@ -310,7 +310,7 @@ export function extractPackageFile(
         '[ \\t]*\\r?\\n| |\\t|#.*?\\r?\\n|--platform=\\S+)+(?<image>\\S+)(?:(?:' +
         escapeChar +
         '[ \\t]*\\r?\\n| |\\t|#.*?\\r?\\n)+as[ \\t]+(?<name>\\S+))?',
-      'im'
+      'im',
     ); // TODO #12875 complex for re2 has too many not supported groups
     const fromMatch = instruction.match(fromRegex);
     if (fromMatch?.groups?.image) {
@@ -330,7 +330,7 @@ export function extractPackageFile(
 
       if (fromMatch.groups?.name) {
         logger.debug(
-          `Found a multistage build stage name: ${fromMatch.groups.name}`
+          `Found a multistage build stage name: ${fromMatch.groups.name}`,
         );
         stageNames.push(fromMatch.groups.name);
       }
@@ -347,7 +347,7 @@ export function extractPackageFile(
             currentValue: dep.currentValue,
             currentDigest: dep.currentDigest,
           },
-          'Dockerfile FROM'
+          'Dockerfile FROM',
         );
         deps.push(dep);
       }
@@ -357,20 +357,20 @@ export function extractPackageFile(
       '^[ \\t]*COPY(?:' +
         escapeChar +
         '[ \\t]*\\r?\\n| |\\t|#.*?\\r?\\n|--[a-z]+=[a-zA-Z0-9_.:-]+?)+--from=(?<image>\\S+)',
-      'im'
+      'im',
     ); // TODO #12875 complex for re2 has too many not supported groups
     const copyFromMatch = instruction.match(copyFromRegex);
     if (copyFromMatch?.groups?.image) {
       if (stageNames.includes(copyFromMatch.groups.image)) {
         logger.debug(
           { image: copyFromMatch.groups.image },
-          'Skipping alias COPY --from'
+          'Skipping alias COPY --from',
         );
       } else if (Number.isNaN(Number(copyFromMatch.groups.image))) {
         const dep = getDep(
           copyFromMatch.groups.image,
           true,
-          config.registryAliases
+          config.registryAliases,
         );
         const lineNumberRanges: number[][] = [
           [lineNumberInstrStart, lineNumber],
@@ -382,13 +382,13 @@ export function extractPackageFile(
             currentValue: dep.currentValue,
             currentDigest: dep.currentDigest,
           },
-          'Dockerfile COPY --from'
+          'Dockerfile COPY --from',
         );
         deps.push(dep);
       } else {
         logger.debug(
           { image: copyFromMatch.groups.image },
-          'Skipping index reference COPY --from'
+          'Skipping index reference COPY --from',
         );
       }
     }
diff --git a/lib/modules/manager/droneci/extract.ts b/lib/modules/manager/droneci/extract.ts
index 1ceead4c579ed0c48ad66338fda32cd426c7b067..3bd070dd173e050487098a9be0be500ed241f3f6 100644
--- a/lib/modules/manager/droneci/extract.ts
+++ b/lib/modules/manager/droneci/extract.ts
@@ -10,7 +10,7 @@ import type {
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   try {
@@ -19,7 +19,7 @@ export function extractPackageFile(
       const line = lines[lineNumber];
 
       const firstLineMatch = regEx(
-        /^(?<leading>\s* image:\s*)(?<replaceString>['"](?<currentFrom>[^\s'"]+)\\)$/
+        /^(?<leading>\s* image:\s*)(?<replaceString>['"](?<currentFrom>[^\s'"]+)\\)$/,
       ).exec(line);
 
       if (firstLineMatch?.groups) {
@@ -29,14 +29,14 @@ export function extractPackageFile(
         for (let i = lineNumber + 1; i < lines.length; i += 1) {
           const internalLine = lines[i];
           const middleLineMatch = regEx(
-            /^(?<replaceString>\s*(?<currentFrom>[^\s'"]+)\\)$/
+            /^(?<replaceString>\s*(?<currentFrom>[^\s'"]+)\\)$/,
           ).exec(internalLine);
           if (middleLineMatch?.groups) {
             currentFrom += middleLineMatch.groups.currentFrom;
             replaceString += '\n' + middleLineMatch.groups.replaceString;
           } else {
             const finalLineMatch = regEx(
-              /^(?<replaceString>\s*(?<currentFrom>[^\s'"]+)['"])$/
+              /^(?<replaceString>\s*(?<currentFrom>[^\s'"]+)['"])$/,
             ).exec(internalLine);
             if (finalLineMatch?.groups) {
               currentFrom += finalLineMatch.groups.currentFrom;
@@ -59,13 +59,13 @@ export function extractPackageFile(
         }
       } else {
         const match = regEx(
-          /^\s* image:\s*'?"?(?<currentFrom>[^\s'"]+)'?"?\s*$/
+          /^\s* image:\s*'?"?(?<currentFrom>[^\s'"]+)'?"?\s*$/,
         ).exec(line);
         if (match?.groups) {
           const dep = getDep(
             match.groups.currentFrom,
             true,
-            config.registryAliases
+            config.registryAliases,
           );
           dep.depType = 'docker';
           deps.push(dep);
diff --git a/lib/modules/manager/fleet/extract.spec.ts b/lib/modules/manager/fleet/extract.spec.ts
index 6f25008456644b395ee9e275299725f92c12524e..70229b856e17448e5f19a769e4b19f264839ec8f 100644
--- a/lib/modules/manager/fleet/extract.spec.ts
+++ b/lib/modules/manager/fleet/extract.spec.ts
@@ -3,7 +3,7 @@ import { extractPackageFile } from '.';
 
 const validFleetYaml = Fixtures.get('valid_fleet.yaml');
 const validFleetYamlWithCustom = Fixtures.get(
-  'valid_fleet_helm_target_customization.yaml'
+  'valid_fleet_helm_target_customization.yaml',
 );
 const inValidFleetYaml = Fixtures.get('invalid_fleet.yaml');
 
@@ -36,7 +36,7 @@ describe('modules/manager/fleet/extract', () => {
           `apiVersion: v1
 kind: Fleet
 < `,
-          'fleet.yaml'
+          'fleet.yaml',
         );
 
         expect(result).toBeNull();
@@ -67,7 +67,7 @@ kind: Fleet
       it('should parse valid configuration with target customization', () => {
         const result = extractPackageFile(
           validFleetYamlWithCustom,
-          'fleet.yaml'
+          'fleet.yaml',
         );
 
         expect(result).not.toBeNull();
@@ -162,7 +162,7 @@ kind: Fleet
           `apiVersion: v1
  kind: GitRepo
  < `,
-          'test.yaml'
+          'test.yaml',
         );
 
         expect(result).toBeNull();
diff --git a/lib/modules/manager/fleet/extract.ts b/lib/modules/manager/fleet/extract.ts
index 394d9185ff171ac851aadce748bc42f8bfcb7ff1..65fa42d8de2ea81856864ab65fcfa5925ee7418b 100644
--- a/lib/modules/manager/fleet/extract.ts
+++ b/lib/modules/manager/fleet/extract.ts
@@ -110,7 +110,7 @@ function extractFleetFile(doc: FleetFile): PackageDependency[] {
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   if (!content) {
     return null;
diff --git a/lib/modules/manager/flux/extract.spec.ts b/lib/modules/manager/flux/extract.spec.ts
index 6a2291b8973c844d6952dcf90db66fddbe32bc0d..958ab562466ad7afb1a0c1c5c31473f1a6a4c718 100644
--- a/lib/modules/manager/flux/extract.spec.ts
+++ b/lib/modules/manager/flux/extract.spec.ts
@@ -24,7 +24,7 @@ describe('modules/manager/flux/extract', () => {
     it('extracts multiple resources', () => {
       const result = extractPackageFile(
         Fixtures.get('multidoc.yaml'),
-        'multidoc.yaml'
+        'multidoc.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -66,7 +66,7 @@ describe('modules/manager/flux/extract', () => {
       ({ filepath }) => {
         const result = extractPackageFile(
           Fixtures.get('flux-system/gotk-components.yaml'),
-          filepath
+          filepath,
         );
         expect(result).toEqual({
           deps: [
@@ -81,13 +81,13 @@ describe('modules/manager/flux/extract', () => {
             },
           ],
         });
-      }
+      },
     );
 
     it('considers components optional in system manifests', () => {
       const result = extractPackageFile(
         `# Flux Version: v0.27.0`,
-        'clusters/my-cluster/flux-system/gotk-components.yaml'
+        'clusters/my-cluster/flux-system/gotk-components.yaml',
       );
       expect(result).not.toBeNull();
       expect(result?.deps[0].managerData?.components).toBeUndefined();
@@ -96,7 +96,7 @@ describe('modules/manager/flux/extract', () => {
     it('ignores system manifests without a version', () => {
       const result = extractPackageFile(
         'not actually a system manifest!',
-        'clusters/my-cluster/flux-system/gotk-components.yaml'
+        'clusters/my-cluster/flux-system/gotk-components.yaml',
       );
       expect(result).toBeNull();
     });
@@ -104,7 +104,7 @@ describe('modules/manager/flux/extract', () => {
     it('extracts releases without repositories', () => {
       const result = extractPackageFile(
         Fixtures.get('helmRelease.yaml'),
-        'helmRelease.yaml'
+        'helmRelease.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -136,7 +136,7 @@ describe('modules/manager/flux/extract', () => {
           apiVersion: source.toolkit.fluxcd.io/v1beta1
           kind: HelmRepository
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -166,7 +166,7 @@ describe('modules/manager/flux/extract', () => {
                   name: sealed-secrets
                 version: "2.0.2"
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toBeNull();
     });
@@ -192,7 +192,7 @@ describe('modules/manager/flux/extract', () => {
                   name: sealed-secrets
                 version: "2.0.2"
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -221,7 +221,7 @@ describe('modules/manager/flux/extract', () => {
                 chart: sealed-secrets
                 version: "2.0.2"
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -251,7 +251,7 @@ describe('modules/manager/flux/extract', () => {
                   name: sealed-secrets
                 version: "2.0.2"
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -275,7 +275,7 @@ describe('modules/manager/flux/extract', () => {
           metadata:
             name: test
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -300,7 +300,7 @@ describe('modules/manager/flux/extract', () => {
             name: sealed-secrets
             namespace: kube-system
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -325,7 +325,7 @@ describe('modules/manager/flux/extract', () => {
           spec:
             url: https://github.com/renovatebot/renovate
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -347,7 +347,7 @@ describe('modules/manager/flux/extract', () => {
               commit: c93154b
             url: https://github.com/renovatebot/renovate
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -376,7 +376,7 @@ describe('modules/manager/flux/extract', () => {
               tag: v11.35.9
             url: git@github.com:renovatebot/renovate.git
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -404,7 +404,7 @@ describe('modules/manager/flux/extract', () => {
               tag: v11.35.9
             url: https://github.com/renovatebot/renovate
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -432,7 +432,7 @@ describe('modules/manager/flux/extract', () => {
               tag: 1.2.3
             url: https://gitlab.com/renovatebot/renovate
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -460,7 +460,7 @@ describe('modules/manager/flux/extract', () => {
               tag: 2020.5.6+staging.ze
             url: https://bitbucket.org/renovatebot/renovate
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -488,7 +488,7 @@ describe('modules/manager/flux/extract', () => {
               tag: "7.56.4_p1"
             url: https://example.com/renovatebot/renovate
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -514,7 +514,7 @@ describe('modules/manager/flux/extract', () => {
         spec:
           url: oci://ghcr.io/kyverno/manifests/kyverno
       `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -539,7 +539,7 @@ describe('modules/manager/flux/extract', () => {
             tag: v1.8.2
           url: oci://ghcr.io/kyverno/manifests/kyverno
       `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -569,7 +569,7 @@ describe('modules/manager/flux/extract', () => {
             digest: sha256:761c3189c482d0f1f0ad3735ca05c4c398cae201d2169f6645280c7b7b2ce6fc
           url: oci://ghcr.io/kyverno/manifests/kyverno
       `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -596,7 +596,7 @@ describe('modules/manager/flux/extract', () => {
             tag: v1.8.2@sha256:761c3189c482d0f1f0ad3735ca05c4c398cae201d2169f6645280c7b7b2ce6fc
           url: oci://ghcr.io/kyverno/manifests/kyverno
       `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -629,7 +629,7 @@ describe('modules/manager/flux/extract', () => {
             tag: v1.8.2
           url: oci://ghcr.io/kyverno/manifests/kyverno
       `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toEqual({
         deps: [
@@ -649,7 +649,7 @@ describe('modules/manager/flux/extract', () => {
           kind: SomethingElse
           apiVersion: helm.toolkit.fluxcd.io/v2beta1
         `,
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toBeNull();
     });
@@ -657,7 +657,7 @@ describe('modules/manager/flux/extract', () => {
     it('ignores resources without a kind', () => {
       const result = extractPackageFile(
         'apiVersion: helm.toolkit.fluxcd.io/v2beta1',
-        'test.yaml'
+        'test.yaml',
       );
       expect(result).toBeNull();
     });
diff --git a/lib/modules/manager/flux/extract.ts b/lib/modules/manager/flux/extract.ts
index bb7067ab9af97ea00740edf547305e2173164f1d..89c260319a705aaa86035ea67de7452443d98d08 100644
--- a/lib/modules/manager/flux/extract.ts
+++ b/lib/modules/manager/flux/extract.ts
@@ -30,7 +30,7 @@ import type {
 
 function readManifest(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): FluxManifest | null {
   if (isSystemManifest(packageFile)) {
     const versionMatch = regEx(systemManifestHeaderRegex).exec(content);
@@ -102,18 +102,18 @@ function readManifest(
 }
 
 const githubUrlRegex = regEx(
-  /^(?:https:\/\/|git@)github\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)github\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 const gitlabUrlRegex = regEx(
-  /^(?:https:\/\/|git@)gitlab\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)gitlab\.com[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 const bitbucketUrlRegex = regEx(
-  /^(?:https:\/\/|git@)bitbucket\.org[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/
+  /^(?:https:\/\/|git@)bitbucket\.org[/:](?<packageName>[^/]+\/[^/]+?)(?:\.git)?$/,
 );
 
 function resolveGitRepositoryPerSourceTag(
   dep: PackageDependency,
-  gitUrl: string
+  gitUrl: string,
 ): void {
   const githubMatchGroups = githubUrlRegex.exec(gitUrl)?.groups;
   if (githubMatchGroups) {
@@ -147,7 +147,7 @@ function resolveGitRepositoryPerSourceTag(
 }
 
 function resolveSystemManifest(
-  manifest: SystemFluxManifest
+  manifest: SystemFluxManifest,
 ): PackageDependency<FluxManagerData>[] {
   return [
     {
@@ -163,7 +163,7 @@ function resolveSystemManifest(
 
 function resolveResourceManifest(
   manifest: ResourceFluxManifest,
-  helmRepositories: HelmRepository[]
+  helmRepositories: HelmRepository[],
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
   for (const resource of manifest.resources) {
@@ -181,7 +181,7 @@ function resolveResourceManifest(
             rep.metadata.name === resource.spec.chart.spec.sourceRef.name &&
             rep.metadata.namespace ===
               (resource.spec.chart.spec.sourceRef.namespace ??
-                resource.metadata?.namespace)
+                resource.metadata?.namespace),
         );
         if (matchingRepositories.length) {
           dep.registryUrls = matchingRepositories
@@ -264,7 +264,7 @@ function resolveResourceManifest(
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent<FluxManagerData> | null {
   const manifest = readManifest(content, packageFile);
   if (!manifest) {
@@ -293,7 +293,7 @@ export function extractPackageFile(
 
 export async function extractAllPackageFiles(
   _config: ExtractConfig,
-  packageFiles: string[]
+  packageFiles: string[],
 ): Promise<PackageFile<FluxManagerData>[] | null> {
   const manifests: FluxManifest[] = [];
   const results: PackageFile<FluxManagerData>[] = [];
diff --git a/lib/modules/manager/fvm/extract.spec.ts b/lib/modules/manager/fvm/extract.spec.ts
index a078fef294d6464ff773e9eb0c05d3a176eb6ba7..2211d90eb7c14a7fe876f9fc93931be6973e5921 100644
--- a/lib/modules/manager/fvm/extract.spec.ts
+++ b/lib/modules/manager/fvm/extract.spec.ts
@@ -6,7 +6,7 @@ describe('modules/manager/fvm/extract', () => {
   describe('extractPackageFile()', () => {
     it('returns null for invalid json', () => {
       expect(
-        extractPackageFile('clearly invalid json', packageFile)
+        extractPackageFile('clearly invalid json', packageFile),
       ).toBeNull();
     });
 
@@ -18,15 +18,15 @@ describe('modules/manager/fvm/extract', () => {
       expect(
         extractPackageFile(
           '{"flutterSdkVersion": 2.1, "flavors": {}}',
-          packageFile
-        )
+          packageFile,
+        ),
       ).toBeNull();
     });
 
     it('returns a result', () => {
       const res = extractPackageFile(
         '{"flutterSdkVersion": "2.10.1", "flavors": {}}',
-        packageFile
+        packageFile,
       );
       expect(res?.deps).toEqual([
         {
@@ -41,7 +41,7 @@ describe('modules/manager/fvm/extract', () => {
     it('supports non range', () => {
       const res = extractPackageFile(
         '{"flutterSdkVersion": "stable", "flavors": {}}',
-        packageFile
+        packageFile,
       );
       expect(res?.deps).toEqual([
         {
diff --git a/lib/modules/manager/fvm/extract.ts b/lib/modules/manager/fvm/extract.ts
index 8d79aab8b28a29a1c3e3a7adeb766c8540067fa4..eb050391f85d461b69fbab6ddbf11aec442298a7 100644
--- a/lib/modules/manager/fvm/extract.ts
+++ b/lib/modules/manager/fvm/extract.ts
@@ -9,7 +9,7 @@ interface FvmConfig {
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   let fvmConfig: FvmConfig;
   try {
@@ -22,7 +22,7 @@ export function extractPackageFile(
   if (!fvmConfig.flutterSdkVersion) {
     logger.debug(
       { contents: fvmConfig },
-      'FVM config does not have flutterSdkVersion specified'
+      'FVM config does not have flutterSdkVersion specified',
     );
     return null;
   } else if (!is.string(fvmConfig.flutterSdkVersion)) {
diff --git a/lib/modules/manager/git-submodules/artifact.spec.ts b/lib/modules/manager/git-submodules/artifact.spec.ts
index a776a7a1ebcd44b59a9bc58382b12db806675547..a58eeca374a969bdc19a57c9327181c1f687a960 100644
--- a/lib/modules/manager/git-submodules/artifact.spec.ts
+++ b/lib/modules/manager/git-submodules/artifact.spec.ts
@@ -9,7 +9,7 @@ describe('modules/manager/git-submodules/artifact', () => {
           updatedDeps: [{ depName: '' }],
           newPackageFileContent: '',
           config: {},
-        })
+        }),
       ).toMatchObject([{ file: { type: 'addition', path: '', contents: '' } }]);
     });
 
@@ -20,7 +20,7 @@ describe('modules/manager/git-submodules/artifact', () => {
           updatedDeps: [{ depName: 'renovate' }, { depName: 'renovate-pro' }],
           newPackageFileContent: '',
           config: {},
-        })
+        }),
       ).toMatchObject([
         { file: { type: 'addition', path: 'renovate', contents: '' } },
         { file: { type: 'addition', path: 'renovate-pro', contents: '' } },
diff --git a/lib/modules/manager/git-submodules/extract.spec.ts b/lib/modules/manager/git-submodules/extract.spec.ts
index 45c117e24b19d01ef9f4ec471a205c0e2fed1dc2..b74cdebad572c3834117fe7f76f577e3cb0f0a04 100644
--- a/lib/modules/manager/git-submodules/extract.spec.ts
+++ b/lib/modules/manager/git-submodules/extract.spec.ts
@@ -24,7 +24,7 @@ describe('modules/manager/git-submodules/extract', () => {
 
       gitMock.env.mockImplementation(() => gitMock);
       gitMock.subModule.mockResolvedValue(
-        '4b825dc642cb6eb9a060e54bf8d69288fbee4904'
+        '4b825dc642cb6eb9a060e54bf8d69288fbee4904',
       );
 
       gitMock.raw.mockImplementation((options) => {
@@ -33,7 +33,7 @@ describe('modules/manager/git-submodules/extract', () => {
           options.includes('remote.origin.url')
         ) {
           return Promise.resolve(
-            'https://github.com/renovatebot/renovate.git'
+            'https://github.com/renovatebot/renovate.git',
           ) as Response<string>;
         }
         return git.raw(options);
@@ -49,7 +49,7 @@ describe('modules/manager/git-submodules/extract', () => {
 
     it('default branch is detected when no branch is specified', async () => {
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       const res = await extractPackageFile('', '.gitmodules.2', {});
       expect(res?.deps).toHaveLength(1);
@@ -58,7 +58,7 @@ describe('modules/manager/git-submodules/extract', () => {
 
     it('default branch is detected with using git environment variables when no branch is specified', async () => {
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       hostRules.add({
         hostType: 'github',
@@ -86,7 +86,7 @@ describe('modules/manager/git-submodules/extract', () => {
 
     it('combined token from host rule is used to detect branch', async () => {
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       hostRules.add({
         hostType: 'github',
@@ -131,13 +131,13 @@ describe('modules/manager/git-submodules/extract', () => {
       const res = await extractPackageFile('', '.gitmodules.4', {});
       expect(res?.deps).toHaveLength(1);
       expect(res?.deps[0].packageName).toBe(
-        'https://github.com/PowerShell/PowerShell-Docs'
+        'https://github.com/PowerShell/PowerShell-Docs',
       );
     });
 
     it('combined username+pwd from host rule is used to detect branch for gitlab', async () => {
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       hostRules.add({
         hostType: 'gitlab',
@@ -166,7 +166,7 @@ describe('modules/manager/git-submodules/extract', () => {
 
     it('combined username+pwd from host rule is used to detect branch for git-refs and git-tags', async () => {
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       hostRules.add({
         hostType: 'git-refs',
@@ -224,19 +224,19 @@ describe('modules/manager/git-submodules/extract', () => {
         hostType: 'gitlab',
       });
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/master  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/master  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/dev  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/dev  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       const res = await extractPackageFile('', '.gitmodules.5', {});
       expect(res).toEqual({
@@ -283,7 +283,7 @@ describe('modules/manager/git-submodules/extract', () => {
         hostType: 'azure',
       });
       gitMock.listRemote.mockResolvedValueOnce(
-        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD'
+        'ref: refs/heads/main  HEAD\n5701164b9f5edba1f6ca114c491a564ffb55a964        HEAD',
       );
       const res = await extractPackageFile('', '.gitmodules.6', {});
       expect(res).toEqual({
diff --git a/lib/modules/manager/git-submodules/extract.ts b/lib/modules/manager/git-submodules/extract.ts
index b870183b9ac173ffd94e36ecf5da4696677ef6c1..6e0ac14ec4b9f863cfe9719aa7866c63848dc7d4 100644
--- a/lib/modules/manager/git-submodules/extract.ts
+++ b/lib/modules/manager/git-submodules/extract.ts
@@ -14,7 +14,7 @@ import type { GitModule } from './types';
 async function getUrl(
   git: SimpleGit,
   gitModulesPath: string,
-  submoduleName: string
+  submoduleName: string,
 ): Promise<string> {
   const path = (
     await Git(simpleGitConfig()).raw([
@@ -57,7 +57,7 @@ async function getBranch(
   git: SimpleGit,
   gitModulesPath: string,
   submoduleName: string,
-  subModuleUrl: string
+  subModuleUrl: string,
 ): Promise<string> {
   const branchFromConfig = (
     await Git(simpleGitConfig()).raw([
@@ -76,7 +76,7 @@ async function getBranch(
 
 async function getModules(
   git: SimpleGit,
-  gitModulesPath: string
+  gitModulesPath: string,
 ): Promise<GitModule[]> {
   const res: GitModule[] = [];
   try {
@@ -106,7 +106,7 @@ async function getModules(
 export default async function extractPackageFile(
   _content: string,
   packageFile: string,
-  _config: ExtractConfig
+  _config: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   const localDir = GlobalConfig.get('localDir');
   const git = Git(localDir, simpleGitConfig());
@@ -131,7 +131,7 @@ export default async function extractPackageFile(
         git,
         gitModulesPath,
         name,
-        httpSubModuleUrl
+        httpSubModuleUrl,
       );
       deps.push({
         depName: path,
@@ -142,7 +142,7 @@ export default async function extractPackageFile(
     } catch (err) /* istanbul ignore next */ {
       logger.warn(
         { err, packageFile },
-        'Error mapping git submodules during extraction'
+        'Error mapping git submodules during extraction',
       );
     }
   }
diff --git a/lib/modules/manager/github-actions/extract.spec.ts b/lib/modules/manager/github-actions/extract.spec.ts
index 7c2014611390bff030da185dfcf39c3c55756904..4f96931d875c1c69d96e2ceb38433a3950e7034e 100644
--- a/lib/modules/manager/github-actions/extract.spec.ts
+++ b/lib/modules/manager/github-actions/extract.spec.ts
@@ -39,42 +39,42 @@ describe('modules/manager/github-actions/extract', () => {
   describe('extractPackageFile()', () => {
     it('returns null for empty', () => {
       expect(
-        extractPackageFile('nothing here', 'empty-workflow.yml')
+        extractPackageFile('nothing here', 'empty-workflow.yml'),
       ).toBeNull();
     });
 
     it('returns null for invalid yaml', () => {
       expect(
-        extractPackageFile('nothing here: [', 'invalid-workflow.yml')
+        extractPackageFile('nothing here: [', 'invalid-workflow.yml'),
       ).toBeNull();
     });
 
     it('extracts multiple docker image lines from yaml configuration file', () => {
       const res = extractPackageFile(
         Fixtures.get('workflow_1.yml'),
-        'workflow_1.yml'
+        'workflow_1.yml',
       );
       expect(res?.deps).toMatchSnapshot();
       expect(res?.deps.filter((d) => d.datasource === 'docker')).toHaveLength(
-        6
+        6,
       );
     });
 
     it('extracts multiple action tag lines from yaml configuration file', () => {
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps).toMatchSnapshot();
       expect(
-        res?.deps.filter((d) => d.datasource === 'github-tags')
+        res?.deps.filter((d) => d.datasource === 'github-tags'),
       ).toHaveLength(8);
     });
 
     it('use github.com as registry when no settings provided', () => {
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps[0].registryUrls).toBeUndefined();
     });
@@ -86,7 +86,7 @@ describe('modules/manager/github-actions/extract', () => {
       });
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps[0].registryUrls).toEqual([
         'https://github.enterprise.com',
@@ -101,7 +101,7 @@ describe('modules/manager/github-actions/extract', () => {
       });
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps[0].registryUrls).toEqual([
         'https://github.enterprise.com',
@@ -116,7 +116,7 @@ describe('modules/manager/github-actions/extract', () => {
       });
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps[0].registryUrls).toBeUndefined();
     });
@@ -128,7 +128,7 @@ describe('modules/manager/github-actions/extract', () => {
       });
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps[0].registryUrls).toBeUndefined();
     });
@@ -140,7 +140,7 @@ describe('modules/manager/github-actions/extract', () => {
       });
       const res = extractPackageFile(
         Fixtures.get('workflow_2.yml'),
-        'workflow_2.yml'
+        'workflow_2.yml',
       );
       expect(res?.deps[0].registryUrls).toBeUndefined();
     });
@@ -148,7 +148,7 @@ describe('modules/manager/github-actions/extract', () => {
     it('extracts multiple action tag lines with double quotes and comments', () => {
       const res = extractPackageFile(
         Fixtures.get('workflow_3.yml'),
-        'workflow_3.yml'
+        'workflow_3.yml',
       );
 
       expect(res?.deps).toMatchObject([
@@ -287,7 +287,7 @@ describe('modules/manager/github-actions/extract', () => {
     it('extracts tags in different formats', () => {
       const res = extractPackageFile(
         Fixtures.get('workflow_4.yml'),
-        'workflow_4.yml'
+        'workflow_4.yml',
       );
       expect(res?.deps).toMatchObject([
         {
@@ -451,7 +451,7 @@ describe('modules/manager/github-actions/extract', () => {
         },
       ]);
       expect(
-        res?.deps.filter((d) => d.datasource === 'github-runners')
+        res?.deps.filter((d) => d.datasource === 'github-runners'),
       ).toHaveLength(7);
     });
   });
diff --git a/lib/modules/manager/github-actions/extract.ts b/lib/modules/manager/github-actions/extract.ts
index 4b2916c9fc03225799733107773dae2e076104b7..4f0e59b5ae79a4d1093d42ec5e65ae0dde490dbc 100644
--- a/lib/modules/manager/github-actions/extract.ts
+++ b/lib/modules/manager/github-actions/extract.ts
@@ -13,7 +13,7 @@ import type { Workflow } from './types';
 
 const dockerActionRe = regEx(/^\s+uses: ['"]?docker:\/\/([^'"]+)\s*$/);
 const actionRe = regEx(
-  /^\s+-?\s+?uses: (?<replaceString>['"]?(?<depName>[\w-]+\/[.\w-]+)(?<path>\/.*)?@(?<currentValue>[^\s'"]+)['"]?(?:\s+#\s*(?:renovate\s*:\s*)?(?:pin\s+|tag\s*=\s*)?@?(?<tag>v?\d+(?:\.\d+(?:\.\d+)?)?))?)/
+  /^\s+-?\s+?uses: (?<replaceString>['"]?(?<depName>[\w-]+\/[.\w-]+)(?<path>\/.*)?@(?<currentValue>[^\s'"]+)['"]?(?:\s+#\s*(?:renovate\s*:\s*)?(?:pin\s+|tag\s*=\s*)?@?(?<tag>v?\d+(?:\.\d+(?:\.\d+)?)?))?)/,
 );
 
 // SHA1 or SHA256, see https://github.blog/2020-10-19-git-2-29-released/
@@ -34,7 +34,7 @@ function detectCustomGitHubRegistryUrlsForActions(): PackageDependency {
       parsedEndpoint.host !== 'api.github.com'
     ) {
       registryUrls.unshift(
-        `${parsedEndpoint.protocol}//${parsedEndpoint.host}`
+        `${parsedEndpoint.protocol}//${parsedEndpoint.host}`,
       );
       return { registryUrls };
     }
@@ -115,7 +115,7 @@ function extractContainer(container: unknown): PackageDependency | undefined {
 }
 
 const runnerVersionRegex = regEx(
-  /^\s*(?<depName>[a-zA-Z]+)-(?<currentValue>[^\s]+)/
+  /^\s*(?<depName>[a-zA-Z]+)-(?<currentValue>[^\s]+)/,
 );
 
 function extractRunner(runner: string): PackageDependency | null {
@@ -159,7 +159,7 @@ function extractRunners(runner: unknown): PackageDependency[] {
 
 function extractWithYAMLParser(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageDependency[] {
   logger.trace('github-actions.extractWithYAMLParser()');
   const deps: PackageDependency[] = [];
@@ -170,7 +170,7 @@ function extractWithYAMLParser(
   } catch (err) {
     logger.debug(
       { packageFile, err },
-      'Failed to parse GitHub Actions Workflow YAML'
+      'Failed to parse GitHub Actions Workflow YAML',
     );
     return [];
   }
@@ -198,7 +198,7 @@ function extractWithYAMLParser(
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   logger.trace(`github-actions.extractPackageFile(${packageFile})`);
   const deps = [
diff --git a/lib/modules/manager/gitlabci-include/common.spec.ts b/lib/modules/manager/gitlabci-include/common.spec.ts
index a1c2dcd02491209bb364703a754437089ee30252..7484698925386af35226eef68ff5d2b1e93a05b0 100644
--- a/lib/modules/manager/gitlabci-include/common.spec.ts
+++ b/lib/modules/manager/gitlabci-include/common.spec.ts
@@ -11,7 +11,7 @@ import {
 
 const yamlFileMultiConfig = Fixtures.get('gitlab-ci.1.yaml');
 const pipeline = load(
-  replaceReferenceTags(yamlFileMultiConfig)
+  replaceReferenceTags(yamlFileMultiConfig),
 ) as GitlabPipeline;
 const includeLocal = { local: 'something' };
 const includeProject = { project: 'something' };
diff --git a/lib/modules/manager/gitlabci-include/common.ts b/lib/modules/manager/gitlabci-include/common.ts
index 3c0720d019a2543e43fe28ba9962898fb0197453..7aefb628e62d00320ab6a13382cb764e292ed396 100644
--- a/lib/modules/manager/gitlabci-include/common.ts
+++ b/lib/modules/manager/gitlabci-include/common.ts
@@ -11,7 +11,7 @@ export function isNonEmptyObject(obj: any): boolean {
 }
 
 export function filterIncludeFromGitlabPipeline(
-  pipeline: GitlabPipeline
+  pipeline: GitlabPipeline,
 ): GitlabPipeline {
   const pipeline_without_include: GitlabPipeline = {};
   for (const key of Object.keys(pipeline).filter((key) => key !== 'include')) {
@@ -22,13 +22,13 @@ export function filterIncludeFromGitlabPipeline(
 }
 
 export function isGitlabIncludeProject(
-  include: GitlabInclude
+  include: GitlabInclude,
 ): include is GitlabIncludeProject {
   return !is.undefined((include as GitlabIncludeProject).project);
 }
 
 export function isGitlabIncludeLocal(
-  include: GitlabInclude
+  include: GitlabInclude,
 ): include is GitlabIncludeLocal {
   return !is.undefined((include as GitlabIncludeLocal).local);
 }
diff --git a/lib/modules/manager/gitlabci-include/extract.ts b/lib/modules/manager/gitlabci-include/extract.ts
index 7d2011fca0bf5120439d804f6d975302dcec6f1c..9c4a2892042396b0e4d7191a9b4f126961bcd605 100644
--- a/lib/modules/manager/gitlabci-include/extract.ts
+++ b/lib/modules/manager/gitlabci-include/extract.ts
@@ -18,7 +18,7 @@ import {
 } from './common';
 
 function extractDepFromIncludeFile(
-  includeObj: GitlabIncludeProject
+  includeObj: GitlabIncludeProject,
 ): PackageDependency {
   const dep: PackageDependency = {
     datasource: GitlabTagsDatasource.id,
@@ -34,7 +34,7 @@ function extractDepFromIncludeFile(
 }
 
 function getIncludeProjectsFromInclude(
-  includeValue: GitlabInclude[] | GitlabInclude
+  includeValue: GitlabInclude[] | GitlabInclude,
 ): GitlabIncludeProject[] {
   const includes = is.array(includeValue) ? includeValue : [includeValue];
 
@@ -65,7 +65,7 @@ function getAllIncludeProjects(data: GitlabPipeline): GitlabIncludeProject[] {
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   const platform = GlobalConfig.get('platform');
@@ -86,7 +86,7 @@ export function extractPackageFile(
     if (err.stack?.startsWith('YAMLException:')) {
       logger.debug(
         { err, packageFile },
-        'YAML exception extracting GitLab CI includes'
+        'YAML exception extracting GitLab CI includes',
       );
     } else {
       logger.debug({ err, packageFile }, 'Error extracting GitLab CI includes');
diff --git a/lib/modules/manager/gitlabci/common.ts b/lib/modules/manager/gitlabci/common.ts
index baa89d32ddd8bff45734be11bb8d843150297f0b..4c7b4c4b92368d4fd637684be7df19cb6a11e91a 100644
--- a/lib/modules/manager/gitlabci/common.ts
+++ b/lib/modules/manager/gitlabci/common.ts
@@ -2,7 +2,7 @@ import is from '@sindresorhus/is';
 import type { GitlabInclude, GitlabIncludeLocal } from '../gitlabci/types';
 
 export function isGitlabIncludeLocal(
-  include: GitlabInclude
+  include: GitlabInclude,
 ): include is GitlabIncludeLocal {
   return !is.undefined((include as GitlabIncludeLocal).local);
 }
diff --git a/lib/modules/manager/gitlabci/extract.spec.ts b/lib/modules/manager/gitlabci/extract.spec.ts
index b9f3b2196d05d1201ce8d9d519a3dd6980a7688f..db03a11829205e7e78df7cc1621e94273fa73df2 100644
--- a/lib/modules/manager/gitlabci/extract.spec.ts
+++ b/lib/modules/manager/gitlabci/extract.spec.ts
@@ -33,7 +33,7 @@ describe('modules/manager/gitlabci/extract', () => {
       expect(
         await extractAllPackageFiles(config, [
           'lib/modules/manager/gitlabci/__fixtures__/gitlab-ci.2.yaml',
-        ])
+        ]),
       ).toBeNull();
     });
 
@@ -180,7 +180,7 @@ describe('modules/manager/gitlabci/extract', () => {
             alias: imagealias1
       `,
         '',
-        {}
+        {},
       );
       expect(res?.deps).toEqual([
         {
@@ -240,7 +240,7 @@ describe('modules/manager/gitlabci/extract', () => {
         '',
         {
           registryAliases,
-        }
+        },
       );
       expect(res?.deps).toEqual([
         {
@@ -294,7 +294,7 @@ describe('modules/manager/gitlabci/extract', () => {
       expect(
         extractFromImage({
           name: 'image:test',
-        })
+        }),
       ).toEqual(expectedRes);
 
       expect(extractFromImage(undefined)).toBeNull();
@@ -327,7 +327,7 @@ describe('modules/manager/gitlabci/extract', () => {
       expect(extractFromServices(undefined)).toBeEmptyArray();
       expect(extractFromServices(services)).toEqual(expectedRes);
       expect(
-        extractFromServices([{ name: 'image:test' }, { name: 'image2:test2' }])
+        extractFromServices([{ name: 'image:test' }, { name: 'image2:test2' }]),
       ).toEqual(expectedRes);
     });
 
diff --git a/lib/modules/manager/gitlabci/extract.ts b/lib/modules/manager/gitlabci/extract.ts
index ce1c7bd71f2fb4fc3a663e8d4c1a452ed0c09a9d..00da59a140dad44a80c56a9fc7a91496e003c12f 100644
--- a/lib/modules/manager/gitlabci/extract.ts
+++ b/lib/modules/manager/gitlabci/extract.ts
@@ -15,7 +15,7 @@ import { getGitlabDep, replaceReferenceTags } from './utils';
 
 export function extractFromImage(
   image: Image | undefined,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): PackageDependency | null {
   if (is.undefined(image)) {
     return null;
@@ -33,7 +33,7 @@ export function extractFromImage(
 
 export function extractFromServices(
   services: Services | undefined,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): PackageDependency[] {
   if (is.undefined(services)) {
     return [];
@@ -55,7 +55,7 @@ export function extractFromServices(
 
 export function extractFromJob(
   job: Job | undefined,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): PackageDependency[] {
   if (is.undefined(job)) {
     return [];
@@ -79,7 +79,7 @@ export function extractFromJob(
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   let deps: PackageDependency[] = [];
   try {
@@ -93,7 +93,7 @@ export function extractPackageFile(
             {
               const dep = extractFromImage(
                 value as Image,
-                config.registryAliases
+                config.registryAliases,
               );
               if (dep) {
                 deps.push(dep);
@@ -103,7 +103,7 @@ export function extractPackageFile(
 
           case 'services':
             deps.push(
-              ...extractFromServices(value as Services, config.registryAliases)
+              ...extractFromServices(value as Services, config.registryAliases),
             );
             break;
 
@@ -117,7 +117,7 @@ export function extractPackageFile(
   } catch (err) /* istanbul ignore next */ {
     logger.debug(
       { err, packageFile },
-      'Error extracting GitLab CI dependencies'
+      'Error extracting GitLab CI dependencies',
     );
   }
 
@@ -126,7 +126,7 @@ export function extractPackageFile(
 
 export async function extractAllPackageFiles(
   config: ExtractConfig,
-  packageFiles: string[]
+  packageFiles: string[],
 ): Promise<PackageFile[] | null> {
   const filesToExamine = [...packageFiles];
   const seen = new Set<string>(packageFiles);
@@ -140,7 +140,7 @@ export async function extractAllPackageFiles(
     if (!content) {
       logger.debug(
         { packageFile: file },
-        `Empty or non existent gitlabci file`
+        `Empty or non existent gitlabci file`,
       );
       continue;
     }
@@ -152,7 +152,7 @@ export async function extractAllPackageFiles(
     } catch (err) {
       logger.debug(
         { err, packageFile: file },
-        'Error extracting GitLab CI dependencies'
+        'Error extracting GitLab CI dependencies',
       );
       continue;
     }
@@ -184,7 +184,7 @@ export async function extractAllPackageFiles(
 
   logger.trace(
     { packageFiles, files: filesToExamine.entries() },
-    'extracted all GitLab CI files'
+    'extracted all GitLab CI files',
   );
 
   if (!results.length) {
diff --git a/lib/modules/manager/gitlabci/utils.spec.ts b/lib/modules/manager/gitlabci/utils.spec.ts
index a52b8927c4789706abfa2346a0d7258f8b7dcd2c..3c563ce93cbb93298a619dea146bbae26ba27f81 100644
--- a/lib/modules/manager/gitlabci/utils.spec.ts
+++ b/lib/modules/manager/gitlabci/utils.spec.ts
@@ -43,7 +43,7 @@ describe('modules/manager/gitlabci/utils', () => {
           depName: 'renovate/renovate',
           currentValue: '19.70.8-slim',
         });
-      }
+      },
     );
 
     it.each`
@@ -68,12 +68,12 @@ describe('modules/manager/gitlabci/utils', () => {
           ...dep,
           replaceString: imageName,
         });
-      }
+      },
     );
 
     it('no Docker hub', () => {
       expect(
-        getGitlabDep('quay.io/prometheus/node-exporter:v1.3.1')
+        getGitlabDep('quay.io/prometheus/node-exporter:v1.3.1'),
       ).toMatchObject({
         autoReplaceStringTemplate: defaultAutoReplaceStringTemplate,
         replaceString: 'quay.io/prometheus/node-exporter:v1.3.1',
diff --git a/lib/modules/manager/gitlabci/utils.ts b/lib/modules/manager/gitlabci/utils.ts
index e09825c09569eb4a601c9956024af4ac4646f3d0..e69552b2a61f69aae03ee85ac37d6cdf398cd706 100644
--- a/lib/modules/manager/gitlabci/utils.ts
+++ b/lib/modules/manager/gitlabci/utils.ts
@@ -16,7 +16,7 @@ export function replaceReferenceTags(content: string): string {
 }
 
 const depProxyRe = regEx(
-  `(?<prefix>\\$\\{?CI_DEPENDENCY_PROXY_(?:DIRECT_)?GROUP_IMAGE_PREFIX\\}?/)(?<depName>.+)`
+  `(?<prefix>\\$\\{?CI_DEPENDENCY_PROXY_(?:DIRECT_)?GROUP_IMAGE_PREFIX\\}?/)(?<depName>.+)`,
 );
 
 /**
@@ -26,7 +26,7 @@ const depProxyRe = regEx(
  */
 export function getGitlabDep(
   imageName: string,
-  registryAliases?: Record<string, string>
+  registryAliases?: Record<string, string>,
 ): PackageDependency {
   const match = depProxyRe.exec(imageName);
   if (match?.groups) {
diff --git a/lib/modules/manager/gomod/artifacts.spec.ts b/lib/modules/manager/gomod/artifacts.spec.ts
index 9e09f9c6abfb18b1e64f6c8ee9f0e9514f4c6c15..b4bdd0e21b7efd63b77859be0ab2ae579a53c50d 100644
--- a/lib/modules/manager/gomod/artifacts.spec.ts
+++ b/lib/modules/manager/gomod/artifacts.spec.ts
@@ -98,7 +98,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toBeEmptyArray();
   });
@@ -110,7 +110,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: [],
-      })
+      }),
     );
 
     expect(
@@ -119,7 +119,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -147,7 +147,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -157,7 +157,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -199,7 +199,7 @@ describe('modules/manager/gomod/artifacts', () => {
         modified: ['go.sum', foo],
         not_added: [bar],
         deleted: [baz],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce('Foo go.sum');
@@ -283,7 +283,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -296,7 +296,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -345,7 +345,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -358,7 +358,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -388,7 +388,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -398,7 +398,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -435,7 +435,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -448,7 +448,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -544,7 +544,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -557,7 +557,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -612,7 +612,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -625,7 +625,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -653,7 +653,7 @@ describe('modules/manager/gomod/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -677,7 +677,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -690,7 +690,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -727,7 +727,7 @@ describe('modules/manager/gomod/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -751,7 +751,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -764,7 +764,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -792,7 +792,7 @@ describe('modules/manager/gomod/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -824,7 +824,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -837,7 +837,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -882,7 +882,7 @@ describe('modules/manager/gomod/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -905,7 +905,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -918,7 +918,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -945,7 +945,7 @@ describe('modules/manager/gomod/artifacts', () => {
             }),
           }),
         }),
-      ])
+      ]),
     );
   });
 
@@ -958,7 +958,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 1');
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 2');
@@ -976,7 +976,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           postUpdateOptions: ['gomodTidy'],
         },
-      })
+      }),
     ).toEqual([
       { file: { contents: 'New go.sum 1', path: 'go.sum', type: 'addition' } },
       { file: { contents: 'New go.sum 2', path: 'go.mod', type: 'addition' } },
@@ -1022,7 +1022,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 1');
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 2');
@@ -1040,7 +1040,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           postUpdateOptions: ['gomodTidy1.17'],
         },
-      })
+      }),
     ).toEqual([
       { file: { contents: 'New go.sum 1', path: 'go.sum', type: 'addition' } },
       { file: { contents: 'New go.sum 2', path: 'go.mod', type: 'addition' } },
@@ -1086,7 +1086,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 1');
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 2');
@@ -1104,7 +1104,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           postUpdateOptions: ['gomodTidyE', 'gomodTidy1.17'],
         },
-      })
+      }),
     ).toEqual([
       { file: { contents: 'New go.sum 1', path: 'go.sum', type: 'addition' } },
       { file: { contents: 'New go.sum 2', path: 'go.mod', type: 'addition' } },
@@ -1150,7 +1150,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 1');
     fs.readLocalFile.mockResolvedValueOnce('New go.sum 2');
@@ -1168,7 +1168,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           postUpdateOptions: ['gomodTidyE'],
         },
-      })
+      }),
     ).toEqual([
       { file: { contents: 'New go.sum 1', path: 'go.sum', type: 'addition' } },
       { file: { contents: 'New go.sum 2', path: 'go.mod', type: 'addition' } },
@@ -1219,7 +1219,7 @@ describe('modules/manager/gomod/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: gomod1,
         config,
-      })
+      }),
     ).toEqual([
       {
         artifactError: {
@@ -1238,7 +1238,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1256,7 +1256,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           postUpdateOptions: ['gomodUpdateImportPaths'],
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'main.go', contents: 'New main.go' } },
@@ -1293,7 +1293,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1312,7 +1312,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           postUpdateOptions: ['gomodUpdateImportPaths'],
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'main.go', contents: 'New main.go' } },
@@ -1353,7 +1353,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1370,7 +1370,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           postUpdateOptions: ['gomodUpdateImportPaths'],
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'go.mod', contents: 'New go.mod' } },
@@ -1398,7 +1398,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1415,7 +1415,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           postUpdateOptions: ['gomodUpdateImportPaths'],
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'go.mod', contents: 'New go.mod' } },
@@ -1444,7 +1444,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1464,7 +1464,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           postUpdateOptions: ['gomodUpdateImportPaths'],
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'go.mod', contents: 'New go.mod' } },
@@ -1492,7 +1492,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1509,7 +1509,7 @@ describe('modules/manager/gomod/artifacts', () => {
           newMajor: 28,
           postUpdateOptions: ['gomodTidy'],
         },
-      })
+      }),
     ).toEqual([
       { file: { contents: 'New go.sum', path: 'go.sum', type: 'addition' } },
       { file: { contents: 'New main.go', path: 'go.mod', type: 'addition' } },
@@ -1529,7 +1529,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1545,7 +1545,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           newMajor: 28,
         },
-      })
+      }),
     ).toEqual([
       { file: { contents: 'New go.sum', path: 'go.sum', type: 'addition' } },
       { file: { contents: 'New main.go', path: 'go.mod', type: 'addition' } },
@@ -1566,7 +1566,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1587,7 +1587,7 @@ describe('modules/manager/gomod/artifacts', () => {
             gomodMod: 'v1.2.3',
           },
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'main.go', contents: 'New main.go' } },
@@ -1625,7 +1625,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1646,7 +1646,7 @@ describe('modules/manager/gomod/artifacts', () => {
             gomodMod: 'a.b.c',
           },
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'main.go', contents: 'New main.go' } },
@@ -1683,7 +1683,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1701,7 +1701,7 @@ describe('modules/manager/gomod/artifacts', () => {
           updateType: 'major',
           postUpdateOptions: ['gomodUpdateImportPaths'],
         },
-      })
+      }),
     ).toEqual([
       { file: { type: 'addition', path: 'go.sum', contents: 'New go.sum' } },
       { file: { type: 'addition', path: 'go.mod', contents: 'New go.mod' } },
@@ -1743,7 +1743,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1802,7 +1802,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1874,7 +1874,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum', 'main.go'],
-      })
+      }),
     );
     fs.readLocalFile
       .mockResolvedValueOnce('New go.sum')
@@ -1947,7 +1947,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: [],
-      })
+      }),
     );
 
     expect(
@@ -1959,7 +1959,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           goGetDirs: ['.', 'foo', '.bar/...', '&&', 'cat', '/etc/passwd'],
         },
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -1978,7 +1978,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -1991,7 +1991,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           goGetDirs: ['.'],
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -2018,7 +2018,7 @@ describe('modules/manager/gomod/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['go.sum'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New go.sum');
     fs.readLocalFile.mockResolvedValueOnce(gomod1);
@@ -2031,7 +2031,7 @@ describe('modules/manager/gomod/artifacts', () => {
           ...config,
           goGetDirs: ['/etc', '../../../'],
         },
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'go.sum', stderr: 'Invalid goGetDirs' } },
     ]);
diff --git a/lib/modules/manager/gomod/artifacts.ts b/lib/modules/manager/gomod/artifacts.ts
index df9c4bacb0d252bb5c1afea0cfd1fb6c0ebc4b22..e7855c3aee98726e910f688e7402a1e40cdd3dbf 100644
--- a/lib/modules/manager/gomod/artifacts.ts
+++ b/lib/modules/manager/gomod/artifacts.ts
@@ -29,25 +29,25 @@ const { major, valid } = semver;
 
 function getUpdateImportPathCmds(
   updatedDeps: PackageDependency[],
-  { constraints }: UpdateArtifactsConfig
+  { constraints }: UpdateArtifactsConfig,
 ): string[] {
   // Check if we fail to parse any major versions and log that they're skipped
   const invalidMajorDeps = updatedDeps.filter(
-    ({ newVersion }) => !valid(newVersion)
+    ({ newVersion }) => !valid(newVersion),
   );
   if (invalidMajorDeps.length > 0) {
     invalidMajorDeps.forEach(({ depName }) =>
       logger.warn(
         { depName },
-        'Ignoring dependency: Could not get major version'
-      )
+        'Ignoring dependency: Could not get major version',
+      ),
     );
   }
 
   const updateImportCommands = updatedDeps
     .filter(
       ({ newVersion }) =>
-        valid(newVersion) && !newVersion!.endsWith('+incompatible')
+        valid(newVersion) && !newVersion!.endsWith('+incompatible'),
     )
     .map(({ depName, newVersion }) => ({
       depName: depName!,
@@ -55,12 +55,13 @@ function getUpdateImportPathCmds(
     }))
     // Skip path updates going from v0 to v1
     .filter(
-      ({ depName, newMajor }) => depName.startsWith('gopkg.in/') || newMajor > 1
+      ({ depName, newMajor }) =>
+        depName.startsWith('gopkg.in/') || newMajor > 1,
     )
 
     .map(
       ({ depName, newMajor }) =>
-        `mod upgrade --mod-name=${depName} -t=${newMajor}`
+        `mod upgrade --mod-name=${depName} -t=${newMajor}`,
     );
 
   if (updateImportCommands.length > 0) {
@@ -74,17 +75,17 @@ function getUpdateImportPathCmds(
       ) {
         installMarwanModArgs = installMarwanModArgs.replace(
           regEx(/@latest$/),
-          `@${gomodModCompatibility}`
+          `@${gomodModCompatibility}`,
         );
       } else {
         logger.debug(
           { gomodModCompatibility },
-          'marwan-at-work/mod compatibility range is not valid - skipping'
+          'marwan-at-work/mod compatibility range is not valid - skipping',
         );
       }
     } else {
       logger.debug(
-        'No marwan-at-work/mod compatibility range found - installing marwan-at-work/mod latest'
+        'No marwan-at-work/mod compatibility range found - installing marwan-at-work/mod latest',
       );
     }
     updateImportCommands.unshift(`go ${installMarwanModArgs}`);
@@ -99,7 +100,7 @@ function useModcacherw(goVersion: string | undefined): boolean {
   }
 
   const [, majorPart, minorPart] = coerceArray(
-    regEx(/(\d+)\.(\d+)/).exec(goVersion)
+    regEx(/(\d+)\.(\d+)/).exec(goVersion),
   );
   const [major, minor] = [majorPart, minorPart].map((x) => parseInt(x, 10));
 
@@ -149,7 +150,7 @@ export async function updateArtifacts({
       .join('\n');
 
     const inlineReplaceRegEx = regEx(
-      /(\r?\n)(replace\s+[^\s]+\s+=>\s+\.\.\/.*)/g
+      /(\r?\n)(replace\s+[^\s]+\s+=>\s+\.\.\/.*)/g,
     );
 
     // $1 will be matched with the (\r?n) group
@@ -178,7 +179,7 @@ export async function updateArtifacts({
 
     if (massagedGoMod !== newGoModContent) {
       logger.debug(
-        'Removed some relative replace statements and comments from go.mod'
+        'Removed some relative replace statements and comments from go.mod',
       );
     }
   }
@@ -390,7 +391,7 @@ export async function updateArtifacts({
 }
 
 async function getGoConstraints(
-  goModFileName: string
+  goModFileName: string,
 ): Promise<string | undefined> {
   const content = (await readLocalFile(goModFileName, 'utf8')) ?? null;
   if (!content) {
diff --git a/lib/modules/manager/gomod/extract.ts b/lib/modules/manager/gomod/extract.ts
index aa59b01a784cf4fa7e29ca8aaa369dda2ebb4473..d5e2d62fb348722efff6c031ace52e94f9b99a1a 100644
--- a/lib/modules/manager/gomod/extract.ts
+++ b/lib/modules/manager/gomod/extract.ts
@@ -10,7 +10,7 @@ import type { MultiLineParseResult } from './types';
 function getDep(
   lineNumber: number,
   match: RegExpMatchArray,
-  type: string
+  type: string,
 ): PackageDependency {
   const [, , currentValue] = match;
   let [, depName] = match;
@@ -51,7 +51,7 @@ function getGoDep(lineNumber: number, goVer: string): PackageDependency {
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   logger.trace({ content }, 'gomod.extractPackageFile()');
   const deps: PackageDependency[] = [];
@@ -65,7 +65,7 @@ export function extractPackageFile(
         deps.push(dep);
       }
       const replaceMatch = regEx(
-        /^replace\s+[^\s]+[\s]+[=][>]\s+([^\s]+)\s+([^\s]+)/
+        /^replace\s+[^\s]+[\s]+[=][>]\s+([^\s]+)\s+([^\s]+)/,
       ).exec(line);
       if (replaceMatch) {
         const dep = getDep(lineNumber, replaceMatch, 'replace');
@@ -91,7 +91,7 @@ export function extractPackageFile(
           lineNumber,
           lines,
           matcher,
-          'require'
+          'require',
         );
         lineNumber = reachedLine;
         deps.push(...detectedDeps);
@@ -102,7 +102,7 @@ export function extractPackageFile(
           lineNumber,
           lines,
           matcher,
-          'replace'
+          'replace',
         );
         lineNumber = reachedLine;
         deps.push(...detectedDeps);
@@ -121,7 +121,7 @@ function parseMultiLine(
   startingLine: number,
   lines: string[],
   matchRegex: RegExp,
-  blockType: 'require' | 'replace'
+  blockType: 'require' | 'replace',
 ): MultiLineParseResult {
   const deps: PackageDependency[] = [];
   let lineNumber = startingLine;
diff --git a/lib/modules/manager/gomod/update.spec.ts b/lib/modules/manager/gomod/update.spec.ts
index d57e267d66946da18075e1ed2053357d919ef18b..06eff90aa8b80e41c43a7d8cdcd0b8fc428fa1f4 100644
--- a/lib/modules/manager/gomod/update.spec.ts
+++ b/lib/modules/manager/gomod/update.spec.ts
@@ -129,7 +129,7 @@ describe('modules/manager/gomod/update', () => {
       const res = updateDependency({ fileContent: gomod1, upgrade });
       expect(res).not.toEqual(gomod1);
       expect(res).toContain(
-        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible'
+        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible',
       );
     });
 
@@ -272,7 +272,7 @@ describe('modules/manager/gomod/update', () => {
       expect(res).not.toEqual(gomod1);
       // Assert that the version still contains +incompatible tag.
       expect(res).toContain(
-        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible'
+        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible',
       );
     });
 
@@ -286,10 +286,10 @@ describe('modules/manager/gomod/update', () => {
       const res = updateDependency({ fileContent: gomod1, upgrade });
       expect(res).not.toEqual(gomod1);
       expect(res).not.toContain(
-        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible+incompatible'
+        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible+incompatible',
       );
       expect(res).toContain(
-        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible'
+        'github.com/Azure/azure-sdk-for-go v26.0.0+incompatible',
       );
     });
 
diff --git a/lib/modules/manager/gomod/update.ts b/lib/modules/manager/gomod/update.ts
index 2dd8f8f6af490545471371ea2da1e12f255310e1..2a446db9edd1a22a59eef3b076cd58979ab76503 100644
--- a/lib/modules/manager/gomod/update.ts
+++ b/lib/modules/manager/gomod/update.ts
@@ -40,7 +40,7 @@ export function updateDependency({
     ) {
       logger.debug(
         { lineToChange, depName },
-        "go.mod current line doesn't contain dependency"
+        "go.mod current line doesn't contain dependency",
       );
       return null;
     }
@@ -52,11 +52,11 @@ export function updateDependency({
     if (depType === 'replace') {
       if (upgrade.managerData.multiLine) {
         updateLineExp = regEx(
-          /^(?<depPart>\s+[^\s]+[\s]+[=][>]+\s+)(?<divider>[^\s]+\s+)[^\s]+/
+          /^(?<depPart>\s+[^\s]+[\s]+[=][>]+\s+)(?<divider>[^\s]+\s+)[^\s]+/,
         );
       } else {
         updateLineExp = regEx(
-          /^(?<depPart>replace\s+[^\s]+[\s]+[=][>]+\s+)(?<divider>[^\s]+\s+)[^\s]+/
+          /^(?<depPart>replace\s+[^\s]+[\s]+[=][>]+\s+)(?<divider>[^\s]+\s+)[^\s]+/,
         );
       }
     } else if (depType === 'require' || depType === 'indirect') {
@@ -64,7 +64,7 @@ export function updateDependency({
         updateLineExp = regEx(/^(?<depPart>\s+[^\s]+)(?<divider>\s+)[^\s]+/);
       } else {
         updateLineExp = regEx(
-          /^(?<depPart>require\s+[^\s]+)(?<divider>\s+)[^\s]+/
+          /^(?<depPart>require\s+[^\s]+)(?<divider>\s+)[^\s]+/,
         );
       }
     }
@@ -76,25 +76,25 @@ export function updateDependency({
     if (upgrade.updateType === 'digest') {
       const newDigestRightSized = upgrade.newDigest!.substring(
         0,
-        upgrade.currentDigest!.length
+        upgrade.currentDigest!.length,
       );
       if (lineToChange.includes(newDigestRightSized)) {
         return fileContent;
       }
       logger.debug(
         { depName, lineToChange, newDigestRightSized },
-        'gomod: need to update digest'
+        'gomod: need to update digest',
       );
       newLine = lineToChange.replace(
         // TODO: can be undefined? (#22198)
         updateLineExp!,
-        `$<depPart>$<divider>${newDigestRightSized}`
+        `$<depPart>$<divider>${newDigestRightSized}`,
       );
     } else {
       newLine = lineToChange.replace(
         // TODO: can be undefined? (#22198)
         updateLineExp!,
-        `$<depPart>$<divider>${upgrade.newValue}`
+        `$<depPart>$<divider>${upgrade.newValue}`,
       );
     }
     if (upgrade.updateType === 'major') {
@@ -105,7 +105,7 @@ export function updateDependency({
         // Package renames - I couldn't think of a better place to do this
         newLine = newLine.replace(
           'gorethink/gorethink.v5',
-          'rethinkdb/rethinkdb-go.v5'
+          'rethinkdb/rethinkdb-go.v5',
         );
       } else if (
         upgrade.newMajor! > 1 &&
@@ -120,7 +120,7 @@ export function updateDependency({
           const [oldV] = upgrade.currentValue!.split('.');
           newLine = newLine.replace(
             regEx(`/${oldV}(\\s+)`, undefined, false),
-            `/v${upgrade.newMajor}$1`
+            `/v${upgrade.newMajor}$1`,
           );
         }
       }
@@ -144,7 +144,7 @@ export function updateDependency({
     if (depType === 'indirect') {
       newLine = newLine.replace(
         regEx(/\s*(?:\/\/\s*indirect(?:\s*;)?\s*)*$/),
-        ' // indirect'
+        ' // indirect',
       );
     }
 
diff --git a/lib/modules/manager/gradle-wrapper/artifacts.spec.ts b/lib/modules/manager/gradle-wrapper/artifacts.spec.ts
index 039dcb7fb8a353e92dd3c525792ba8956ff4279f..87e28bde8084bd44c44c66898315daba14117924 100644
--- a/lib/modules/manager/gradle-wrapper/artifacts.spec.ts
+++ b/lib/modules/manager/gradle-wrapper/artifacts.spec.ts
@@ -61,7 +61,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
       partial<Stats>({
         isFile: () => true,
         mode: 0o555,
-      })
+      }),
     );
 
     // java
@@ -85,14 +85,14 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
             'gradlew',
             'gradlew.bat',
           ],
-        })
+        }),
       );
 
       const res = await updateArtifacts({
         packageFileName: 'gradle/wrapper/gradle-wrapper.properties',
         updatedDeps: [],
         newPackageFileContent: Fixtures.get(
-          'expectedFiles/gradle/wrapper/gradle-wrapper.properties'
+          'expectedFiles/gradle/wrapper/gradle-wrapper.properties',
         ),
         config: { ...config, newValue: '6.3' },
       });
@@ -108,7 +108,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
             path: fileProjectPath,
             contents: 'test',
           },
-        }))
+        })),
       );
       expect(execSnapshots).toMatchObject([
         {
@@ -131,7 +131,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
         partial<Stats>({
           isFile: () => false,
           mode: 0o555,
-        })
+        }),
       );
 
       const result = await updateArtifacts({
@@ -150,7 +150,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: [],
-        })
+        }),
       );
       const result = await updateArtifacts({
         packageFileName: 'gradle/wrapper/gradle-wrapper.properties',
@@ -175,12 +175,12 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
         .get('/distributions/gradle-6.3-bin.zip.sha256')
         .reply(
           200,
-          '038794feef1f4745c6347107b6726279d1c824f3fc634b60f86ace1e9fbd1768'
+          '038794feef1f4745c6347107b6726279d1c824f3fc634b60f86ace1e9fbd1768',
         );
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['gradle/wrapper/gradle-wrapper.properties'],
-        })
+        }),
       );
       GlobalConfig.set({
         ...adminConfig,
@@ -233,12 +233,12 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
         .get('/distributions/gradle-6.3-bin.zip.sha256')
         .reply(
           200,
-          '038794feef1f4745c6347107b6726279d1c824f3fc634b60f86ace1e9fbd1768'
+          '038794feef1f4745c6347107b6726279d1c824f3fc634b60f86ace1e9fbd1768',
         );
       git.getRepoStatus.mockResolvedValueOnce(
         partial<StatusResult>({
           modified: ['gradle/wrapper/gradle-wrapper.properties'],
-        })
+        }),
       );
       GlobalConfig.set({ ...adminConfig, binarySource: 'install' });
 
@@ -301,14 +301,14 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
             'sub/gradlew',
             'sub/gradlew.bat',
           ],
-        })
+        }),
       );
 
       const res = await updateArtifacts({
         packageFileName: 'sub/gradle/wrapper/gradle-wrapper.properties',
         updatedDeps: [],
         newPackageFileContent: Fixtures.get(
-          'expectedFiles/gradle/wrapper/gradle-wrapper.properties'
+          'expectedFiles/gradle/wrapper/gradle-wrapper.properties',
         ),
         config: { ...config, newValue: '6.3' },
       });
@@ -324,7 +324,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
             path: fileProjectPath,
             contents: 'test',
           },
-        }))
+        })),
       );
       expect(execSnapshots).toMatchObject([
         {
@@ -360,7 +360,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
               distributionUrl = "https://services.gradle.org/distributions/gradle-6.3-bin.zip"
             }`);
           return Promise.resolve();
-        }
+        },
       );
 
       const res = await updateBuildFile('', {
@@ -389,7 +389,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
               distributionUrl = "https://services.gradle.org/distributions/gradle-$gradleVersion-all.zip"
             }`);
           return Promise.resolve();
-        }
+        },
       );
 
       const res = await updateBuildFile('', {
@@ -412,7 +412,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
           'https://services.gradle.org/distributions/gradle-6.3-bin.zip',
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'build.gradle or build.gradle.kts not found'
+        'build.gradle or build.gradle.kts not found',
       );
       expect(res).toBe('build.gradle.kts');
     });
@@ -424,7 +424,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
 
       const res = await updateLockFiles('', {});
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'build.gradle or build.gradle.kts not found'
+        'build.gradle or build.gradle.kts not found',
       );
       expect(res).toBeNull();
     });
@@ -445,7 +445,7 @@ describe('modules/manager/gradle-wrapper/artifacts', () => {
       git.getRepoStatus.mockResolvedValue(
         partial<StatusResult>({
           modified: ['gradle.lockfile'],
-        })
+        }),
       );
 
       const res = await updateArtifacts({
diff --git a/lib/modules/manager/gradle-wrapper/artifacts.ts b/lib/modules/manager/gradle-wrapper/artifacts.ts
index 02798bf6f1bdf9eb2f8888915aebe8196977f6fe..170940bf89545c1cb528943ed3d530f227933d4f 100644
--- a/lib/modules/manager/gradle-wrapper/artifacts.ts
+++ b/lib/modules/manager/gradle-wrapper/artifacts.ts
@@ -36,7 +36,7 @@ type Ctx = string[];
 
 async function addIfUpdated(
   status: StatusResult,
-  fileProjectPath: string
+  fileProjectPath: string,
 ): Promise<UpdateArtifactsResult | null> {
   if (status.modified.includes(fileProjectPath)) {
     return {
@@ -69,7 +69,7 @@ async function getDistributionChecksum(url: string): Promise<string> {
 
 export async function updateBuildFile(
   localGradleDir: string,
-  wrapperProperties: Record<string, string | undefined | null>
+  wrapperProperties: Record<string, string | undefined | null>,
 ): Promise<string> {
   let buildFileName = join(localGradleDir, 'build.gradle');
   if (!(await localPathExists(buildFileName))) {
@@ -99,7 +99,7 @@ export async function updateBuildFile(
             buildFileUpdated,
             offset,
             value,
-            newValue
+            newValue,
           );
           return ctx;
         }),
@@ -114,7 +114,7 @@ export async function updateBuildFile(
 
 export async function updateLockFiles(
   buildFileName: string,
-  config: UpdateArtifactsConfig
+  config: UpdateArtifactsConfig,
 ): Promise<UpdateArtifactsResult[] | null> {
   const buildFileContent = await readLocalFile(buildFileName, 'utf8');
   if (!buildFileContent) {
@@ -159,8 +159,8 @@ export async function updateArtifacts({
           packageFileName,
           newPackageFileContent.replace(
             /distributionSha256Sum=.*/,
-            `distributionSha256Sum=${checksum}`
-          )
+            `distributionSha256Sum=${checksum}`,
+          ),
         );
         cmd += ` --gradle-distribution-sha256-sum ${quote(checksum)}`;
       }
@@ -189,7 +189,7 @@ export async function updateArtifacts({
       }
       logger.warn(
         { err },
-        'Error executing gradle wrapper update command. It can be not a critical one though.'
+        'Error executing gradle wrapper update command. It can be not a critical one though.',
       );
     }
 
@@ -205,14 +205,14 @@ export async function updateArtifacts({
       packageFileName,
       buildFileName,
       ...['gradle/wrapper/gradle-wrapper.jar', 'gradlew', 'gradlew.bat'].map(
-        (filename) => join(localGradleDir, filename)
+        (filename) => join(localGradleDir, filename),
       ),
     ];
     const updateArtifactsResult = (
       await Promise.all(
         artifactFileNames.map((fileProjectPath) =>
-          addIfUpdated(status, fileProjectPath)
-        )
+          addIfUpdated(status, fileProjectPath),
+        ),
       )
     ).filter(is.truthy);
     if (lockFiles) {
@@ -221,7 +221,7 @@ export async function updateArtifacts({
 
     logger.debug(
       { files: updateArtifactsResult.map((r) => r.file?.path) },
-      `Returning updated gradle-wrapper files`
+      `Returning updated gradle-wrapper files`,
     );
     return updateArtifactsResult;
   } catch (err) {
diff --git a/lib/modules/manager/gradle-wrapper/extract.spec.ts b/lib/modules/manager/gradle-wrapper/extract.spec.ts
index 7746bc170a75819ef55b3932a4a320f778ecb6bf..f83afd2a87a9cb8afa69136b9f557a09933b90ed 100644
--- a/lib/modules/manager/gradle-wrapper/extract.spec.ts
+++ b/lib/modules/manager/gradle-wrapper/extract.spec.ts
@@ -4,19 +4,19 @@ import { extractPackageFile } from '.';
 const typeBinFileContent = Fixtures.get('gradle-wrapper-bin.properties');
 const typeAllFileContent = Fixtures.get('gradle-wrapper-all.properties');
 const prereleaseVersionFileContent = Fixtures.get(
-  'gradle-wrapper-prerelease.properties'
+  'gradle-wrapper-prerelease.properties',
 );
 const unknownFormatFileContent = Fixtures.get(
-  'gradle-wrapper-unknown-format.properties'
+  'gradle-wrapper-unknown-format.properties',
 );
 const whitespacePropertiesFile = Fixtures.get(
-  'gradle-wrapper-whitespace.properties'
+  'gradle-wrapper-whitespace.properties',
 );
 const customTypeBinFileContent = Fixtures.get(
-  'custom-gradle-wrapper-bin.properties'
+  'custom-gradle-wrapper-bin.properties',
 );
 const customTypeAllFileContent = Fixtures.get(
-  'custom-gradle-wrapper-all.properties'
+  'custom-gradle-wrapper-all.properties',
 );
 
 describe('modules/manager/gradle-wrapper/extract', () => {
diff --git a/lib/modules/manager/gradle-wrapper/extract.ts b/lib/modules/manager/gradle-wrapper/extract.ts
index 98fa96ab0db3798d1de25f43b4f4a9be8940a207..147f9ce9f7807fc649f2748619e04583caaf232c 100644
--- a/lib/modules/manager/gradle-wrapper/extract.ts
+++ b/lib/modules/manager/gradle-wrapper/extract.ts
@@ -5,7 +5,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 import { extractGradleVersion } from './utils';
 
 export function extractPackageFile(
-  fileContent: string
+  fileContent: string,
 ): PackageFileContent | null {
   logger.trace('gradle-wrapper.extractPackageFile()');
   const extractResult = extractGradleVersion(fileContent);
diff --git a/lib/modules/manager/gradle-wrapper/util.spec.ts b/lib/modules/manager/gradle-wrapper/util.spec.ts
index 09234235e99086e9a28404de05b12eead0fbca1a..9670f9de7b421280cffb5d869114d6524fa24f02 100644
--- a/lib/modules/manager/gradle-wrapper/util.spec.ts
+++ b/lib/modules/manager/gradle-wrapper/util.spec.ts
@@ -73,7 +73,7 @@ describe('modules/manager/gradle-wrapper/util', () => {
         partial<Stats>({
           isFile: () => true,
           mode: 0o550,
-        })
+        }),
       );
       expect(await prepareGradleCommand('./gradlew')).toBe('./gradlew');
     });
@@ -82,7 +82,7 @@ describe('modules/manager/gradle-wrapper/util', () => {
       fs.statLocalFile.mockResolvedValue(
         partial<Stats>({
           isFile: () => false,
-        })
+        }),
       );
       expect(await prepareGradleCommand('./gradlew')).toBeNull();
     });
diff --git a/lib/modules/manager/gradle-wrapper/utils.ts b/lib/modules/manager/gradle-wrapper/utils.ts
index e410efa3c173b9db7f4c0e8f266cafbb10da1da9..5e75e3c2e14b7721a99cc8f38f4d7400db591dde 100644
--- a/lib/modules/manager/gradle-wrapper/utils.ts
+++ b/lib/modules/manager/gradle-wrapper/utils.ts
@@ -22,7 +22,7 @@ export function gradleWrapperFileName(): string {
 }
 
 export async function prepareGradleCommand(
-  gradlewFile: string
+  gradlewFile: string,
 ): Promise<string | null> {
   const gradlewStat = await statLocalFile(gradlewFile);
   if (gradlewStat?.isFile() === true) {
@@ -44,7 +44,7 @@ export async function prepareGradleCommand(
  * @returns A Java semver range
  */
 export function getJavaConstraint(
-  gradleVersion: string | null | undefined
+  gradleVersion: string | null | undefined,
 ): string {
   const major = gradleVersion ? gradleVersioning.getMajor(gradleVersion) : null;
   const minor = gradleVersion ? gradleVersioning.getMinor(gradleVersion) : null;
@@ -63,11 +63,11 @@ export function getJavaConstraint(
 
 // https://regex101.com/r/IcOs7P/1
 const DISTRIBUTION_URL_REGEX = regEx(
-  '^(?:distributionUrl\\s*=\\s*)(?<url>\\S*-(?<version>\\d+\\.\\d+(?:\\.\\d+)?(?:-\\w+)*)-(?<type>bin|all)\\.zip)\\s*$'
+  '^(?:distributionUrl\\s*=\\s*)(?<url>\\S*-(?<version>\\d+\\.\\d+(?:\\.\\d+)?(?:-\\w+)*)-(?<type>bin|all)\\.zip)\\s*$',
 );
 
 export function extractGradleVersion(
-  fileContent: string
+  fileContent: string,
 ): GradleVersionExtract | null {
   const lines = fileContent?.split(newlineRegex) ?? [];
 
@@ -82,7 +82,7 @@ export function extractGradleVersion(
     }
   }
   logger.debug(
-    'Gradle wrapper version and url could not be extracted from properties - skipping update'
+    'Gradle wrapper version and url could not be extracted from properties - skipping update',
   );
 
   return null;
diff --git a/lib/modules/manager/gradle/artifacts.spec.ts b/lib/modules/manager/gradle/artifacts.spec.ts
index 2a994cb878d829ee9688fc0fb9b061ec170c373d..6c0afa81ad9cbb82eb4a7af3781fabafda10f832 100644
--- a/lib/modules/manager/gradle/artifacts.spec.ts
+++ b/lib/modules/manager/gradle/artifacts.spec.ts
@@ -76,7 +76,7 @@ describe('modules/manager/gradle/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['build.gradle', 'gradle.lockfile'],
-      })
+      }),
     );
 
     // TODO: fix types, jest is using wrong overload (#22198)
@@ -107,11 +107,11 @@ describe('modules/manager/gradle/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '',
           config: {},
-        })
+        }),
       ).toBeNull();
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'No Gradle dependency lockfiles or verification metadata found - skipping update'
+        'No Gradle dependency lockfiles or verification metadata found - skipping update',
       );
       expect(execSnapshots).toBeEmptyArray();
     });
@@ -126,11 +126,11 @@ describe('modules/manager/gradle/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '',
           config: {},
-        })
+        }),
       ).toBeNull();
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Found Gradle dependency lockfiles but no gradlew - aborting update'
+        'Found Gradle dependency lockfiles but no gradlew - aborting update',
       );
       expect(execSnapshots).toBeEmptyArray();
     });
@@ -267,11 +267,11 @@ describe('modules/manager/gradle/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '',
           config: { isLockFileMaintenance: true },
-        })
+        }),
       ).toBeNull();
 
       expect(logger.logger.trace).toHaveBeenCalledWith(
-        'No build.gradle(.kts) file or not in root project - skipping lock file maintenance'
+        'No build.gradle(.kts) file or not in root project - skipping lock file maintenance',
       );
     });
 
@@ -462,7 +462,7 @@ describe('modules/manager/gradle/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '',
           config: { isLockFileMaintenance: true },
-        })
+        }),
       ).toBeNull();
     });
 
@@ -475,7 +475,7 @@ describe('modules/manager/gradle/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '',
           config: { isLockFileMaintenance: true },
-        })
+        }),
       ).toEqual([
         {
           artifactError: {
@@ -510,7 +510,7 @@ describe('modules/manager/gradle/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '{}',
           config: {},
-        })
+        }),
       ).rejects.toThrow(TEMPORARY_ERROR);
     });
 
@@ -567,7 +567,7 @@ describe('modules/manager/gradle/artifacts', () => {
       git.getRepoStatus.mockResolvedValue(
         partial<StatusResult>({
           modified: ['build.gradle', 'gradle/verification-metadata.xml'],
-        })
+        }),
       );
 
       const res = await updateArtifacts({
@@ -617,7 +617,7 @@ describe('modules/manager/gradle/artifacts', () => {
             'gradle.lockfile',
             'gradle/verification-metadata.xml',
           ],
-        })
+        }),
       );
 
       const res = await updateArtifacts({
@@ -682,7 +682,7 @@ describe('modules/manager/gradle/artifacts', () => {
       git.getRepoStatus.mockResolvedValue(
         partial<StatusResult>({
           modified: ['build.gradle', 'gradle/verification-metadata.xml'],
-        })
+        }),
       );
       fs.readLocalFile.mockImplementation((fileName: string): Promise<any> => {
         let content = '';
@@ -725,7 +725,7 @@ describe('modules/manager/gradle/artifacts', () => {
       git.getRepoStatus.mockResolvedValue(
         partial<StatusResult>({
           modified: ['build.gradle', 'gradle/verification-metadata.xml'],
-        })
+        }),
       );
       fs.readLocalFile.mockImplementation((fileName: string): Promise<any> => {
         let content = '';
@@ -767,7 +767,7 @@ describe('modules/manager/gradle/artifacts', () => {
       git.getRepoStatus.mockResolvedValue(
         partial<StatusResult>({
           modified: ['build.gradle', 'gradle/verification-metadata.xml'],
-        })
+        }),
       );
       fs.readLocalFile.mockImplementation((fileName: string): Promise<any> => {
         let content = '';
diff --git a/lib/modules/manager/gradle/artifacts.ts b/lib/modules/manager/gradle/artifacts.ts
index 8a9a0ab81d98a3d0d599bd2f3e09cbb9406ebecb..0a45657b5c276ceb9b0604585398aa06c40bd3f3 100644
--- a/lib/modules/manager/gradle/artifacts.ts
+++ b/lib/modules/manager/gradle/artifacts.ts
@@ -29,7 +29,7 @@ function isLockFile(fileName: string): boolean {
 }
 
 async function getUpdatedLockfiles(
-  oldLockFileContentMap: Record<string, string | null>
+  oldLockFileContentMap: Record<string, string | null>,
 ): Promise<UpdateArtifactsResult[]> {
   const res: UpdateArtifactsResult[] = [];
 
@@ -58,7 +58,7 @@ async function getUpdatedLockfiles(
 
 async function getSubProjectList(
   cmd: string,
-  execOptions: ExecOptions
+  execOptions: ExecOptions,
 ): Promise<string[]> {
   const subprojects = ['']; // = root project
   const subprojectsRegex = regEx(/^[ \t]*subprojects: \[(?<subprojects>.+)\]/m);
@@ -81,7 +81,7 @@ async function getSubProjectList(
 async function getGradleVersion(gradlewFile: string): Promise<string | null> {
   const propertiesFile = join(
     dirname(gradlewFile),
-    'gradle/wrapper/gradle-wrapper.properties'
+    'gradle/wrapper/gradle-wrapper.properties',
   );
   const properties = await readLocalFile(propertiesFile, 'utf8');
   const extractResult = extractGradleVersion(properties ?? '');
@@ -91,7 +91,7 @@ async function getGradleVersion(gradlewFile: string): Promise<string | null> {
 
 async function buildUpdateVerificationMetadataCmd(
   verificationMetadataFile: string | undefined,
-  baseCmd: string
+  baseCmd: string,
 ): Promise<string | null> {
   if (!verificationMetadataFile) {
     return null;
@@ -113,11 +113,11 @@ async function buildUpdateVerificationMetadataCmd(
   }
   if (
     verificationMetadata?.includes(
-      '<verify-signatures>true</verify-signatures>'
+      '<verify-signatures>true</verify-signatures>',
     )
   ) {
     logger.debug(
-      'Dependency signature verification enabled - generating PGP signatures'
+      'Dependency signature verification enabled - generating PGP signatures',
     );
     // signature verification requires at least one checksum type as fallback.
     if (!hashTypes.length) {
@@ -142,11 +142,11 @@ export async function updateArtifacts({
   const fileList = await scm.getFileList();
   const lockFiles = fileList.filter((file) => isLockFile(file));
   const verificationMetadataFile = fileList.find((fileName) =>
-    fileName.endsWith('gradle/verification-metadata.xml')
+    fileName.endsWith('gradle/verification-metadata.xml'),
   );
   if (!lockFiles.length && !verificationMetadataFile) {
     logger.debug(
-      'No Gradle dependency lockfiles or verification metadata found - skipping update'
+      'No Gradle dependency lockfiles or verification metadata found - skipping update',
     );
     return null;
   }
@@ -155,7 +155,7 @@ export async function updateArtifacts({
   const gradlewFile = await findUpLocal(gradlewName, dirname(packageFileName));
   if (!gradlewFile) {
     logger.debug(
-      'Found Gradle dependency lockfiles but no gradlew - aborting update'
+      'Found Gradle dependency lockfiles but no gradlew - aborting update',
     );
     return null;
   }
@@ -166,7 +166,7 @@ export async function updateArtifacts({
       dirname(packageFileName) !== dirname(gradlewFile))
   ) {
     logger.trace(
-      'No build.gradle(.kts) file or not in root project - skipping lock file maintenance'
+      'No build.gradle(.kts) file or not in root project - skipping lock file maintenance',
     );
     return null;
   }
@@ -221,7 +221,7 @@ export async function updateArtifacts({
     const updateVerificationMetadataCmd =
       await buildUpdateVerificationMetadataCmd(
         verificationMetadataFile,
-        baseCmd
+        baseCmd,
       );
     if (updateVerificationMetadataCmd) {
       cmds.push(updateVerificationMetadataCmd);
diff --git a/lib/modules/manager/gradle/extract.spec.ts b/lib/modules/manager/gradle/extract.spec.ts
index 9f703ff5739189e070eae75973e066c494b74530..7a0900ddfc2b3c8383e695d54eaaad061c2e3d90 100644
--- a/lib/modules/manager/gradle/extract.spec.ts
+++ b/lib/modules/manager/gradle/extract.spec.ts
@@ -23,7 +23,7 @@ function mockFs(files: Record<string, string>): void {
       return existingFileNameWithPath
         .slice(0, existingFileNameWithPath.lastIndexOf('/') + 1)
         .concat(otherFileName);
-    }
+    },
   );
 }
 
@@ -42,8 +42,8 @@ describe('modules/manager/gradle/extract', () => {
     expect(
       await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
-      )
+        Object.keys(fsMock),
+      ),
     ).toBeNull();
   });
 
@@ -62,7 +62,7 @@ describe('modules/manager/gradle/extract', () => {
 
     expect(logger.logger.debug).toHaveBeenCalledWith(
       { err, config: {}, packageFile: filename },
-      `Failed to process Gradle file`
+      `Failed to process Gradle file`,
     );
   });
 
@@ -75,7 +75,7 @@ describe('modules/manager/gradle/extract', () => {
 
     const res = await extractAllPackageFiles(
       partial<ExtractConfig>(),
-      Object.keys(fsMock)
+      Object.keys(fsMock),
     );
 
     expect(res).toMatchObject([
@@ -102,7 +102,7 @@ describe('modules/manager/gradle/extract', () => {
 
     const res = await extractAllPackageFiles(
       partial<ExtractConfig>(),
-      Object.keys(fsMock)
+      Object.keys(fsMock),
     );
 
     expect(res).toMatchObject([
@@ -178,7 +178,7 @@ describe('modules/manager/gradle/extract', () => {
 
     const res = await extractAllPackageFiles(
       partial<ExtractConfig>(),
-      Object.keys(fsMock)
+      Object.keys(fsMock),
     );
 
     expect(res).toMatchObject([
@@ -252,7 +252,7 @@ describe('modules/manager/gradle/extract', () => {
 
     const res = await extractAllPackageFiles(
       partial<ExtractConfig>(),
-      Object.keys(fsMock)
+      Object.keys(fsMock),
     );
 
     expect(res).toMatchObject([
@@ -299,7 +299,7 @@ describe('modules/manager/gradle/extract', () => {
 
     const res = await extractAllPackageFiles(
       partial<ExtractConfig>(),
-      Object.keys(fsMock)
+      Object.keys(fsMock),
     );
 
     expect(res).toMatchObject([
@@ -324,7 +324,7 @@ describe('modules/manager/gradle/extract', () => {
 
     const res = await extractAllPackageFiles(
       partial<ExtractConfig>(),
-      Object.keys(fsMock)
+      Object.keys(fsMock),
     );
 
     expect(res).toMatchObject([
@@ -358,7 +358,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -406,7 +406,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -467,7 +467,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -505,7 +505,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
       expect(res).toMatchObject([
         {
@@ -622,7 +622,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -735,8 +735,8 @@ describe('modules/manager/gradle/extract', () => {
       expect(
         await extractAllPackageFiles(
           partial<ExtractConfig>(),
-          Object.keys(fsMock)
-        )
+          Object.keys(fsMock),
+        ),
       ).toBeNull();
     });
 
@@ -757,7 +757,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
       expect(res).toMatchObject([
         {
@@ -800,7 +800,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
       expect(res).toMatchObject([
         {
@@ -870,7 +870,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -950,7 +950,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -972,8 +972,8 @@ describe('modules/manager/gradle/extract', () => {
       expect(
         await extractAllPackageFiles(
           partial<ExtractConfig>(),
-          Object.keys(fsMock)
-        )
+          Object.keys(fsMock),
+        ),
       ).toBeNull();
     });
 
@@ -986,8 +986,8 @@ describe('modules/manager/gradle/extract', () => {
       expect(
         await extractAllPackageFiles(
           partial<ExtractConfig>(),
-          Object.keys(fsMock)
-        )
+          Object.keys(fsMock),
+        ),
       ).toBeNull();
     });
   });
@@ -1006,7 +1006,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       expect(res).toMatchObject([
@@ -1054,7 +1054,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
       expect(res).toBeNull();
     });
@@ -1068,7 +1068,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
       expect(res).toBeNull();
     });
@@ -1097,7 +1097,7 @@ describe('modules/manager/gradle/extract', () => {
 
       const res = await extractAllPackageFiles(
         partial<ExtractConfig>(),
-        Object.keys(fsMock)
+        Object.keys(fsMock),
       );
 
       // Each lock dep is only present once, with highest prio for exact prop match, then globs from longest to shortest
diff --git a/lib/modules/manager/gradle/extract.ts b/lib/modules/manager/gradle/extract.ts
index 010a24de8105095ee35f2e1f06551792fb32b0b3..d70a5722dbb6e2b575b37dd4ef7210d107f4803c 100644
--- a/lib/modules/manager/gradle/extract.ts
+++ b/lib/modules/manager/gradle/extract.ts
@@ -31,11 +31,12 @@ const mavenDatasource = MavenDatasource.id;
 
 function updatePackageRegistries(
   packageRegistries: PackageRegistry[],
-  urls: PackageRegistry[]
+  urls: PackageRegistry[],
 ): void {
   for (const url of urls) {
     const registryAlreadyKnown = packageRegistries.some(
-      (item) => item.registryUrl === url.registryUrl && item.scope === url.scope
+      (item) =>
+        item.registryUrl === url.registryUrl && item.scope === url.scope,
     );
     if (!registryAlreadyKnown) {
       packageRegistries.push(url);
@@ -45,7 +46,7 @@ function updatePackageRegistries(
 
 function getRegistryUrlsForDep(
   packageRegistries: PackageRegistry[],
-  dep: PackageDependency<GradleManagerData>
+  dep: PackageDependency<GradleManagerData>,
 ): string[] {
   const scope = dep.depType === 'plugin' ? 'plugin' : 'dep';
 
@@ -65,7 +66,7 @@ async function parsePackageFiles(
   packageFiles: string[],
   extractedDeps: PackageDependency<GradleManagerData>[],
   packageFilesByName: Record<string, PackageFile>,
-  packageRegistries: PackageRegistry[]
+  packageRegistries: PackageRegistry[],
 ): Promise<PackageDependency<GradleManagerData>[]> {
   const varRegistry: VariableRegistry = {};
   const fileContents = await getLocalFiles(packageFiles);
@@ -100,7 +101,7 @@ async function parsePackageFiles(
         const { vars: gradleVars, deps } = parseKotlinSource(
           content,
           vars,
-          packageFile
+          packageFile,
         );
         updateVars(varRegistry, '/', gradleVars);
         extractedDeps.push(...deps);
@@ -118,7 +119,7 @@ async function parsePackageFiles(
     } catch (err) {
       logger.debug(
         { err, config, packageFile },
-        `Failed to process Gradle file`
+        `Failed to process Gradle file`,
       );
     }
   }
@@ -128,14 +129,14 @@ async function parsePackageFiles(
 
 export async function extractAllPackageFiles(
   config: ExtractConfig,
-  packageFiles: string[]
+  packageFiles: string[],
 ): Promise<PackageFile[] | null> {
   const packageFilesByName: Record<string, PackageFile> = {};
   const packageRegistries: PackageRegistry[] = [];
   const extractedDeps: PackageDependency<GradleManagerData>[] = [];
   const kotlinSourceFiles = packageFiles.filter(isKotlinSourceFile);
   const gradleFiles = reorderFiles(
-    packageFiles.filter((e) => !kotlinSourceFiles.includes(e))
+    packageFiles.filter((e) => !kotlinSourceFiles.includes(e)),
   );
 
   await parsePackageFiles(
@@ -143,7 +144,7 @@ export async function extractAllPackageFiles(
     [...kotlinSourceFiles, ...kotlinSourceFiles, ...gradleFiles],
     extractedDeps,
     packageFilesByName,
-    packageRegistries
+    packageRegistries,
   );
 
   if (!extractedDeps.length) {
@@ -185,7 +186,7 @@ export async function extractAllPackageFiles(
         (item) =>
           item.depName === dep.depName &&
           item.managerData?.fileReplacePosition ===
-            dep.managerData?.fileReplacePosition
+            dep.managerData?.fileReplacePosition,
       );
       if (!depAlreadyInPkgFile) {
         pkgFile.deps.push(dep);
diff --git a/lib/modules/manager/gradle/extract/catalog.ts b/lib/modules/manager/gradle/extract/catalog.ts
index 8bed620e1c7a6a87c62890d664f625d3bf2591b5..e4ed8d634266a685f0625eaee6a589e3f7b41126 100644
--- a/lib/modules/manager/gradle/extract/catalog.ts
+++ b/lib/modules/manager/gradle/extract/catalog.ts
@@ -18,12 +18,12 @@ import type {
 function findVersionIndex(
   content: string,
   depName: string,
-  version: string
+  version: string,
 ): number {
   const eDn = escapeRegExp(depName);
   const eVer = escapeRegExp(version);
   const re = regEx(
-    `(?:id\\s*=\\s*)?['"]?${eDn}["']?(?:(?:\\s*=\\s*)|:|,\\s*)(?:.*version(?:\\.ref)?(?:\\s*\\=\\s*))?["']?${eVer}['"]?`
+    `(?:id\\s*=\\s*)?['"]?${eDn}["']?(?:(?:\\s*=\\s*)|:|,\\s*)(?:.*version(?:\\.ref)?(?:\\s*\\=\\s*))?["']?${eVer}['"]?`,
   );
   const match = re.exec(content);
   if (match) {
@@ -37,20 +37,20 @@ function findVersionIndex(
 function findIndexAfter(
   content: string,
   sliceAfter: string,
-  find: string
+  find: string,
 ): number {
   const slicePoint = content.indexOf(sliceAfter) + sliceAfter.length;
   return slicePoint + content.slice(slicePoint).indexOf(find);
 }
 
 function isArtifactDescriptor(
-  obj: GradleCatalogArtifactDescriptor | GradleCatalogModuleDescriptor
+  obj: GradleCatalogArtifactDescriptor | GradleCatalogModuleDescriptor,
 ): obj is GradleCatalogArtifactDescriptor {
   return hasKey('group', obj);
 }
 
 function isVersionPointer(
-  obj: GradleVersionCatalogVersion | undefined
+  obj: GradleVersionCatalogVersion | undefined,
 ): obj is VersionPointer {
   return hasKey('ref', obj);
 }
@@ -61,7 +61,7 @@ function normalizeAlias(alias: string): string {
 
 function findOriginalAlias(
   versions: Record<string, GradleVersionPointerTarget>,
-  alias: string
+  alias: string,
 ): string {
   const normalizedAlias = normalizeAlias(alias);
   for (const sectionKey of Object.keys(versions)) {
@@ -244,7 +244,7 @@ function extractDependency({
 
 export function parseCatalog(
   packageFile: string,
-  content: string
+  content: string,
 ): PackageDependency<GradleManagerData>[] {
   const tomlContent = parseToml(content) as GradleCatalog;
   const versions = tomlContent.versions ?? {};
diff --git a/lib/modules/manager/gradle/extract/consistent-versions-plugin.ts b/lib/modules/manager/gradle/extract/consistent-versions-plugin.ts
index ea78d1de4358d7a5e2408cecbe62a5d1a638f543..8a1d602a2d1ff168d9fc64022a9d748e28bb8bd0 100644
--- a/lib/modules/manager/gradle/extract/consistent-versions-plugin.ts
+++ b/lib/modules/manager/gradle/extract/consistent-versions-plugin.ts
@@ -20,11 +20,11 @@ const LOCKFILE_HEADER_TEXT =
  */
 export function usesGcv(
   versionsPropsFilename: string,
-  fileContents: Record<string, string | null>
+  fileContents: Record<string, string | null>,
 ): boolean {
   const versionsLockFile: string = fs.getSiblingFileName(
     versionsPropsFilename,
-    VERSIONS_LOCK
+    VERSIONS_LOCK,
   );
   return (
     fileContents[versionsLockFile]?.startsWith(LOCKFILE_HEADER_TEXT) ?? false
@@ -58,7 +58,7 @@ export function isGcvLockFile(fileName: string): boolean {
  */
 export function parseGcv(
   propsFileName: string,
-  fileContents: Record<string, string | null>
+  fileContents: Record<string, string | null>,
 ): PackageDependency<GradleManagerData>[] {
   const propsFileContent = coerceString(fileContents[propsFileName]);
   const lockFileName = fs.getSiblingFileName(propsFileName, VERSIONS_LOCK);
@@ -120,7 +120,7 @@ function globToRegex(depName: string): RegExp {
     depName
       .replace(/\*/g, '_WC_CHAR_')
       .replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
-      .replace(/_WC_CHAR_/g, '.*?')
+      .replace(/_WC_CHAR_/g, '.*?'),
   );
 }
 
@@ -139,7 +139,7 @@ interface VersionWithDepType {
  */
 export function parseLockFile(input: string): Map<string, VersionWithDepType> {
   const lockLineRegex = regEx(
-    `^(?<depName>[^:]+:[^:]+):(?<lockVersion>[^ ]+) \\(\\d+ constraints: [0-9a-f]+\\)$`
+    `^(?<depName>[^:]+:[^:]+):(?<lockVersion>[^ ]+) \\(\\d+ constraints: [0-9a-f]+\\)$`,
   );
 
   const depVerMap = new Map<string, VersionWithDepType>();
@@ -159,7 +159,7 @@ export function parseLockFile(input: string): Map<string, VersionWithDepType> {
     }
   }
   logger.trace(
-    `Found ${depVerMap.size} locked dependencies in ${VERSIONS_LOCK}.`
+    `Found ${depVerMap.size} locked dependencies in ${VERSIONS_LOCK}.`,
   );
   return depVerMap;
 }
@@ -170,10 +170,10 @@ export function parseLockFile(input: string): Map<string, VersionWithDepType> {
  * @return two maps, first being exact matches, second regex matches
  */
 export function parsePropsFile(
-  input: string
+  input: string,
 ): [Map<string, VersionWithPosition>, Map<string, VersionWithPosition>] {
   const propsLineRegex = regEx(
-    `^(?<depName>[^:]+:[^=]+?) *= *(?<propsVersion>.*)$`
+    `^(?<depName>[^:]+:[^=]+?) *= *(?<propsVersion>.*)$`,
   );
   const depVerExactMap = new Map<string, VersionWithPosition>();
   const depVerRegexMap = new Map<string, VersionWithPosition>();
@@ -207,7 +207,7 @@ export function parsePropsFile(
     startOfLineIdx += line.length + (isCrLf ? 2 : 1);
   }
   logger.trace(
-    `Found ${depVerExactMap.size} dependencies and ${depVerRegexMap.size} wildcard dependencies in ${VERSIONS_PROPS}.`
+    `Found ${depVerExactMap.size} dependencies and ${depVerRegexMap.size} wildcard dependencies in ${VERSIONS_PROPS}.`,
   );
   return [depVerExactMap, new Map([...depVerRegexMap].sort().reverse())];
 }
diff --git a/lib/modules/manager/gradle/parser.spec.ts b/lib/modules/manager/gradle/parser.spec.ts
index 6f778f563fe97b5db502064a823cfd801b144cf7..2fb70034c1abc12f948651ed39a463d258d25ce4 100644
--- a/lib/modules/manager/gradle/parser.spec.ts
+++ b/lib/modules/manager/gradle/parser.spec.ts
@@ -13,7 +13,7 @@ function mockFs(files: Record<string, string>): void {
       return existingFileNameWithPath
         .slice(0, existingFileNameWithPath.lastIndexOf('/') + 1)
         .concat(otherFileName);
-    }
+    },
   );
 }
 
@@ -767,7 +767,7 @@ describe('modules/manager/gradle/parser', () => {
       const { deps } = parseGradle(content, {}, 'build.gradle');
       const replacementIndices = deps.map(({ managerData, currentValue }) =>
         // TODO #22198
-        content.slice(managerData!.fileReplacePosition).indexOf(currentValue!)
+        content.slice(managerData!.fileReplacePosition).indexOf(currentValue!),
       );
       expect(replacementIndices.every((idx) => idx === 0)).toBeTrue();
       expect(deps).toMatchSnapshot();
@@ -800,7 +800,7 @@ describe('modules/manager/gradle/parser', () => {
 
     it('attaches packageFile', () => {
       expect(
-        parseProps('foo = bar', 'foo/bar/gradle.properties')
+        parseProps('foo = bar', 'foo/bar/gradle.properties'),
       ).toMatchObject({
         vars: { foo: { packageFile: 'foo/bar/gradle.properties' } },
       });
@@ -878,7 +878,7 @@ describe('modules/manager/gradle/parser', () => {
         [def, input].join('\n'),
         {},
         '',
-        fileContents
+        fileContents,
       );
       expect(vars).toMatchObject(output);
     });
@@ -889,10 +889,10 @@ describe('modules/manager/gradle/parser', () => {
         {},
         '',
         fileContents,
-        3
+        3,
       );
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Max recursion depth reached in script file: foo/bar.gradle'
+        'Max recursion depth reached in script file: foo/bar.gradle',
       );
       expect(vars).toBeEmpty();
     });
diff --git a/lib/modules/manager/gradle/parser.ts b/lib/modules/manager/gradle/parser.ts
index 5d5367baf56179e9592bfc56820f801a7150ab56..abf2361eaca859c8335e1d513ea7153b2b89d74b 100644
--- a/lib/modules/manager/gradle/parser.ts
+++ b/lib/modules/manager/gradle/parser.ts
@@ -43,7 +43,7 @@ export function parseGradle(
   initVars: PackageVariables = {},
   packageFile = '',
   fileContents: Record<string, string | null> = {},
-  recursionDepth = 0
+  recursionDepth = 0,
 ): ParseGradleResult {
   let vars: PackageVariables = { ...initVars };
   const deps: PackageDependency<GradleManagerData>[] = [];
@@ -60,7 +60,7 @@ export function parseGradle(
       qRegistryUrls,
       qVersionCatalogs,
       qLongFormDep,
-      qApplyFrom
+      qApplyFrom,
     ),
   });
 
@@ -84,7 +84,7 @@ export function parseGradle(
 export function parseKotlinSource(
   input: string,
   initVars: PackageVariables = {},
-  packageFile = ''
+  packageFile = '',
 ): { vars: PackageVariables; deps: PackageDependency<GradleManagerData>[] } {
   let vars: PackageVariables = { ...initVars };
   const deps: PackageDependency<GradleManagerData>[] = [];
@@ -111,12 +111,12 @@ export function parseKotlinSource(
 
 const propWord = '[a-zA-Z_][a-zA-Z0-9_]*(?:\\.[a-zA-Z_][a-zA-Z0-9_]*)*';
 const propRegex = regEx(
-  `^(?<leftPart>\\s*(?<key>${propWord})\\s*[= :]\\s*['"]?)(?<value>[^\\s'"]+)['"]?\\s*$`
+  `^(?<leftPart>\\s*(?<key>${propWord})\\s*[= :]\\s*['"]?)(?<value>[^\\s'"]+)['"]?\\s*$`,
 );
 
 export function parseProps(
   input: string,
-  packageFile?: string
+  packageFile?: string,
 ): { vars: PackageVariables; deps: PackageDependency<GradleManagerData>[] } {
   let offset = 0;
   const vars: PackageVariables = {};
diff --git a/lib/modules/manager/gradle/parser/apply-from.ts b/lib/modules/manager/gradle/parser/apply-from.ts
index dfab77b3d95444139a98d57351c35bf648fdd956..7c0bf1488d0c381ed4762d402d7f84ebc91faedc 100644
--- a/lib/modules/manager/gradle/parser/apply-from.ts
+++ b/lib/modules/manager/gradle/parser/apply-from.ts
@@ -13,7 +13,7 @@ const qApplyFromFile = q
         q
           .opt(q.sym<Ctx>(regEx(/^(?:rootProject|project)$/)).op('.'))
           .sym('file'),
-        q.opt<Ctx>(q.sym('new')).sym('File')
+        q.opt<Ctx>(q.sym('new')).sym('File'),
       )
       .tree({
         maxDepth: 1,
@@ -24,12 +24,12 @@ const qApplyFromFile = q
           .opt(
             q
               .join(qValueMatcher, q.op(','))
-              .handler((ctx) => storeInTokenMap(ctx, 'parentPath'))
+              .handler((ctx) => storeInTokenMap(ctx, 'parentPath')),
           )
           .join(qValueMatcher)
           .end(),
       }),
-    qValueMatcher
+    qValueMatcher,
   )
   .handler((ctx) => storeInTokenMap(ctx, 'scriptFile'));
 
@@ -47,7 +47,7 @@ export const qApplyFrom = q
         startsWith: '(',
         endsWith: ')',
         search: q.begin<Ctx>().sym('from').op('=').join(qApplyFromFile).end(),
-      })
+      }),
   )
   .handler(handleApplyFrom)
   .handler(cleanupTempVars);
diff --git a/lib/modules/manager/gradle/parser/assignments.ts b/lib/modules/manager/gradle/parser/assignments.ts
index 424be4895e8c80d7ebfe6e8f96cc796dba581e17..2f6e9260305e23dd9a6f35c63a68ffea2d9cba1e 100644
--- a/lib/modules/manager/gradle/parser/assignments.ts
+++ b/lib/modules/manager/gradle/parser/assignments.ts
@@ -78,11 +78,11 @@ const qGroovySingleMapOfVarAssignment = q.alt(
     .op(':')
     .join(qValueMatcher)
     .handler((ctx) => storeInTokenMap(ctx, 'valToken'))
-    .handler(handleAssignment)
+    .handler(handleAssignment),
 );
 
 const qGroovyMapOfExpr = (
-  search: q.QueryBuilder<Ctx, parser.Node>
+  search: q.QueryBuilder<Ctx, parser.Node>,
 ): q.QueryBuilder<Ctx, parser.Node> =>
   q.alt(
     q.sym(storeVarToken).op(':').tree({
@@ -94,7 +94,7 @@ const qGroovyMapOfExpr = (
       search,
       postHandler: reduceNestingDepth,
     }),
-    qGroovySingleMapOfVarAssignment
+    qGroovySingleMapOfVarAssignment,
   );
 
 // versions = [ android: [ buildTools: '30.0.3' ], kotlin: '1.4.30' ]
@@ -122,7 +122,7 @@ const qKotlinSingleMapOfVarAssignment = qStringValue
   .handler(handleAssignment);
 
 const qKotlinMapOfExpr = (
-  search: q.QueryBuilder<Ctx, parser.Node>
+  search: q.QueryBuilder<Ctx, parser.Node>,
 ): q.QueryBuilder<Ctx, parser.Node> =>
   q.alt(
     qStringValue.sym('to').sym('mapOf').tree({
@@ -134,7 +134,7 @@ const qKotlinMapOfExpr = (
       search,
       postHandler: reduceNestingDepth,
     }),
-    qKotlinSingleMapOfVarAssignment
+    qKotlinSingleMapOfVarAssignment,
   );
 
 // val versions = mapOf("foo1" to "bar1", "foo2" to "bar2", "foo3" to "bar3")
@@ -157,5 +157,5 @@ export const qAssignments = q.alt(
   qGroovyMultiVarAssignment,
   qKotlinSingleVarAssignment,
   qKotlinSingleExtraVarAssignment,
-  qKotlinMultiMapOfVarAssignment
+  qKotlinMultiMapOfVarAssignment,
 );
diff --git a/lib/modules/manager/gradle/parser/common.spec.ts b/lib/modules/manager/gradle/parser/common.spec.ts
index 589130e4983b5c0223e52c9ebdad7c9a44b4d555..e9625d2c2dbbd161fbfbd2c6cff2baa0cf0cfdeb 100644
--- a/lib/modules/manager/gradle/parser/common.spec.ts
+++ b/lib/modules/manager/gradle/parser/common.spec.ts
@@ -77,7 +77,7 @@ describe('modules/manager/gradle/parser/common', () => {
 
   it('loadFromTokenMap', () => {
     expect(() => loadFromTokenMap(ctx, 'foo')).toThrow(
-      'Expected token foo not found'
+      'Expected token foo not found',
     );
 
     ctx.varTokens = [token];
@@ -105,7 +105,7 @@ describe('modules/manager/gradle/parser/common', () => {
     ];
 
     ctx.varTokens.push(
-      ...tokenValues.map((value) => partial<lexer.Token>({ value }))
+      ...tokenValues.map((value) => partial<lexer.Token>({ value })),
     );
     stripReservedPrefixFromKeyTokens(ctx);
     expect(ctx.varTokens).toStrictEqual([{ value: 'foo' }]);
@@ -115,7 +115,7 @@ describe('modules/manager/gradle/parser/common', () => {
     const tokenValues = ['foo', 'bar', 'baz', 'qux'];
 
     ctx.varTokens.push(
-      ...tokenValues.map((value) => partial<lexer.Token>({ value }))
+      ...tokenValues.map((value) => partial<lexer.Token>({ value })),
     );
     coalesceVariable(ctx);
     expect(ctx.varTokens).toStrictEqual([{ value: 'foo.bar.baz.qux' }]);
@@ -131,10 +131,10 @@ describe('modules/manager/gradle/parser/common', () => {
 
     expect(findVariable('unknown-global-var', ctx)).toBeUndefined();
     expect(findVariable('foo3', ctx)).toStrictEqual(
-      ctx.globalVars['test.test.foo3']
+      ctx.globalVars['test.test.foo3'],
     );
     expect(findVariable('test.foo', ctx)).toStrictEqual(
-      ctx.globalVars['test.foo']
+      ctx.globalVars['test.foo'],
     );
     expect(findVariable('foo', ctx)).toStrictEqual(ctx.globalVars['test.foo']);
 
@@ -143,7 +143,7 @@ describe('modules/manager/gradle/parser/common', () => {
 
     ctx.tmpKotlinImportStore = [[token, token]];
     expect(findVariable('test.foo3', ctx)).toStrictEqual(
-      ctx.globalVars['test.test.foo3']
+      ctx.globalVars['test.test.foo3'],
     );
   });
 
@@ -159,20 +159,20 @@ describe('modules/manager/gradle/parser/common', () => {
         ctx,
         {
           bar: { key: '', value: 'BAR' },
-        }
-      )
+        },
+      ),
     ).toBe('fooBARbaz');
     expect(
       interpolateString(
         partial<lexer.Token>([{ type: 'symbol', value: 'foo' }]),
-        ctx
-      )
+        ctx,
+      ),
     ).toBeNull();
     expect(
       interpolateString(
         partial<lexer.Token>([{ type: '_', value: 'foo' }]),
-        ctx
-      )
+        ctx,
+      ),
     ).toBeNull();
   });
 });
diff --git a/lib/modules/manager/gradle/parser/common.ts b/lib/modules/manager/gradle/parser/common.ts
index da60daa174f2841564355fc424839bdf0aa2d035..b564fd30c86e61cee0035aab30b50fd967f79ac0 100644
--- a/lib/modules/manager/gradle/parser/common.ts
+++ b/lib/modules/manager/gradle/parser/common.ts
@@ -65,7 +65,7 @@ export function storeInTokenMap(ctx: Ctx, tokenMapKey: string): Ctx {
 
 export function loadFromTokenMap(
   ctx: Ctx,
-  tokenMapKey: string
+  tokenMapKey: string,
 ): NonEmptyArray<lexer.Token> {
   const tokens = ctx.tokenMap[tokenMapKey];
   if (!tokens) {
@@ -115,7 +115,7 @@ export function coalesceVariable(ctx: Ctx): Ctx {
 export function findVariableInKotlinImport(
   name: string,
   ctx: Ctx,
-  variables: PackageVariables
+  variables: PackageVariables,
 ): VariableData | undefined {
   if (ctx.tmpKotlinImportStore.length && name.includes('.')) {
     for (const tokens of ctx.tmpKotlinImportStore) {
@@ -140,7 +140,7 @@ export function findVariableInKotlinImport(
 export function findVariable(
   name: string,
   ctx: Ctx,
-  variables: PackageVariables = ctx.globalVars
+  variables: PackageVariables = ctx.globalVars,
 ): VariableData | undefined {
   if (ctx.tmpNestingDepth.length) {
     const prefixParts = ctx.tmpNestingDepth.map((token) => token.value);
@@ -164,7 +164,7 @@ export function findVariable(
 export function interpolateString(
   childTokens: lexer.Token[],
   ctx: Ctx,
-  variables: PackageVariables = ctx.globalVars
+  variables: PackageVariables = ctx.globalVars,
 ): string | null {
   const resolvedSubstrings: string[] = [];
   for (const childToken of childTokens) {
@@ -212,10 +212,10 @@ export const qVariableAssignmentIdentifier = q
         startsWith: '[',
         endsWith: ']',
         search: q.begin<Ctx>().join(qStringValueAsSymbol).end(),
-      })
+      }),
     ),
     0,
-    32
+    32,
   )
   .handler(stripReservedPrefixFromKeyTokens);
 
@@ -246,7 +246,7 @@ export const qPropertyAccessIdentifier = q
     q
       .sym<Ctx>(regEx(/^(?:extra|ext)$/))
       .op('.')
-      .sym('get')
+      .sym('get'),
   )
   .tree({
     maxDepth: 1,
@@ -280,7 +280,7 @@ export const qTemplateString = q
         ctx.tmpTokenStore.templateTokens?.push(...ctx.varTokens);
         ctx.varTokens = [];
         return ctx;
-      })
+      }),
     ),
   })
   .handler((ctx) => {
@@ -298,7 +298,7 @@ export const qConcatExpr = (
 export const qValueMatcher = qConcatExpr(
   qTemplateString,
   qPropertyAccessIdentifier,
-  qVariableAccessIdentifier
+  qVariableAccessIdentifier,
 );
 
 // import foo.bar
diff --git a/lib/modules/manager/gradle/parser/dependencies.ts b/lib/modules/manager/gradle/parser/dependencies.ts
index cb66a92273f9ec5c92fe64a80d4e8556b9ed2987..d8778737edaaf80be080e01c0a09004efae21e24 100644
--- a/lib/modules/manager/gradle/parser/dependencies.ts
+++ b/lib/modules/manager/gradle/parser/dependencies.ts
@@ -17,15 +17,15 @@ import {
 } from './handlers';
 
 const qGroupId = qValueMatcher.handler((ctx) =>
-  storeInTokenMap(ctx, 'groupId')
+  storeInTokenMap(ctx, 'groupId'),
 );
 
 const qArtifactId = qValueMatcher.handler((ctx) =>
-  storeInTokenMap(ctx, 'artifactId')
+  storeInTokenMap(ctx, 'artifactId'),
 );
 
 const qVersion = qValueMatcher.handler((ctx) =>
-  storeInTokenMap(ctx, 'version')
+  storeInTokenMap(ctx, 'version'),
 );
 
 // "foo:bar:1.2.3"
@@ -72,7 +72,7 @@ const qDependencySet = q
           startsWith: '(',
           endsWith: ')',
           search: q.begin<Ctx>().join(qArtifactId).end(),
-        })
+        }),
       )
       .handler(handleLongFormDep),
   })
@@ -140,7 +140,7 @@ const qKotlinMapNotationDependencies = q
 // someMethod("foo", "bar", "1.2.3")
 export const qLongFormDep = q
   .opt<Ctx>(
-    q.sym(storeVarToken).handler((ctx) => storeInTokenMap(ctx, 'methodName'))
+    q.sym(storeVarToken).handler((ctx) => storeInTokenMap(ctx, 'methodName')),
   )
   .tree({
     type: 'wrapped-tree',
@@ -175,7 +175,7 @@ const qImplicitGradlePlugin = q
           endsWith: '}',
           search: q
             .sym<Ctx>(
-              GRADLE_PLUGINS[pluginName as keyof typeof GRADLE_PLUGINS][0]
+              GRADLE_PLUGINS[pluginName as keyof typeof GRADLE_PLUGINS][0],
             )
             .alt(
               // toolVersion = "1.2.3"
@@ -189,10 +189,10 @@ const qImplicitGradlePlugin = q
                   startsWith: '(',
                   endsWith: ')',
                   search: q.begin<Ctx>().join(qVersion).end(),
-                })
+                }),
             ),
-        })
-    )
+        }),
+    ),
   )
   .handler(handleImplicitGradlePlugin)
   .handler(cleanupTempVars);
@@ -203,5 +203,5 @@ export const qDependencies = q.alt(
   qGroovyMapNotationDependencies,
   qKotlinShortNotationDependencies,
   qKotlinMapNotationDependencies,
-  qImplicitGradlePlugin
+  qImplicitGradlePlugin,
 );
diff --git a/lib/modules/manager/gradle/parser/handlers.ts b/lib/modules/manager/gradle/parser/handlers.ts
index b4d68412a62a459eaea0dbd1744c1772f6c588c4..be12911f0eed9d4e879bc574140d9e374ce61465 100644
--- a/lib/modules/manager/gradle/parser/handlers.ts
+++ b/lib/modules/manager/gradle/parser/handlers.ts
@@ -287,7 +287,7 @@ export function handleCustomRegistryUrl(ctx: Ctx): Ctx {
   let registryUrl = interpolateString(
     loadFromTokenMap(ctx, 'registryUrl'),
     ctx,
-    localVariables
+    localVariables,
   );
   if (registryUrl) {
     registryUrl = registryUrl.replace(regEx(/\\/g), '');
@@ -346,7 +346,7 @@ export function handleApplyFrom(ctx: Ctx): Ctx {
   if (ctx.tokenMap.parentPath) {
     const parentPath = interpolateString(
       loadFromTokenMap(ctx, 'parentPath'),
-      ctx
+      ctx,
     );
     if (parentPath && scriptFile) {
       scriptFile = upath.join(parentPath, scriptFile);
@@ -375,7 +375,7 @@ export function handleApplyFrom(ctx: Ctx): Ctx {
     ctx.globalVars,
     scriptFilePath,
     ctx.fileContents,
-    ctx.recursionDepth + 1
+    ctx.recursionDepth + 1,
   );
 
   ctx.deps.push(...matchResult.deps);
diff --git a/lib/modules/manager/gradle/parser/objects.ts b/lib/modules/manager/gradle/parser/objects.ts
index a4cabc5b02c967054db688f71251e37d0b356a2f..7334b7186b4b000e728038ec37054cb399df48da 100644
--- a/lib/modules/manager/gradle/parser/objects.ts
+++ b/lib/modules/manager/gradle/parser/objects.ts
@@ -37,14 +37,14 @@ const qKotlinSingleObjectVarAssignment = q.alt(
       // val dep: String = "foo:bar:" + Versions.baz
       qValueMatcher
         .handler((ctx) => storeInTokenMap(ctx, 'valToken'))
-        .handler(handleAssignment)
+        .handler(handleAssignment),
     )
-    .handler(cleanupTempVars)
+    .handler(cleanupTempVars),
 );
 
 // object foo { ... }
 const qKotlinMultiObjectExpr = (
-  search: q.QueryBuilder<Ctx, parser.Node>
+  search: q.QueryBuilder<Ctx, parser.Node>,
 ): q.QueryBuilder<Ctx, parser.Node> =>
   q.alt(
     q.sym<Ctx>('object').sym(storeVarToken).tree({
@@ -56,13 +56,13 @@ const qKotlinMultiObjectExpr = (
       search,
       postHandler: reduceNestingDepth,
     }),
-    qKotlinSingleObjectVarAssignment
+    qKotlinSingleObjectVarAssignment,
   );
 
 export const qKotlinMultiObjectVarAssignment = qKotlinMultiObjectExpr(
   qKotlinMultiObjectExpr(
     qKotlinMultiObjectExpr(
-      qKotlinMultiObjectExpr(qKotlinSingleObjectVarAssignment)
-    )
-  )
+      qKotlinMultiObjectExpr(qKotlinSingleObjectVarAssignment),
+    ),
+  ),
 ).handler(cleanupTempVars);
diff --git a/lib/modules/manager/gradle/parser/plugins.ts b/lib/modules/manager/gradle/parser/plugins.ts
index a656c2e5e4e790a142984e25446da7b98ce562f8..5cf27b58c118915ae2fcb85922a45386b6c44851 100644
--- a/lib/modules/manager/gradle/parser/plugins.ts
+++ b/lib/modules/manager/gradle/parser/plugins.ts
@@ -11,7 +11,7 @@ import {
 import { handlePlugin } from './handlers';
 
 const qVersion = qValueMatcher.handler((ctx) =>
-  storeInTokenMap(ctx, 'version')
+  storeInTokenMap(ctx, 'version'),
 );
 
 export const qPlugins = q
@@ -46,8 +46,8 @@ export const qPlugins = q
             startsWith: '(',
             endsWith: ')',
             search: q.begin<Ctx>().join(qVersion).end(),
-          })
-      )
+          }),
+      ),
   )
   .handler(handlePlugin)
   .handler(cleanupTempVars);
diff --git a/lib/modules/manager/gradle/parser/registry-urls.ts b/lib/modules/manager/gradle/parser/registry-urls.ts
index da827272ed424b536cb7c69ea214fba737c1f161..341dc37d547ec20a5cc04f64e32cd3ca3c89a969 100644
--- a/lib/modules/manager/gradle/parser/registry-urls.ts
+++ b/lib/modules/manager/gradle/parser/registry-urls.ts
@@ -24,7 +24,7 @@ const qUri = q
       maxDepth: 1,
       search: qValueMatcher,
     }),
-    qValueMatcher
+    qValueMatcher,
   )
   .handler((ctx) => storeInTokenMap(ctx, 'registryUrl'));
 
@@ -43,7 +43,7 @@ const qPredefinedRegistries = q
       type: 'wrapped-tree',
       startsWith: '{',
       endsWith: '}',
-    })
+    }),
   )
   .handler((ctx) => storeInTokenMap(ctx, 'registryUrl'))
   .handler(handlePredefinedRegistryUrl)
@@ -78,9 +78,9 @@ const qCustomRegistryUrl = q
           startsWith: '(',
           endsWith: ')',
           search: q.begin<Ctx>().join(qUri).end(),
-        })
+        }),
       ),
-    })
+    }),
   )
   .handler(handleCustomRegistryUrl)
   .handler(cleanupTempVars);
@@ -106,7 +106,7 @@ const qPluginManagement = q.sym<Ctx>('pluginManagement', storeVarToken).tree({
       qApplyFrom,
       qPlugins,
       qPredefinedRegistries,
-      qCustomRegistryUrl
+      qCustomRegistryUrl,
     ),
   postHandler: (ctx) => {
     delete ctx.tmpTokenStore.registryScope;
@@ -118,5 +118,5 @@ export const qRegistryUrls = q.alt<Ctx>(
   q.sym<Ctx>('publishing').tree(),
   qPluginManagement,
   qPredefinedRegistries,
-  qCustomRegistryUrl
+  qCustomRegistryUrl,
 );
diff --git a/lib/modules/manager/gradle/parser/version-catalogs.ts b/lib/modules/manager/gradle/parser/version-catalogs.ts
index f08b32767e38e4d0deb4d0a6085afb5d54b7fe07..781183e99a70b28a59c6e565ae2e384ca1a03f2f 100644
--- a/lib/modules/manager/gradle/parser/version-catalogs.ts
+++ b/lib/modules/manager/gradle/parser/version-catalogs.ts
@@ -11,11 +11,11 @@ import {
 import { handleLibraryDep, handlePlugin } from './handlers';
 
 const qGroupId = qValueMatcher.handler((ctx) =>
-  storeInTokenMap(ctx, 'groupId')
+  storeInTokenMap(ctx, 'groupId'),
 );
 
 const qArtifactId = qValueMatcher.handler((ctx) =>
-  storeInTokenMap(ctx, 'artifactId')
+  storeInTokenMap(ctx, 'artifactId'),
 );
 
 const qVersionCatalogVersion = q
@@ -34,7 +34,7 @@ const qVersionCatalogVersion = q
       startsWith: '(',
       endsWith: ')',
       search: q.begin<Ctx>().join(qValueMatcher).end(),
-    })
+    }),
   )
   .handler((ctx) => storeInTokenMap(ctx, 'version'));
 
@@ -113,5 +113,5 @@ const qVersionCatalogAliasDependencies = q
 export const qVersionCatalogs = q.alt(
   qVersionCatalogDependencies,
   qVersionCatalogPlugins,
-  qVersionCatalogAliasDependencies
+  qVersionCatalogAliasDependencies,
 );
diff --git a/lib/modules/manager/gradle/update.spec.ts b/lib/modules/manager/gradle/update.spec.ts
index fa147b1b5ca294433287d82c30ac02af4ed56f8d..4d0a77623d58108fcd77d8468037ed3a4c17b338 100644
--- a/lib/modules/manager/gradle/update.spec.ts
+++ b/lib/modules/manager/gradle/update.spec.ts
@@ -12,7 +12,7 @@ describe('modules/manager/gradle/update', () => {
             fileReplacePosition: 3,
           },
         },
-      })
+      }),
     ).toBe('###1.2.4###');
   });
 
@@ -28,7 +28,7 @@ describe('modules/manager/gradle/update', () => {
             fileReplacePosition: 3,
           },
         },
-      })
+      }),
     ).toBe('###1.2.5###');
   });
 
@@ -44,7 +44,7 @@ describe('modules/manager/gradle/update', () => {
             fileReplacePosition: 3,
           },
         },
-      })
+      }),
     ).toBe(fileContent);
   });
 
@@ -59,7 +59,7 @@ describe('modules/manager/gradle/update', () => {
             fileReplacePosition: 3,
           },
         },
-      })
+      }),
     ).toBeNull();
 
     expect(
@@ -72,7 +72,7 @@ describe('modules/manager/gradle/update', () => {
             fileReplacePosition: 3,
           },
         },
-      })
+      }),
     ).toBeNull();
   });
 
diff --git a/lib/modules/manager/gradle/utils.spec.ts b/lib/modules/manager/gradle/utils.spec.ts
index bad712dfd8b194e73c402ecdb18dc7bd87bfb944..1de059b86c62b95004db6a1551d62bbb6f5e65fb 100644
--- a/lib/modules/manager/gradle/utils.spec.ts
+++ b/lib/modules/manager/gradle/utils.spec.ts
@@ -80,7 +80,7 @@ describe('modules/manager/gradle/utils', () => {
         'b.gradle',
         'a.gradle',
         'versions.gradle',
-      ])
+      ]),
     ).toStrictEqual([
       'versions.gradle',
       'a.gradle',
@@ -99,7 +99,7 @@ describe('modules/manager/gradle/utils', () => {
         'a/versions.gradle',
         'build.gradle',
         'a/b/c/versions.gradle',
-      ])
+      ]),
     ).toStrictEqual([
       'versions.gradle',
       'build.gradle',
@@ -118,7 +118,7 @@ describe('modules/manager/gradle/utils', () => {
     ]);
 
     expect(
-      reorderFiles(['b.gradle', 'c.gradle', 'a.gradle', 'gradle.properties'])
+      reorderFiles(['b.gradle', 'c.gradle', 'a.gradle', 'gradle.properties']),
     ).toStrictEqual(['gradle.properties', 'a.gradle', 'b.gradle', 'c.gradle']);
 
     expect(
@@ -134,7 +134,7 @@ describe('modules/manager/gradle/utils', () => {
         'b.gradle',
         'c.gradle',
         'a.gradle',
-      ])
+      ]),
     ).toStrictEqual([
       'gradle.properties',
       'a.gradle',
diff --git a/lib/modules/manager/gradle/utils.ts b/lib/modules/manager/gradle/utils.ts
index 06b8476d8d5572d5e54dd93a87d828f8c5e14b0c..84de600b478a0fdf65c35e8366c698a59946a39f 100644
--- a/lib/modules/manager/gradle/utils.ts
+++ b/lib/modules/manager/gradle/utils.ts
@@ -8,7 +8,7 @@ import type {
 } from './types';
 
 const artifactRegex = regEx(
-  '^[a-zA-Z][-_a-zA-Z0-9]*(?:\\.[a-zA-Z0-9][-_a-zA-Z0-9]*?)*$'
+  '^[a-zA-Z][-_a-zA-Z0-9]*(?:\\.[a-zA-Z0-9][-_a-zA-Z0-9]*?)*$',
 );
 
 const versionLikeRegex = regEx('^(?<version>[-_.\\[\\](),a-zA-Z0-9+]+)');
@@ -16,7 +16,7 @@ const versionLikeRegex = regEx('^(?<version>[-_.\\[\\](),a-zA-Z0-9+]+)');
 // Extracts version-like and range-like strings
 // from the beginning of input
 export function versionLikeSubstring(
-  input: string | null | undefined
+  input: string | null | undefined,
 ): string | null {
   if (!input) {
     return null;
@@ -70,7 +70,7 @@ export function isDependencyString(input: string): boolean {
 }
 
 export function parseDependencyString(
-  input: string
+  input: string,
 ): PackageDependency<GradleManagerData> | null {
   if (!isDependencyString(input)) {
     return null;
@@ -176,7 +176,7 @@ export function reorderFiles(packageFiles: string[]): string[] {
 export function getVars(
   registry: VariableRegistry,
   dir: string,
-  vars: PackageVariables = registry[dir] || {}
+  vars: PackageVariables = registry[dir] || {},
 ): PackageVariables {
   const dirAbs = toAbsolutePath(dir);
   const parentDir = upath.dirname(dirAbs);
@@ -190,7 +190,7 @@ export function getVars(
 export function updateVars(
   registry: VariableRegistry,
   dir: string,
-  newVars: PackageVariables
+  newVars: PackageVariables,
 ): void {
   const oldVars = registry[dir] ?? {};
   registry[dir] = { ...oldVars, ...newVars };
diff --git a/lib/modules/manager/helm-requirements/extract.ts b/lib/modules/manager/helm-requirements/extract.ts
index 805ad944b0fc06f07cb76bcb8689a282de415dc3..5a6a8567aa84ad42154f3bae1f7adfeaafda9865 100644
--- a/lib/modules/manager/helm-requirements/extract.ts
+++ b/lib/modules/manager/helm-requirements/extract.ts
@@ -11,7 +11,7 @@ import type {
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   let deps = [];
   // TODO: fix type
@@ -59,7 +59,7 @@ export function extractPackageFile(
     res.registryUrls = [dep.repository];
     if (dep.repository.startsWith('@') || dep.repository.startsWith('alias:')) {
       const repoWithPrefixRemoved = dep.repository.slice(
-        dep.repository[0] === '@' ? 1 : 6
+        dep.repository[0] === '@' ? 1 : 6,
       );
       const alias = config.registryAliases?.[repoWithPrefixRemoved];
       if (alias) {
@@ -77,7 +77,7 @@ export function extractPackageFile(
       } catch (err) {
         logger.debug(
           { err, packageFile, url: dep.repository },
-          'Error parsing url'
+          'Error parsing url',
         );
         res.skipReason = 'invalid-url';
       }
diff --git a/lib/modules/manager/helm-values/extract.spec.ts b/lib/modules/manager/helm-values/extract.spec.ts
index 3666b8441349bbeb448577a061f5ac87b48848c2..d8eb805db86f7ad6917490c1aa3649fbe33ef9f0 100644
--- a/lib/modules/manager/helm-values/extract.spec.ts
+++ b/lib/modules/manager/helm-values/extract.spec.ts
@@ -2,11 +2,11 @@ import { Fixtures } from '../../../../test/fixtures';
 import { extractPackageFile } from '.';
 
 const helmDefaultChartInitValues = Fixtures.get(
-  'default_chart_init_values.yaml'
+  'default_chart_init_values.yaml',
 );
 
 const helmMultiAndNestedImageValues = Fixtures.get(
-  'multi_and_nested_image_values.yaml'
+  'multi_and_nested_image_values.yaml',
 );
 
 describe('modules/manager/helm-values/extract', () => {
@@ -41,7 +41,7 @@ describe('modules/manager/helm-values/extract', () => {
 
     it('extract data from file with multiple documents', () => {
       const multiDocumentFile = Fixtures.get(
-        'single_file_with_multiple_documents.yaml'
+        'single_file_with_multiple_documents.yaml',
       );
       const result = extractPackageFile(multiDocumentFile);
       expect(result).toMatchObject({
diff --git a/lib/modules/manager/helm-values/extract.ts b/lib/modules/manager/helm-values/extract.ts
index ce0f929000a082d8f94b55ff8192d43f8c2ca15d..1f9ea32f61b571b05ee559df95d14682b641a622 100644
--- a/lib/modules/manager/helm-values/extract.ts
+++ b/lib/modules/manager/helm-values/extract.ts
@@ -33,7 +33,7 @@ function getHelmDep({
  */
 function findDependencies(
   parsedContent: Record<string, unknown> | HelmDockerImageDependency,
-  packageDependencies: Array<PackageDependency>
+  packageDependencies: Array<PackageDependency>,
 ): Array<PackageDependency> {
   if (!parsedContent || typeof parsedContent !== 'object') {
     return packageDependencies;
@@ -59,7 +59,7 @@ function findDependencies(
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   let parsedContent: Record<string, unknown>[] | HelmDockerImageDependency[];
   try {
@@ -84,7 +84,7 @@ export function extractPackageFile(
   } catch (err) /* istanbul ignore next */ {
     logger.debug(
       { err, packageFile },
-      'Error parsing helm-values parsed content'
+      'Error parsing helm-values parsed content',
     );
   }
   return null;
diff --git a/lib/modules/manager/helm-values/util.ts b/lib/modules/manager/helm-values/util.ts
index 58322e176f7aa85524967cb49664c9aebf1e2a0b..34b6c2dabc86ab94d03d9fad0a16f62c73b7f7ce 100644
--- a/lib/modules/manager/helm-values/util.ts
+++ b/lib/modules/manager/helm-values/util.ts
@@ -24,7 +24,7 @@ const parentKeyRe = regEx(/image$/i);
  */
 export function matchesHelmValuesDockerHeuristic(
   parentKey: string,
-  data: unknown
+  data: unknown,
 ): data is HelmDockerImageDependency {
   return !!(
     parentKeyRe.test(parentKey) &&
@@ -37,7 +37,7 @@ export function matchesHelmValuesDockerHeuristic(
 
 export function matchesHelmValuesInlineImage(
   parentKey: string,
-  data: unknown
+  data: unknown,
 ): data is string {
   return !!(parentKeyRe.test(parentKey) && data && typeof data === 'string');
 }
diff --git a/lib/modules/manager/helmfile/artifacts.spec.ts b/lib/modules/manager/helmfile/artifacts.spec.ts
index a64c46e68fc42f232921279be370a4bb637ed377..03ddc3dddc7dcd528facb608fcebaee94acaf636 100644
--- a/lib/modules/manager/helmfile/artifacts.spec.ts
+++ b/lib/modules/manager/helmfile/artifacts.spec.ts
@@ -86,7 +86,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -97,7 +97,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -107,7 +107,7 @@ describe('modules/manager/helmfile/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(lockFile as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     expect(
@@ -116,7 +116,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps: [{ depName: 'dep1' }],
         newPackageFileContent: helmfileYaml,
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       { cmd: 'helmfile deps -f helmfile.yaml' },
@@ -129,7 +129,7 @@ describe('modules/manager/helmfile/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(lockFileTwo as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     const updatedDeps = [{ depName: 'dep1' }, { depName: 'dep2' }];
@@ -139,7 +139,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps,
         newPackageFileContent: helmfileYaml,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -186,10 +186,10 @@ describe('modules/manager/helmfile/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('helmfile.lock');
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(
-      lockFileTwoWithoutRepositories as never
+      lockFileTwoWithoutRepositories as never,
     );
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     const updatedDeps = [{ depName: 'dep1' }, { depName: 'dep2' }];
@@ -199,7 +199,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps,
         newPackageFileContent: helmfileYamlWithoutRepositories,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -255,7 +255,7 @@ describe('modules/manager/helmfile/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(lockFileOCIPrivateRepoTwo as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     const updatedDeps = [{ depName: 'dep1' }, { depName: 'dep2' }];
@@ -265,7 +265,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps,
         newPackageFileContent: helmfileYamlOCIPrivateRepo,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -353,7 +353,7 @@ describe('modules/manager/helmfile/artifacts', () => {
       const execSnapshots = mockExecAll();
       fs.readLocalFile.mockResolvedValueOnce(lockFileTwo);
       fs.privateCacheDir.mockReturnValue(
-        '/tmp/renovate/cache/__renovate-private-cache'
+        '/tmp/renovate/cache/__renovate-private-cache',
       );
       fs.getParentDir.mockReturnValue('');
       // helm
@@ -372,7 +372,7 @@ describe('modules/manager/helmfile/artifacts', () => {
           updatedDeps,
           newPackageFileContent: helmfileYaml,
           config,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -383,7 +383,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         },
       ]);
       expect(execSnapshots).toMatchObject(expectedCommands);
-    }
+    },
   );
 
   it.each([
@@ -393,7 +393,7 @@ describe('modules/manager/helmfile/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('helmfile.lock');
     git.getFile.mockResolvedValueOnce(lockFile);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.writeLocalFile.mockImplementationOnce(() => {
       throw new Error(errorMessage);
@@ -405,7 +405,7 @@ describe('modules/manager/helmfile/artifacts', () => {
         updatedDeps,
         newPackageFileContent: helmfileYaml,
         config,
-      })
+      }),
     ).toEqual([
       {
         artifactError: {
diff --git a/lib/modules/manager/helmfile/artifacts.ts b/lib/modules/manager/helmfile/artifacts.ts
index d8ecb9134882186c707992087515f8b4eec9a974..7d677f3823678c3b362d545927cd21c3edb20f6f 100644
--- a/lib/modules/manager/helmfile/artifacts.ts
+++ b/lib/modules/manager/helmfile/artifacts.ts
@@ -60,7 +60,7 @@ export async function updateArtifacts({
       },
     ];
     const needKustomize = updatedDeps.some(
-      (dep) => dep.managerData?.needKustomize
+      (dep) => dep.managerData?.needKustomize,
     );
     if (needKustomize) {
       toolConstraints.push({
@@ -72,7 +72,7 @@ export async function updateArtifacts({
     const cmd: string[] = [];
     const doc = Result.parse(
       newPackageFileContent,
-      Yaml.pipe(Doc)
+      Yaml.pipe(Doc),
     ).unwrapOrThrow();
 
     for (const value of coerceArray(doc.repositories).filter(isOCIRegistry)) {
@@ -80,7 +80,7 @@ export async function updateArtifacts({
         value.name,
         `https://${value.url}`,
         // this extracts the hostname from url like format ghcr.ip/helm-charts
-        value.url.replace(regEx(/\/.*/), '')
+        value.url.replace(regEx(/\/.*/), ''),
       );
 
       if (loginCmd) {
diff --git a/lib/modules/manager/helmfile/extract.spec.ts b/lib/modules/manager/helmfile/extract.spec.ts
index 891ad097aa646ca3e775257639cfda5558fe60e2..279c57cd6e051a0a6dccfdee03ab7a0f638e80da 100644
--- a/lib/modules/manager/helmfile/extract.spec.ts
+++ b/lib/modules/manager/helmfile/extract.spec.ts
@@ -196,7 +196,7 @@ describe('modules/manager/helmfile/extract', () => {
           registryAliases: {
             stable: 'https://charts.helm.sh/stable',
           },
-        }
+        },
       );
       expect(result).toMatchSnapshot({
         datasource: 'helm',
@@ -339,7 +339,7 @@ describe('modules/manager/helmfile/extract', () => {
           registryAliases: {
             stable: 'https://charts.helm.sh/stable',
           },
-        }
+        },
       );
       expect(result).toMatchObject({
         datasource: 'helm',
@@ -405,7 +405,7 @@ describe('modules/manager/helmfile/extract', () => {
           registryAliases: {
             stable: 'https://charts.helm.sh/stable',
           },
-        }
+        },
       );
       expect(result).toMatchObject({
         datasource: 'helm',
diff --git a/lib/modules/manager/helmfile/extract.ts b/lib/modules/manager/helmfile/extract.ts
index 8726c90e7da0ed9174042f731c3f1c7fb9cefa91..0a29612900046597f0d122a1730a161f490a883b 100644
--- a/lib/modules/manager/helmfile/extract.ts
+++ b/lib/modules/manager/helmfile/extract.ts
@@ -27,14 +27,14 @@ function extractYaml(content: string): string {
 
 function isLocalPath(possiblePath: string): boolean {
   return ['./', '../', '/'].some((localPrefix) =>
-    possiblePath.startsWith(localPrefix)
+    possiblePath.startsWith(localPrefix),
   );
 }
 
 export async function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   const deps: PackageDependency[] = [];
   let docs: Doc[];
@@ -46,7 +46,7 @@ export async function extractPackageFile(
   } catch (err) {
     logger.debug(
       { err, packageFile },
-      'Failed to parse helmfile helmfile.yaml'
+      'Failed to parse helmfile helmfile.yaml',
     );
     return null;
   }
@@ -121,7 +121,7 @@ export async function extractPackageFile(
       }
       // in case of OCI repository, we need a PackageDependency with a DockerDatasource and a packageName
       const repository = doc.repositories?.find(
-        (repo) => repo.name === repoName
+        (repo) => repo.name === repoName,
       );
       if (repository?.oci) {
         res.datasource = DockerDatasource.id;
diff --git a/lib/modules/manager/helmfile/schema.ts b/lib/modules/manager/helmfile/schema.ts
index 1ff9e67b80f7148f4ad42cf7e9c64833e006ac53..a885e6265e7d94648b272e85f5d2d658554e9f2e 100644
--- a/lib/modules/manager/helmfile/schema.ts
+++ b/lib/modules/manager/helmfile/schema.ts
@@ -25,6 +25,6 @@ export const Doc = z.object({
 export type Doc = z.infer<typeof Doc>;
 
 export const LockVersion = Yaml.pipe(
-  z.object({ version: z.string() }).transform(({ version }) => version)
+  z.object({ version: z.string() }).transform(({ version }) => version),
 );
 export type LockVersion = z.infer<typeof LockVersion>;
diff --git a/lib/modules/manager/helmfile/utils.ts b/lib/modules/manager/helmfile/utils.ts
index db427892ba182c6250feccfd4f73e12bd74168c0..f765edbf0b3c1e94f7c76acd6f1f7cdd8704a4bd 100644
--- a/lib/modules/manager/helmfile/utils.ts
+++ b/lib/modules/manager/helmfile/utils.ts
@@ -21,11 +21,11 @@ export function kustomizationsKeysUsed(release: HelmRelease): boolean {
 // eslint-disable-next-line require-await
 export async function localChartHasKustomizationsYaml(
   release: HelmRelease,
-  helmFileYamlFileName: string
+  helmFileYamlFileName: string,
 ): Promise<boolean> {
   const helmfileYamlParentDir = getParentDir(helmFileYamlFileName) || '';
   return localPathExists(
-    upath.join(helmfileYamlParentDir, release.chart, 'kustomization.yaml')
+    upath.join(helmfileYamlParentDir, release.chart, 'kustomization.yaml'),
   );
 }
 
@@ -36,7 +36,7 @@ export function isOCIRegistry(repository: HelmRepository): boolean {
 export async function generateRegistryLoginCmd(
   repositoryName: string,
   repositoryBaseURL: string,
-  repositoryHost: string
+  repositoryHost: string,
 ): Promise<string | null> {
   const repositoryRule: RepositoryRule = {
     name: repositoryName,
diff --git a/lib/modules/manager/helmsman/extract.ts b/lib/modules/manager/helmsman/extract.ts
index c6419ab49bb65958e28aa905fec4c6b8a7837b4c..504b2f43af57867eb2d4b11a8c03edf9bb7f69ca 100644
--- a/lib/modules/manager/helmsman/extract.ts
+++ b/lib/modules/manager/helmsman/extract.ts
@@ -15,7 +15,7 @@ const chartRegex = regEx('^(?<registryRef>[^/]*)/(?<packageName>[^/]*)$');
 
 function createDep(
   key: string,
-  doc: HelmsmanDocument
+  doc: HelmsmanDocument,
 ): PackageDependency | null {
   const dep: PackageDependency = {
     depName: key,
@@ -65,7 +65,7 @@ function createDep(
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  _config: ExtractConfig
+  _config: ExtractConfig,
 ): PackageFileContent | null {
   try {
     // TODO: fix me (#9610)
diff --git a/lib/modules/manager/helmv3/artifacts.spec.ts b/lib/modules/manager/helmv3/artifacts.spec.ts
index 3029b358f35c65411944f8f13f9348f2f1142904..dcc9351d49a27fe67917de83a1a09fa19bc938c2 100644
--- a/lib/modules/manager/helmv3/artifacts.spec.ts
+++ b/lib/modules/manager/helmv3/artifacts.spec.ts
@@ -48,7 +48,7 @@ const chartFileECR = Fixtures.get('ChartECR.yaml');
 const ecrMock = mockClient(ECRClient);
 
 function mockEcrAuthResolve(
-  res: Partial<GetAuthorizationTokenCommandOutput> = {}
+  res: Partial<GetAuthorizationTokenCommandOutput> = {},
 ) {
   ecrMock.on(GetAuthorizationTokenCommand).resolvesOnce(res);
 }
@@ -78,7 +78,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps,
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -89,7 +89,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -99,7 +99,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile1 as any);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     const updatedDeps = [{ depName: 'dep1' }];
@@ -109,7 +109,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps,
         newPackageFileContent: chartFile,
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -120,7 +120,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     const updatedDeps = [{ depName: 'dep1' }];
@@ -130,7 +130,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps,
         newPackageFileContent: chartFile,
         config,
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -150,7 +150,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     expect(
@@ -159,7 +159,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: chartFile,
         config: { ...config, updateType: 'lockFileMaintenance' },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -184,7 +184,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     datasource.getPkgReleases.mockResolvedValueOnce({
@@ -197,7 +197,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps,
         newPackageFileContent: chartFile,
         config,
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -215,7 +215,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('Chart.lock');
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile1 as any);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.writeLocalFile.mockImplementationOnce(() => {
       throw new Error('not found');
@@ -227,7 +227,7 @@ describe('modules/manager/helmv3/artifacts', () => {
         updatedDeps,
         newPackageFileContent: chartFile,
         config,
-      })
+      }),
     ).toMatchObject([
       {
         artifactError: {
@@ -244,7 +244,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -254,7 +254,7 @@ describe('modules/manager/helmv3/artifacts', () => {
       partial<StatusResult>({
         not_added: ['charts/example-1.9.2.tgz'],
         deleted: ['charts/example-1.6.2.tgz'],
-      })
+      }),
     );
     const updatedDeps = [{ depName: 'dep1' }];
     const test = await helmv3.updateArtifacts({
@@ -303,7 +303,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('Chart.lock');
     const execSnapshots = mockExecAll();
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -313,7 +313,7 @@ describe('modules/manager/helmv3/artifacts', () => {
       partial<StatusResult>({
         not_added: ['charts/example-1.9.2.tgz'],
         deleted: ['charts/example-1.6.2.tgz'],
-      })
+      }),
     );
     const updatedDeps = [{ depName: 'dep1' }];
     expect(
@@ -325,7 +325,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           postUpdateOptions: ['helmUpdateSubChartArchives'],
           ...config,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -378,7 +378,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('Chart.lock');
     const execSnapshots = mockExecAll();
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -387,7 +387,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         not_added: ['charts/example-1.9.2.tgz'],
-      })
+      }),
     );
     const updatedDeps = [{ depName: 'dep1' }];
     expect(
@@ -399,7 +399,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           postUpdateOptions: ['helmUpdateSubChartArchives'],
           ...config,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -446,7 +446,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('Chart.lock');
     const execSnapshots = mockExecAll();
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -456,7 +456,7 @@ describe('modules/manager/helmv3/artifacts', () => {
       partial<StatusResult>({
         not_added: ['charts/example-1.9.2.tgz', 'exampleFile'],
         deleted: ['charts/example-1.6.2.tgz', 'aFolder/otherFile'],
-      })
+      }),
     );
     const updatedDeps = [{ depName: 'dep1' }];
     expect(
@@ -468,7 +468,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           postUpdateOptions: ['helmUpdateSubChartArchives'],
           ...config,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -521,7 +521,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     fs.getSiblingFileName.mockReturnValueOnce('Chart.lock');
     const execSnapshots = mockExecAll();
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -530,7 +530,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     git.getRepoStatus.mockResolvedValueOnce(
       partial<StatusResult>({
         modified: ['example/example.tgz'],
-      })
+      }),
     );
     const updatedDeps = [{ depName: 'dep1' }];
     expect(
@@ -542,7 +542,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           postUpdateOptions: ['helmUpdateSubChartArchives'],
           ...config,
         },
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -578,7 +578,7 @@ describe('modules/manager/helmv3/artifacts', () => {
 
   it('sets repositories from registryAliases', async () => {
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getSiblingFileName.mockReturnValueOnce('Chart.lock');
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile1 as never);
@@ -595,7 +595,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: { stable: 'the_stable_url', repo1: 'the_repo1_url' },
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -620,7 +620,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     datasource.getPkgReleases.mockResolvedValueOnce({
@@ -636,7 +636,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: { stable: 'the_stable_url', repo1: 'the_repo1_url' },
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -669,7 +669,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     expect(
@@ -686,7 +686,7 @@ describe('modules/manager/helmv3/artifacts', () => {
             repo1: 'https://the_repo1_url',
           },
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -719,7 +719,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     expect(
@@ -732,7 +732,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: {},
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -766,7 +766,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2ECR as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -780,7 +780,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: {},
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -829,7 +829,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2ECR as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -843,7 +843,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: {},
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -884,7 +884,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2ECR as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -898,7 +898,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: {},
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -940,7 +940,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2ECR as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
 
@@ -954,7 +954,7 @@ describe('modules/manager/helmv3/artifacts', () => {
           updateType: 'lockFileMaintenance',
           registryAliases: {},
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -993,7 +993,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2 as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     expect(
@@ -1009,7 +1009,7 @@ describe('modules/manager/helmv3/artifacts', () => {
               'https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable',
           },
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -1022,15 +1022,15 @@ describe('modules/manager/helmv3/artifacts', () => {
     expect(execSnapshots).toBeArrayOfSize(2);
     expect(
       execSnapshots.filter((value) =>
-        value.cmd.startsWith('helm repo add repo1')
-      )
+        value.cmd.startsWith('helm repo add repo1'),
+      ),
     ).toBeArrayOfSize(1);
     expect(
       execSnapshots.filter((value) =>
         value.cmd.includes(
-          'https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable'
-        )
-      )
+          'https://gitlab.com/api/v4/projects/xxxxxxx/packages/helm/stable',
+        ),
+      ),
     ).toBeArrayOfSize(1);
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -1041,7 +1041,7 @@ describe('modules/manager/helmv3/artifacts', () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(ociLockFile2Alias as never);
     fs.privateCacheDir.mockReturnValue(
-      '/tmp/renovate/cache/__renovate-private-cache'
+      '/tmp/renovate/cache/__renovate-private-cache',
     );
     fs.getParentDir.mockReturnValue('');
     expect(
@@ -1056,7 +1056,7 @@ describe('modules/manager/helmv3/artifacts', () => {
             jetstack: 'https://charts.jetstack.io',
           },
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -1071,15 +1071,15 @@ describe('modules/manager/helmv3/artifacts', () => {
       execSnapshots.filter(
         (value) =>
           value.cmd.startsWith('helm repo add jetstack') && // alias
-          value.cmd.includes('https://charts.jetstack.io')
-      )
+          value.cmd.includes('https://charts.jetstack.io'),
+      ),
     ).toBeArrayOfSize(1);
     expect(
       execSnapshots.filter(
         (value) =>
           value.cmd.startsWith('helm repo add nginx') && // falling back to name
-          value.cmd.includes('https://kubernetes.github.io/ingress-nginx')
-      )
+          value.cmd.includes('https://kubernetes.github.io/ingress-nginx'),
+      ),
     ).toBeArrayOfSize(1);
     expect(execSnapshots).toMatchSnapshot();
   });
diff --git a/lib/modules/manager/helmv3/artifacts.ts b/lib/modules/manager/helmv3/artifacts.ts
index 16677538e5a5a552472b26aaca5cf1adf31fcb1d..93616df86e557bdd3947a5705597099886665097 100644
--- a/lib/modules/manager/helmv3/artifacts.ts
+++ b/lib/modules/manager/helmv3/artifacts.ts
@@ -29,7 +29,7 @@ import {
 async function helmCommands(
   execOptions: ExecOptions,
   manifestPath: string,
-  repositories: Repository[]
+  repositories: Repository[],
 ): Promise<void> {
   const cmd: string[] = [];
   // get OCI registries and detect host rules
@@ -95,7 +95,7 @@ export async function updateArtifacts({
 
   const isLockFileMaintenance = config.updateType === 'lockFileMaintenance';
   const isUpdateOptionAddChartArchives = config.postUpdateOptions?.includes(
-    'helmUpdateSubChartArchives'
+    'helmUpdateSubChartArchives',
   );
 
   if (
diff --git a/lib/modules/manager/helmv3/common.ts b/lib/modules/manager/helmv3/common.ts
index 50c61003216c0f81f1249a5fcc406bcddf2bdc59..6c2cb5408cfc9ba052dded4ae2f652b5afcaeefc 100644
--- a/lib/modules/manager/helmv3/common.ts
+++ b/lib/modules/manager/helmv3/common.ts
@@ -12,7 +12,7 @@ import type { RepositoryRule } from './types';
 
 export async function generateLoginCmd(
   repositoryRule: RepositoryRule,
-  loginCMD: string
+  loginCMD: string,
 ): Promise<string | null> {
   const { hostRule, repository } = repositoryRule;
   const { username, password } = hostRule;
@@ -30,13 +30,13 @@ export async function generateLoginCmd(
     addSecretForSanitizing(username);
     addSecretForSanitizing(password);
     return `${loginCMD} --username ${quote(username)} --password ${quote(
-      password
+      password,
     )} ${repository}`;
   }
   if (username && password) {
     logger.trace({ repository }, `Using basic auth for Helm registry`);
     return `${loginCMD} --username ${quote(username)} --password ${quote(
-      password
+      password,
     )} ${repository}`;
   }
   return null;
@@ -49,7 +49,7 @@ export function generateHelmEnvs(): ExtraEnv {
     HELM_REGISTRY_CONFIG: `${upath.join(privateCacheDir(), 'registry.json')}`,
     HELM_REPOSITORY_CONFIG: `${upath.join(
       privateCacheDir(),
-      'repositories.yaml'
+      'repositories.yaml',
     )}`,
     HELM_REPOSITORY_CACHE: `${upath.join(privateCacheDir(), 'repositories')}`,
   };
diff --git a/lib/modules/manager/helmv3/extract.ts b/lib/modules/manager/helmv3/extract.ts
index 16fd078844128671e03e861bddfc95881a841008..d7fc59518c0e169173c7f18c2ffff3b28aa9301c 100644
--- a/lib/modules/manager/helmv3/extract.ts
+++ b/lib/modules/manager/helmv3/extract.ts
@@ -13,7 +13,7 @@ import { parseRepository, resolveAlias } from './utils';
 export async function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   let chart: {
     apiVersion: string;
@@ -27,14 +27,14 @@ export async function extractPackageFile(
     if (!(chart?.apiVersion && chart.name && chart.version)) {
       logger.debug(
         { packageFile },
-        'Failed to find required fields in Chart.yaml'
+        'Failed to find required fields in Chart.yaml',
       );
       return null;
     }
     if (chart.apiVersion !== 'v2') {
       logger.debug(
         { packageFile },
-        'Unsupported Chart apiVersion. Only v2 is supported.'
+        'Unsupported Chart apiVersion. Only v2 is supported.',
       );
       return null;
     }
@@ -49,7 +49,7 @@ export async function extractPackageFile(
     return null;
   }
   const validDependencies = chart.dependencies.filter(
-    (dep) => is.nonEmptyString(dep.name) && is.nonEmptyString(dep.version)
+    (dep) => is.nonEmptyString(dep.name) && is.nonEmptyString(dep.version),
   );
   if (!is.nonEmptyArray(validDependencies)) {
     logger.debug('Name and/or version missing for all dependencies');
diff --git a/lib/modules/manager/helmv3/update.spec.ts b/lib/modules/manager/helmv3/update.spec.ts
index c768f853de1fc6a2ba40921ff7c2fcbaaafb5b16..43062508b6da9323a2ea78519358b4076692c102 100644
--- a/lib/modules/manager/helmv3/update.spec.ts
+++ b/lib/modules/manager/helmv3/update.spec.ts
@@ -13,7 +13,7 @@ describe('modules/manager/helmv3/update', () => {
       const { bumpedContent } = helmv3Updater.bumpPackageVersion(
         content,
         '0.0.2',
-        'patch'
+        'patch',
       );
       const expected = content.replace('0.0.2', '0.0.3');
       expect(bumpedContent).toEqual(expected);
@@ -23,7 +23,7 @@ describe('modules/manager/helmv3/update', () => {
       const { bumpedContent } = helmv3Updater.bumpPackageVersion(
         content,
         '0.0.1',
-        'patch'
+        'patch',
       );
       expect(bumpedContent).toEqual(content);
     });
@@ -32,7 +32,7 @@ describe('modules/manager/helmv3/update', () => {
       const { bumpedContent } = helmv3Updater.bumpPackageVersion(
         content,
         '0.0.1',
-        'minor'
+        'minor',
       );
       const expected = content.replace('0.0.2', '0.1.0');
       expect(bumpedContent).toEqual(expected);
@@ -42,7 +42,7 @@ describe('modules/manager/helmv3/update', () => {
       const { bumpedContent } = helmv3Updater.bumpPackageVersion(
         content,
         '0.0.2',
-        true as any
+        true as any,
       );
       expect(bumpedContent).toEqual(content);
     });
diff --git a/lib/modules/manager/helmv3/update.ts b/lib/modules/manager/helmv3/update.ts
index be7ac97206c27c9c3bc703f8ff9376efed8c07f6..912476152e4e7b023f7bf87ebcc0d871bd441b37 100644
--- a/lib/modules/manager/helmv3/update.ts
+++ b/lib/modules/manager/helmv3/update.ts
@@ -6,11 +6,11 @@ import type { BumpPackageVersionResult } from '../types';
 export function bumpPackageVersion(
   content: string,
   currentValue: string,
-  bumpVersion: ReleaseType
+  bumpVersion: ReleaseType,
 ): BumpPackageVersionResult {
   logger.debug(
     { bumpVersion, currentValue },
-    'Checking if we should bump Chart.yaml version'
+    'Checking if we should bump Chart.yaml version',
   );
   let newChartVersion: string | null;
   let bumpedContent = content;
@@ -23,7 +23,7 @@ export function bumpPackageVersion(
     logger.debug(`newChartVersion: ${newChartVersion}`);
     bumpedContent = content.replace(
       regEx(`^(?<version>version:\\s*).*$`, 'm'),
-      `$<version>${newChartVersion}`
+      `$<version>${newChartVersion}`,
     );
     if (bumpedContent === content) {
       logger.debug('Version was already bumped');
@@ -37,7 +37,7 @@ export function bumpPackageVersion(
         currentValue,
         bumpVersion,
       },
-      'Failed to bumpVersion'
+      'Failed to bumpVersion',
     );
   }
   return { bumpedContent };
diff --git a/lib/modules/manager/helmv3/utils.ts b/lib/modules/manager/helmv3/utils.ts
index 3e29cb0940d84a1fe912159fdeaa6a417a55f80a..20636a6ea6f181a662351e8918f439bcb6a7846b 100644
--- a/lib/modules/manager/helmv3/utils.ts
+++ b/lib/modules/manager/helmv3/utils.ts
@@ -7,7 +7,7 @@ import type { ChartDefinition, Repository } from './types';
 
 export function parseRepository(
   depName: string,
-  repositoryURL: string
+  repositoryURL: string,
 ): PackageDependency {
   const res: PackageDependency = {};
 
@@ -41,7 +41,7 @@ export function parseRepository(
  */
 export function resolveAlias(
   repository: string,
-  registryAliases: Record<string, string>
+  registryAliases: Record<string, string>,
 ): string | null {
   if (!isAlias(repository)) {
     return repository;
@@ -84,7 +84,7 @@ export function isAlias(repository: string): boolean {
 }
 
 export function isOCIRegistry(
-  repository: Repository | string | null | undefined
+  repository: Repository | string | null | undefined,
 ): boolean {
   if (is.nullOrUndefined(repository)) {
     return false;
@@ -94,7 +94,7 @@ export function isOCIRegistry(
 }
 
 export function aliasRecordToRepositories(
-  registryAliases: Record<string, string>
+  registryAliases: Record<string, string>,
 ): Repository[] {
   return Object.entries(registryAliases).map(([alias, url]) => {
     return {
diff --git a/lib/modules/manager/hermit/artifacts.spec.ts b/lib/modules/manager/hermit/artifacts.spec.ts
index bd84fb03c88ca37a09ed5b8e04609ada59accef8..f76e605367cffea74fc2e76c1698ba8219e8d9bd 100644
--- a/lib/modules/manager/hermit/artifacts.spec.ts
+++ b/lib/modules/manager/hermit/artifacts.spec.ts
@@ -37,7 +37,7 @@ describe('modules/manager/hermit/artifacts', () => {
               to: 'bin/jq-1.6',
             },
           ],
-        })
+        }),
       );
 
       const res = await updateArtifacts(
@@ -55,7 +55,7 @@ describe('modules/manager/hermit/artifacts', () => {
             },
           ],
           packageFileName: 'go/bin/hermit',
-        })
+        }),
       );
 
       expect(execSnapshots).toMatchObject([
@@ -155,7 +155,7 @@ describe('modules/manager/hermit/artifacts', () => {
               to: 'bin/jq-1.6',
             },
           ],
-        })
+        }),
       );
 
       const res = await updateArtifacts(
@@ -173,7 +173,7 @@ describe('modules/manager/hermit/artifacts', () => {
             },
           ],
           packageFileName: 'go/bin/hermit',
-        })
+        }),
       );
 
       expect(res).toEqual([
@@ -194,7 +194,7 @@ describe('modules/manager/hermit/artifacts', () => {
           options: {
             encoding: 'utf-8',
           },
-        })
+        }),
       );
 
       const res = await updateArtifacts(
@@ -212,7 +212,7 @@ describe('modules/manager/hermit/artifacts', () => {
             },
           ],
           packageFileName: 'go/bin/hermit',
-        })
+        }),
       );
 
       expect(res).toStrictEqual([
@@ -235,7 +235,7 @@ describe('modules/manager/hermit/artifacts', () => {
             },
           ],
           packageFileName: 'go/bin/hermit',
-        })
+        }),
       );
 
       expect(res).toStrictEqual([
@@ -256,7 +256,7 @@ describe('modules/manager/hermit/artifacts', () => {
             },
           ],
           packageFileName: 'go/bin/hermit',
-        })
+        }),
       );
 
       expect(res).toStrictEqual([
@@ -277,7 +277,7 @@ describe('modules/manager/hermit/artifacts', () => {
             },
           ],
           packageFileName: 'go/bin/hermit',
-        })
+        }),
       );
 
       expect(res).toStrictEqual([
diff --git a/lib/modules/manager/hermit/artifacts.ts b/lib/modules/manager/hermit/artifacts.ts
index 0eaffa4b1930faabdce0dd5ce9eb341e8b815648..052520ba53604c99671d93cf93061d8666a365da 100644
--- a/lib/modules/manager/hermit/artifacts.ts
+++ b/lib/modules/manager/hermit/artifacts.ts
@@ -12,7 +12,7 @@ import type { ReadContentResult } from './types';
  * updateArtifacts runs hermit install for each updated dependencies
  */
 export async function updateArtifacts(
-  update: UpdateArtifact
+  update: UpdateArtifact,
 ): Promise<UpdateArtifactsResult[] | null> {
   const { packageFileName } = update;
   try {
@@ -77,7 +77,7 @@ async function getContent(file: string): Promise<ReadContentResult> {
  */
 function getAddResult(
   path: string,
-  contentRes: ReadContentResult
+  contentRes: ReadContentResult,
 ): UpdateArtifactsResult {
   return {
     file: {
@@ -107,13 +107,13 @@ function getDeleteResult(path: string): UpdateArtifactsResult {
  * has been performed for all packages
  */
 async function getUpdateResult(
-  packageFileName: string
+  packageFileName: string,
 ): Promise<UpdateArtifactsResult[]> {
   const hermitFolder = `${upath.dirname(packageFileName)}/`;
   const hermitChanges = await getRepoStatus(hermitFolder);
   logger.debug(
     { hermitChanges, hermitFolder },
-    `hermit changes after package update`
+    `hermit changes after package update`,
   );
 
   // handle added files
@@ -123,7 +123,7 @@ async function getUpdateResult(
       const contents = await getContent(path);
 
       return getAddResult(path, contents);
-    }
+    },
   );
 
   const deleted = hermitChanges.deleted.map(getDeleteResult);
@@ -136,7 +136,7 @@ async function getUpdateResult(
         getDeleteResult(path), // delete existing link
         getAddResult(path, contents), // add a new link
       ];
-    }
+    },
   );
 
   const renamed = await p.map(
@@ -147,7 +147,7 @@ async function getUpdateResult(
       const toContents = await getContent(to);
 
       return [getDeleteResult(from), getAddResult(to, toContents)];
-    }
+    },
   );
 
   return [
@@ -185,13 +185,13 @@ async function updateHermitPackage(update: UpdateArtifact): Promise<void> {
           currentVersion: pkg.currentVersion,
           newValue: pkg.newValue,
         },
-        'missing package update information'
+        'missing package update information',
       );
 
       throw new UpdateHermitError(
         getHermitPackage(pkg.depName ?? '', pkg.currentVersion ?? ''),
         getHermitPackage(pkg.depName ?? '', pkg.newValue ?? ''),
-        'invalid package to update'
+        'invalid package to update',
       );
     }
 
@@ -218,7 +218,7 @@ async function updateHermitPackage(update: UpdateArtifact): Promise<void> {
       packageFile: update.packageFileName,
       packagesToInstall,
     },
-    `performing updates`
+    `performing updates`,
   );
 
   try {
@@ -230,7 +230,7 @@ async function updateHermitPackage(update: UpdateArtifact): Promise<void> {
       fromPackages,
       packagesToInstall,
       e.stderr,
-      e.stdout
+      e.stdout,
     );
   }
 }
diff --git a/lib/modules/manager/hermit/default-config.spec.ts b/lib/modules/manager/hermit/default-config.spec.ts
index fadc58b1c3af8e1f6e56febb2654192cd8bc81e0..b481a1e0226d29bc17f6d1951dcba6052f82da4a 100644
--- a/lib/modules/manager/hermit/default-config.spec.ts
+++ b/lib/modules/manager/hermit/default-config.spec.ts
@@ -21,7 +21,7 @@ describe('modules/manager/hermit/default-config', () => {
       ${'nested/module/other'}      | ${false}
     `('minimatches("$path") === $expected', ({ path, expected }) => {
       expect(miniMatches(path, defaultConfig.excludeCommitPaths)).toBe(
-        expected
+        expected,
       );
     });
   });
diff --git a/lib/modules/manager/hermit/extract.ts b/lib/modules/manager/hermit/extract.ts
index 206ffdb84b8be44df57d97fbcbe692060ef55929..a8483a332f49c841d5bd37c74044af644d050fa3 100644
--- a/lib/modules/manager/hermit/extract.ts
+++ b/lib/modules/manager/hermit/extract.ts
@@ -15,7 +15,7 @@ const pkgReferenceRegex = regEx(`(?<packageName>.*?)-(?<version>[0-9]{1}.*)`);
  */
 export async function extractPackageFile(
   _content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   logger.trace(`hermit.extractPackageFile(${packageFile})`);
   const dependencies = [] as PackageDependency[];
@@ -46,7 +46,7 @@ export async function extractPackageFile(
  * listHermitPackages will fetch all installed packages from the bin folder
  */
 async function listHermitPackages(
-  packageFile: string
+  packageFile: string,
 ): Promise<HermitListItem[] | null> {
   logger.trace('hermit.listHermitPackages()');
   const hermitFolder = upath.dirname(packageFile);
@@ -58,7 +58,7 @@ async function listHermitPackages(
   } catch (err) {
     logger.debug(
       { hermitFolder, err, packageFile },
-      'error listing hermit package references'
+      'error listing hermit package references',
     );
     return null;
   }
@@ -90,7 +90,7 @@ async function listHermitPackages(
     if (!groups) {
       logger.debug(
         { fileName },
-        'invalid hermit package reference file name found'
+        'invalid hermit package reference file name found',
       );
       continue;
     }
diff --git a/lib/modules/manager/homebrew/extract.ts b/lib/modules/manager/homebrew/extract.ts
index 9564de28ffa0bfe0c97eeefce5ee863e96e867fe..2aabaacdab5b0b808295233cbd3e038c4aa75764 100644
--- a/lib/modules/manager/homebrew/extract.ts
+++ b/lib/modules/manager/homebrew/extract.ts
@@ -57,7 +57,7 @@ function extractUrl(content: string): string | null {
 }
 
 export function parseUrlPath(
-  urlStr: string | null | undefined
+  urlStr: string | null | undefined,
 ): UrlPathParsedResult | null {
   if (!urlStr) {
     return null;
@@ -76,7 +76,7 @@ export function parseUrlPath(
       currentValue = s[3];
       const targz = currentValue.slice(
         currentValue.length - 7,
-        currentValue.length
+        currentValue.length,
       );
       if (targz === '.tar.gz') {
         currentValue = currentValue.substring(0, currentValue.length - 7);
diff --git a/lib/modules/manager/homebrew/update.spec.ts b/lib/modules/manager/homebrew/update.spec.ts
index c57a28858abbea5d441efa203a7aff41bca2ba18..f169e9977f08b118d60594bb20b9f153fa33255d 100644
--- a/lib/modules/manager/homebrew/update.spec.ts
+++ b/lib/modules/manager/homebrew/update.spec.ts
@@ -51,7 +51,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz'
+        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz',
       )
       .reply(200, Readable.from(['foo']));
     const newContent = await updateDependency({
@@ -79,7 +79,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz'
+        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz',
       )
       .replyWithError('')
       .get('/bazelbuild/bazel-watcher/archive/v0.9.3.tar.gz')
@@ -131,7 +131,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/invalid/repo/name/releases/download/v0.9.3/invalid/repo/name-0.9.3.tar.gz'
+        '/bazelbuild/invalid/repo/name/releases/download/v0.9.3/invalid/repo/name-0.9.3.tar.gz',
       )
       .replyWithError('')
       .get('/bazelbuild/invalid/repo/name/archive/v0.9.3.tar.gz')
@@ -161,11 +161,11 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/wrong-version/archive/v10.2.3.tar.gz/releases/download/v0.9.3/wrong-version/archive/v10.2.3.tar.gz-0.9.3.tar.gz'
+        '/bazelbuild/wrong-version/archive/v10.2.3.tar.gz/releases/download/v0.9.3/wrong-version/archive/v10.2.3.tar.gz-0.9.3.tar.gz',
       )
       .replyWithError('')
       .get(
-        '/bazelbuild/wrong-version/archive/v10.2.3.tar.gz/archive/v0.9.3.tar.gz'
+        '/bazelbuild/wrong-version/archive/v10.2.3.tar.gz/archive/v0.9.3.tar.gz',
       )
       .reply(200, Readable.from(['foo']));
     const newContent = await updateDependency({
@@ -200,7 +200,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz'
+        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz',
       )
       .reply(200, Readable.from(['foo']));
     const newContent = await updateDependency({
@@ -234,7 +234,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz'
+        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz',
       )
       .reply(200, Readable.from(['foo']));
     const newContent = await updateDependency({
@@ -269,7 +269,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz'
+        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz',
       )
       .reply(200, Readable.from(['foo']));
     const newContent = await updateDependency({
@@ -303,7 +303,7 @@ describe('modules/manager/homebrew/update', () => {
     httpMock
       .scope(baseUrl)
       .get(
-        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz'
+        '/bazelbuild/bazel-watcher/releases/download/v0.9.3/bazel-watcher-0.9.3.tar.gz',
       )
       .reply(200, Readable.from(['foo']));
     const newContent = await updateDependency({
diff --git a/lib/modules/manager/homebrew/update.ts b/lib/modules/manager/homebrew/update.ts
index bf9b55e4b6cfea8dcd52bdc03a1ad397913ed3d0..4d5530c8172cf9c79422010dcc7827b8f608ffd6 100644
--- a/lib/modules/manager/homebrew/update.ts
+++ b/lib/modules/manager/homebrew/update.ts
@@ -13,7 +13,7 @@ function replaceUrl(
   idx: number,
   content: string,
   oldUrl: string,
-  newUrl: string
+  newUrl: string,
 ): string | null {
   let i = idx;
   i += 'url'.length;
@@ -31,7 +31,7 @@ function replaceUrl(
 function getUrlTestContent(
   content: string,
   oldUrl: string,
-  newUrl: string
+  newUrl: string,
 ): string | null {
   const urlRegExp = /(^|\s)url(\s)/;
   const cleanContent = removeComments(content);
@@ -46,7 +46,7 @@ function getUrlTestContent(
 function updateUrl(
   content: string,
   oldUrl: string,
-  newUrl: string
+  newUrl: string,
 ): string | null {
   const urlRegExp = /(^|\s)url(\s)/;
   let i = content.search(urlRegExp);
@@ -76,7 +76,7 @@ function replaceSha256(
   idx: number,
   content: string,
   oldSha256: string,
-  newSha256: string
+  newSha256: string,
 ): string | null {
   let i = idx;
   i += 'sha256'.length;
@@ -95,7 +95,7 @@ function replaceSha256(
 function getSha256TestContent(
   content: string,
   oldSha256: string,
-  newSha256: string
+  newSha256: string,
 ): string | null {
   const sha256RegExp = /(^|\s)sha256(\s)/;
   const cleanContent = removeComments(content);
@@ -110,7 +110,7 @@ function getSha256TestContent(
 function updateSha256(
   content: string,
   oldSha256: string,
-  newSha256: string
+  newSha256: string,
 ): string | null {
   const sha256RegExp = /(^|\s)sha256(\s)/;
   let i = content.search(sha256RegExp);
@@ -152,7 +152,7 @@ export async function updateDependency({
   const oldParsedUrlPath = parseUrlPath(upgrade.managerData?.url);
   if (!oldParsedUrlPath || !upgrade.managerData) {
     logger.debug(
-      `Failed to update - upgrade.managerData.url is invalid ${upgrade.depName}`
+      `Failed to update - upgrade.managerData.url is invalid ${upgrade.depName}`,
     );
     return fileContent;
   }
@@ -166,7 +166,7 @@ export async function updateDependency({
     newSha256 = await hashStream(http.stream(newUrl), 'sha256');
   } catch (errOuter) {
     logger.debug(
-      `Failed to download release download for ${upgrade.depName} - trying archive instead`
+      `Failed to download release download for ${upgrade.depName} - trying archive instead`,
     );
     try {
       const ownerName = String(upgrade.managerData.ownerName);
@@ -175,7 +175,7 @@ export async function updateDependency({
       newSha256 = await hashStream(http.stream(newUrl), 'sha256');
     } catch (errInner) {
       logger.debug(
-        `Failed to download archive download for ${upgrade.depName} - update failed`
+        `Failed to download archive download for ${upgrade.depName} - update failed`,
       );
       return fileContent;
     }
@@ -183,7 +183,7 @@ export async function updateDependency({
   // istanbul ignore next
   if (!newSha256) {
     logger.debug(
-      `Failed to generate new sha256 for ${upgrade.depName} - update failed`
+      `Failed to generate new sha256 for ${upgrade.depName} - update failed`,
     );
     return fileContent;
   }
diff --git a/lib/modules/manager/homebrew/util.ts b/lib/modules/manager/homebrew/util.ts
index 132400e4472b0baf797ec3b67368dadec4881aab..89612e04323873dec2c634c3ec280cb718e98d11 100644
--- a/lib/modules/manager/homebrew/util.ts
+++ b/lib/modules/manager/homebrew/util.ts
@@ -1,7 +1,7 @@
 export function skip(
   idx: number,
   content: string,
-  cond: (s: string) => boolean
+  cond: (s: string) => boolean,
 ): number {
   let i = idx;
   while (i < content.length) {
diff --git a/lib/modules/manager/html/extract.ts b/lib/modules/manager/html/extract.ts
index e8581147c8df23757739b12802d9536a97a967bb..51b69c9fe1f43f91820122cfc9c7bb6f6f1bfa59 100644
--- a/lib/modules/manager/html/extract.ts
+++ b/lib/modules/manager/html/extract.ts
@@ -6,7 +6,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 const regex = regEx(/<\s*(script|link)\s+[^>]*?\/?>/i);
 
 const integrityRegex = regEx(
-  /\s+integrity\s*=\s*("|')(?<currentDigest>[^"']+)/
+  /\s+integrity\s*=\s*("|')(?<currentDigest>[^"']+)/,
 );
 
 export function extractDep(tag: string): PackageDependency | null {
diff --git a/lib/modules/manager/index.spec.ts b/lib/modules/manager/index.spec.ts
index a2f778421f5e0e97da473ae878a8171722221231..f297c5f738107f2701644ab0078b5631d9ee9cad 100644
--- a/lib/modules/manager/index.spec.ts
+++ b/lib/modules/manager/index.spec.ts
@@ -64,7 +64,7 @@ describe('modules/manager/index', () => {
     delete loadedMgr['custom'];
 
     expect(Array.from([...mgrs.keys(), ...customMgrs.keys()]).sort()).toEqual(
-      Object.keys(loadedMgr).sort()
+      Object.keys(loadedMgr).sort(),
     );
 
     for (const name of mgrs.keys()) {
@@ -86,10 +86,10 @@ describe('modules/manager/index', () => {
         supportedDatasources: [],
       });
       expect(
-        await manager.extractAllPackageFiles('unknown', {} as any, [])
+        await manager.extractAllPackageFiles('unknown', {} as any, []),
       ).toBeNull();
       expect(
-        await manager.extractAllPackageFiles('dummy', {} as any, [])
+        await manager.extractAllPackageFiles('dummy', {} as any, []),
       ).toBeNull();
     });
 
@@ -100,7 +100,7 @@ describe('modules/manager/index', () => {
         extractAllPackageFiles: () => Promise.resolve([]),
       });
       expect(
-        await manager.extractAllPackageFiles('dummy', {} as any, [])
+        await manager.extractAllPackageFiles('dummy', {} as any, []),
       ).not.toBeNull();
     });
 
@@ -116,10 +116,10 @@ describe('modules/manager/index', () => {
         supportedDatasources: [],
       });
       expect(
-        manager.extractPackageFile('unknown', '', 'filename', {})
+        manager.extractPackageFile('unknown', '', 'filename', {}),
       ).toBeNull();
       expect(
-        manager.extractPackageFile('dummy', '', 'filename', {})
+        manager.extractPackageFile('dummy', '', 'filename', {}),
       ).toBeNull();
     });
 
@@ -130,7 +130,7 @@ describe('modules/manager/index', () => {
         extractPackageFile: () => Promise.resolve({ deps: [] }),
       });
       expect(
-        manager.extractPackageFile('dummy', '', 'filename', {})
+        manager.extractPackageFile('dummy', '', 'filename', {}),
       ).not.toBeNull();
     });
 
@@ -142,7 +142,7 @@ describe('modules/manager/index', () => {
       });
 
       expect(
-        manager.extractPackageFile('dummy', '', 'filename', {})
+        manager.extractPackageFile('dummy', '', 'filename', {}),
       ).not.toBeNull();
     });
 
@@ -158,7 +158,7 @@ describe('modules/manager/index', () => {
         supportedDatasources: [],
       });
       expect(
-        manager.getRangeStrategy({ manager: 'unknown', rangeStrategy: 'auto' })
+        manager.getRangeStrategy({ manager: 'unknown', rangeStrategy: 'auto' }),
       ).toBeNull();
     });
 
@@ -169,7 +169,7 @@ describe('modules/manager/index', () => {
         getRangeStrategy: () => 'replace',
       });
       expect(
-        manager.getRangeStrategy({ manager: 'dummy', rangeStrategy: 'auto' })
+        manager.getRangeStrategy({ manager: 'dummy', rangeStrategy: 'auto' }),
       ).not.toBeNull();
 
       manager.getManagers().set('dummy', {
@@ -177,11 +177,11 @@ describe('modules/manager/index', () => {
         supportedDatasources: [],
       });
       expect(
-        manager.getRangeStrategy({ manager: 'dummy', rangeStrategy: 'auto' })
+        manager.getRangeStrategy({ manager: 'dummy', rangeStrategy: 'auto' }),
       ).not.toBeNull();
 
       expect(
-        manager.getRangeStrategy({ manager: 'dummy', rangeStrategy: 'bump' })
+        manager.getRangeStrategy({ manager: 'dummy', rangeStrategy: 'bump' }),
       ).not.toBeNull();
     });
 
@@ -194,7 +194,7 @@ describe('modules/manager/index', () => {
         manager.getRangeStrategy({
           manager: 'dummy',
           rangeStrategy: 'in-range-only',
-        })
+        }),
       ).toBe('update-lockfile');
     });
 
@@ -208,7 +208,7 @@ describe('modules/manager/index', () => {
         manager.getRangeStrategy({
           manager: 'dummy',
           rangeStrategy: 'in-range-only',
-        })
+        }),
       ).toBe('update-lockfile');
     });
 
diff --git a/lib/modules/manager/index.ts b/lib/modules/manager/index.ts
index 3e910f5899465148ce8c1cb4de4cb112adbe41ed..c5653ef9a140bf2368f9b0d31b40d683ecd85527 100644
--- a/lib/modules/manager/index.ts
+++ b/lib/modules/manager/index.ts
@@ -20,7 +20,7 @@ export const allManagersList = [...managerList, ...customManagerList];
 
 export function get<T extends keyof ManagerApi>(
   manager: string,
-  name: T
+  name: T,
 ): ManagerApi[T] | undefined {
   return isCustomManager(manager)
     ? customManagers.get(manager)?.[name]
@@ -43,7 +43,7 @@ export async function detectAllGlobalConfig(): Promise<GlobalManagerConfig> {
 export async function extractAllPackageFiles(
   manager: string,
   config: ExtractConfig,
-  files: string[]
+  files: string[],
 ): Promise<PackageFile[] | null> {
   if (!managers.has(manager)) {
     return null;
@@ -64,7 +64,7 @@ export function extractPackageFile(
   manager: string,
   content: string,
   fileName: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): Result<PackageFileContent | null> {
   const m = managers.get(manager)! ?? customManagers.get(manager)!;
   if (!m) {
diff --git a/lib/modules/manager/jenkins/extract.ts b/lib/modules/manager/jenkins/extract.ts
index 7038096cbf43d36db6a551f88cd9599b331d23bf..c8b1dad120d01d5bc29bfe6a5e5a364361f114d8 100644
--- a/lib/modules/manager/jenkins/extract.ts
+++ b/lib/modules/manager/jenkins/extract.ts
@@ -23,7 +23,7 @@ function getDependency(plugin: JenkinsPlugin): PackageDependency {
       dep.skipReason = 'invalid-version';
       logger.warn(
         { dep },
-        'Jenkins plugin dependency version is not a string and will be ignored'
+        'Jenkins plugin dependency version is not a string and will be ignored',
       );
     }
   } else {
@@ -52,7 +52,7 @@ function getDependency(plugin: JenkinsPlugin): PackageDependency {
 
 function extractYaml(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
 
@@ -75,7 +75,7 @@ function extractYaml(
 function extractText(content: string): PackageDependency[] {
   const deps: PackageDependency[] = [];
   const regex = regEx(
-    /^\s*(?<depName>[\d\w-]+):(?<currentValue>[^#\s]+)[#\s]*(?<comment>.*)$/
+    /^\s*(?<depName>[\d\w-]+):(?<currentValue>[^#\s]+)[#\s]*(?<comment>.*)$/,
   );
 
   for (const line of content.split(newlineRegex)) {
@@ -100,7 +100,7 @@ function extractText(content: string): PackageDependency[] {
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   logger.trace(`jenkins.extractPackageFile(${packageFile})`);
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/jsonnet-bundler/artifacts.spec.ts b/lib/modules/manager/jsonnet-bundler/artifacts.spec.ts
index 490681c8d3481243c784da0d65095e4d7e0fe67a..ea1ebe433ecefa94966a447cda34a46bd0aefae2 100644
--- a/lib/modules/manager/jsonnet-bundler/artifacts.spec.ts
+++ b/lib/modules/manager/jsonnet-bundler/artifacts.spec.ts
@@ -34,7 +34,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -49,7 +49,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         isClean(): boolean {
           return true;
         },
-      })
+      }),
     );
     expect(
       await updateArtifacts({
@@ -57,7 +57,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -73,7 +73,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         isClean(): boolean {
           return false;
         },
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('Updated jsonnetfile.json');
     fs.readLocalFile.mockResolvedValueOnce('Updated jsonnetfile.lock.json');
@@ -97,7 +97,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         ],
         newPackageFileContent: 'Updated jsonnetfile.json',
         config,
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -146,7 +146,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         isClean(): boolean {
           return false;
         },
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('Updated jsonnetfile.lock.json');
     expect(
@@ -158,7 +158,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
           ...config,
           isLockFileMaintenance: true,
         },
-      })
+      }),
     ).toMatchObject([
       {
         file: {
@@ -183,7 +183,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
         isClean(): boolean {
           return false;
         },
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('Updated jsonnetfile.lock.json');
     expect(
@@ -195,7 +195,7 @@ describe('modules/manager/jsonnet-bundler/artifacts', () => {
           ...config,
           isLockFileMaintenance: true,
         },
-      })
+      }),
     ).toMatchObject([
       {
         artifactError: {
diff --git a/lib/modules/manager/jsonnet-bundler/artifacts.ts b/lib/modules/manager/jsonnet-bundler/artifacts.ts
index 19027353bd3521906dc773b31ab501d8aa8fd33b..00e3e9b6c97edec1cfdb7aaa18741d18754d3cf8 100644
--- a/lib/modules/manager/jsonnet-bundler/artifacts.ts
+++ b/lib/modules/manager/jsonnet-bundler/artifacts.ts
@@ -22,7 +22,7 @@ function dependencyUrl(dep: PackageDependency): string {
 }
 
 export async function updateArtifacts(
-  updateArtifact: UpdateArtifact
+  updateArtifact: UpdateArtifact,
 ): Promise<UpdateArtifactsResult[] | null> {
   const { packageFileName, updatedDeps, config } = updateArtifact;
   logger.trace({ packageFileName }, 'jsonnet-bundler.updateArtifacts()');
@@ -54,7 +54,7 @@ export async function updateArtifacts(
       if (dependencyUrls.length > 0) {
         await exec(
           `jb update ${dependencyUrls.map(quote).join(' ')}`,
-          execOptions
+          execOptions,
         );
       }
     }
diff --git a/lib/modules/manager/jsonnet-bundler/extract.spec.ts b/lib/modules/manager/jsonnet-bundler/extract.spec.ts
index 9e130501bc248b92dbfc2ac45e05106b208055d7..0a7b472c9d08c27d20903dcd7c97b1c1f347edc0 100644
--- a/lib/modules/manager/jsonnet-bundler/extract.spec.ts
+++ b/lib/modules/manager/jsonnet-bundler/extract.spec.ts
@@ -4,10 +4,10 @@ import { extractPackageFile } from '.';
 const jsonnetfile = Fixtures.get('jsonnetfile.json');
 const jsonnetfileWithName = Fixtures.get('jsonnetfile-with-name.json');
 const jsonnetfileNoDependencies = Fixtures.get(
-  'jsonnetfile-no-dependencies.json'
+  'jsonnetfile-no-dependencies.json',
 );
 const jsonnetfileLocalDependencies = Fixtures.get(
-  'jsonnetfile-local-dependencies.json'
+  'jsonnetfile-local-dependencies.json',
 );
 const jsonnetfileEmptyGitSource = JSON.stringify({
   version: 1,
@@ -23,25 +23,25 @@ describe('modules/manager/jsonnet-bundler/extract', () => {
   describe('extractPackageFile()', () => {
     it('returns null for invalid jsonnetfile', () => {
       expect(
-        extractPackageFile('this is not a jsonnetfile', 'jsonnetfile.json')
+        extractPackageFile('this is not a jsonnetfile', 'jsonnetfile.json'),
       ).toBeNull();
     });
 
     it('returns null for jsonnetfile with no dependencies', () => {
       expect(
-        extractPackageFile(jsonnetfileNoDependencies, 'jsonnetfile.json')
+        extractPackageFile(jsonnetfileNoDependencies, 'jsonnetfile.json'),
       ).toBeNull();
     });
 
     it('returns null for local dependencies', () => {
       expect(
-        extractPackageFile(jsonnetfileLocalDependencies, 'jsonnetfile.json')
+        extractPackageFile(jsonnetfileLocalDependencies, 'jsonnetfile.json'),
       ).toBeNull();
     });
 
     it('returns null for vendored dependencies', () => {
       expect(
-        extractPackageFile(jsonnetfile, 'vendor/jsonnetfile.json')
+        extractPackageFile(jsonnetfile, 'vendor/jsonnetfile.json'),
       ).toBeNull();
     });
 
@@ -49,8 +49,8 @@ describe('modules/manager/jsonnet-bundler/extract', () => {
       expect(
         extractPackageFile(
           jsonnetfileEmptyGitSource,
-          'jsonnetfile-empty-git-source.json'
-        )
+          'jsonnetfile-empty-git-source.json',
+        ),
       ).toBeNull();
     });
 
diff --git a/lib/modules/manager/jsonnet-bundler/extract.ts b/lib/modules/manager/jsonnet-bundler/extract.ts
index f2565b3c5e4fa25a798cf52a757fda0bf9f013b0..00eb18d66a878e37a45473e8b4cbdd7ba0188982 100644
--- a/lib/modules/manager/jsonnet-bundler/extract.ts
+++ b/lib/modules/manager/jsonnet-bundler/extract.ts
@@ -8,7 +8,7 @@ import type { Dependency, JsonnetFile } from './types';
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   logger.trace({ packageFile }, 'jsonnet-bundler.extractPackageFile()');
 
@@ -53,7 +53,7 @@ function extractDependency(dependency: Dependency): PackageDependency | null {
   const depName = join(
     gitRemote.host,
     gitRemote.pathname.replace(/\.git$/, ''),
-    coerceString(dependency.source.git.subdir)
+    coerceString(dependency.source.git.subdir),
   );
 
   return {
diff --git a/lib/modules/manager/kotlin-script/extract.spec.ts b/lib/modules/manager/kotlin-script/extract.spec.ts
index 9272c601065416684369886b29fe3b7e4ae49483..bd4f8d3591fe1c6899fc596d0970eafb1e9c8b8c 100644
--- a/lib/modules/manager/kotlin-script/extract.spec.ts
+++ b/lib/modules/manager/kotlin-script/extract.spec.ts
@@ -3,7 +3,7 @@ import { extractPackageFile } from '.';
 
 const genericCaseFileContent = Fixtures.get('generic-case.main.kts');
 const customRepositoriesFileContent = Fixtures.get(
-  'custom-repositories.main.kts'
+  'custom-repositories.main.kts',
 );
 const missingPartsFileContent = Fixtures.get('missing-parts.main.kts');
 
diff --git a/lib/modules/manager/kotlin-script/extract.ts b/lib/modules/manager/kotlin-script/extract.ts
index 80753900fe57782d0d76677f3f8062b6a04e72a5..1872f2765ebcf5c5af731f35ae98337210dab09e 100644
--- a/lib/modules/manager/kotlin-script/extract.ts
+++ b/lib/modules/manager/kotlin-script/extract.ts
@@ -4,14 +4,14 @@ import { MavenDatasource } from '../../datasource/maven';
 import type { PackageDependency, PackageFileContent } from '../types';
 
 const dependsOnRegex = regEx(
-  /@file\s*:\s*DependsOn\s*\(\s*(?<replaceString>"(?<groupId>.+):(?<artifactId>.+):(?<version>.+)")\s*\)/g
+  /@file\s*:\s*DependsOn\s*\(\s*(?<replaceString>"(?<groupId>.+):(?<artifactId>.+):(?<version>.+)")\s*\)/g,
 );
 const repositoryRegex = regEx(
-  /@file\s*:\s*Repository\s*\(\s*"(?<repositoryName>.+)"\s*\)/g
+  /@file\s*:\s*Repository\s*\(\s*"(?<repositoryName>.+)"\s*\)/g,
 );
 
 export function extractPackageFile(
-  fileContent: string
+  fileContent: string,
 ): PackageFileContent | null {
   const registryUrls: string[] = [...fileContent.matchAll(repositoryRegex)]
     .map((match) => match.groups?.repositoryName)
diff --git a/lib/modules/manager/kubernetes/extract.spec.ts b/lib/modules/manager/kubernetes/extract.spec.ts
index 16fbd38e10cbb280460795786f4c3430a0b44a67..f1a2d02e83635c9b2b34d09dc45de1d6d27cf24f 100644
--- a/lib/modules/manager/kubernetes/extract.spec.ts
+++ b/lib/modules/manager/kubernetes/extract.spec.ts
@@ -67,7 +67,7 @@ describe('modules/manager/kubernetes/extract', () => {
       const res = extractPackageFile(
         kubernetesArraySyntaxFile,
         'file.yaml',
-        {}
+        {},
       );
       expect(res?.deps).toStrictEqual([
         {
diff --git a/lib/modules/manager/kubernetes/extract.ts b/lib/modules/manager/kubernetes/extract.ts
index 9ed7cfc0fea5e6372d0646f230c7f8be17d900e7..4588f63d1f07db1055287d1a99c29daed570794c 100644
--- a/lib/modules/manager/kubernetes/extract.ts
+++ b/lib/modules/manager/kubernetes/extract.ts
@@ -18,7 +18,7 @@ import type { KubernetesConfiguration } from './types';
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageFileContent | null {
   logger.trace('kubernetes.extractPackageFile()');
 
@@ -39,7 +39,7 @@ export function extractPackageFile(
 
 function extractImages(
   content: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
 
@@ -54,7 +54,7 @@ function extractImages(
           currentValue: dep.currentValue,
           currentDigest: dep.currentDigest,
         },
-        'Kubernetes image'
+        'Kubernetes image',
       );
       deps.push(dep);
     }
@@ -65,7 +65,7 @@ function extractImages(
 
 function extractApis(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageDependency[] {
   let doc: KubernetesConfiguration[];
 
@@ -81,7 +81,7 @@ function extractApis(
     .filter(
       (m) =>
         is.nonEmptyStringAndNotWhitespace(m.kind) &&
-        is.nonEmptyStringAndNotWhitespace(m.apiVersion)
+        is.nonEmptyStringAndNotWhitespace(m.apiVersion),
     )
     .filter((m) => supportedApis.has(m.kind))
     .map((configuration) => ({
diff --git a/lib/modules/manager/kustomize/extract.spec.ts b/lib/modules/manager/kustomize/extract.spec.ts
index 36632d807ed3c1fbd378a677c97f72518bbe3aa3..75c2cb52cd9408dbea9c42fcc4cd51c86b22cfff 100644
--- a/lib/modules/manager/kustomize/extract.spec.ts
+++ b/lib/modules/manager/kustomize/extract.spec.ts
@@ -76,7 +76,7 @@ describe('modules/manager/kustomize/extract', () => {
 
     it('should extract the version of a non http base', () => {
       const pkg = extractResource(
-        'ssh://git@bitbucket.com/user/test-repo?ref=v1.2.3'
+        'ssh://git@bitbucket.com/user/test-repo?ref=v1.2.3',
       );
       expect(pkg).toEqual({
         currentValue: 'v1.2.3',
@@ -88,7 +88,7 @@ describe('modules/manager/kustomize/extract', () => {
 
     it('should extract the depName if the URL includes a port number', () => {
       const pkg = extractResource(
-        'ssh://git@bitbucket.com:7999/user/test-repo?ref=v1.2.3'
+        'ssh://git@bitbucket.com:7999/user/test-repo?ref=v1.2.3',
       );
       expect(pkg).toEqual({
         currentValue: 'v1.2.3',
@@ -100,7 +100,7 @@ describe('modules/manager/kustomize/extract', () => {
 
     it('should extract the version of a non http base with subdir', () => {
       const pkg = extractResource(
-        'ssh://git@bitbucket.com/user/test-repo/subdir?ref=v1.2.3'
+        'ssh://git@bitbucket.com/user/test-repo/subdir?ref=v1.2.3',
       );
       expect(pkg).toEqual({
         currentValue: 'v1.2.3',
@@ -733,7 +733,7 @@ describe('modules/manager/kustomize/extract', () => {
 
         const pkg = extractResource(`${url}?ref=${version}`);
         expect(pkg).toEqual(sample);
-      }
+      },
     );
   });
 });
diff --git a/lib/modules/manager/kustomize/extract.ts b/lib/modules/manager/kustomize/extract.ts
index f930f576a9e274e784719edecce0e4e25a6e9b77..8af061713a609e0cb730ccaa12bbcb414c9bde93 100644
--- a/lib/modules/manager/kustomize/extract.ts
+++ b/lib/modules/manager/kustomize/extract.ts
@@ -14,19 +14,19 @@ import type { HelmChart, Image, Kustomize } from './types';
 // URL specifications should follow the hashicorp URL format
 // https://github.com/hashicorp/go-getter#url-format
 const gitUrl = regEx(
-  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^/\s]+\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/
+  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^/\s]+\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/,
 );
 // regex to match URLs with ".git" delimiter
 const dotGitRegex = regEx(
-  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^?\s]*(\.git))))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/
+  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^?\s]*(\.git))))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/,
 );
 // regex to match URLs with "_git" delimiter
 const underscoreGitRegex = regEx(
-  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^?\s]*)(_git\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/
+  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])?(?<project>[^?\s]*)(_git\/[^/\s]+)))(?<subdir>[^?\s]*)\?ref=(?<currentValue>.+)$/,
 );
 // regex to match URLs having an extra "//"
 const gitUrlWithPath = regEx(
-  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])(?<project>[^?\s]+)))(?:\/\/)(?<subdir>[^?\s]+)\?ref=(?<currentValue>.+)$/
+  /^(?:git::)?(?<url>(?:(?:(?:http|https|ssh):\/\/)?(?:.*@)?)?(?<path>(?:[^:/\s]+(?::[0-9]+)?[:/])(?<project>[^?\s]+)))(?:\/\/)(?<subdir>[^?\s]+)\?ref=(?<currentValue>.+)$/,
 );
 
 export function extractResource(base: string): PackageDependency | null {
@@ -78,7 +78,7 @@ export function extractImage(image: Image): PackageDependency | null {
   if (digest && newTag) {
     logger.debug(
       { newTag, digest },
-      'Kustomize ignores newTag when digest is provided. Pick one, or use `newTag: tag@digest`'
+      'Kustomize ignores newTag when digest is provided. Pick one, or use `newTag: tag@digest`',
     );
     return {
       depName,
@@ -138,7 +138,7 @@ export function extractImage(image: Image): PackageDependency | null {
 }
 
 export function extractHelmChart(
-  helmChart: HelmChart
+  helmChart: HelmChart,
 ): PackageDependency | null {
   if (!helmChart.name) {
     return null;
@@ -154,7 +154,7 @@ export function extractHelmChart(
 
 export function parseKustomize(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): Kustomize | null {
   let pkg: Kustomize | null = null;
   try {
@@ -179,7 +179,7 @@ export function parseKustomize(
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string // TODO: fix tests
+  packageFile?: string, // TODO: fix tests
 ): PackageFileContent | null {
   logger.trace(`kustomize.extractPackageFile(${packageFile!})`);
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/leiningen/extract.spec.ts b/lib/modules/manager/leiningen/extract.spec.ts
index e6f741e7d15366327f0087f4241deac6eef7437c..a5e92389bac9db248de986f9fb00aee9222eefbe 100644
--- a/lib/modules/manager/leiningen/extract.spec.ts
+++ b/lib/modules/manager/leiningen/extract.spec.ts
@@ -25,7 +25,7 @@ describe('modules/manager/leiningen/extract', () => {
       },
     ]);
     expect(
-      extractFromVectors('[[foo/bar ~baz]]', {}, { baz: '1.2.3' })
+      extractFromVectors('[[foo/bar ~baz]]', {}, { baz: '1.2.3' }),
     ).toEqual([
       {
         datasource: ClojureDatasource.id,
@@ -35,7 +35,7 @@ describe('modules/manager/leiningen/extract', () => {
       },
     ]);
     expect(
-      extractFromVectors('[\t[foo/bar "1.2.3"]\n["foo/baz"  "4.5.6"] ]')
+      extractFromVectors('[\t[foo/bar "1.2.3"]\n["foo/baz"  "4.5.6"] ]'),
     ).toEqual([
       {
         datasource: ClojureDatasource.id,
diff --git a/lib/modules/manager/leiningen/extract.ts b/lib/modules/manager/leiningen/extract.ts
index 7e936a9b52d9b771b41e776927e76db0fe2557a3..a6df1128927f8c3cf3692a4ffa5d64b76d44011b 100644
--- a/lib/modules/manager/leiningen/extract.ts
+++ b/lib/modules/manager/leiningen/extract.ts
@@ -25,7 +25,7 @@ export function expandDepName(name: string): string {
 export function extractFromVectors(
   str: string,
   ctx: ExtractContext = {},
-  vars: ExtractedVariables = {}
+  vars: ExtractedVariables = {},
 ): PackageDependency[] {
   if (!str.startsWith('[')) {
     return [];
@@ -121,7 +121,7 @@ function extractLeinRepos(content: string): string[] {
 
   const repoContent = trimAtKey(
     content.replace(/;;.*(?=[\r\n])/g, ''), // get rid of comments // TODO #12872 lookahead
-    'repositories'
+    'repositories',
   );
 
   if (repoContent) {
@@ -141,10 +141,10 @@ function extractLeinRepos(content: string): string[] {
     }
     const repoSectionContent = repoContent.slice(0, endIdx);
     const matches = coerceArray(
-      repoSectionContent.match(regEx(/"https?:\/\/[^"]*"/g))
+      repoSectionContent.match(regEx(/"https?:\/\/[^"]*"/g)),
     );
     const urls = matches.map((x) =>
-      x.replace(regEx(/^"/), '').replace(regEx(/"$/), '')
+      x.replace(regEx(/^"/), '').replace(regEx(/"$/), ''),
     );
     urls.forEach((url) => result.push(url));
   }
@@ -153,7 +153,7 @@ function extractLeinRepos(content: string): string[] {
 }
 
 const defRegex = regEx(
-  /^[\s,]*\([\s,]*def[\s,]+(?<varName>[-+*=<>.!?#$%&_|a-zA-Z][-+*=<>.!?#$%&_|a-zA-Z0-9']+)[\s,]*"(?<stringValue>[^"]*)"[\s,]*\)[\s,]*$/
+  /^[\s,]*\([\s,]*def[\s,]+(?<varName>[-+*=<>.!?#$%&_|a-zA-Z][-+*=<>.!?#$%&_|a-zA-Z0-9']+)[\s,]*"(?<stringValue>[^"]*)"[\s,]*\)[\s,]*$/,
 );
 
 export function extractVariables(content: string): ExtractedVariables {
@@ -174,7 +174,7 @@ function collectDeps(
   content: string,
   key: string,
   registryUrls: string[],
-  vars: ExtractedVariables
+  vars: ExtractedVariables,
 ): PackageDependency[] {
   const ctx = {
     depType: key,
diff --git a/lib/modules/manager/maven-wrapper/artifacts.spec.ts b/lib/modules/manager/maven-wrapper/artifacts.spec.ts
index 06fa35f79254bb5e885ed95ccdcb19bc36c84518..82932d24e12991045c538e7761590b61354df675 100644
--- a/lib/modules/manager/maven-wrapper/artifacts.spec.ts
+++ b/lib/modules/manager/maven-wrapper/artifacts.spec.ts
@@ -23,7 +23,7 @@ function mockMavenFileChangedInGit(fileName = 'maven-wrapper.properties') {
   git.getRepoStatus.mockResolvedValueOnce(
     partial<StatusResult>({
       modified: [`maven.mvn/wrapper/${fileName}`],
-    })
+    }),
   );
 }
 
@@ -35,7 +35,7 @@ describe('modules/manager/maven-wrapper/artifacts', () => {
       partial<Stats>({
         isFile: () => true,
         mode: 0o555,
-      })
+      }),
     );
 
     resetPrefetchedImages();
diff --git a/lib/modules/manager/maven-wrapper/artifacts.ts b/lib/modules/manager/maven-wrapper/artifacts.ts
index 789c2658fe2f6d0c940f470fa430f4bd01c80833..cd9b21035f9a3c883021e2d14014fe6b65f2b111 100644
--- a/lib/modules/manager/maven-wrapper/artifacts.ts
+++ b/lib/modules/manager/maven-wrapper/artifacts.ts
@@ -27,7 +27,7 @@ interface MavenWrapperPaths {
 
 async function addIfUpdated(
   status: StatusResult,
-  fileProjectPath: string
+  fileProjectPath: string,
 ): Promise<UpdateArtifactsResult | null> {
   if (status.modified.includes(fileProjectPath)) {
     return {
@@ -52,7 +52,7 @@ export async function updateArtifacts({
 
     if (!updatedDeps.some((dep) => dep.depName === 'maven-wrapper')) {
       logger.info(
-        'Maven wrapper version not updated - skipping Artifacts update'
+        'Maven wrapper version not updated - skipping Artifacts update',
       );
       return null;
     }
@@ -77,7 +77,7 @@ export async function updateArtifacts({
     ].map(
       (filename) =>
         packageFileName.replace('.mvn/wrapper/maven-wrapper.properties', '') +
-        filename
+        filename,
     );
     const updateArtifactsResult = (
       await getUpdatedArtifacts(status, artifactFileNames)
@@ -85,7 +85,7 @@ export async function updateArtifacts({
 
     logger.debug(
       { files: updateArtifactsResult.map((r) => r.file?.path) },
-      `Returning updated maven-wrapper files`
+      `Returning updated maven-wrapper files`,
     );
     return updateArtifactsResult;
   } catch (err) {
@@ -103,7 +103,7 @@ export async function updateArtifacts({
 
 async function getUpdatedArtifacts(
   status: StatusResult,
-  artifactFileNames: string[]
+  artifactFileNames: string[],
 ): Promise<UpdateArtifactsResult[]> {
   const updatedResults: UpdateArtifactsResult[] = [];
   for (const artifactFileName of artifactFileNames) {
@@ -122,7 +122,7 @@ async function getUpdatedArtifacts(
  * @returns A Java semver range
  */
 export function getJavaConstraint(
-  mavenWrapperVersion: string | null | undefined
+  mavenWrapperVersion: string | null | undefined,
 ): string | null {
   const major = mavenWrapperVersion
     ? mavenVersioning.getMajor(mavenWrapperVersion)
@@ -139,7 +139,7 @@ async function executeWrapperCommand(
   cmd: string,
   config: UpdateArtifactsConfig,
   packageFileName: string,
-  extraEnv: ExtraEnv
+  extraEnv: ExtraEnv,
 ): Promise<void> {
   logger.debug(`Updating maven wrapper: "${cmd}"`);
   const { wrapperFullyQualifiedPath } = getMavenPaths(packageFileName);
@@ -174,10 +174,10 @@ function getExtraEnvOptions(deps: PackageDependency[]): ExtraEnv {
 }
 
 function getCustomMavenWrapperRepoUrl(
-  deps: PackageDependency[]
+  deps: PackageDependency[],
 ): string | null {
   const replaceString = deps.find(
-    (dep) => dep.depName === 'maven-wrapper'
+    (dep) => dep.depName === 'maven-wrapper',
   )?.replaceString;
 
   if (!replaceString) {
@@ -185,7 +185,7 @@ function getCustomMavenWrapperRepoUrl(
   }
 
   const match = regEx(/^(.*?)\/org\/apache\/maven\/wrapper\//).exec(
-    replaceString
+    replaceString,
   );
 
   if (!match) {
@@ -196,7 +196,7 @@ function getCustomMavenWrapperRepoUrl(
 }
 
 async function createWrapperCommand(
-  packageFileName: string
+  packageFileName: string,
 ): Promise<string | null> {
   const {
     wrapperExecutableFileName,
@@ -208,7 +208,7 @@ async function createWrapperCommand(
     wrapperExecutableFileName,
     localProjectDir,
     await statLocalFile(wrapperFullyQualifiedPath),
-    'wrapper:wrapper'
+    'wrapper:wrapper',
   );
 }
 
@@ -227,7 +227,7 @@ function getMavenPaths(packageFileName: string): MavenWrapperPaths {
   const localProjectDir = join(dirname(packageFileName), '../../');
   const wrapperFullyQualifiedPath = join(
     localProjectDir,
-    wrapperExecutableFileName
+    wrapperExecutableFileName,
   );
   return {
     wrapperExecutableFileName,
@@ -240,7 +240,7 @@ async function prepareCommand(
   fileName: string,
   cwd: string | undefined,
   pathFileStats: Stats | null,
-  args: string | null
+  args: string | null,
 ): Promise<string | null> {
   // istanbul ignore if
   if (pathFileStats?.isFile() === true) {
diff --git a/lib/modules/manager/maven-wrapper/extract.ts b/lib/modules/manager/maven-wrapper/extract.ts
index 92e834072ef6893f02d278c21a6d902c488e821f..27c7199b1d97e1a12242d49026e41aa8bdb70f04 100644
--- a/lib/modules/manager/maven-wrapper/extract.ts
+++ b/lib/modules/manager/maven-wrapper/extract.ts
@@ -8,11 +8,11 @@ import type { MavenVersionExtract, Version } from './types';
 
 // https://regex101.com/r/IcOs7P/1
 const DISTRIBUTION_URL_REGEX = regEx(
-  '^(?:distributionUrl\\s*=\\s*)(?<url>\\S*-(?<version>\\d+\\.\\d+(?:\\.\\d+)?(?:-\\w+)*)-(?<type>bin|all)\\.zip)\\s*$'
+  '^(?:distributionUrl\\s*=\\s*)(?<url>\\S*-(?<version>\\d+\\.\\d+(?:\\.\\d+)?(?:-\\w+)*)-(?<type>bin|all)\\.zip)\\s*$',
 );
 
 const WRAPPER_URL_REGEX = regEx(
-  '^(?:wrapperUrl\\s*=\\s*)(?<url>\\S*-(?<version>\\d+\\.\\d+(?:\\.\\d+)?(?:-\\w+)*)(?:.jar))'
+  '^(?:wrapperUrl\\s*=\\s*)(?<url>\\S*-(?<version>\\d+\\.\\d+(?:\\.\\d+)?(?:-\\w+)*)(?:.jar))',
 );
 
 function extractVersions(fileContent: string): MavenVersionExtract {
@@ -38,7 +38,7 @@ function extractLineInfo(lines: string[], regex: RegExp): Version | null {
 }
 
 export function extractPackageFile(
-  fileContent: string
+  fileContent: string,
 ): PackageFileContent | null {
   logger.trace('maven-wrapper.extractPackageFile()');
   const extractResult = extractVersions(fileContent);
diff --git a/lib/modules/manager/maven/extract.ts b/lib/modules/manager/maven/extract.ts
index 5ab8652717bca5a025231dca01cead49fc5ebf25..7cf5b98d8a140cb9c1ebb81d22c92b2bcc6adce1 100644
--- a/lib/modules/manager/maven/extract.ts
+++ b/lib/modules/manager/maven/extract.ts
@@ -39,7 +39,7 @@ function containsPlaceholder(str: string | null | undefined): boolean {
 
 function depFromNode(
   node: XmlElement,
-  underBuildSettingsElement: boolean
+  underBuildSettingsElement: boolean,
 ): PackageDependency | null {
   if (!('valueWithPath' in node)) {
     return null;
@@ -99,7 +99,7 @@ function deepExtract(
   node: XmlElement,
   result: PackageDependency[] = [],
   isRoot = true,
-  underBuildSettingsElement = false
+  underBuildSettingsElement = false,
 ): PackageDependency[] {
   const dep = depFromNode(node, underBuildSettingsElement);
   if (dep && !isRoot) {
@@ -113,7 +113,7 @@ function deepExtract(
         false,
         node.name === 'build' ||
           node.name === 'reporting' ||
-          underBuildSettingsElement
+          underBuildSettingsElement,
       );
     }
   }
@@ -123,7 +123,7 @@ function deepExtract(
 function applyProps(
   dep: PackageDependency<Record<string, any>>,
   depPackageFile: string,
-  props: MavenProp
+  props: MavenProp,
 ): PackageDependency<Record<string, any>> {
   let result = dep;
   let anyChange = false;
@@ -134,7 +134,7 @@ function applyProps(
       result,
       depPackageFile,
       props,
-      alreadySeenProps
+      alreadySeenProps,
     );
     if (fatal) {
       dep.skipReason = 'recursive-placeholder';
@@ -157,7 +157,7 @@ function applyPropsInternal(
   dep: PackageDependency<Record<string, any>>,
   depPackageFile: string,
   props: MavenProp,
-  previouslySeenProps: Set<string>
+  previouslySeenProps: Set<string>,
 ): [PackageDependency<Record<string, any>>, boolean, boolean] {
   let anyChange = false;
   let fatal = false;
@@ -211,7 +211,7 @@ function applyPropsInternal(
         return propValue.val;
       }
       return substr;
-    }
+    },
   );
 
   const result: PackageDependency = {
@@ -256,7 +256,7 @@ interface MavenInterimPackageFile extends PackageFile {
 
 export function extractPackage(
   rawContent: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFile | null {
   if (!rawContent) {
     return null;
@@ -471,7 +471,7 @@ function cleanResult(packageFiles: MavenInterimPackageFile[]): PackageFile[] {
 
 export async function extractAllPackageFiles(
   _config: ExtractConfig,
-  packageFiles: string[]
+  packageFiles: string[],
 ): Promise<PackageFile[]> {
   const packages: PackageFile[] = [];
   const additionalRegistryUrls: string[] = [];
@@ -487,7 +487,7 @@ export async function extractAllPackageFiles(
       if (registries) {
         logger.debug(
           { registries, packageFile },
-          'Found registryUrls in settings.xml'
+          'Found registryUrls in settings.xml',
         );
         additionalRegistryUrls.push(...registries);
       }
diff --git a/lib/modules/manager/maven/index.spec.ts b/lib/modules/manager/maven/index.spec.ts
index 06de022c0b9d1a90cb7def90ea275fb967d79672..410a8df1ff612a210b709a46dcf0ad614ee9e3cc 100644
--- a/lib/modules/manager/maven/index.spec.ts
+++ b/lib/modules/manager/maven/index.spec.ts
@@ -220,7 +220,7 @@ describe('modules/manager/maven/index', () => {
         upgrade,
       })!;
       const updatedDep = selectDep(
-        extractPackage(updatedContent, 'some-file')!.deps
+        extractPackage(updatedContent, 'some-file')!.deps,
       );
 
       expect(updatedDep?.currentValue).toEqual(newValue);
@@ -268,7 +268,7 @@ describe('modules/manager/maven/index', () => {
       const [{ deps }] = resolveParents([
         extractPackage(
           Fixtures.get('multiple_usages_props.pom.xml'),
-          'some-file'
+          'some-file',
         )!,
       ]);
       expect(deps).toMatchObject([
@@ -283,7 +283,7 @@ describe('modules/manager/maven/index', () => {
       const [{ deps }] = resolveParents([
         extractPackage(
           Fixtures.get('infinite_recursive_props.pom.xml'),
-          'some-file'
+          'some-file',
         )!,
       ]);
       expect(deps).toMatchObject([
@@ -349,13 +349,13 @@ describe('modules/manager/maven/index', () => {
       const updatedOutside = origContent.replace('1.0.0', '1.0.1');
 
       expect(
-        updateDependency({ fileContent: origContent, upgrade: upgrade1 })
+        updateDependency({ fileContent: origContent, upgrade: upgrade1 }),
       ).toEqual(origContent.replace('1.0.0', '1.0.2'));
       expect(
         updateDependency({
           fileContent: updatedOutside,
           upgrade: upgrade1,
-        })
+        }),
       ).toEqual(origContent.replace('1.0.0', '1.0.2'));
 
       const updatedByPrevious = updateDependency({
@@ -367,17 +367,17 @@ describe('modules/manager/maven/index', () => {
         updateDependency({
           fileContent: updatedByPrevious,
           upgrade: upgrade2,
-        })
+        }),
       ).toEqual(origContent.replace('1.0.0', '1.0.3'));
       expect(
         updateDependency({
           fileContent: updatedOutside,
           upgrade: upgrade2,
-        })
+        }),
       ).toEqual(origContent.replace('1.0.0', '1.0.3'));
 
       expect(
-        updateDependency({ fileContent: origContent, upgrade: upgrade2 })
+        updateDependency({ fileContent: origContent, upgrade: upgrade2 }),
       ).toEqual(origContent.replace('1.0.0', '1.0.3'));
     });
 
@@ -388,7 +388,7 @@ describe('modules/manager/maven/index', () => {
       const upgrade = { ...dep, newValue: '2.0.2' };
       const updatedOutside = origContent.replace('2.0.0', '2.0.1');
       expect(
-        updateDependency({ fileContent: updatedOutside, upgrade })
+        updateDependency({ fileContent: updatedOutside, upgrade }),
       ).toBeNull();
     });
 
@@ -416,7 +416,7 @@ describe('modules/manager/maven/index', () => {
       const upgrade = { ...dep, newValue };
       const newContent = extractPackage(
         updateDependency({ fileContent: pomContent, upgrade })!,
-        'some-file'
+        'some-file',
       );
       const newDep = select(newContent!);
       expect(newDep?.currentValue).toEqual(newValue);
@@ -431,7 +431,7 @@ describe('modules/manager/maven/index', () => {
       expect(dep).not.toBeNull();
       const upgrade = { ...dep, newValue };
       expect(updateDependency({ fileContent: pomContent, upgrade })).toEqual(
-        pomContent
+        pomContent,
       );
     });
 
diff --git a/lib/modules/manager/maven/update.spec.ts b/lib/modules/manager/maven/update.spec.ts
index cf23ec6d63651bb97f81adcb257c9376a05c2875..da7330e0e7e1630c6dd8769b983d7845bff329d8 100644
--- a/lib/modules/manager/maven/update.spec.ts
+++ b/lib/modules/manager/maven/update.spec.ts
@@ -13,7 +13,7 @@ describe('modules/manager/maven/update', () => {
       const { bumpedContent } = pomUpdater.bumpPackageVersion(
         simpleContent,
         '0.0.1',
-        'patch'
+        'patch',
       );
 
       const project = new XmlDocument(bumpedContent!);
@@ -24,12 +24,12 @@ describe('modules/manager/maven/update', () => {
       const { bumpedContent } = pomUpdater.bumpPackageVersion(
         simpleContent,
         '0.0.1',
-        'patch'
+        'patch',
       );
       const { bumpedContent: bumpedContent2 } = pomUpdater.bumpPackageVersion(
         bumpedContent!,
         '0.0.1',
-        'patch'
+        'patch',
       );
 
       expect(bumpedContent).toEqual(bumpedContent2);
@@ -39,7 +39,7 @@ describe('modules/manager/maven/update', () => {
       const { bumpedContent } = pomUpdater.bumpPackageVersion(
         minimumContent,
         '1',
-        'patch'
+        'patch',
       );
 
       const project = new XmlDocument(bumpedContent!);
@@ -50,7 +50,7 @@ describe('modules/manager/maven/update', () => {
       const { bumpedContent } = pomUpdater.bumpPackageVersion(
         minimumContent,
         '',
-        'patch'
+        'patch',
       );
 
       expect(bumpedContent).toEqual(minimumContent);
@@ -60,7 +60,7 @@ describe('modules/manager/maven/update', () => {
       const { bumpedContent } = pomUpdater.bumpPackageVersion(
         simpleContent,
         '0.0.1',
-        true as any
+        true as any,
       );
       expect(bumpedContent).toEqual(simpleContent);
     });
@@ -69,7 +69,7 @@ describe('modules/manager/maven/update', () => {
       const { bumpedContent } = pomUpdater.bumpPackageVersion(
         prereleaseContent,
         '1.0.0-1',
-        'prerelease'
+        'prerelease',
       );
 
       const project = new XmlDocument(bumpedContent!);
diff --git a/lib/modules/manager/maven/update.ts b/lib/modules/manager/maven/update.ts
index 23ebe518383b9b163c9b67de3e34995f19050a44..055d033cccb9ac9dd0dd30cebcc0b98515f19865 100644
--- a/lib/modules/manager/maven/update.ts
+++ b/lib/modules/manager/maven/update.ts
@@ -11,7 +11,7 @@ import type {
 export function updateAtPosition(
   fileContent: string,
   upgrade: Upgrade,
-  endingAnchor: string
+  endingAnchor: string,
 ): string | null {
   const { depName, currentValue, newValue, fileReplacePosition } = upgrade;
   const leftPart = fileContent.slice(0, fileReplacePosition);
@@ -56,18 +56,18 @@ export function updateDependency({
 export function bumpPackageVersion(
   content: string,
   currentValue: string,
-  bumpVersion: ReleaseType
+  bumpVersion: ReleaseType,
 ): BumpPackageVersionResult {
   logger.debug(
     { bumpVersion, currentValue },
-    'Checking if we should bump pom.xml version'
+    'Checking if we should bump pom.xml version',
   );
   let bumpedContent = content;
 
   if (!semver.valid(currentValue)) {
     logger.warn(
       { currentValue },
-      'Unable to bump pom.xml version, not a valid semver'
+      'Unable to bump pom.xml version, not a valid semver',
     );
     return { bumpedContent };
   }
@@ -88,7 +88,7 @@ export function bumpPackageVersion(
       content,
       versionPosition,
       currentValue,
-      newPomVersion
+      newPomVersion,
     );
 
     if (bumpedContent === content) {
@@ -103,7 +103,7 @@ export function bumpPackageVersion(
         currentValue,
         bumpVersion,
       },
-      'Failed to bumpVersion'
+      'Failed to bumpVersion',
     );
   }
   return { bumpedContent };
diff --git a/lib/modules/manager/metadata.spec.ts b/lib/modules/manager/metadata.spec.ts
index 320088f09891347f57180613fff68fbfec949aaa..1409333360ba4b32940dd2d3baf2d156b37fa464 100644
--- a/lib/modules/manager/metadata.spec.ts
+++ b/lib/modules/manager/metadata.spec.ts
@@ -43,8 +43,8 @@ describe('modules/manager/metadata', () => {
       }
 
       expect(
-        res.some((line) => line.startsWith('# ') || line.startsWith('## '))
+        res.some((line) => line.startsWith('# ') || line.startsWith('## ')),
       ).toBeFalse();
-    }
+    },
   );
 });
diff --git a/lib/modules/manager/meteor/extract.ts b/lib/modules/manager/meteor/extract.ts
index 05a85f7c02d3531c9226abb528da9deb84f9b481..9178415b3ad1c52b6d4b70793d3ae9a383874c92 100644
--- a/lib/modules/manager/meteor/extract.ts
+++ b/lib/modules/manager/meteor/extract.ts
@@ -5,7 +5,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   let deps: PackageDependency[] = [];
   const npmDepends = regEx(/\nNpm\.depends\({([\s\S]*?)}\);/).exec(content);
diff --git a/lib/modules/manager/mix/artifacts.spec.ts b/lib/modules/manager/mix/artifacts.spec.ts
index 4115d207382743c0c58734324eb52706f211f10b..742476a21ca54834ec811b8e456193b60e1438e1 100644
--- a/lib/modules/manager/mix/artifacts.spec.ts
+++ b/lib/modules/manager/mix/artifacts.spec.ts
@@ -47,7 +47,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -58,7 +58,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -69,7 +69,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -83,7 +83,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -125,7 +125,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         file: { type: 'addition', path: 'mix.lock', contents: 'New mix.lock' },
@@ -150,7 +150,7 @@ describe('modules/manager/mix/artifacts', () => {
           ...config,
           constraints: { erlang: '26.0.0', elixir: '1.14.5' },
         },
-      })
+      }),
     ).toEqual([
       {
         file: { type: 'addition', path: 'mix.lock', contents: 'New mix.lock' },
@@ -224,7 +224,7 @@ describe('modules/manager/mix/artifacts', () => {
     const [, packageUpdateCommand] = execSnapshots;
     expect(packageUpdateCommand.cmd).toInclude(
       'mix hex.organization auth renovate_test --key valid_test_token && ' +
-        'mix deps.update private_package other_package'
+        'mix deps.update private_package other_package',
     );
   });
 
@@ -305,7 +305,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(fs.readLocalFile).toHaveBeenCalledWith('subdir/mix.lock', 'utf8');
   });
@@ -322,7 +322,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'mix.lock', stderr: 'not found' } },
     ]);
@@ -338,7 +338,7 @@ describe('modules/manager/mix/artifacts', () => {
         updatedDeps: [{ depName: 'plug' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'mix.lock', stderr: 'exec-error' } },
     ]);
diff --git a/lib/modules/manager/mix/artifacts.ts b/lib/modules/manager/mix/artifacts.ts
index 6d53495aacff5fb31f9fc02e027b34be4b284d85..480207d63d30a5e04a18f084ed7f9fb0d3734cf0 100644
--- a/lib/modules/manager/mix/artifacts.ts
+++ b/lib/modules/manager/mix/artifacts.ts
@@ -16,7 +16,7 @@ import type { UpdateArtifact, UpdateArtifactsResult } from '../types';
 
 const hexRepoUrl = 'https://hex.pm/';
 const hexRepoOrgUrlRegex = regEx(
-  `^https://hex\\.pm/api/repos/(?<organization>[a-z0-9_]+)/$`
+  `^https://hex\\.pm/api/repos/(?<organization>[a-z0-9_]+)/$`,
 );
 
 export async function updateArtifacts({
@@ -59,7 +59,7 @@ export async function updateArtifacts({
     .getAll()
     .filter(
       (hostRule) =>
-        !!hostRule.matchHost && hexRepoOrgUrlRegex.test(hostRule.matchHost)
+        !!hostRule.matchHost && hexRepoOrgUrlRegex.test(hostRule.matchHost),
     );
 
   for (const { matchHost } of hexHostRulesWithMatchHost) {
@@ -131,7 +131,7 @@ export async function updateArtifacts({
 
     logger.debug(
       { err, message: err.message, command },
-      'Failed to update Mix lock file'
+      'Failed to update Mix lock file',
     );
 
     return [
diff --git a/lib/modules/manager/mix/extract.ts b/lib/modules/manager/mix/extract.ts
index 457c382ba1c188cb417fdbb0fc4a71ae65fc65e6..25389f4ed9b1d7fd126aebac2947ae6b2b9da39e 100644
--- a/lib/modules/manager/mix/extract.ts
+++ b/lib/modules/manager/mix/extract.ts
@@ -8,7 +8,7 @@ import type { PackageDependency, PackageFileContent } from '../types';
 
 const depSectionRegExp = regEx(/defp\s+deps.*do/g);
 const depMatchRegExp = regEx(
-  /{:(?<app>\w+)(\s*,\s*"(?<requirement>[^"]+)")?(\s*,\s*(?<opts>[^}]+))?}/gm
+  /{:(?<app>\w+)(\s*,\s*"(?<requirement>[^"]+)")?(\s*,\s*(?<opts>[^}]+))?}/gm,
 );
 const gitRegexp = regEx(/git:\s*"(?<value>[^"]+)"/);
 const githubRegexp = regEx(/github:\s*"(?<value>[^"]+)"/);
@@ -19,7 +19,7 @@ const commentMatchRegExp = regEx(/#.*$/);
 
 export async function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   logger.trace(`mix.extractPackageFile(${packageFile})`);
   const deps: PackageDependency[] = [];
diff --git a/lib/modules/manager/nix/artifacts.spec.ts b/lib/modules/manager/nix/artifacts.spec.ts
index 9b58b41cc0baed824308b0bf36b7783ec65f5d99..7b741ee82208f4f591bae9ae5a28f355d7812711 100644
--- a/lib/modules/manager/nix/artifacts.spec.ts
+++ b/lib/modules/manager/nix/artifacts.spec.ts
@@ -82,7 +82,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: [''],
-      })
+      }),
     );
 
     const res = await updateArtifacts({
@@ -102,7 +102,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new flake.lock');
 
@@ -131,7 +131,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new flake.lock');
     hostRules.find.mockReturnValueOnce({ token: 'token' });
@@ -161,7 +161,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new flake.lock');
     hostRules.find.mockReturnValueOnce({ token: 'x-access-token:token' });
@@ -191,7 +191,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new flake.lock');
 
@@ -236,7 +236,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new flake.lock');
 
@@ -289,7 +289,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new flake.lock');
 
@@ -318,7 +318,7 @@ describe('modules/manager/nix/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['flake.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
 
diff --git a/lib/modules/manager/nix/artifacts.ts b/lib/modules/manager/nix/artifacts.ts
index 236004dffcd6c60fc20fd86c7fd2363c563e5d79..bea247114e8df0b81b67b898dc87e5cabb3e722d 100644
--- a/lib/modules/manager/nix/artifacts.ts
+++ b/lib/modules/manager/nix/artifacts.ts
@@ -30,7 +30,7 @@ export async function updateArtifacts({
     hostRules.find({
       hostType: 'github',
       url: 'https://api.github.com/',
-    })
+    }),
   );
 
   if (token) {
diff --git a/lib/modules/manager/npm/detect.spec.ts b/lib/modules/manager/npm/detect.spec.ts
index 057cd892bdf119e5258c2bba6d3c78eca305448f..095e9ca3df56d35b18bbb349047665b5ab531912 100644
--- a/lib/modules/manager/npm/detect.spec.ts
+++ b/lib/modules/manager/npm/detect.spec.ts
@@ -7,7 +7,7 @@ describe('modules/manager/npm/detect', () => {
   describe('.detectGlobalConfig()', () => {
     it('detects .npmrc in home directory', async () => {
       fs.readSystemFile.mockResolvedValueOnce(
-        'registry=https://registry.npmjs.org\n'
+        'registry=https://registry.npmjs.org\n',
       );
       const res = await detectGlobalConfig();
       expect(res).toMatchInlineSnapshot(`
diff --git a/lib/modules/manager/npm/extract/common/dependency.ts b/lib/modules/manager/npm/extract/common/dependency.ts
index 47575f75c8e24b2694f0047b47d940b1fe00f202..1fc13de8407c4365a735ac2ba790e4fcfbdefd21 100644
--- a/lib/modules/manager/npm/extract/common/dependency.ts
+++ b/lib/modules/manager/npm/extract/common/dependency.ts
@@ -9,7 +9,7 @@ import { api, isValid, isVersion } from '../../../../versioning/npm';
 import type { PackageDependency } from '../../../types';
 
 const RE_REPOSITORY_GITHUB_SSH_FORMAT = regEx(
-  /(?:git@)github.com:([^/]+)\/([^/.]+)(?:\.git)?/
+  /(?:git@)github.com:([^/]+)\/([^/.]+)(?:\.git)?/,
 );
 
 export function parseDepName(depType: string, key: string): string {
@@ -24,7 +24,7 @@ export function parseDepName(depType: string, key: string): string {
 export function extractDependency(
   depType: string,
   depName: string,
-  input: string
+  input: string,
 ): PackageDependency {
   const dep: PackageDependency = {};
   if (!validateNpmPackageName(depName).validForOldPackages) {
@@ -109,7 +109,7 @@ export function extractDependency(
       dep.currentValue = valSplit[2];
     } else {
       logger.debug(
-        `Invalid npm package alias for dependency: "${depName}":"${dep.currentValue}"`
+        `Invalid npm package alias for dependency: "${depName}":"${dep.currentValue}"`,
       );
     }
   }
@@ -186,7 +186,7 @@ export function extractDependency(
 }
 
 export function getExtractedConstraints(
-  deps: PackageDependency[]
+  deps: PackageDependency[],
 ): Record<string, string> {
   const extractedConstraints: Record<string, string> = {};
   const constraints = ['node', 'yarn', 'npm', 'pnpm', 'vscode'];
diff --git a/lib/modules/manager/npm/extract/common/node.ts b/lib/modules/manager/npm/extract/common/node.ts
index 067bfb00de91c8ebb3d44202c17664ce5d96a776..6b578ad644be9d6135986ae7e54d506979094e54 100644
--- a/lib/modules/manager/npm/extract/common/node.ts
+++ b/lib/modules/manager/npm/extract/common/node.ts
@@ -2,7 +2,7 @@ import type { PackageDependency } from '../../../types';
 import type { NpmManagerData } from '../../types';
 
 export function setNodeCommitTopic(
-  dep: PackageDependency<NpmManagerData>
+  dep: PackageDependency<NpmManagerData>,
 ): void {
   // This is a special case for Node.js to group it together with other managers
   if (dep.depName === 'node') {
diff --git a/lib/modules/manager/npm/extract/common/overrides.ts b/lib/modules/manager/npm/extract/common/overrides.ts
index 1bed0e85c3a26112300fc144d3dc6e1890bc7bed..74f1ce2c0cf468f082df0a0a5182a282430130a3 100644
--- a/lib/modules/manager/npm/extract/common/overrides.ts
+++ b/lib/modules/manager/npm/extract/common/overrides.ts
@@ -12,7 +12,7 @@ import { setNodeCommitTopic } from './node';
  */
 export function extractOverrideDepsRec(
   parents: string[],
-  child: NpmManagerData
+  child: NpmManagerData,
 ): PackageDependency[] {
   const deps: PackageDependency[] = [];
   if (!child || is.emptyObject(child)) {
diff --git a/lib/modules/manager/npm/extract/common/package-file.ts b/lib/modules/manager/npm/extract/common/package-file.ts
index 0340070dd1ece83088e2206e5506a0565db5a732..31048ee922cdce85f8860398151d36bb84491525 100644
--- a/lib/modules/manager/npm/extract/common/package-file.ts
+++ b/lib/modules/manager/npm/extract/common/package-file.ts
@@ -15,7 +15,7 @@ import { extractOverrideDepsRec } from './overrides';
 
 export function extractPackageJson(
   packageJson: NpmPackage,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent<NpmManagerData> | null {
   logger.trace(`npm.extractPackageJson(${packageFile})`);
   const deps: PackageDependency[] = [];
@@ -33,7 +33,7 @@ export function extractPackageJson(
   }
   const packageJsonName = packageJson.name;
   logger.debug(
-    `npm file ${packageFile} has name ${JSON.stringify(packageJsonName)}`
+    `npm file ${packageFile} has name ${JSON.stringify(packageJsonName)}`,
   );
   const packageFileVersion = packageJson.version;
 
@@ -55,7 +55,7 @@ export function extractPackageJson(
       try {
         if (depType === 'packageManager') {
           const match = regEx('^(?<name>.+)@(?<range>.+)$').exec(
-            dependencies as string
+            dependencies as string,
           );
           // istanbul ignore next
           if (!match?.groups) {
@@ -64,7 +64,7 @@ export function extractPackageJson(
           dependencies = { [match.groups.name]: match.groups.range };
         }
         for (const [key, val] of Object.entries(
-          dependencies as NpmPackageDependency
+          dependencies as NpmPackageDependency,
         )) {
           const depName = parseDepName(depType, key);
           let dep: PackageDependency = {
@@ -79,8 +79,8 @@ export function extractPackageJson(
             deps.push(
               ...extractOverrideDepsRec(
                 [depName],
-                val as unknown as NpmManagerData
-              )
+                val as unknown as NpmManagerData,
+              ),
             );
           } else {
             // TODO: fix type #22198
@@ -93,7 +93,7 @@ export function extractPackageJson(
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { fileName: packageFile, depType, err },
-          'Error parsing package.json'
+          'Error parsing package.json',
         );
         return null;
       }
@@ -109,7 +109,7 @@ export function extractPackageJson(
     managerData: {
       packageJsonName,
       hasPackageManager: is.nonEmptyStringAndNotWhitespace(
-        packageJson.packageManager
+        packageJson.packageManager,
       ),
     },
   };
diff --git a/lib/modules/manager/npm/extract/index.spec.ts b/lib/modules/manager/npm/extract/index.spec.ts
index 56d6cd263f3310b91cc5a45d7727ffc87afbbd84..fbedd8a31f08514022afda717096cfe6f077250c 100644
--- a/lib/modules/manager/npm/extract/index.spec.ts
+++ b/lib/modules/manager/npm/extract/index.spec.ts
@@ -31,7 +31,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         'not json',
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toBeNull();
     });
@@ -40,7 +40,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         invalidNameContent,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         deps: [{ skipReason: 'invalid-name' }],
@@ -51,7 +51,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         vendorisedContent,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toBeNull();
     });
@@ -61,8 +61,8 @@ describe('modules/manager/npm/extract/index', () => {
         npmExtract.extractPackageFile(
           '{ "renovate": {} }',
           'backend/package.json',
-          defaultExtractConfig
-        )
+          defaultExtractConfig,
+        ),
       ).rejects.toThrow();
     });
 
@@ -70,7 +70,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         '{ "renovate": {} }',
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toBeNull();
     });
@@ -79,7 +79,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         '{"dependencies": true, "devDependencies": []}',
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toBeNull();
     });
@@ -88,7 +88,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         deps: [
@@ -115,7 +115,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01GlobContent,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res?.deps).toHaveLength(13);
       expect(res).toMatchSnapshot({
@@ -150,7 +150,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         managerData: {
@@ -172,10 +172,10 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(logger.warn).toHaveBeenCalledWith(
-        'Updating multiple npm lock files is deprecated and support will be removed in future versions.'
+        'Updating multiple npm lock files is deprecated and support will be removed in future versions.',
       );
       expect(res).toMatchObject({
         managerData: {
@@ -195,7 +195,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        {}
+        {},
       );
       expect(res?.npmrc).toBe('save-exact = true\n');
     });
@@ -205,7 +205,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        { ...defaultExtractConfig, npmrc: 'config-npmrc' }
+        { ...defaultExtractConfig, npmrc: 'config-npmrc' },
       );
       expect(res?.npmrc).toBe('config-npmrc');
     });
@@ -220,7 +220,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        { npmrc: 'config-npmrc' }
+        { npmrc: 'config-npmrc' },
       );
       expect(res?.npmrc).toBe('config-npmrc');
     });
@@ -235,7 +235,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        { npmrc: 'config-npmrc', npmrcMerge: true }
+        { npmrc: 'config-npmrc', npmrcMerge: true },
       );
       expect(res?.npmrc).toBe(`config-npmrc\nrepo-npmrc\n`);
     });
@@ -244,7 +244,7 @@ describe('modules/manager/npm/extract/index', () => {
       fs.readLocalFile.mockImplementation((fileName): Promise<any> => {
         if (fileName === '.npmrc') {
           return Promise.resolve(
-            'registry=https://registry.npmjs.org\n//registry.npmjs.org/:_authToken=${NPM_AUTH_TOKEN}\n'
+            'registry=https://registry.npmjs.org\n//registry.npmjs.org/:_authToken=${NPM_AUTH_TOKEN}\n',
           );
         }
         return Promise.resolve(null);
@@ -252,7 +252,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        {}
+        {},
       );
       expect(res?.npmrc).toBe('registry=https://registry.npmjs.org\n');
     });
@@ -261,7 +261,7 @@ describe('modules/manager/npm/extract/index', () => {
       fs.readLocalFile.mockImplementation((fileName): Promise<any> => {
         if (fileName === '.yarnrc.yml') {
           return Promise.resolve(
-            'npmRegistryServer: https://registry.example.com'
+            'npmRegistryServer: https://registry.example.com',
           );
         }
         return Promise.resolve(null);
@@ -269,10 +269,10 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input02Content,
         'package.json',
-        {}
+        {},
       );
       expect(
-        res?.deps.flatMap((dep) => dep.registryUrls)
+        res?.deps.flatMap((dep) => dep.registryUrls),
       ).toBeArrayIncludingOnly(['https://registry.example.com']);
     });
 
@@ -286,10 +286,10 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input02Content,
         'package.json',
-        {}
+        {},
       );
       expect(
-        res?.deps.flatMap((dep) => dep.registryUrls)
+        res?.deps.flatMap((dep) => dep.registryUrls),
       ).toBeArrayIncludingOnly(['https://registry.example.com']);
     });
 
@@ -300,7 +300,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         workspacesContent,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         managerData: { workspacesPackages: ['packages/*'] },
@@ -333,7 +333,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         extractedConstraints: {
@@ -417,7 +417,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         deps: [
@@ -454,7 +454,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         deps: [
@@ -497,7 +497,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
 
       expect(res).toMatchObject({
@@ -551,7 +551,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         deps: [
@@ -650,7 +650,7 @@ describe('modules/manager/npm/extract/index', () => {
       fs.readLocalFile.mockImplementation((fileName): Promise<any> => {
         if (fileName === '.yarnrc.yml') {
           return Promise.resolve(
-            'npmRegistryServer: https://registry.example.com'
+            'npmRegistryServer: https://registry.example.com',
           );
         }
         return Promise.resolve(null);
@@ -670,7 +670,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchObject({
         deps: [
@@ -725,10 +725,10 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(logger.debug).toHaveBeenCalledWith(
-        'Invalid npm package alias for dependency: "g":"npm:@foo/@bar/@1.2.3"'
+        'Invalid npm package alias for dependency: "g":"npm:@foo/@bar/@1.2.3"',
       );
       expect(res).toMatchSnapshot({
         deps: [
@@ -767,7 +767,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         input01Content,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot();
     });
@@ -780,7 +780,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         pJsonStr,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchSnapshot({
         extractedConstraints: { yarn: '3.0.0' },
@@ -822,7 +822,7 @@ describe('modules/manager/npm/extract/index', () => {
       const res = await npmExtract.extractPackageFile(
         content,
         'package.json',
-        defaultExtractConfig
+        defaultExtractConfig,
       );
       expect(res).toMatchObject({
         deps: [
@@ -883,7 +883,7 @@ describe('modules/manager/npm/extract/index', () => {
       fs.readLocalFile.mockResolvedValueOnce(input02Content);
       const res = await npmExtract.extractAllPackageFiles(
         defaultExtractConfig,
-        ['package.json']
+        ['package.json'],
       );
       expect(res).toEqual([
         {
diff --git a/lib/modules/manager/npm/extract/index.ts b/lib/modules/manager/npm/extract/index.ts
index 846f2d9fe8f1d759205e4ef8b6cbe668bc4accc8..1fc7c097b19c76909f9a6800998c211e2fa948bf 100644
--- a/lib/modules/manager/npm/extract/index.ts
+++ b/lib/modules/manager/npm/extract/index.ts
@@ -30,7 +30,7 @@ function hasMultipleLockFiles(lockFiles: NpmLockFiles): boolean {
 export async function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): Promise<PackageFileContent<NpmManagerData> | null> {
   logger.trace(`npm.extractPackageFile(${packageFile})`);
   logger.trace({ content });
@@ -63,7 +63,7 @@ export async function extractPackageFile(
 
   for (const [key, val] of Object.entries(lockFiles) as [
     'yarnLock' | 'packageLock' | 'shrinkwrapJson' | 'pnpmShrinkwrap',
-    string
+    string,
   ][]) {
     const filePath = getSiblingFileName(packageFile, val);
     if (await readLocalFile(filePath, 'utf8')) {
@@ -78,7 +78,7 @@ export async function extractPackageFile(
 
   if (hasMultipleLockFiles(lockFiles)) {
     logger.warn(
-      'Updating multiple npm lock files is deprecated and support will be removed in future versions.'
+      'Updating multiple npm lock files is deprecated and support will be removed in future versions.',
     );
   }
 
@@ -89,7 +89,7 @@ export async function extractPackageFile(
     if (is.string(config.npmrc) && !config.npmrcMerge) {
       logger.debug(
         { npmrcFileName },
-        'Repo .npmrc file is ignored due to config.npmrc with config.npmrcMerge=false'
+        'Repo .npmrc file is ignored due to config.npmrc with config.npmrcMerge=false',
       );
       npmrc = config.npmrc;
     } else {
@@ -103,13 +103,13 @@ export async function extractPackageFile(
         logger.debug('Stripping package-lock setting from .npmrc');
         repoNpmrc = repoNpmrc.replace(
           regEx(/(^|\n)package-lock.*?(\n|$)/g),
-          '\n'
+          '\n',
         );
       }
       if (repoNpmrc.includes('=${') && !GlobalConfig.get('exposeAllEnv')) {
         logger.debug(
           { npmrcFileName },
-          'Stripping .npmrc file of lines with variables'
+          'Stripping .npmrc file of lines with variables',
         );
         repoNpmrc = repoNpmrc
           .split(newlineRegex)
@@ -156,7 +156,7 @@ export async function extractPackageFile(
     const hasFancyRefs = !!res.deps.some(
       (dep) =>
         !!dep.currentValue?.startsWith('file:') ||
-        !!dep.currentValue?.startsWith('npm:')
+        !!dep.currentValue?.startsWith('npm:'),
     );
     if ((hasFancyRefs && !!lockFiles.npmLock) || yarnZeroInstall) {
       // https://github.com/npm/cli/issues/1432
@@ -178,7 +178,7 @@ export async function extractPackageFile(
       if (dep.depName) {
         const registryUrlFromYarnConfig = resolveRegistryUrl(
           dep.depName,
-          yarnConfig
+          yarnConfig,
         );
         if (registryUrlFromYarnConfig && dep.datasource === NpmDatasource.id) {
           dep.registryUrls = [registryUrlFromYarnConfig];
@@ -195,7 +195,7 @@ export async function extractPackageFile(
       ...lockFiles,
       yarnZeroInstall,
       hasPackageManager: is.nonEmptyStringAndNotWhitespace(
-        packageJson.packageManager
+        packageJson.packageManager,
       ),
       workspacesPackages,
     },
@@ -206,7 +206,7 @@ export async function extractPackageFile(
 
 export async function extractAllPackageFiles(
   config: ExtractConfig,
-  packageFiles: string[]
+  packageFiles: string[],
 ): Promise<PackageFile<NpmManagerData>[]> {
   const npmFiles: PackageFile<NpmManagerData>[] = [];
   for (const packageFile of packageFiles) {
diff --git a/lib/modules/manager/npm/extract/npm.ts b/lib/modules/manager/npm/extract/npm.ts
index db86b5288115eb91921b4de5bed431ca71d48c7e..a1fba7e2652ba5434767c4abc11f52e7f8d0574c 100644
--- a/lib/modules/manager/npm/extract/npm.ts
+++ b/lib/modules/manager/npm/extract/npm.ts
@@ -14,7 +14,7 @@ export async function getNpmLock(filePath: string): Promise<LockFile> {
   if (!parsedLockfile.success) {
     logger.debug(
       { filePath, err: parsedLockfile.error },
-      'Npm: unable to parse lockfile'
+      'Npm: unable to parse lockfile',
     );
     return { lockedVersions: {} };
   }
diff --git a/lib/modules/manager/npm/extract/pnpm.spec.ts b/lib/modules/manager/npm/extract/pnpm.spec.ts
index 26227db1b0b3bd81961ae502bd71d0b0fe20f429..9927655a776f07eef73e4d10c7055cd98b3045bc 100644
--- a/lib/modules/manager/npm/extract/pnpm.spec.ts
+++ b/lib/modules/manager/npm/extract/pnpm.spec.ts
@@ -29,7 +29,7 @@ describe('modules/manager/npm/extract/pnpm', () => {
 
       const workSpaceFilePath = getFixturePath(
         'pnpm-monorepo/pnpm-workspace.yml',
-        '..'
+        '..',
       );
       const res = await extractPnpmFilters(workSpaceFilePath);
       expect(res).toBeUndefined();
@@ -37,7 +37,7 @@ describe('modules/manager/npm/extract/pnpm', () => {
         {
           fileName: expect.any(String),
         },
-        'Failed to find required "packages" array in pnpm-workspace.yaml'
+        'Failed to find required "packages" array in pnpm-workspace.yaml',
       );
     });
 
@@ -53,7 +53,7 @@ describe('modules/manager/npm/extract/pnpm', () => {
           fileName: expect.any(String),
           err: expect.anything(),
         }),
-        'Failed to parse pnpm-workspace.yaml'
+        'Failed to parse pnpm-workspace.yaml',
       );
     });
   });
@@ -67,7 +67,7 @@ describe('modules/manager/npm/extract/pnpm', () => {
       expect(res).toBeNull();
       expect(logger.logger.trace).toHaveBeenCalledWith(
         expect.objectContaining({ packageFile }),
-        'Failed to locate pnpm-workspace.yaml in a parent directory.'
+        'Failed to locate pnpm-workspace.yaml in a parent directory.',
       );
     });
 
@@ -85,7 +85,7 @@ describe('modules/manager/npm/extract/pnpm', () => {
           workspaceYamlPath: 'pnpm-workspace.yaml',
           packageFile,
         }),
-        'Failed to find a pnpm-lock.yaml sibling for the workspace.'
+        'Failed to find a pnpm-lock.yaml sibling for the workspace.',
       );
     });
   });
@@ -173,8 +173,9 @@ describe('modules/manager/npm/extract/pnpm', () => {
       expect(packageFiles).toMatchSnapshot();
       expect(
         packageFiles.every(
-          (packageFile) => packageFile.managerData?.pnpmShrinkwrap !== undefined
-        )
+          (packageFile) =>
+            packageFile.managerData?.pnpmShrinkwrap !== undefined,
+        ),
       ).toBeTrue();
     });
 
@@ -233,8 +234,8 @@ describe('modules/manager/npm/extract/pnpm', () => {
       expect(
         packageFiles.find(
           (packageFile) =>
-            packageFile.packageFile === 'not-matching/b/package.json'
-        )?.managerData.pnpmShrinkwrap
+            packageFile.packageFile === 'not-matching/b/package.json',
+        )?.managerData.pnpmShrinkwrap,
       ).toBeUndefined();
     });
   });
@@ -256,7 +257,7 @@ describe('modules/manager/npm/extract/pnpm', () => {
     it('extracts version from normal repo', async () => {
       const plocktest1Lock = Fixtures.get(
         'lockfile-parsing/pnpm-lock.yaml',
-        '..'
+        '..',
       );
       jest.spyOn(fs, 'readLocalFile').mockResolvedValueOnce(plocktest1Lock);
       const res = await getPnpmLock('package.json');
diff --git a/lib/modules/manager/npm/extract/pnpm.ts b/lib/modules/manager/npm/extract/pnpm.ts
index fbfea660e96ef29cc34113291db51f6f70dfa4d6..3de992b46027baa9729390deff05ebaf2a417b7d 100644
--- a/lib/modules/manager/npm/extract/pnpm.ts
+++ b/lib/modules/manager/npm/extract/pnpm.ts
@@ -20,7 +20,7 @@ function isPnpmLockfile(obj: any): obj is PnpmLockFile {
 }
 
 export async function extractPnpmFilters(
-  fileName: string
+  fileName: string,
 ): Promise<string[] | undefined> {
   try {
     // TODO #22198
@@ -33,7 +33,7 @@ export async function extractPnpmFilters(
     ) {
       logger.trace(
         { fileName },
-        'Failed to find required "packages" array in pnpm-workspace.yaml'
+        'Failed to find required "packages" array in pnpm-workspace.yaml',
       );
       return undefined;
     }
@@ -45,17 +45,17 @@ export async function extractPnpmFilters(
 }
 
 export async function findPnpmWorkspace(
-  packageFile: string
+  packageFile: string,
 ): Promise<{ lockFilePath: string; workspaceYamlPath: string } | null> {
   // search for pnpm-workspace.yaml
   const workspaceYamlPath = await findLocalSiblingOrParent(
     packageFile,
-    'pnpm-workspace.yaml'
+    'pnpm-workspace.yaml',
   );
   if (!workspaceYamlPath) {
     logger.trace(
       { packageFile },
-      'Failed to locate pnpm-workspace.yaml in a parent directory.'
+      'Failed to locate pnpm-workspace.yaml in a parent directory.',
     );
     return null;
   }
@@ -63,12 +63,12 @@ export async function findPnpmWorkspace(
   // search for pnpm-lock.yaml next to pnpm-workspace.yaml
   const pnpmLockfilePath = getSiblingFileName(
     workspaceYamlPath,
-    'pnpm-lock.yaml'
+    'pnpm-lock.yaml',
   );
   if (!(await localPathExists(pnpmLockfilePath))) {
     logger.trace(
       { workspaceYamlPath, packageFile },
-      'Failed to find a pnpm-lock.yaml sibling for the workspace.'
+      'Failed to find a pnpm-lock.yaml sibling for the workspace.',
     );
     return null;
   }
@@ -80,7 +80,7 @@ export async function findPnpmWorkspace(
 }
 
 export async function detectPnpmWorkspaces(
-  packageFiles: Partial<PackageFile<NpmManagerData>>[]
+  packageFiles: Partial<PackageFile<NpmManagerData>>[],
 ): Promise<void> {
   logger.debug(`Detecting pnpm Workspaces`);
   const packagePathCache = new Map<string, string[] | null>();
@@ -93,7 +93,7 @@ export async function detectPnpmWorkspaces(
     if (pnpmShrinkwrap) {
       logger.trace(
         { packageFile, pnpmShrinkwrap },
-        'Found an existing pnpm shrinkwrap file; skipping pnpm monorepo check.'
+        'Found an existing pnpm shrinkwrap file; skipping pnpm monorepo check.',
       );
       continue;
     }
@@ -116,17 +116,17 @@ export async function detectPnpmWorkspaces(
           patterns: filters,
           // Match the ignores used in @pnpm/find-workspace-packages
           ignore: ['**/node_modules/**', '**/bower_components/**'],
-        }
+        },
       );
       const packagePaths = packages.map((pkg) =>
-        upath.join(pkg.dir, 'package.json')
+        upath.join(pkg.dir, 'package.json'),
       );
       packagePathCache.set(workspaceYamlPath, packagePaths);
     }
     const packagePaths = packagePathCache.get(workspaceYamlPath);
 
     const isPackageInWorkspace = packagePaths?.some((p) =>
-      p.endsWith(packageFile!)
+      p.endsWith(packageFile!),
     );
 
     if (isPackageInWorkspace) {
@@ -135,7 +135,7 @@ export async function detectPnpmWorkspaces(
     } else {
       logger.trace(
         { packageFile, workspaceYamlPath },
-        `Didn't find the package in the pnpm workspace`
+        `Didn't find the package in the pnpm workspace`,
       );
     }
   }
@@ -172,7 +172,7 @@ export async function getPnpmLock(filePath: string): Promise<LockFile> {
 }
 
 function getLockedVersions(
-  lockParsed: PnpmLockFile
+  lockParsed: PnpmLockFile,
 ): Record<string, Record<string, Record<string, string>>> {
   const lockedVersions: Record<
     string,
@@ -194,7 +194,7 @@ function getLockedVersions(
 }
 
 function getLockedDependencyVersions(
-  obj: PnpmLockFile | Record<string, PnpmDependencySchema>
+  obj: PnpmLockFile | Record<string, PnpmDependencySchema>,
 ): Record<string, Record<string, string>> {
   const dependencyTypes = [
     'dependencies',
@@ -206,7 +206,7 @@ function getLockedDependencyVersions(
   for (const depType of dependencyTypes) {
     res[depType] = {};
     for (const [pkgName, versionCarrier] of Object.entries(
-      obj[depType] ?? {}
+      obj[depType] ?? {},
     )) {
       let version: string;
       if (is.object(versionCarrier)) {
diff --git a/lib/modules/manager/npm/extract/post/index.ts b/lib/modules/manager/npm/extract/post/index.ts
index 6621a61a67b27c5a293c678d34b336e59f574e41..954b516ad79ca7f90d11b9cce8726a53b4067e9b 100644
--- a/lib/modules/manager/npm/extract/post/index.ts
+++ b/lib/modules/manager/npm/extract/post/index.ts
@@ -4,7 +4,7 @@ import { getLockedVersions } from './locked-versions';
 import { detectMonorepos } from './monorepo';
 
 export async function postExtract(
-  packageFiles: PackageFile<NpmManagerData>[]
+  packageFiles: PackageFile<NpmManagerData>[],
 ): Promise<void> {
   await detectMonorepos(packageFiles);
   await getLockedVersions(packageFiles);
diff --git a/lib/modules/manager/npm/extract/post/locked-versions.spec.ts b/lib/modules/manager/npm/extract/post/locked-versions.spec.ts
index ebd8b56b8696f59d3ddb8e347e1f76c8c2e93038..e141766d63f973174e78f9c551a81db4ef3f83bf 100644
--- a/lib/modules/manager/npm/extract/post/locked-versions.spec.ts
+++ b/lib/modules/manager/npm/extract/post/locked-versions.spec.ts
@@ -20,7 +20,7 @@ jest.mock('../pnpm');
 describe('modules/manager/npm/extract/post/locked-versions', () => {
   describe('.getLockedVersions()', () => {
     function getPackageFiles(
-      yarnVersion: string
+      yarnVersion: string,
     ): PackageFile<NpmManagerData>[] {
       return [
         {
@@ -750,7 +750,7 @@ describe('modules/manager/npm/extract/post/locked-versions', () => {
         lockfileVersion: 99,
         npmLock: 'package-lock.json',
       },
-      'Found unsupported npm lockfile version'
+      'Found unsupported npm lockfile version',
     );
   });
 
diff --git a/lib/modules/manager/npm/extract/post/locked-versions.ts b/lib/modules/manager/npm/extract/post/locked-versions.ts
index 8e6f4857488ab4d7c69c50e9dbd93ac264d0e282..0970c3faeca5098f276ac7e1db0cfcf3420f78b8 100644
--- a/lib/modules/manager/npm/extract/post/locked-versions.ts
+++ b/lib/modules/manager/npm/extract/post/locked-versions.ts
@@ -9,7 +9,7 @@ import { getPnpmLock } from '../pnpm';
 import type { LockFile } from '../types';
 import { getYarnLock, getYarnVersionFromLock } from '../yarn';
 export async function getLockedVersions(
-  packageFiles: PackageFile<NpmManagerData>[]
+  packageFiles: PackageFile<NpmManagerData>[],
 ): Promise<void> {
   const lockFileCache: Record<string, LockFile> = {};
   logger.debug('Finding locked versions');
@@ -92,7 +92,7 @@ export async function getLockedVersions(
       } else {
         logger.warn(
           { lockfileVersion, npmLock },
-          'Found unsupported npm lockfile version'
+          'Found unsupported npm lockfile version',
         );
         return;
       }
@@ -104,7 +104,7 @@ export async function getLockedVersions(
       for (const dep of packageFile.deps) {
         // TODO: types (#22198)
         dep.lockedVersion = semver.valid(
-          lockFileCache[npmLock].lockedVersions?.[dep.depName!]
+          lockFileCache[npmLock].lockedVersions?.[dep.depName!],
         )!;
       }
     } else if (pnpmShrinkwrap) {
@@ -125,7 +125,7 @@ export async function getLockedVersions(
         const lockedVersion = semver.valid(
           lockFileCache[pnpmShrinkwrap].lockedVersionsWithPath?.[relativeDir]?.[
             depType!
-          ]?.[depName!]
+          ]?.[depName!],
         );
         if (is.string(lockedVersion)) {
           dep.lockedVersion = lockedVersion;
diff --git a/lib/modules/manager/npm/extract/post/monorepo.spec.ts b/lib/modules/manager/npm/extract/post/monorepo.spec.ts
index ca861fbf817d2078ae7d5fb1bcd61f7324cca1e6..07c79f83c648aeaa141cb82b6618ca11c15f312c 100644
--- a/lib/modules/manager/npm/extract/post/monorepo.spec.ts
+++ b/lib/modules/manager/npm/extract/post/monorepo.spec.ts
@@ -66,8 +66,8 @@ describe('modules/manager/npm/extract/post/monorepo', () => {
       await detectMonorepos(packageFiles);
       expect(
         packageFiles.some((packageFile) =>
-          packageFile.deps?.some((dep) => dep.isInternal)
-        )
+          packageFile.deps?.some((dep) => dep.isInternal),
+        ),
       ).toBeTrue();
     });
 
diff --git a/lib/modules/manager/npm/extract/post/monorepo.ts b/lib/modules/manager/npm/extract/post/monorepo.ts
index 9e9eb18cf7011afd4bc001fccbcca8798d96302c..20efd742c6e921623a3524c1bf23d66957a1a6a9 100644
--- a/lib/modules/manager/npm/extract/post/monorepo.ts
+++ b/lib/modules/manager/npm/extract/post/monorepo.ts
@@ -7,7 +7,7 @@ import { detectPnpmWorkspaces } from '../pnpm';
 import { matchesAnyPattern } from '../utils';
 
 export async function detectMonorepos(
-  packageFiles: Partial<PackageFile<NpmManagerData>>[]
+  packageFiles: Partial<PackageFile<NpmManagerData>>[],
 ): Promise<void> {
   await detectPnpmWorkspaces(packageFiles);
   logger.debug('Detecting workspaces');
@@ -29,8 +29,8 @@ export async function detectMonorepos(
       const internalPackageFiles = packageFiles.filter((sp) =>
         matchesAnyPattern(
           getParentDir(sp.packageFile!),
-          internalPackagePatterns
-        )
+          internalPackagePatterns,
+        ),
       );
       const internalPackageNames = internalPackageFiles
         .map((sp) => sp.managerData?.packageJsonName)
diff --git a/lib/modules/manager/npm/extract/utils.spec.ts b/lib/modules/manager/npm/extract/utils.spec.ts
index 8a7777f6d8da4988fc166053bd944c21d3f51d39..9936a463849e2ba3308ae51f735e22975c155882 100644
--- a/lib/modules/manager/npm/extract/utils.spec.ts
+++ b/lib/modules/manager/npm/extract/utils.spec.ts
@@ -8,7 +8,7 @@ describe('modules/manager/npm/extract/utils', () => {
 
       const isPackageInWorkspace = matchesAnyPattern(
         packageFile,
-        packageFilters
+        packageFilters,
       );
 
       expect(isPackageInWorkspace).toBeTrue();
@@ -20,7 +20,7 @@ describe('modules/manager/npm/extract/utils', () => {
 
       const isPackageInWorkspace = matchesAnyPattern(
         packageFile,
-        packageFilters
+        packageFilters,
       );
 
       expect(isPackageInWorkspace).toBeTrue();
@@ -32,7 +32,7 @@ describe('modules/manager/npm/extract/utils', () => {
 
       const isPackageInWorkspace = matchesAnyPattern(
         packageFile,
-        packageFilters
+        packageFilters,
       );
 
       expect(isPackageInWorkspace).toBeTrue();
diff --git a/lib/modules/manager/npm/extract/utils.ts b/lib/modules/manager/npm/extract/utils.ts
index 14eb7117526b1504eb3564138f4793f3a4d7e012..20d7873382276c639fddbcc8d51e45a9370a67d6 100644
--- a/lib/modules/manager/npm/extract/utils.ts
+++ b/lib/modules/manager/npm/extract/utils.ts
@@ -4,7 +4,7 @@ import { minimatch } from '../../../../util/minimatch';
 export function matchesAnyPattern(val: string, patterns: string[]): boolean {
   const res = patterns.some(
     (pattern) =>
-      pattern === `${val}/` || minimatch(pattern, { dot: true }).match(val)
+      pattern === `${val}/` || minimatch(pattern, { dot: true }).match(val),
   );
   logger.trace({ val, patterns, res }, `matchesAnyPattern`);
   return res;
diff --git a/lib/modules/manager/npm/extract/yarn.spec.ts b/lib/modules/manager/npm/extract/yarn.spec.ts
index 12e572ddc1e47df44f74d9f7d7972fba8cdeaf63..548efc97d07a36ff85c1246ffd9e5103bcc9d09a 100644
--- a/lib/modules/manager/npm/extract/yarn.spec.ts
+++ b/lib/modules/manager/npm/extract/yarn.spec.ts
@@ -46,7 +46,7 @@ describe('modules/manager/npm/extract/yarn', () => {
     it('ignores individual invalid entries', async () => {
       const invalidNameLock = Fixtures.get(
         'yarn1-invalid-name/yarn.lock',
-        '..'
+        '..',
       );
       fs.readLocalFile.mockResolvedValueOnce(invalidNameLock);
       const res = await getYarnLock('package.json');
@@ -59,13 +59,13 @@ describe('modules/manager/npm/extract/yarn', () => {
   it('getYarnVersionFromLock', () => {
     expect(getYarnVersionFromLock({ isYarn1: true })).toBe('^1.22.18');
     expect(getYarnVersionFromLock({ isYarn1: false, lockfileVersion: 8 })).toBe(
-      '^3.0.0'
+      '^3.0.0',
     );
     expect(getYarnVersionFromLock({ isYarn1: false, lockfileVersion: 6 })).toBe(
-      '^2.2.0'
+      '^2.2.0',
     );
     expect(getYarnVersionFromLock({ isYarn1: false, lockfileVersion: 3 })).toBe(
-      '^2.0.0'
+      '^2.0.0',
     );
   });
 });
diff --git a/lib/modules/manager/npm/extract/yarn.ts b/lib/modules/manager/npm/extract/yarn.ts
index d61b53e51b29393d2672966353756a0beaeb04d9..0e13e39207ae42587da63b5400bc7d64d8e7df6b 100644
--- a/lib/modules/manager/npm/extract/yarn.ts
+++ b/lib/modules/manager/npm/extract/yarn.ts
@@ -33,7 +33,7 @@ export async function getYarnLock(filePath: string): Promise<LockFile> {
           } catch (err) {
             logger.debug(
               { entry, err },
-              'Invalid descriptor or range found in yarn.lock'
+              'Invalid descriptor or range found in yarn.lock',
             );
           }
         }
diff --git a/lib/modules/manager/npm/extract/yarnrc.ts b/lib/modules/manager/npm/extract/yarnrc.ts
index 034cc6ad00e2c8d70a55884b890e60fd08972eeb..b87f0c8fa6c5bbb68db5910de1a8d67f6ed9a03d 100644
--- a/lib/modules/manager/npm/extract/yarnrc.ts
+++ b/lib/modules/manager/npm/extract/yarnrc.ts
@@ -12,20 +12,20 @@ const YarnrcYmlSchema = Yaml.pipe(
       .record(
         z.object({
           npmRegistryServer: z.string().optional(),
-        })
+        }),
       )
       .optional(),
-  })
+  }),
 );
 
 export type YarnConfig = z.infer<typeof YarnrcYmlSchema>;
 
 const registryRegEx = regEx(
-  /^"?(@(?<scope>[^:]+):)?registry"? "?(?<registryUrl>[^"]+)"?$/gm
+  /^"?(@(?<scope>[^:]+):)?registry"? "?(?<registryUrl>[^"]+)"?$/gm,
 );
 
 export function loadConfigFromLegacyYarnrc(
-  legacyYarnrc: string
+  legacyYarnrc: string,
 ): YarnConfig | null {
   const registryMatches = [...legacyYarnrc.matchAll(registryRegEx)]
     .map((m) => m.groups)
@@ -55,7 +55,7 @@ export function loadConfigFromYarnrcYml(yarnrcYml: string): YarnConfig | null {
 
 export function resolveRegistryUrl(
   packageName: string,
-  yarnConfig: YarnConfig
+  yarnConfig: YarnConfig,
 ): string | null {
   if (yarnConfig.npmScopes) {
     for (const scope in yarnConfig.npmScopes) {
diff --git a/lib/modules/manager/npm/post-update/index.spec.ts b/lib/modules/manager/npm/post-update/index.spec.ts
index 2468fe61f73951639de2666d0fecdc364377043e..af67dc833f217c60a6879f8b56e5cd283f93a3b6 100644
--- a/lib/modules/manager/npm/post-update/index.spec.ts
+++ b/lib/modules/manager/npm/post-update/index.spec.ts
@@ -156,8 +156,8 @@ describe('modules/manager/npm/post-update/index', () => {
         determineLockFileDirs(
           updateConfig,
 
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         npmLockDirs: ['package-lock.json', 'randomFolder/package-lock.json'],
         pnpmShrinkwrapDirs: ['packages/pnpm/pnpm-lock.yaml'],
@@ -179,8 +179,8 @@ describe('modules/manager/npm/post-update/index', () => {
               },
             ],
           },
-          {}
-        )
+          {},
+        ),
       ).toStrictEqual({
         npmLockDirs: [],
         pnpmShrinkwrapDirs: [],
@@ -192,10 +192,10 @@ describe('modules/manager/npm/post-update/index', () => {
   describe('writeExistingFiles()', () => {
     it('works', async () => {
       git.getFile.mockResolvedValueOnce(
-        Fixtures.get('update-lockfile-massage-1/package-lock.json')
+        Fixtures.get('update-lockfile-massage-1/package-lock.json'),
       );
       await expect(
-        writeExistingFiles(updateConfig, additionalFiles)
+        writeExistingFiles(updateConfig, additionalFiles),
       ).resolves.toBeUndefined();
 
       expect(fs.writeLocalFile).toHaveBeenCalledTimes(2);
@@ -207,8 +207,8 @@ describe('modules/manager/npm/post-update/index', () => {
       await expect(
         writeExistingFiles(
           { ...updateConfig, reuseLockFiles: false },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).resolves.toBeUndefined();
 
       expect(fs.writeLocalFile).toHaveBeenCalledOnce();
@@ -237,7 +237,7 @@ describe('modules/manager/npm/post-update/index', () => {
       expect(fs.writeLocalFile).toHaveBeenCalledOnce();
       expect(fs.writeLocalFile).toHaveBeenCalledWith(
         'packages/core/.npmrc',
-        '#dummy\n'
+        '#dummy\n',
       );
     });
 
@@ -255,22 +255,22 @@ describe('modules/manager/npm/post-update/index', () => {
             // No npmrc content should be written for this package.
             { packageFile: 'packages/core/package.json', managerData: {} },
           ],
-        }
+        },
       );
 
       expect(fs.writeLocalFile).toHaveBeenCalledOnce();
       expect(fs.writeLocalFile).toHaveBeenCalledWith(
         'packages/core/.npmrc',
-        '#dummy\n'
+        '#dummy\n',
       );
     });
 
     it('works only on relevant folders', async () => {
       git.getFile.mockResolvedValueOnce(
-        Fixtures.get('update-lockfile-massage-1/package-lock.json')
+        Fixtures.get('update-lockfile-massage-1/package-lock.json'),
       );
       await expect(
-        writeExistingFiles(updateConfig, additionalFiles)
+        writeExistingFiles(updateConfig, additionalFiles),
       ).resolves.toBeUndefined();
 
       expect(fs.writeLocalFile).toHaveBeenCalledTimes(2);
@@ -294,7 +294,7 @@ describe('modules/manager/npm/post-update/index', () => {
 
     it('missing updated packages files', async () => {
       await expect(
-        writeUpdatedPackageFiles(baseConfig)
+        writeUpdatedPackageFiles(baseConfig),
       ).resolves.toBeUndefined();
       expect(fs.writeLocalFile).not.toHaveBeenCalled();
     });
@@ -314,7 +314,7 @@ describe('modules/manager/npm/post-update/index', () => {
       const yarnrcYmlContent = await updateYarnBinary(
         lockFileDir,
         updatedArtifacts,
-        undefined
+        undefined,
       );
       expect(yarnrcYmlContent).toBeUndefined();
       expect(updatedArtifacts).toMatchSnapshot();
@@ -327,7 +327,7 @@ describe('modules/manager/npm/post-update/index', () => {
       const existingYarnrcYmlContent = await updateYarnBinary(
         lockFileDir,
         updatedArtifacts,
-        oldYarnrcYml
+        oldYarnrcYml,
       );
       expect(git.getFile).not.toHaveBeenCalled();
       expect(existingYarnrcYmlContent).toMatchSnapshot();
@@ -341,7 +341,7 @@ describe('modules/manager/npm/post-update/index', () => {
       const yarnrcYmlContent = await updateYarnBinary(
         lockFileDir,
         updatedArtifacts,
-        undefined
+        undefined,
       );
       expect(yarnrcYmlContent).toBeUndefined();
       expect(updatedArtifacts).toBeEmpty();
@@ -354,7 +354,7 @@ describe('modules/manager/npm/post-update/index', () => {
       const yarnrcYmlContent = await updateYarnBinary(
         lockFileDir,
         updatedArtifacts,
-        undefined
+        undefined,
       );
       expect(yarnrcYmlContent).toBeUndefined();
       expect(updatedArtifacts).toBeEmpty();
@@ -366,7 +366,7 @@ describe('modules/manager/npm/post-update/index', () => {
       const existingYarnrcYmlContent = await updateYarnBinary(
         lockFileDir,
         updatedArtifacts,
-        oldYarnrcYml
+        oldYarnrcYml,
       );
       expect(existingYarnrcYmlContent).toMatch(oldYarnrcYml);
       expect(updatedArtifacts).toBeEmpty();
@@ -380,7 +380,7 @@ describe('modules/manager/npm/post-update/index', () => {
       const yarnrcYmlContent = await updateYarnBinary(
         lockFileDir,
         updatedArtifacts,
-        ''
+        '',
       );
       expect(yarnrcYmlContent).toBe('');
       expect(updatedArtifacts).toEqual([]);
@@ -404,8 +404,8 @@ describe('modules/manager/npm/post-update/index', () => {
       expect(
         await getAdditionalFiles(
           { ...updateConfig, updateLockFiles: true },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [],
@@ -424,8 +424,8 @@ describe('modules/manager/npm/post-update/index', () => {
       expect(
         await getAdditionalFiles(
           { ...updateConfig, updateLockFiles: true, reuseExistingBranch: true },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [
@@ -467,9 +467,9 @@ describe('modules/manager/npm/post-update/index', () => {
               updateLockFiles: true,
               reuseExistingBranch: true,
             },
-            additionalFiles
+            additionalFiles,
           )
-        ).updatedArtifacts.find((a) => a.path === 'package-lock.json')
+        ).updatedArtifacts.find((a) => a.path === 'package-lock.json'),
       ).toBeUndefined();
     });
 
@@ -497,9 +497,9 @@ describe('modules/manager/npm/post-update/index', () => {
               reuseExistingBranch: false,
               baseBranch: 'base',
             },
-            additionalFiles
+            additionalFiles,
           )
-        ).updatedArtifacts.find((a) => a.path === 'package-lock.json')
+        ).updatedArtifacts.find((a) => a.path === 'package-lock.json'),
       ).toBeUndefined();
     });
 
@@ -508,8 +508,8 @@ describe('modules/manager/npm/post-update/index', () => {
       expect(
         await getAdditionalFiles(
           { ...updateConfig, updateLockFiles: true, reuseExistingBranch: true },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [
@@ -541,8 +541,8 @@ describe('modules/manager/npm/post-update/index', () => {
               },
             ],
           },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [
@@ -565,7 +565,7 @@ describe('modules/manager/npm/post-update/index', () => {
 
     it('no lockfiles updates', async () => {
       expect(
-        await getAdditionalFiles(baseConfig, additionalFiles)
+        await getAdditionalFiles(baseConfig, additionalFiles),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [],
@@ -581,8 +581,8 @@ describe('modules/manager/npm/post-update/index', () => {
             upgrades: [{ isLockfileUpdate: true }],
             updateLockFiles: true,
           },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [],
@@ -601,8 +601,8 @@ describe('modules/manager/npm/post-update/index', () => {
             updateType: 'lockFileMaintenance',
             updateLockFiles: true,
           },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [],
@@ -618,8 +618,8 @@ describe('modules/manager/npm/post-update/index', () => {
             transitiveRemediation: true,
             updateLockFiles: true,
           },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [],
         updatedArtifacts: [],
@@ -631,8 +631,8 @@ describe('modules/manager/npm/post-update/index', () => {
       expect(
         await getAdditionalFiles(
           { ...updateConfig, updateLockFiles: true },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [
           { lockFile: 'package-lock.json', stderr: 'some-error' },
@@ -646,8 +646,8 @@ describe('modules/manager/npm/post-update/index', () => {
       expect(
         await getAdditionalFiles(
           { ...updateConfig, updateLockFiles: true, reuseExistingBranch: true },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [{ lockFile: 'yarn.lock', stderr: 'some-error' }],
         updatedArtifacts: [],
@@ -668,8 +668,8 @@ describe('modules/manager/npm/post-update/index', () => {
               },
             ],
           },
-          additionalFiles
-        )
+          additionalFiles,
+        ),
       ).toStrictEqual({
         artifactErrors: [
           { lockFile: 'packages/pnpm/pnpm-lock.yaml', stderr: 'some-error' },
diff --git a/lib/modules/manager/npm/post-update/index.ts b/lib/modules/manager/npm/post-update/index.ts
index 7be17f4548a53b97cfd2553d5c5bac2fde6371ea..5b8d92b4e1a4cba2abfc567c34361a8bf147633f 100644
--- a/lib/modules/manager/npm/post-update/index.ts
+++ b/lib/modules/manager/npm/post-update/index.ts
@@ -42,7 +42,7 @@ const getDirs = (arr: (string | null | undefined)[]): string[] =>
 
 export function determineLockFileDirs(
   config: PostUpdateConfig,
-  packageFiles: AdditionalPackageFiles
+  packageFiles: AdditionalPackageFiles,
 ): DetermineLockFileDirsResult {
   const npmLockDirs: (string | undefined)[] = [];
   const yarnLockDirs: (string | undefined)[] = [];
@@ -64,7 +64,8 @@ export function determineLockFileDirs(
   if (
     config.upgrades.every(
       (upgrade: Upgrade) =>
-        upgrade.updateType === 'lockFileMaintenance' || upgrade.isLockfileUpdate
+        upgrade.updateType === 'lockFileMaintenance' ||
+        upgrade.isLockfileUpdate,
     )
   ) {
     return {
@@ -75,7 +76,7 @@ export function determineLockFileDirs(
   }
 
   function getPackageFile(
-    fileName: string
+    fileName: string,
   ): Partial<PackageFile<NpmManagerData>> {
     logger.trace('Looking for packageFile: ' + fileName);
 
@@ -112,7 +113,7 @@ export function determineLockFileDirs(
 
 export async function writeExistingFiles(
   config: PostUpdateConfig,
-  packageFiles: AdditionalPackageFiles
+  packageFiles: AdditionalPackageFiles,
 ): Promise<void> {
   if (!packageFiles.npm) {
     return;
@@ -120,7 +121,7 @@ export async function writeExistingFiles(
   const npmFiles = packageFiles.npm;
   logger.debug(
     { packageFiles: npmFiles.map((n) => n.packageFile) },
-    'Writing package.json files'
+    'Writing package.json files',
   );
   for (const packageFile of npmFiles) {
     // istanbul ignore if
@@ -194,7 +195,7 @@ export async function writeExistingFiles(
           }
           if (widens.length) {
             logger.debug(
-              `Removing ${String(widens)} from ${npmLock} to force an update`
+              `Removing ${String(widens)} from ${npmLock} to force an update`,
             );
             lockFileChanged = true;
             try {
@@ -210,7 +211,7 @@ export async function writeExistingFiles(
             } catch (err) /* istanbul ignore next */ {
               logger.warn(
                 { npmLock },
-                'Error massaging package-lock.json for widen'
+                'Error massaging package-lock.json for widen',
               );
             }
           }
@@ -237,7 +238,7 @@ export async function writeExistingFiles(
 }
 
 export async function writeUpdatedPackageFiles(
-  config: PostUpdateConfig
+  config: PostUpdateConfig,
 ): Promise<void> {
   logger.trace({ config }, 'writeUpdatedPackageFiles');
   logger.debug('Writing any updated package files');
@@ -284,7 +285,7 @@ async function getNpmrcContent(dir: string): Promise<string | null> {
 async function updateNpmrcContent(
   dir: string,
   originalContent: string | null,
-  additionalLines: string[]
+  additionalLines: string[],
 ): Promise<void> {
   const npmrcFilePath = upath.join(dir, '.npmrc');
   const newNpmrc = originalContent
@@ -303,7 +304,7 @@ async function updateNpmrcContent(
 
 async function resetNpmrcContent(
   dir: string,
-  originalContent: string | null
+  originalContent: string | null,
 ): Promise<void> {
   const npmrcFilePath = upath.join(dir, '.npmrc');
   if (originalContent) {
@@ -324,7 +325,7 @@ async function resetNpmrcContent(
 // istanbul ignore next
 async function updateYarnOffline(
   lockFileDir: string,
-  updatedArtifacts: FileChange[]
+  updatedArtifacts: FileChange[],
 ): Promise<void> {
   try {
     const resolvedPaths: string[] = [];
@@ -344,7 +345,7 @@ async function updateYarnOffline(
         .find((line) => line.startsWith('yarn-offline-mirror '));
       if (mirrorLine) {
         const mirrorPath = ensureTrailingSlash(
-          mirrorLine.split(' ')[1].replace(regEx(/"/g), '')
+          mirrorLine.split(' ')[1].replace(regEx(/"/g), ''),
         );
         resolvedPaths.push(upath.join(lockFileDir, mirrorPath));
       }
@@ -378,7 +379,7 @@ async function updateYarnOffline(
 export async function updateYarnBinary(
   lockFileDir: string,
   updatedArtifacts: FileChange[],
-  existingYarnrcYmlContent: string | undefined | null
+  existingYarnrcYmlContent: string | undefined | null,
 ): Promise<string | undefined | null> {
   let yarnrcYml = existingYarnrcYmlContent;
   try {
@@ -418,7 +419,7 @@ export async function updateYarnBinary(
         path: newYarnFullPath,
         contents: await readLocalFile(newYarnFullPath, 'utf8'),
         isExecutable: true,
-      }
+      },
     );
   } catch (err) /* istanbul ignore next */ {
     logger.error({ err }, 'Error updating Yarn binary');
@@ -428,7 +429,7 @@ export async function updateYarnBinary(
 
 export async function getAdditionalFiles(
   config: PostUpdateConfig<NpmManagerData>,
-  packageFiles: AdditionalPackageFiles
+  packageFiles: AdditionalPackageFiles,
 ): Promise<WriteExistingFilesResult> {
   logger.trace({ config }, 'getAdditionalFiles');
   const artifactErrors: ArtifactError[] = [];
@@ -444,7 +445,7 @@ export async function getAdditionalFiles(
     !config.updatedPackageFiles?.length &&
     config.transitiveRemediation &&
     config.upgrades?.every(
-      (upgrade) => upgrade.isRemediation ?? upgrade.isVulnerabilityAlert
+      (upgrade) => upgrade.isRemediation ?? upgrade.isVulnerabilityAlert,
     )
   ) {
     logger.debug('Skipping lock file generation for remediations');
@@ -501,14 +502,14 @@ export async function getAdditionalFiles(
     const fileName = upath.basename(npmLock);
     logger.debug(`Generating ${fileName} for ${lockFileDir}`);
     const upgrades = config.upgrades.filter(
-      (upgrade) => upgrade.managerData?.npmLock === npmLock
+      (upgrade) => upgrade.managerData?.npmLock === npmLock,
     );
     const res = await npm.generateLockFile(
       lockFileDir,
       env,
       fileName,
       config,
-      upgrades
+      upgrades,
     );
     if (res.error) {
       // istanbul ignore if
@@ -516,15 +517,15 @@ export async function getAdditionalFiles(
         for (const upgrade of config.upgrades) {
           if (
             res.stderr.includes(
-              `No matching version found for ${upgrade.depName}`
+              `No matching version found for ${upgrade.depName}`,
             )
           ) {
             logger.debug(
               { dependency: upgrade.depName, type: 'npm' },
-              'lock file failed for the dependency being updated - skipping branch creation'
+              'lock file failed for the dependency being updated - skipping branch creation',
             );
             const err = new Error(
-              'lock file failed for the dependency being updated - skipping branch creation'
+              'lock file failed for the dependency being updated - skipping branch creation',
             );
             throw new ExternalHostError(err, NpmDatasource.id);
           }
@@ -537,7 +538,7 @@ export async function getAdditionalFiles(
     } else if (res.lockFile) {
       const existingContent = await getFile(
         npmLock,
-        config.reuseExistingBranch ? config.branchName : config.baseBranch
+        config.reuseExistingBranch ? config.branchName : config.baseBranch,
       );
       if (res.lockFile === existingContent) {
         logger.debug(`${npmLock} hasn't changed`);
@@ -573,7 +574,7 @@ export async function getAdditionalFiles(
           >;
           const updatedYarnYrcYml = deepmerge(
             existingYarnrRcYml,
-            additionalYarnRcYml
+            additionalYarnRcYml,
           );
           await writeLocalFile(yarnRcYmlFilename, dump(updatedYarnYrcYml));
           logger.debug('Added authentication to .yarnrc.yml');
@@ -585,7 +586,7 @@ export async function getAdditionalFiles(
     logger.debug(`Generating yarn.lock for ${lockFileDir}`);
     const lockFileName = upath.join(lockFileDir, 'yarn.lock');
     const upgrades = config.upgrades.filter(
-      (upgrade) => upgrade.managerData?.yarnLock === yarnLock
+      (upgrade) => upgrade.managerData?.yarnLock === yarnLock,
     );
     const res = await yarn.generateLockFile(lockFileDir, env, config, upgrades);
     if (res.error) {
@@ -595,18 +596,18 @@ export async function getAdditionalFiles(
           /* eslint-disable no-useless-escape */
           if (
             res.stderr.includes(
-              `Couldn't find any versions for \\\"${upgrade.depName}\\\"`
+              `Couldn't find any versions for \\\"${upgrade.depName}\\\"`,
             )
           ) {
             logger.debug(
               { dependency: upgrade.depName, type: 'yarn' },
-              'lock file failed for the dependency being updated - skipping branch creation'
+              'lock file failed for the dependency being updated - skipping branch creation',
             );
             throw new ExternalHostError(
               new Error(
-                'lock file failed for the dependency being updated - skipping branch creation'
+                'lock file failed for the dependency being updated - skipping branch creation',
               ),
-              NpmDatasource.id
+              NpmDatasource.id,
             );
           }
           /* eslint-enable no-useless-escape */
@@ -620,7 +621,7 @@ export async function getAdditionalFiles(
     } else {
       const existingContent = await getFile(
         lockFileName,
-        config.reuseExistingBranch ? config.branchName : config.baseBranch
+        config.reuseExistingBranch ? config.branchName : config.baseBranch,
       );
       if (res.lockFile === existingContent) {
         logger.debug("yarn.lock hasn't changed");
@@ -640,7 +641,7 @@ export async function getAdditionalFiles(
         existingYarnrcYmlContent = await updateYarnBinary(
           lockFileDir,
           updatedArtifacts,
-          existingYarnrcYmlContent
+          existingYarnrcYmlContent,
         );
       }
     }
@@ -658,7 +659,7 @@ export async function getAdditionalFiles(
     await updateNpmrcContent(lockFileDir, npmrcContent, additionalNpmrcContent);
     logger.debug(`Generating pnpm-lock.yaml for ${lockFileDir}`);
     const upgrades = config.upgrades.filter(
-      (upgrade) => upgrade.managerData?.pnpmShrinkwrap === pnpmShrinkwrap
+      (upgrade) => upgrade.managerData?.pnpmShrinkwrap === pnpmShrinkwrap,
     );
     const res = await pnpm.generateLockFile(lockFileDir, env, config, upgrades);
     if (res.error) {
@@ -667,18 +668,18 @@ export async function getAdditionalFiles(
         for (const upgrade of config.upgrades) {
           if (
             res.stdout.includes(
-              `No compatible version found: ${upgrade.depName}`
+              `No compatible version found: ${upgrade.depName}`,
             )
           ) {
             logger.debug(
               { dependency: upgrade.depName, type: 'pnpm' },
-              'lock file failed for the dependency being updated - skipping branch creation'
+              'lock file failed for the dependency being updated - skipping branch creation',
             );
             throw new ExternalHostError(
               Error(
-                'lock file failed for the dependency being updated - skipping branch creation'
+                'lock file failed for the dependency being updated - skipping branch creation',
               ),
-              NpmDatasource.id
+              NpmDatasource.id,
             );
           }
         }
@@ -691,7 +692,7 @@ export async function getAdditionalFiles(
     } else {
       const existingContent = await getFile(
         pnpmShrinkwrap,
-        config.reuseExistingBranch ? config.branchName : config.baseBranch
+        config.reuseExistingBranch ? config.branchName : config.baseBranch,
       );
       if (res.lockFile === existingContent) {
         logger.debug("pnpm-lock.yaml hasn't changed");
diff --git a/lib/modules/manager/npm/post-update/node-version.spec.ts b/lib/modules/manager/npm/post-update/node-version.spec.ts
index a951be3807fdd7dc94160711b997fc6f5bfbab94..64e3f6eefcb94a0c8d89fadd29ce87f0765d699b 100644
--- a/lib/modules/manager/npm/post-update/node-version.spec.ts
+++ b/lib/modules/manager/npm/post-update/node-version.spec.ts
@@ -20,7 +20,7 @@ describe('modules/manager/npm/post-update/node-version', () => {
         config,
         [],
         '',
-        new Lazy(() => Promise.resolve({}))
+        new Lazy(() => Promise.resolve({})),
       );
       expect(res).toBe('^12.16.0');
       expect(fs.readLocalFile).not.toHaveBeenCalled();
@@ -33,7 +33,7 @@ describe('modules/manager/npm/post-update/node-version', () => {
         {},
         [],
         '',
-        new Lazy(() => Promise.resolve({}))
+        new Lazy(() => Promise.resolve({})),
       );
       expect(res).toBe('12.16.1');
     });
@@ -44,7 +44,7 @@ describe('modules/manager/npm/post-update/node-version', () => {
         {},
         [],
         '',
-        new Lazy(() => Promise.resolve({}))
+        new Lazy(() => Promise.resolve({})),
       );
       expect(res).toBe('12.16.2');
     });
@@ -56,7 +56,7 @@ describe('modules/manager/npm/post-update/node-version', () => {
         {},
         [],
         '',
-        new Lazy(() => Promise.resolve({}))
+        new Lazy(() => Promise.resolve({})),
       );
       expect(res).toBeNull();
     });
@@ -66,7 +66,7 @@ describe('modules/manager/npm/post-update/node-version', () => {
         {},
         [],
         '',
-        new Lazy(() => Promise.resolve({ engines: { node: '^12.16.3' } }))
+        new Lazy(() => Promise.resolve({ engines: { node: '^12.16.3' } })),
       );
       expect(res).toBe('^12.16.3');
     });
@@ -75,7 +75,7 @@ describe('modules/manager/npm/post-update/node-version', () => {
   describe('getNodeUpdate()', () => {
     it('returns version', () => {
       expect(getNodeUpdate([{ depName: 'node', newValue: '16.15.0' }])).toBe(
-        '16.15.0'
+        '16.15.0',
       );
     });
 
@@ -91,8 +91,8 @@ describe('modules/manager/npm/post-update/node-version', () => {
           config,
           [{ depName: 'node', newValue: '16.15.0' }],
           '',
-          new Lazy(() => Promise.resolve({}))
-        )
+          new Lazy(() => Promise.resolve({})),
+        ),
       ).toEqual({
         toolName: 'node',
         constraint: '16.15.0',
@@ -105,8 +105,8 @@ describe('modules/manager/npm/post-update/node-version', () => {
           config,
           [],
           '',
-          new Lazy(() => Promise.resolve({}))
-        )
+          new Lazy(() => Promise.resolve({})),
+        ),
       ).toEqual({
         toolName: 'node',
         constraint: '^12.16.0',
diff --git a/lib/modules/manager/npm/post-update/node-version.ts b/lib/modules/manager/npm/post-update/node-version.ts
index 1852d2507d5fab9664d541323c12a971c0ac3062..1ef74862959e2d19e8204539f4c008ebd374abe3 100644
--- a/lib/modules/manager/npm/post-update/node-version.ts
+++ b/lib/modules/manager/npm/post-update/node-version.ts
@@ -24,7 +24,7 @@ async function getNodeFile(filename: string): Promise<string | null> {
 }
 
 async function getPackageJsonConstraint(
-  pkg: LazyPackageJson
+  pkg: LazyPackageJson,
 ): Promise<string | null> {
   const constraint = (await pkg.getValue()).engines?.node;
   if (constraint && semver.validRange(constraint)) {
@@ -39,7 +39,7 @@ export async function getNodeConstraint(
   config: Partial<PostUpdateConfig>,
   upgrades: Upgrade[],
   lockFileDir: string,
-  pkg: LazyPackageJson
+  pkg: LazyPackageJson,
 ): Promise<string | null> {
   const constraint =
     getNodeUpdate(upgrades) ??
@@ -61,13 +61,13 @@ export async function getNodeToolConstraint(
   config: Partial<PostUpdateConfig>,
   upgrades: Upgrade[],
   lockFileDir: string,
-  pkg: LazyPackageJson
+  pkg: LazyPackageJson,
 ): Promise<ToolConstraint> {
   const constraint = await getNodeConstraint(
     config,
     upgrades,
     lockFileDir,
-    pkg
+    pkg,
   );
 
   return {
diff --git a/lib/modules/manager/npm/post-update/npm.spec.ts b/lib/modules/manager/npm/post-update/npm.spec.ts
index 7bff57fb2d12b0826e275db8b7a12b44317daa5c..170ca700c57bde6b0a714cc7ccf8bba0d446d3eb 100644
--- a/lib/modules/manager/npm/post-update/npm.spec.ts
+++ b/lib/modules/manager/npm/post-update/npm.spec.ts
@@ -37,7 +37,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { skipInstalls, postUpdateOptions },
-      updates
+      updates,
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
     expect(res.error).toBeFalse();
@@ -57,7 +57,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { skipInstalls, constraints: { npm: '^6.0.0' } },
-      updates
+      updates,
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.error).toBeFalse();
@@ -68,7 +68,7 @@ describe('modules/manager/npm/post-update/npm', () => {
   it('performs lock file updates retaining the package.json counterparts', async () => {
     const execSnapshots = mockExecAll();
     fs.readLocalFile.mockResolvedValueOnce(
-      Fixtures.get('update-lockfile-massage-1/package-lock.json')
+      Fixtures.get('update-lockfile-massage-1/package-lock.json'),
     );
     const skipInstalls = true;
     const updates = [
@@ -86,7 +86,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { skipInstalls, constraints: { npm: '^6.0.0' } },
-      updates
+      updates,
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.error).toBeFalse();
@@ -103,17 +103,17 @@ describe('modules/manager/npm/post-update/npm', () => {
       'some-dir',
       {},
       'npm-shrinkwrap.json',
-      { skipInstalls, constraints: { npm: '^6.0.0' } }
+      { skipInstalls, constraints: { npm: '^6.0.0' } },
     );
     expect(fs.renameLocalFile).toHaveBeenCalledTimes(1);
     expect(fs.renameLocalFile).toHaveBeenCalledWith(
       upath.join('some-dir', 'package-lock.json'),
-      upath.join('some-dir', 'npm-shrinkwrap.json')
+      upath.join('some-dir', 'npm-shrinkwrap.json'),
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(fs.readLocalFile).toHaveBeenCalledWith(
       'some-dir/npm-shrinkwrap.json',
-      'utf8'
+      'utf8',
     );
     expect(res.error).toBeFalse();
     expect(res.lockFile).toBe('package-lock-contents');
@@ -130,13 +130,13 @@ describe('modules/manager/npm/post-update/npm', () => {
       'some-dir',
       {},
       'npm-shrinkwrap.json',
-      { skipInstalls, constraints: { npm: '^6.0.0' } }
+      { skipInstalls, constraints: { npm: '^6.0.0' } },
     );
     expect(fs.renameLocalFile).toHaveBeenCalledTimes(0);
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(fs.readLocalFile).toHaveBeenCalledWith(
       'some-dir/npm-shrinkwrap.json',
-      'utf8'
+      'utf8',
     );
     expect(res.error).toBeFalse();
     expect(res.lockFile).toBe('package-lock-contents');
@@ -153,7 +153,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       'some-dir',
       {},
       'package-lock.json',
-      { skipInstalls, binarySource, constraints: { npm: '^6.0.0' } }
+      { skipInstalls, binarySource, constraints: { npm: '^6.0.0' } },
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.error).toBeFalse();
@@ -176,7 +176,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { postUpdateOptions },
-      updates
+      updates,
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
     expect(res.error).toBeFalse();
@@ -202,7 +202,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { postUpdateOptions, constraints: { npm: '^6.0.0' } },
-      updates
+      updates,
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.error).toBeFalse();
@@ -227,7 +227,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { binarySource, constraints: { npm: '^6.0.0' } },
-      [{ isRemediation: true }]
+      [{ isRemediation: true }],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.error).toBeFalse();
@@ -243,7 +243,7 @@ describe('modules/manager/npm/post-update/npm', () => {
     const res = await npmHelper.generateLockFile(
       'some-dir',
       {},
-      'package-lock.json'
+      'package-lock.json',
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.error).toBeTrue();
@@ -259,7 +259,7 @@ describe('modules/manager/npm/post-update/npm', () => {
     const res = await npmHelper.generateLockFile(
       'some-dir',
       {},
-      'package-lock.json'
+      'package-lock.json',
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -274,7 +274,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       'some-dir',
       {},
       'package-lock.json',
-      { binarySource: 'docker', constraints: { npm: '^6.0.0' } }
+      { binarySource: 'docker', constraints: { npm: '^6.0.0' } },
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -292,7 +292,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       {},
-      [{ isLockFileMaintenance: true }]
+      [{ isLockFileMaintenance: true }],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
     expect(fs.deleteLocalFile).toHaveBeenCalledTimes(1);
@@ -315,7 +315,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { constraints: { npm: '6.0.0' } },
-      [{ isLockFileMaintenance: true }]
+      [{ isLockFileMaintenance: true }],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -355,7 +355,7 @@ describe('modules/manager/npm/post-update/npm', () => {
       {},
       'package-lock.json',
       { constraints: { npm: '6.0.0' } },
-      [{ isLockFileMaintenance: true }]
+      [{ isLockFileMaintenance: true }],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -472,7 +472,7 @@ describe('modules/manager/npm/post-update/npm', () => {
         {},
         'package-lock.json',
         { skipInstalls },
-        updates
+        updates,
       );
       expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
       expect(res.error).toBeFalse();
@@ -507,7 +507,7 @@ describe('modules/manager/npm/post-update/npm', () => {
         {},
         'package-lock.json',
         { skipInstalls },
-        modifiedUpdates
+        modifiedUpdates,
       );
       expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
       expect(res.error).toBeFalse();
@@ -524,7 +524,7 @@ describe('modules/manager/npm/post-update/npm', () => {
         },
       ]);
       expect(
-        npmHelper.divideWorkspaceAndRootDeps('.', modifiedUpdates)
+        npmHelper.divideWorkspaceAndRootDeps('.', modifiedUpdates),
       ).toMatchObject({
         lockRootUpdates: [
           {
diff --git a/lib/modules/manager/npm/post-update/npm.ts b/lib/modules/manager/npm/post-update/npm.ts
index fd2b7b3f7b1b7d56f16bffa3757d51290c7f6692..3540350264e0988281e1ff406723840a20490bf9 100644
--- a/lib/modules/manager/npm/post-update/npm.ts
+++ b/lib/modules/manager/npm/post-update/npm.ts
@@ -33,7 +33,7 @@ export async function generateLockFile(
   env: NodeJS.ProcessEnv,
   filename: string,
   config: Partial<PostUpdateConfig> = {},
-  upgrades: Upgrade[] = []
+  upgrades: Upgrade[] = [],
 ): Promise<GenerateLockFileResult> {
   // TODO: don't assume package-lock.json is in the same directory
   const lockFileName = upath.join(lockFileDir, filename);
@@ -117,7 +117,7 @@ export async function generateLockFile(
 
         if (currentWorkspaceUpdates.length) {
           const updateCmd = `npm install ${cmdOptions} --workspace=${workspace} ${currentWorkspaceUpdates.join(
-            ' '
+            ' ',
           )}`;
           commands.push(updateCmd);
         }
@@ -150,14 +150,14 @@ export async function generateLockFile(
 
     if (upgrades.find((upgrade) => upgrade.isLockFileMaintenance)) {
       logger.debug(
-        `Removing ${lockFileName} first due to lock file maintenance upgrade`
+        `Removing ${lockFileName} first due to lock file maintenance upgrade`,
       );
       try {
         await deleteLocalFile(lockFileName);
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { err, lockFileName },
-          'Error removing package-lock.json for lock file maintenance'
+          'Error removing package-lock.json for lock file maintenance',
         );
       }
     }
@@ -172,7 +172,7 @@ export async function generateLockFile(
     ) {
       await renameLocalFile(
         upath.join(lockFileDir, 'package-lock.json'),
-        upath.join(lockFileDir, 'npm-shrinkwrap.json')
+        upath.join(lockFileDir, 'npm-shrinkwrap.json'),
       );
     }
 
@@ -180,7 +180,7 @@ export async function generateLockFile(
     // TODO #22198
     lockFile = (await readLocalFile(
       upath.join(lockFileDir, filename),
-      'utf8'
+      'utf8',
     ))!;
 
     // Massage lockfile counterparts of package.json that were modified
@@ -216,7 +216,7 @@ export async function generateLockFile(
         err,
         type: 'npm',
       },
-      'lock file error'
+      'lock file error',
     );
     if (err.stderr?.includes('ENOSPC: no space left on device')) {
       throw new Error(SYSTEM_INSUFFICIENT_DISK_SPACE);
@@ -228,7 +228,7 @@ export async function generateLockFile(
 
 export function divideWorkspaceAndRootDeps(
   lockFileDir: string,
-  lockUpdates: Upgrade[]
+  lockUpdates: Upgrade[],
 ): {
   lockRootUpdates: Upgrade[];
   lockWorkspacesUpdates: Upgrade[];
@@ -245,7 +245,7 @@ export function divideWorkspaceAndRootDeps(
     upgrade.managerData ??= {};
     upgrade.managerData.packageKey = generatePackageKey(
       upgrade.packageName!,
-      upgrade.newVersion!
+      upgrade.newVersion!,
     );
     if (
       upgrade.managerData.workspacesPackages?.length &&
@@ -253,7 +253,7 @@ export function divideWorkspaceAndRootDeps(
     ) {
       const workspacePatterns = upgrade.managerData.workspacesPackages; // glob pattern or directory name/path
       const packageFileDir = trimSlashes(
-        upgrade.packageFile.replace('package.json', '')
+        upgrade.packageFile.replace('package.json', ''),
       );
 
       // workspaceDir = packageFileDir - lockFileDir
diff --git a/lib/modules/manager/npm/post-update/pnpm.spec.ts b/lib/modules/manager/npm/post-update/pnpm.spec.ts
index bc02236db2eb4155fef09260ed80348bcf786375..3fdcc0fd06d29d1a79e70e88213595bc6e50864f 100644
--- a/lib/modules/manager/npm/post-update/pnpm.spec.ts
+++ b/lib/modules/manager/npm/post-update/pnpm.spec.ts
@@ -75,7 +75,7 @@ describe('modules/manager/npm/post-update/pnpm', () => {
     const res = await pnpmHelper.generateLockFile(
       'some-dir',
       {},
-      { ...config, postUpdateOptions }
+      { ...config, postUpdateOptions },
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -122,7 +122,7 @@ describe('modules/manager/npm/post-update/pnpm', () => {
           depType: 'packageManager',
           depName: 'pnpm',
         },
-      ]
+      ],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -164,7 +164,7 @@ describe('modules/manager/npm/post-update/pnpm', () => {
           depType: 'packageManager',
           depName: 'pnpm',
         },
-      ]
+      ],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(2);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -200,7 +200,7 @@ describe('modules/manager/npm/post-update/pnpm', () => {
       'some-folder',
       {},
       configTemp,
-      []
+      [],
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(3);
     expect(res.lockFile).toBe('lockfileVersion: 5.3\n');
@@ -219,7 +219,7 @@ describe('modules/manager/npm/post-update/pnpm', () => {
     const res = await pnpmHelper.generateLockFile(
       'some-dir',
       {},
-      { ...config, constraints: { pnpm: '6.0.0' } }
+      { ...config, constraints: { pnpm: '6.0.0' } },
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.lockFile).toBe('package-lock-contents');
@@ -253,7 +253,7 @@ describe('modules/manager/npm/post-update/pnpm', () => {
     const res = await pnpmHelper.generateLockFile(
       'some-dir',
       {},
-      { ...config, constraints: { pnpm: '6.0.0' } }
+      { ...config, constraints: { pnpm: '6.0.0' } },
     );
     expect(fs.readLocalFile).toHaveBeenCalledTimes(1);
     expect(res.lockFile).toBe('package-lock-contents');
diff --git a/lib/modules/manager/npm/post-update/pnpm.ts b/lib/modules/manager/npm/post-update/pnpm.ts
index 6f0ea3ced50be045ee9dd348c45bcce185298d9a..c3db89f25a67945043cff1ae3ded1a1dc07747c6 100644
--- a/lib/modules/manager/npm/post-update/pnpm.ts
+++ b/lib/modules/manager/npm/post-update/pnpm.ts
@@ -29,7 +29,7 @@ export async function generateLockFile(
   lockFileDir: string,
   env: NodeJS.ProcessEnv,
   config: PostUpdateConfig,
-  upgrades: Upgrade[] = []
+  upgrades: Upgrade[] = [],
 ): Promise<GenerateLockFileResult> {
   const lockFileName = upath.join(lockFileDir, 'pnpm-lock.yaml');
   logger.debug(`Spawning pnpm install to create ${lockFileName}`);
@@ -84,14 +84,14 @@ export async function generateLockFile(
 
     if (upgrades.find((upgrade) => upgrade.isLockFileMaintenance)) {
       logger.debug(
-        `Removing ${lockFileName} first due to lock file maintenance upgrade`
+        `Removing ${lockFileName} first due to lock file maintenance upgrade`,
       );
       try {
         await deleteLocalFile(lockFileName);
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { err, lockFileName },
-          'Error removing yarn.lock for lock file maintenance'
+          'Error removing yarn.lock for lock file maintenance',
         );
       }
     }
@@ -110,7 +110,7 @@ export async function generateLockFile(
         stderr,
         type: 'pnpm',
       },
-      'lock file error'
+      'lock file error',
     );
     return { error: true, stderr: err.stderr, stdout: err.stdout };
   }
@@ -118,7 +118,7 @@ export async function generateLockFile(
 }
 
 export async function getConstraintFromLockFile(
-  lockFileName: string
+  lockFileName: string,
 ): Promise<string | null> {
   let constraint: string | null = null;
   try {
@@ -134,7 +134,7 @@ export async function getConstraintFromLockFile(
     // if no match found use lockfileVersion 5
     // lockfileVersion 5 is the minimum version required to generate the pnpm-lock.yaml file
     const { lowerConstraint, upperConstraint } = lockToPnpmVersionMapping.find(
-      (m) => m.lockfileVersion === pnpmLock.lockfileVersion
+      (m) => m.lockfileVersion === pnpmLock.lockfileVersion,
     ) ?? {
       lockfileVersion: 5.0,
       lowerConstraint: '>=3',
diff --git a/lib/modules/manager/npm/post-update/rules.spec.ts b/lib/modules/manager/npm/post-update/rules.spec.ts
index 71ce2a4e8753969505c85cd0a1c3e7219cbe70bc..ea17d6025b02e537f6168f8ee99caee0218b59a2 100644
--- a/lib/modules/manager/npm/post-update/rules.spec.ts
+++ b/lib/modules/manager/npm/post-update/rules.spec.ts
@@ -57,7 +57,7 @@ describe('modules/manager/npm/post-update/rules', () => {
             },
           },
         }
-      `
+      `,
       );
     });
 
@@ -127,7 +127,7 @@ describe('modules/manager/npm/post-update/rules', () => {
             },
           },
         }
-      `
+      `,
       );
     });
   });
diff --git a/lib/modules/manager/npm/post-update/utils.ts b/lib/modules/manager/npm/post-update/utils.ts
index a52c4658b915c0d203c33ed50e8565c71df1c540..a38da9ee45e054c99946efe53f4dada518f6dd3a 100644
--- a/lib/modules/manager/npm/post-update/utils.ts
+++ b/lib/modules/manager/npm/post-update/utils.ts
@@ -4,7 +4,7 @@ import { Lazy } from '../../../../util/lazy';
 import { PackageJson, PackageJsonSchema } from '../schema';
 
 export function lazyLoadPackageJson(
-  lockFileDir: string
+  lockFileDir: string,
 ): Lazy<Promise<PackageJsonSchema>> {
   return new Lazy(() => loadPackageJson(lockFileDir));
 }
@@ -12,11 +12,11 @@ export function lazyLoadPackageJson(
 export type LazyPackageJson = ReturnType<typeof lazyLoadPackageJson>;
 
 export async function loadPackageJson(
-  lockFileDir: string
+  lockFileDir: string,
 ): Promise<PackageJsonSchema> {
   const json = await readLocalFile(
     upath.join(lockFileDir, 'package.json'),
-    'utf8'
+    'utf8',
   );
   const res = PackageJson.safeParse(json);
   if (res.success) {
@@ -27,7 +27,7 @@ export async function loadPackageJson(
 
 export function getPackageManagerVersion(
   name: string,
-  pkg: PackageJsonSchema
+  pkg: PackageJsonSchema,
 ): string | null {
   if (pkg.packageManager?.name === name) {
     return pkg.packageManager.version;
diff --git a/lib/modules/manager/npm/post-update/yarn.spec.ts b/lib/modules/manager/npm/post-update/yarn.spec.ts
index fb96a054f4d03bb4ba409abcdfa63c7e85572e7d..b2203682409514b2c88ed05ae0c35cc003e132e9 100644
--- a/lib/modules/manager/npm/post-update/yarn.spec.ts
+++ b/lib/modules/manager/npm/post-update/yarn.spec.ts
@@ -19,9 +19,9 @@ import * as yarnHelper from './yarn';
 jest.mock('fs-extra', () =>
   jest
     .requireActual<typeof import('../../../../../test/fixtures')>(
-      '../../../../../test/fixtures'
+      '../../../../../test/fixtures',
     )
-    .fsExtra()
+    .fsExtra(),
 );
 jest.mock('../../../../util/exec/env');
 jest.mock('./node-version');
@@ -69,7 +69,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           '.yarnrc': 'yarn-path ./.yarn/cli.js\n',
           'yarn.lock': 'package-lock-contents',
         },
-        '/some-dir'
+        '/some-dir',
       );
       GlobalConfig.set({ localDir: '/', cacheDir: '/tmp/cache' });
       const execSnapshots = mockExecAll({
@@ -88,13 +88,13 @@ describe('modules/manager/npm/post-update/yarn', () => {
           YARN_CACHE_FOLDER: '/tmp/renovate/cache/yarn',
           YARN_GLOBAL_FOLDER: '/tmp/renovate/cache/berry',
         },
-        config
+        config,
       );
       expect(fs.readFile).toHaveBeenCalledTimes(expectedFsCalls);
       expect(fs.remove).toHaveBeenCalledTimes(0);
       expect(res.lockFile).toBe('package-lock-contents');
       expect(fixSnapshots(execSnapshots)).toMatchSnapshot();
-    }
+    },
   );
 
   it('only skips build if skipInstalls is false', async () => {
@@ -102,7 +102,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       {
         'yarn.lock': 'package-lock-contents',
       },
-      'some-dir'
+      'some-dir',
     );
     const execSnapshots = mockExecAll({
       stdout: '3.0.0',
@@ -130,7 +130,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       {
         'yarn.lock': 'package-lock-contents',
       },
-      'some-dir'
+      'some-dir',
     );
     const execSnapshots = mockExecAll({
       stdout: '3.0.0',
@@ -152,7 +152,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       {
         'yarn.lock': 'package-lock-contents',
       },
-      'some-dir'
+      'some-dir',
     );
     const execSnapshots = mockExecAll({
       stdout: '2.1.0',
@@ -181,7 +181,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         {
           'yarn.lock': 'package-lock-contents',
         },
-        'some-dir'
+        'some-dir',
       );
       const execSnapshots = mockExecAll({
         stdout: yarnVersion,
@@ -206,7 +206,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       ]);
       expect(res.lockFile).toBe('package-lock-contents');
       expect(fixSnapshots(execSnapshots)).toMatchSnapshot();
-    }
+    },
   );
 
   it.each([['1.22.0']])(
@@ -216,7 +216,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         {
           'yarn.lock': 'package-lock-contents',
         },
-        'some-dir'
+        'some-dir',
       );
       const execSnapshots = mockExecAll({
         stdout: yarnVersion,
@@ -230,7 +230,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       ]);
       expect(res.lockFile).toBe('package-lock-contents');
       expect(fixSnapshots(execSnapshots)).toMatchSnapshot();
-    }
+    },
   );
 
   it.each([
@@ -245,7 +245,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           '.yarnrc': null,
           'yarn.lock': 'package-lock-contents',
         },
-        'some-dir'
+        'some-dir',
       );
       const execSnapshots = mockExecAll({
         stdout: yarnVersion,
@@ -266,7 +266,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       // expected the lock file not to be deleted.
       expect(res.lockFile).toBe('');
       expect(fixSnapshots(execSnapshots)).toMatchSnapshot();
-    }
+    },
   );
 
   it.each([
@@ -287,7 +287,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           }),
           'sub_workspace/yarn.lock': 'sub-workspace-lock-contents',
         },
-        'some-dir'
+        'some-dir',
       );
       const execSnapshots = mockExecAll({
         stdout: yarnVersion,
@@ -303,7 +303,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         'some-dir/sub_workspace',
         {},
         config,
-        [{ isLockFileMaintenance: true }]
+        [{ isLockFileMaintenance: true }],
       );
       expect(fs.readFile).toHaveBeenCalledTimes(expectedFsReadCalls);
       expect(fs.remove).toHaveBeenCalledTimes(0);
@@ -314,10 +314,10 @@ describe('modules/manager/npm/post-update/yarn', () => {
       expect(res.lockFile).toBe('');
       expect(fs.outputFile).toHaveBeenCalledTimes(1);
       expect(mockedFunction(fs.outputFile).mock.calls[0][0]).toEndWith(
-        'some-dir/sub_workspace/yarn.lock'
+        'some-dir/sub_workspace/yarn.lock',
       );
       expect(fixSnapshots(execSnapshots)).toMatchSnapshot();
-    }
+    },
   );
 
   it.each([
@@ -330,7 +330,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         {
           'yarn.lock': 'package-lock-contents',
         },
-        'some-dir'
+        'some-dir',
       );
       const execSnapshots = mockExecAll({
         stdout: yarnVersion,
@@ -350,7 +350,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
       ]);
       expect(res.lockFile).toBe('package-lock-contents');
       expect(fixSnapshots(execSnapshots)).toMatchSnapshot();
-    }
+    },
   );
 
   it('catches errors', async () => {
@@ -375,7 +375,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         'package.json': '{ "packageManager": "yarn@3.0.0" }',
         'yarn.lock': 'package-lock-contents',
       },
-      'some-dir'
+      'some-dir',
     );
     mockedFunction(getPkgReleases).mockResolvedValueOnce({
       releases: [{ version: '0.10.0' }],
@@ -421,7 +421,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         'package.json': '{ "packageManager": "yarn@3.0.0" }',
         'yarn.lock': 'package-lock-contents',
       },
-      'some-dir'
+      'some-dir',
     );
     mockedFunction(getPkgReleases).mockResolvedValueOnce({
       releases: [{ version: '0.10.0' }],
@@ -472,7 +472,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         'package.json': '{ "packageManager": "yarn@3.0.0" }',
         'yarn.lock': 'package-lock-contents',
       },
-      'some-dir'
+      'some-dir',
     );
 
     mockedFunction(getPkgReleases).mockResolvedValueOnce({
@@ -532,7 +532,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           '{ "packageManager": "yarn@1.22.18", "dependencies": { "chalk": "^2.4.1" } }',
         'yarn.lock': plocktest1YarnLockV1,
       },
-      'some-dir'
+      'some-dir',
     );
     mockedFunction(getPkgReleases).mockResolvedValueOnce({
       releases: [{ version: '1.22.18' }, { version: '2.4.3' }],
@@ -568,7 +568,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         'yarn.lock': plocktest1YarnLockV1,
         '.yarnrc': 'yarn-path ./.yarn/cli.js\n',
       },
-      'some-dir'
+      'some-dir',
     );
     mockedFunction(getPkgReleases).mockResolvedValueOnce({
       releases: [{ version: '1.22.18' }],
@@ -611,7 +611,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         'yarn.lock': plocktest1YarnLockV1,
         '.yarnrc': 'yarn-path ./.yarn/cli.js\n',
       },
-      'some-dir'
+      'some-dir',
     );
     mockedFunction(getPkgReleases).mockResolvedValueOnce({
       releases: [{ version: '1.22.18' }],
@@ -648,7 +648,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           '/tmp/renovate/.yarnrc':
             'yarn-offline-mirror "./packages-cache"\nyarn-path "./.yarn/cli.js"\n',
         },
-        '/'
+        '/',
       );
       GlobalConfig.set({ localDir: '/tmp/renovate', cacheDir: '/tmp/cache' });
       expect(await yarnHelper.checkYarnrc('.')).toEqual({
@@ -663,7 +663,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           '.yarn/cli.js': '',
           '.yarnrc': 'yarn-path "./.yarn/cli.js"\n',
         },
-        'some-dir'
+        'some-dir',
       );
       expect(await yarnHelper.checkYarnrc('some-dir')).toEqual({
         offlineMirror: false,
@@ -676,7 +676,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         {
           '/tmp/renovate/.yarnrc': 'yarn-offline-mirror "./packages-cache"\n',
         },
-        '/'
+        '/',
       );
       GlobalConfig.set({ localDir: '/tmp/renovate', cacheDir: '/tmp/cache' });
       expect(await yarnHelper.checkYarnrc('.')).toEqual({
@@ -691,7 +691,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
           '.yarn/cli.js': '',
           '/tmp/renovate/.yarnrc': 'yarn-path /.yarn/cli.js\n',
         },
-        '/'
+        '/',
       );
       GlobalConfig.set({ localDir: '/tmp', cacheDir: '/tmp/cache' });
       expect(await yarnHelper.checkYarnrc('renovate')).toEqual({
@@ -705,7 +705,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         {
           '.yarnrc': 'yarn-path ./.yarn/cli.js\n',
         },
-        '/tmp/renovate'
+        '/tmp/renovate',
       );
       GlobalConfig.set({ localDir: '/tmp/renovate', cacheDir: '/tmp/cache' });
       const { offlineMirror, yarnPath } = await yarnHelper.checkYarnrc('.');
@@ -719,7 +719,7 @@ describe('modules/manager/npm/post-update/yarn', () => {
         {
           '.yarnrc': `--install.pure-lockfile true\n--install.frozen-lockfile true\n`,
         },
-        '/tmp/renovate'
+        '/tmp/renovate',
       );
       GlobalConfig.set({ localDir: '/tmp/renovate', cacheDir: '/tmp/cache' });
       await yarnHelper.checkYarnrc('/tmp/renovate');
diff --git a/lib/modules/manager/npm/post-update/yarn.ts b/lib/modules/manager/npm/post-update/yarn.ts
index 55e1a01bd88ade4a9f7d0005c08d8322b3be76f4..92488dd4563fc7d1871757dc523d25e6d5b27908 100644
--- a/lib/modules/manager/npm/post-update/yarn.ts
+++ b/lib/modules/manager/npm/post-update/yarn.ts
@@ -31,14 +31,14 @@ import type { GenerateLockFileResult } from './types';
 import { getPackageManagerVersion, lazyLoadPackageJson } from './utils';
 
 export async function checkYarnrc(
-  lockFileDir: string
+  lockFileDir: string,
 ): Promise<{ offlineMirror: boolean; yarnPath: string | null }> {
   let offlineMirror = false;
   let yarnPath: string | null = null;
   try {
     const yarnrc = await readLocalFile(
       upath.join(lockFileDir, '.yarnrc'),
-      'utf8'
+      'utf8',
     );
     if (is.string(yarnrc)) {
       const mirrorLine = yarnrc
@@ -64,7 +64,7 @@ export async function checkYarnrc(
       if (!yarnBinaryExists) {
         scrubbedYarnrc = scrubbedYarnrc.replace(
           regEx(/^yarn-path\s+"?.+?"?$/gm),
-          ''
+          '',
         );
         yarnPath = null;
       }
@@ -72,7 +72,7 @@ export async function checkYarnrc(
         logger.debug(`Writing scrubbed .yarnrc to ${lockFileDir}`);
         await writeLocalFile(
           upath.join(lockFileDir, '.yarnrc'),
-          scrubbedYarnrc
+          scrubbedYarnrc,
         );
       }
     }
@@ -94,7 +94,7 @@ export async function generateLockFile(
   lockFileDir: string,
   env: NodeJS.ProcessEnv,
   config: Partial<PostUpdateConfig<NpmManagerData>> = {},
-  upgrades: Upgrade[] = []
+  upgrades: Upgrade[] = [],
 ): Promise<GenerateLockFileResult> {
   const lockFileName = upath.join(lockFileDir, 'yarn.lock');
   logger.debug(`Spawning yarn install to create ${lockFileName}`);
@@ -225,7 +225,7 @@ export async function generateLockFile(
             .filter(is.string)
             .filter(uniqueStrings)
             .map(quote)
-            .join(' ')}${cmdOptions}`
+            .join(' ')}${cmdOptions}`,
         );
       } else {
         // `yarn up -R` updates to the latest release in each range
@@ -235,7 +235,7 @@ export async function generateLockFile(
             .map((update) => `${update.depName!}`)
             .filter(uniqueStrings)
             .map(quote)
-            .join(' ')}${cmdOptions}`
+            .join(' ')}${cmdOptions}`,
         );
       }
     }
@@ -244,7 +244,7 @@ export async function generateLockFile(
     ['fewer', 'highest'].forEach((s) => {
       if (
         config.postUpdateOptions?.includes(
-          `yarnDedupe${s.charAt(0).toUpperCase()}${s.slice(1)}`
+          `yarnDedupe${s.charAt(0).toUpperCase()}${s.slice(1)}`,
         )
       ) {
         logger.debug(`Performing yarn dedupe ${s}`);
@@ -262,7 +262,7 @@ export async function generateLockFile(
 
     if (upgrades.find((upgrade) => upgrade.isLockFileMaintenance)) {
       logger.debug(
-        `Removing ${lockFileName} first due to lock file maintenance upgrade`
+        `Removing ${lockFileName} first due to lock file maintenance upgrade`,
       );
 
       // Note: Instead of just deleting the `yarn.lock` file, we just wipe it
@@ -276,7 +276,7 @@ export async function generateLockFile(
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { err, lockFileName },
-          'Error clearing `yarn.lock` for lock file maintenance'
+          'Error clearing `yarn.lock` for lock file maintenance',
         );
       }
     }
@@ -295,7 +295,7 @@ export async function generateLockFile(
         err,
         type: 'yarn',
       },
-      'lock file error'
+      'lock file error',
     );
     const stdouterr = String(err.stdout) + String(err.stderr);
     if (
diff --git a/lib/modules/manager/npm/range.ts b/lib/modules/manager/npm/range.ts
index fdca40b6784ab522f2781226faced31c54bee84c..c657edc2e1c909b3da8be5d6f7eb257524008b85 100644
--- a/lib/modules/manager/npm/range.ts
+++ b/lib/modules/manager/npm/range.ts
@@ -10,7 +10,7 @@ export function getRangeStrategy(config: RangeConfig): RangeStrategy {
   if (rangeStrategy === 'bump' && isComplexRange) {
     logger.debug(
       { currentValue },
-      'Replacing bump strategy for complex range with widen'
+      'Replacing bump strategy for complex range with widen',
     );
     return 'widen';
   }
diff --git a/lib/modules/manager/npm/schema.ts b/lib/modules/manager/npm/schema.ts
index 98f50d5e96f154b413b8a8ea214462e88200dea9..00847ca8c0210e5ddaf295bffd67c286c14c2ef1 100644
--- a/lib/modules/manager/npm/schema.ts
+++ b/lib/modules/manager/npm/schema.ts
@@ -25,7 +25,7 @@ export const PackageLockV3Schema = z.object({
       .string()
       .transform((x) => x.replace(/^node_modules\//, ''))
       .refine((x) => x.trim() !== ''),
-    z.object({ version: z.string() })
+    z.object({ version: z.string() }),
   ),
 });
 
@@ -40,7 +40,7 @@ export const PackageLockPreV3Schema = z
   }));
 
 export const PackageLock = Json.pipe(
-  z.union([PackageLockV3Schema, PackageLockPreV3Schema])
+  z.union([PackageLockV3Schema, PackageLockPreV3Schema]),
 ).transform(({ packages, lockfileVersion }) => {
   const lockedVersions: Record<string, string> = {};
   for (const [entry, val] of Object.entries(packages)) {
diff --git a/lib/modules/manager/npm/update/dependency/index.spec.ts b/lib/modules/manager/npm/update/dependency/index.spec.ts
index e243622063f19739af9c90322eb1e3ec6d2dc254..2077d321849c79d700f40e1ac69a73855c82e853 100644
--- a/lib/modules/manager/npm/update/dependency/index.spec.ts
+++ b/lib/modules/manager/npm/update/dependency/index.spec.ts
@@ -154,7 +154,7 @@ describe('modules/manager/npm/update/dependency/index', () => {
         upgrade,
       });
       expect(JSON.parse(testContent!).resolutions['**/@angular/cli']).toBe(
-        '8.1.0'
+        '8.1.0',
       );
     });
 
diff --git a/lib/modules/manager/npm/update/dependency/index.ts b/lib/modules/manager/npm/update/dependency/index.ts
index afd824c937f65e5953dbd5ecbb129d8d9c4d332c..d354e56fed128128f2ea84a783daa08c2b016796 100644
--- a/lib/modules/manager/npm/update/dependency/index.ts
+++ b/lib/modules/manager/npm/update/dependency/index.ts
@@ -15,7 +15,7 @@ import type { NpmDepType, NpmManagerData } from '../../types';
 function renameObjKey(
   oldObj: DependenciesMeta,
   oldKey: string,
-  newKey: string
+  newKey: string,
 ): DependenciesMeta {
   const keys = Object.keys(oldObj);
   return keys.reduce((acc, key) => {
@@ -35,7 +35,7 @@ function replaceAsString(
   depName: string,
   oldValue: string,
   newValue: string,
-  parents?: string[]
+  parents?: string[],
 ): string {
   if (depType === 'packageManager') {
     parsedContents[depType] = newValue;
@@ -50,7 +50,7 @@ function replaceAsString(
         // TODO #22198
         parsedContents.dependenciesMeta!,
         oldValue,
-        newValue
+        newValue,
       );
     }
   } else if (parents && depType === 'overrides') {
@@ -58,7 +58,7 @@ function replaceAsString(
     const { depObjectReference, overrideDepName } = overrideDepPosition(
       parsedContents[depType]!,
       parents,
-      depName
+      depName,
     );
     if (depObjectReference) {
       depObjectReference[overrideDepName] = newValue;
@@ -93,7 +93,7 @@ function replaceAsString(
         fileContent,
         searchIndex,
         searchString,
-        newString
+        newString,
       );
       // Compare the parsed JSON structure of old and new
       if (dequal(parsedContents, JSON.parse(testContent))) {
@@ -119,13 +119,13 @@ export function updateDependency({
         upgrade.currentDigest,
         // TODO #22198
 
-        upgrade.newDigest!.substring(0, upgrade.currentDigest.length)
+        upgrade.newDigest!.substring(0, upgrade.currentDigest.length),
       );
     } else {
       logger.debug('Updating package.json git version tag');
       newValue = upgrade.currentRawValue.replace(
         upgrade.currentValue,
-        upgrade.newValue
+        upgrade.newValue,
       );
     }
   }
@@ -151,7 +151,7 @@ export function updateDependency({
         const { depObjectReference, overrideDepName } = overrideDepPosition(
           parsedContents['overrides']!,
           overrideDepParents,
-          depName
+          depName,
         );
         if (depObjectReference) {
           oldVersion = depObjectReference[overrideDepName]!;
@@ -174,7 +174,7 @@ export function updateDependency({
       depName,
       oldVersion!,
       newValue!,
-      overrideDepParents
+      overrideDepParents,
     );
     if (upgrade.newName) {
       newFileContent = replaceAsString(
@@ -184,14 +184,14 @@ export function updateDependency({
         depName,
         depName,
         upgrade.newName,
-        overrideDepParents
+        overrideDepParents,
       );
     }
     // istanbul ignore if
     if (!newFileContent) {
       logger.debug(
         { fileContent, parsedContents, depType, depName, newValue },
-        'Warning: updateDependency error'
+        'Warning: updateDependency error',
       );
       return fileContent;
     }
@@ -212,7 +212,7 @@ export function updateDependency({
               oldVersion,
               resolutionsVersion: parsedContents.resolutions[depKey],
             },
-            'Upgraded dependency exists in yarn resolutions but is different version'
+            'Upgraded dependency exists in yarn resolutions but is different version',
           );
         }
         newFileContent = replaceAsString(
@@ -223,7 +223,7 @@ export function updateDependency({
           // TODO #22198
           parsedContents.resolutions[depKey]!,
           // TODO #22198
-          newValue!
+          newValue!,
         );
         if (upgrade.newName) {
           if (depKey === `**/${depName}`) {
@@ -236,7 +236,7 @@ export function updateDependency({
             'resolutions',
             depKey,
             depKey,
-            upgrade.newName
+            upgrade.newName,
           );
         }
       }
@@ -251,7 +251,7 @@ export function updateDependency({
             depName,
             depKey,
             // TODO: types (#22198)
-            `${depName}@${newValue}`
+            `${depName}@${newValue}`,
           );
         }
       }
@@ -265,7 +265,7 @@ export function updateDependency({
 function overrideDepPosition(
   overrideBlock: OverrideDependency,
   parents: string[],
-  depName: string
+  depName: string,
 ): {
   depObjectReference: Record<string, string>;
   overrideDepName: string;
diff --git a/lib/modules/manager/npm/update/locked-dependency/common/parent-version.spec.ts b/lib/modules/manager/npm/update/locked-dependency/common/parent-version.spec.ts
index c745a89f12185b314faea06a72b2738cf43441a6..866af028cccf138d3020df591791841fc3a8a8d9 100644
--- a/lib/modules/manager/npm/update/locked-dependency/common/parent-version.spec.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/common/parent-version.spec.ts
@@ -27,7 +27,7 @@ describe('modules/manager/npm/update/locked-dependency/common/parent-version', (
         .reply(200, expressJson);
 
       expect(
-        await findFirstParentVersion('express', '4.0.0', 'send', '0.11.1')
+        await findFirstParentVersion('express', '4.0.0', 'send', '0.11.1'),
       ).toBe('4.11.1');
     });
 
@@ -49,8 +49,8 @@ describe('modules/manager/npm/update/locked-dependency/common/parent-version', (
           'express',
           '4.0.0',
           'buffer-crc32',
-          '10.0.0'
-        )
+          '10.0.0',
+        ),
       ).toBe('4.9.1');
     });
 
@@ -70,7 +70,7 @@ describe('modules/manager/npm/update/locked-dependency/common/parent-version', (
         });
 
       expect(
-        await findFirstParentVersion('express', '4.0.0', 'qs', '6.0.4')
+        await findFirstParentVersion('express', '4.0.0', 'qs', '6.0.4'),
       ).toBe('4.14.0');
     });
 
@@ -89,7 +89,7 @@ describe('modules/manager/npm/update/locked-dependency/common/parent-version', (
         });
 
       expect(
-        await findFirstParentVersion('express', '4.16.1', 'type-is', '1.2.1')
+        await findFirstParentVersion('express', '4.16.1', 'type-is', '1.2.1'),
       ).toBe('4.16.1');
     });
 
@@ -107,7 +107,7 @@ describe('modules/manager/npm/update/locked-dependency/common/parent-version', (
         });
 
       expect(
-        await findFirstParentVersion('express', '4.16.1', 'debug', '9.0.0')
+        await findFirstParentVersion('express', '4.16.1', 'debug', '9.0.0'),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/manager/npm/update/locked-dependency/common/parent-version.ts b/lib/modules/manager/npm/update/locked-dependency/common/parent-version.ts
index 5c0a95316fce206595e7a0db62d2af95b4cb9c0d..b1d877888064507bf20438685ec4f1dff7d265a3 100644
--- a/lib/modules/manager/npm/update/locked-dependency/common/parent-version.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/common/parent-version.ts
@@ -9,7 +9,7 @@ import { api as semver } from '../../../../../versioning/npm';
 const pkgCache = new Map<string, Promise<ReleaseResult | null>>();
 
 function getPkgReleasesCached(
-  packageName: string
+  packageName: string,
 ): Promise<ReleaseResult | null> {
   let cachedResult = pkgCache.get(packageName);
   if (!cachedResult) {
@@ -33,7 +33,7 @@ export async function findFirstParentVersion(
   parentName: string,
   parentStartingVersion: string,
   targetDepName: string,
-  targetVersion: string
+  targetVersion: string,
 ): Promise<string | null> {
   // istanbul ignore if
   if (!semver.isVersion(parentStartingVersion)) {
@@ -41,7 +41,7 @@ export async function findFirstParentVersion(
     return null;
   }
   logger.debug(
-    `Finding first version of ${parentName} starting with ${parentStartingVersion} which supports >= ${targetDepName}@${targetVersion}`
+    `Finding first version of ${parentName} starting with ${parentStartingVersion} which supports >= ${targetDepName}@${targetVersion}`,
   );
   try {
     const targetDep = await getPkgReleasesCached(targetDepName);
@@ -49,7 +49,7 @@ export async function findFirstParentVersion(
     if (!targetDep) {
       logger.info(
         { targetDepName },
-        'Could not look up target dependency for remediation'
+        'Could not look up target dependency for remediation',
       );
       return null;
     }
@@ -60,14 +60,14 @@ export async function findFirstParentVersion(
           semver.isVersion(version) &&
           semver.isStable(version) &&
           (version === targetVersion ||
-            semver.isGreaterThan(version, targetVersion))
+            semver.isGreaterThan(version, targetVersion)),
       );
     const parentDep = await getPkgReleasesCached(parentName);
     // istanbul ignore if
     if (!parentDep) {
       logger.info(
         { parentName },
-        'Could not look up parent dependency for remediation'
+        'Could not look up parent dependency for remediation',
       );
       return null;
     }
@@ -78,24 +78,24 @@ export async function findFirstParentVersion(
           semver.isVersion(version) &&
           semver.isStable(version) &&
           (version === parentStartingVersion ||
-            semver.isGreaterThan(version, parentStartingVersion))
+            semver.isGreaterThan(version, parentStartingVersion)),
       )
       .sort((v1, v2) => semver.sortVersions(v1, v2));
     // iterate through parentVersions in sorted order
     for (const parentVersion of parentVersions) {
       const constraint = parentDep.releases.find(
-        (release) => release.version === parentVersion
+        (release) => release.version === parentVersion,
       )?.dependencies?.[targetDepName];
       if (!constraint) {
         logger.debug(
-          `${targetDepName} has been removed from ${parentName}@${parentVersion}`
+          `${targetDepName} has been removed from ${parentName}@${parentVersion}`,
         );
         return parentVersion;
       }
       if (semver.matches(targetVersion, constraint)) {
         // could be version or range
         logger.debug(
-          `${targetDepName} needs ${parentName}@${parentVersion} which uses constraint "${constraint}" in order to update to ${targetVersion}`
+          `${targetDepName} needs ${parentName}@${parentVersion} which uses constraint "${constraint}" in order to update to ${targetVersion}`,
         );
         return parentVersion;
       }
@@ -103,7 +103,7 @@ export async function findFirstParentVersion(
         if (semver.isGreaterThan(constraint, targetVersion)) {
           // it's not the version we were after - the parent skipped to a higher version
           logger.debug(
-            `${targetDepName} needs ${parentName}@${parentVersion} which uses version "${constraint}" in order to update to greater than ${targetVersion}`
+            `${targetDepName} needs ${parentName}@${parentVersion} which uses version "${constraint}" in order to update to greater than ${targetVersion}`,
           );
           return parentVersion;
         }
@@ -113,7 +113,7 @@ export async function findFirstParentVersion(
       ) {
         // the constraint didn't match the version we wanted, but it matches one of the versions higher
         logger.debug(
-          `${targetDepName} needs ${parentName}@${parentVersion} which uses constraint "${constraint}" in order to update to greater than ${targetVersion}`
+          `${targetDepName} needs ${parentName}@${parentVersion} which uses constraint "${constraint}" in order to update to greater than ${targetVersion}`,
         );
         return parentVersion;
       }
@@ -121,7 +121,7 @@ export async function findFirstParentVersion(
   } catch (err) /* istanbul ignore next */ {
     logger.warn(
       { parentName, parentStartingVersion, targetDepName, targetVersion, err },
-      'findFirstParentVersion error'
+      'findFirstParentVersion error',
     );
     return null;
   }
diff --git a/lib/modules/manager/npm/update/locked-dependency/index.spec.ts b/lib/modules/manager/npm/update/locked-dependency/index.spec.ts
index 0e4290e3ecac2c09be2bc73c7a2754560c035e82..054d8a449caa642fa678be4adcbeac8c6978d6cb 100644
--- a/lib/modules/manager/npm/update/locked-dependency/index.spec.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/index.spec.ts
@@ -8,7 +8,7 @@ const packageFileContent = Fixtures.get('package.json', './package-lock');
 const lockFileContent = Fixtures.get('package-lock-v1.json', './package-lock');
 const lockFileV2Content = Fixtures.get(
   'package-lock-v2.json',
-  './package-lock'
+  './package-lock',
 );
 const acceptsJson = Fixtures.getJson('accepts.json', './package-lock');
 const expressJson = Fixtures.get('express.json', './common');
@@ -18,11 +18,11 @@ const sendJson = Fixtures.get('send.json', './package-lock');
 const typeIsJson = Fixtures.getJson('type-is.json', './package-lock');
 const bundledPackageJson = Fixtures.get(
   'bundled.package.json',
-  './package-lock'
+  './package-lock',
 );
 const bundledPackageLockJson = Fixtures.get(
   'bundled.package-lock.json',
-  './package-lock'
+  './package-lock',
 );
 
 describe('modules/manager/npm/update/locked-dependency/index', () => {
@@ -43,10 +43,10 @@ describe('modules/manager/npm/update/locked-dependency/index', () => {
 
     it('validates filename', async () => {
       expect(
-        await updateLockedDependency({ ...config, lockFile: 'yarn.lock' })
+        await updateLockedDependency({ ...config, lockFile: 'yarn.lock' }),
       ).toMatchObject({});
       expect(
-        await updateLockedDependency({ ...config, lockFile: 'yarn.lock2' })
+        await updateLockedDependency({ ...config, lockFile: 'yarn.lock2' }),
       ).toMatchObject({});
     });
 
@@ -55,13 +55,16 @@ describe('modules/manager/npm/update/locked-dependency/index', () => {
         await updateLockedDependency({
           ...config,
           newVersion: '^2.0.0',
-        })
+        }),
       ).toMatchObject({});
     });
 
     it('returns null for unparseable files', async () => {
       expect(
-        await updateLockedDependency({ ...config, lockFileContent: 'not json' })
+        await updateLockedDependency({
+          ...config,
+          lockFileContent: 'not json',
+        }),
       ).toMatchObject({});
     });
 
@@ -70,7 +73,7 @@ describe('modules/manager/npm/update/locked-dependency/index', () => {
         await updateLockedDependency({
           ...config,
           lockFileContent: lockFileContent.replace(': 1,', ': 2,'),
-        })
+        }),
       ).toMatchObject({});
     });
 
@@ -86,7 +89,7 @@ describe('modules/manager/npm/update/locked-dependency/index', () => {
           depName: 'accepts',
           currentVersion: '10.0.0',
           newVersion: '11.0.0',
-        })
+        }),
       ).toMatchObject({});
     });
 
@@ -98,7 +101,7 @@ describe('modules/manager/npm/update/locked-dependency/index', () => {
         newVersion: '1.2.12',
       });
       expect(
-        JSON.parse(res.files!['package-lock.json']).dependencies.mime.version
+        JSON.parse(res.files!['package-lock.json']).dependencies.mime.version,
       ).toBe('1.2.12');
     });
 
diff --git a/lib/modules/manager/npm/update/locked-dependency/index.ts b/lib/modules/manager/npm/update/locked-dependency/index.ts
index 1cba1b8f0df56673099a3806cc3b941c4143976c..7d6e10b961099e69249d381f2aa7007ced4acfa0 100644
--- a/lib/modules/manager/npm/update/locked-dependency/index.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/index.ts
@@ -5,7 +5,7 @@ import * as packageLock from './package-lock';
 import * as yarnLock from './yarn-lock';
 
 export async function updateLockedDependency(
-  config: UpdateLockedConfig
+  config: UpdateLockedConfig,
 ): Promise<UpdateLockedResult> {
   const { currentVersion, newVersion, lockFile } = config;
   if (!(semver.isVersion(currentVersion) && semver.isVersion(newVersion))) {
diff --git a/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.spec.ts b/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.spec.ts
index 765786bedb75a8a865fa514c63ef48238981d292..56fe90cfdd3ec9a3abcae86aad6239ed328b2466 100644
--- a/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.spec.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.spec.ts
@@ -15,8 +15,8 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/dep-constrai
           packageLockJson,
           'send',
           '0.2.0',
-          '0.2.1'
-        )
+          '0.2.1',
+        ),
       ).toEqual([
         {
           constraint: '0.2.0',
@@ -33,8 +33,8 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/dep-constrai
           packageLockJson,
           'express',
           '4.0.0',
-          '4.5.0'
-        )
+          '4.5.0',
+        ),
       ).toEqual([{ constraint: '4.0.0', depType: 'dependencies' }]);
     });
 
@@ -45,8 +45,8 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/dep-constrai
           packageLockJson,
           'express',
           '4.4.0',
-          '4.5.0'
-        )
+          '4.5.0',
+        ),
       ).toHaveLength(0);
     });
 
@@ -60,8 +60,8 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/dep-constrai
           packageLockJson,
           'express',
           '4.0.0',
-          '4.5.0'
-        )
+          '4.5.0',
+        ),
       ).toEqual([{ constraint: '4.0.0', depType: 'devDependencies' }]);
     });
   });
diff --git a/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.ts b/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.ts
index a484b1fb1cd52a057dccd7a4bda926655077207b..214e995330c4f5fcac5d59a84ab69819c8dbe6a9 100644
--- a/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/package-lock/dep-constraints.ts
@@ -11,7 +11,7 @@ export function findDepConstraints(
   depName: string,
   currentVersion: string,
   newVersion: string,
-  parentDepName?: string
+  parentDepName?: string,
 ): ParentDependency[] {
   let parents: ParentDependency[] = [];
   let packageJsonConstraint = packageJson.dependencies?.[depName];
@@ -55,7 +55,7 @@ export function findDepConstraints(
       } else {
         logger.warn(
           { parentDepName, depName, currentVersion, constraint },
-          'Parent constraint is invalid'
+          'Parent constraint is invalid',
         );
       }
     }
@@ -69,8 +69,8 @@ export function findDepConstraints(
           depName,
           currentVersion,
           newVersion,
-          packageName
-        )
+          packageName,
+        ),
       );
     }
   }
diff --git a/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.spec.ts b/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.spec.ts
index 29aad26ffea7a156c6eda56d9bdaee3309a1ed27..5cbb3a8737a786fe93cb1ef87eb2c8904f9e95d4 100644
--- a/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.spec.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.spec.ts
@@ -10,7 +10,7 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/get-locked',
   describe('getLockedDependencies()', () => {
     it('handles error', () => {
       expect(getLockedDependencies(null as any, 'some-dep', '1.0.0')).toEqual(
-        []
+        [],
       );
     });
 
@@ -20,7 +20,7 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/get-locked',
 
     it('finds direct dependency', () => {
       expect(
-        getLockedDependencies(packageLockJson, 'express', '4.0.0')
+        getLockedDependencies(packageLockJson, 'express', '4.0.0'),
       ).toMatchObject([
         {
           resolved: 'https://registry.npmjs.org/express/-/express-4.0.0.tgz',
@@ -31,7 +31,7 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/get-locked',
 
     it('finds indirect dependency', () => {
       expect(
-        getLockedDependencies(packageLockJson, 'send', '0.2.0')
+        getLockedDependencies(packageLockJson, 'send', '0.2.0'),
       ).toMatchObject([
         {
           resolved: 'https://registry.npmjs.org/send/-/send-0.2.0.tgz',
@@ -42,13 +42,13 @@ describe('modules/manager/npm/update/locked-dependency/package-lock/get-locked',
 
     it('finds any version', () => {
       expect(getLockedDependencies(packageLockJson, 'send', null)).toHaveLength(
-        2
+        2,
       );
     });
 
     it('finds bundled dependency', () => {
       expect(
-        getLockedDependencies(bundledPackageLockJson, 'ansi-regex', '3.0.0')
+        getLockedDependencies(bundledPackageLockJson, 'ansi-regex', '3.0.0'),
       ).toMatchObject([
         {
           bundled: true,
diff --git a/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.ts b/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.ts
index 4a9c6670782581205f2de6a0650c308b20f98c26..6aed6ef4f4528b0877004f668ced309d69c69929 100644
--- a/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/package-lock/get-locked.ts
@@ -6,7 +6,7 @@ export function getLockedDependencies(
   entry: PackageLockOrEntry,
   depName: string,
   currentVersion: string | null,
-  bundled = false
+  bundled = false,
 ): PackageLockDependency[] {
   let res: PackageLockDependency[] = [];
   try {
@@ -27,8 +27,8 @@ export function getLockedDependencies(
           dependency,
           depName,
           currentVersion,
-          bundled || !!entry.bundled
-        )
+          bundled || !!entry.bundled,
+        ),
       );
     }
   } catch (err) {
diff --git a/lib/modules/manager/npm/update/locked-dependency/package-lock/index.ts b/lib/modules/manager/npm/update/locked-dependency/package-lock/index.ts
index 3d220be894aa800c0190c3fb5de8cddf2247befc..5e95a770667f67aaf6587606b883ef3c60629d09 100644
--- a/lib/modules/manager/npm/update/locked-dependency/package-lock/index.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/package-lock/index.ts
@@ -11,7 +11,7 @@ import type { PackageLockOrEntry } from './types';
 
 export async function updateLockedDependency(
   config: UpdateLockedConfig,
-  isParentUpdate = false
+  isParentUpdate = false,
 ): Promise<UpdateLockedResult> {
   const {
     depName,
@@ -25,7 +25,7 @@ export async function updateLockedDependency(
     allowHigherOrRemoved = false,
   } = config;
   logger.debug(
-    `npm.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
+    `npm.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`,
   );
   try {
     let packageJson: PackageJson;
@@ -45,11 +45,11 @@ export async function updateLockedDependency(
     const lockedDeps = getLockedDependencies(
       packageLockJson,
       depName,
-      currentVersion
+      currentVersion,
     );
     if (lockedDeps.some((dep) => dep.bundled)) {
       logger.info(
-        `Package ${depName}@${currentVersion} is bundled and cannot be updated`
+        `Package ${depName}@${currentVersion} is bundled and cannot be updated`,
       );
       return { status: 'update-failed' };
     }
@@ -57,19 +57,19 @@ export async function updateLockedDependency(
       const newLockedDeps = getLockedDependencies(
         packageLockJson,
         depName,
-        newVersion
+        newVersion,
       );
       let status: 'update-failed' | 'already-updated';
       if (newLockedDeps.length) {
         logger.debug(
-          `${depName}@${currentVersion} not found in ${lockFile} but ${depName}@${newVersion} was - looks like it's already updated`
+          `${depName}@${currentVersion} not found in ${lockFile} but ${depName}@${newVersion} was - looks like it's already updated`,
         );
         status = 'already-updated';
       } else {
         if (lockfileVersion !== 1) {
           logger.debug(
             // TODO: types (#22198)
-            `Found lockfileVersion ${packageLockJson.lockfileVersion!}`
+            `Found lockfileVersion ${packageLockJson.lockfileVersion!}`,
           );
           status = 'update-failed';
         } else if (allowHigherOrRemoved) {
@@ -77,34 +77,34 @@ export async function updateLockedDependency(
           const anyVersionLocked = getLockedDependencies(
             packageLockJson,
             depName,
-            null
+            null,
           );
           if (anyVersionLocked.length) {
             if (
               anyVersionLocked.every((dep) =>
-                semver.isGreaterThan(dep.version, newVersion)
+                semver.isGreaterThan(dep.version, newVersion),
               )
             ) {
               logger.debug(
-                `${depName} found in ${lockFile} with higher version - looks like it's already updated`
+                `${depName} found in ${lockFile} with higher version - looks like it's already updated`,
               );
               status = 'already-updated';
             } else {
               logger.debug(
                 { anyVersionLocked },
-                `Found alternative versions of qs`
+                `Found alternative versions of qs`,
               );
               status = 'update-failed';
             }
           } else {
             logger.debug(
-              `${depName} not found in ${lockFile} - looks like it's already removed`
+              `${depName} not found in ${lockFile} - looks like it's already removed`,
             );
             status = 'already-updated';
           }
         } else {
           logger.debug(
-            `${depName}@${currentVersion} not found in ${lockFile} - cannot update`
+            `${depName}@${currentVersion} not found in ${lockFile} - cannot update`,
           );
           status = 'update-failed';
         }
@@ -122,20 +122,20 @@ export async function updateLockedDependency(
       return { status };
     }
     logger.debug(
-      `Found matching dependencies with length ${lockedDeps.length}`
+      `Found matching dependencies with length ${lockedDeps.length}`,
     );
     const constraints = findDepConstraints(
       packageJson,
       packageLockJson,
       depName,
       currentVersion,
-      newVersion
+      newVersion,
     );
     logger.trace({ deps: lockedDeps, constraints }, 'Matching details');
     if (!constraints.length) {
       logger.info(
         { depName, currentVersion, newVersion },
-        'Could not find constraints for the locked dependency - cannot remediate'
+        'Could not find constraints for the locked dependency - cannot remediate',
       );
       return { status: 'update-failed' };
     }
@@ -154,12 +154,12 @@ export async function updateLockedDependency(
             parentDepName
               ? `${parentDepName}@${parentVersion!}`
               : /* istanbul ignore next: hard to test */ packageFile
-          }`
+          }`,
         );
       } else if (parentDepName && parentVersion) {
         if (!allowParentUpdates) {
           logger.debug(
-            `Cannot update ${depName} to ${newVersion} without an update to ${parentDepName}`
+            `Cannot update ${depName} to ${newVersion} without an update to ${parentDepName}`,
           );
           return { status: 'update-failed' };
         }
@@ -168,17 +168,17 @@ export async function updateLockedDependency(
           parentDepName,
           parentVersion,
           depName,
-          newVersion
+          newVersion,
         );
         if (parentNewVersion) {
           if (parentNewVersion === parentVersion) {
             logger.debug(
-              `Update of ${depName} to ${newVersion} already achieved in parent ${parentDepName}@${parentNewVersion}`
+              `Update of ${depName} to ${newVersion} already achieved in parent ${parentDepName}@${parentNewVersion}`,
             );
           } else {
             // Update the parent dependency so that we can update this dependency
             logger.debug(
-              `Update of ${depName} to ${newVersion} can be achieved due to parent ${parentDepName}`
+              `Update of ${depName} to ${newVersion} can be achieved due to parent ${parentDepName}`,
             );
             const parentUpdate: Partial<UpdateLockedConfig> = {
               depName: parentDepName,
@@ -190,7 +190,7 @@ export async function updateLockedDependency(
         } else {
           // For some reason it's not possible to update the parent to a version compatible with our desired dep version
           logger.debug(
-            `Update of ${depName} to ${newVersion} cannot be achieved due to parent ${parentDepName}`
+            `Update of ${depName} to ${newVersion} cannot be achieved due to parent ${parentDepName}`,
           );
           return { status: 'update-failed' };
         }
@@ -219,7 +219,7 @@ export async function updateLockedDependency(
     let newLockFileContent = JSON.stringify(
       packageLockJson,
       null,
-      detectedIndent
+      detectedIndent,
     );
     // iterate through the parent updates first
     for (const parentUpdate of parentUpdates) {
@@ -231,13 +231,13 @@ export async function updateLockedDependency(
       };
       const parentUpdateResult = await updateLockedDependency(
         parentUpdateConfig,
-        true
+        true,
       );
       // istanbul ignore if: hard to test due to recursion
       if (!parentUpdateResult.files) {
         logger.debug(
           // TODO: types (#22198)
-          `Update of ${depName} to ${newVersion} impossible due to failed update of parent ${parentUpdate.depName} to ${parentUpdate.newVersion}`
+          `Update of ${depName} to ${newVersion} impossible due to failed update of parent ${parentUpdate.depName} to ${parentUpdate.newVersion}`,
         );
         return { status: 'update-failed' };
       }
@@ -255,7 +255,7 @@ export async function updateLockedDependency(
       files[packageFile] = newPackageJsonContent;
     } else if (lockfileVersion !== 1) {
       logger.debug(
-        'Remediations which change package-lock.json only are not supported unless lockfileVersion=1'
+        'Remediations which change package-lock.json only are not supported unless lockfileVersion=1',
       );
       return { status: 'unsupported' };
     }
diff --git a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/get-locked.ts b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/get-locked.ts
index 7f4f5fa73db189555321e2bd076e590a5debcdfa..76164ada874d65763ab3c99a8a923f15e7bc34f1 100644
--- a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/get-locked.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/get-locked.ts
@@ -23,7 +23,7 @@ export function parseEntry(depNameConstraint: string): {
 export function getYarn1LockedDependencies(
   yarnLock: YarnLock,
   depName: string,
-  currentVersion: string
+  currentVersion: string,
 ): YarnLockEntrySummary[] {
   const res: YarnLockEntrySummary[] = [];
   try {
@@ -47,7 +47,7 @@ export function getYarn1LockedDependencies(
 export function getYarn2LockedDependencies(
   yarnLock: YarnLock,
   depName: string,
-  currentVersion: string
+  currentVersion: string,
 ): YarnLockEntrySummary[] {
   const res: YarnLockEntrySummary[] = [];
   try {
@@ -79,7 +79,7 @@ export function getYarn2LockedDependencies(
 export function getLockedDependencies(
   yarnLock: YarnLock,
   depName: string,
-  currentVersion: string
+  currentVersion: string,
 ): YarnLockEntrySummary[] {
   if ('__metadata' in yarnLock) {
     return getYarn2LockedDependencies(yarnLock, depName, currentVersion);
diff --git a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/index.ts b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/index.ts
index 6aba66c95c639bc518ec7994c335acf042576cf5..8d158853a7af7bdda01ac93901fc4722b9969b11 100644
--- a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/index.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/index.ts
@@ -7,12 +7,12 @@ import { replaceConstraintVersion } from './replace';
 import type { YarnLock, YarnLockEntryUpdate } from './types';
 
 export function updateLockedDependency(
-  config: UpdateLockedConfig
+  config: UpdateLockedConfig,
 ): UpdateLockedResult {
   const { depName, currentVersion, newVersion, lockFile, lockFileContent } =
     config;
   logger.debug(
-    `npm.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
+    `npm.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`,
   );
   let yarnLock: YarnLock;
   try {
@@ -28,39 +28,39 @@ export function updateLockedDependency(
       const newLockedDeps = getLockedDependencies(
         yarnLock,
         depName,
-        newVersion
+        newVersion,
       );
       if (newLockedDeps.length) {
         logger.debug(
-          `${depName}@${currentVersion} not found in ${lockFile} but ${depName}@${newVersion} was - looks like it's already updated`
+          `${depName}@${currentVersion} not found in ${lockFile} but ${depName}@${newVersion} was - looks like it's already updated`,
         );
         return { status: 'already-updated' };
       }
       logger.debug(
-        `${depName}@${currentVersion} not found in ${lockFile} - cannot update`
+        `${depName}@${currentVersion} not found in ${lockFile} - cannot update`,
       );
       return { status: 'update-failed' };
     }
     if ('__metadata' in yarnLock) {
       logger.debug(
-        'Cannot patch Yarn 2+ lock file directly - falling back to using yarn'
+        'Cannot patch Yarn 2+ lock file directly - falling back to using yarn',
       );
       return { status: 'unsupported' };
     }
     logger.debug(
-      `Found matching dependencies with length ${lockedDeps.length}`
+      `Found matching dependencies with length ${lockedDeps.length}`,
     );
     const updateLockedDeps: YarnLockEntryUpdate[] = [];
     for (const lockedDep of lockedDeps) {
       if (semver.matches(newVersion, lockedDep.constraint)) {
         logger.debug(
-          `Dependency ${depName} can be updated from ${newVersion} to ${newVersion} in range ${lockedDep.constraint}`
+          `Dependency ${depName} can be updated from ${newVersion} to ${newVersion} in range ${lockedDep.constraint}`,
         );
         updateLockedDeps.push({ ...lockedDep, newVersion });
         continue;
       }
       logger.debug(
-        `Dependency ${depName} cannot be updated from ${newVersion} to ${newVersion} in range ${lockedDep.constraint}`
+        `Dependency ${depName} cannot be updated from ${newVersion} to ${newVersion} in range ${lockedDep.constraint}`,
       );
       return { status: 'update-failed' };
     }
@@ -72,7 +72,7 @@ export function updateLockedDependency(
         newLockFileContent,
         depName,
         constraint,
-        newVersion
+        newVersion,
       );
     }
     // istanbul ignore if: cannot test
diff --git a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.spec.ts b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.spec.ts
index 4f160fa466281c9593142f9322e94d09950079b5..bc2f114fd61d8138c7e02aec92d77ff31813a2e5 100644
--- a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.spec.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.spec.ts
@@ -13,7 +13,7 @@ describe('modules/manager/npm/update/locked-dependency/yarn-lock/replace', () =>
         yarn2Lock,
         'chalk',
         '^2.4.1',
-        '2.5.0'
+        '2.5.0',
       );
       expect(res).toBe(yarn2Lock);
     });
@@ -23,7 +23,7 @@ describe('modules/manager/npm/update/locked-dependency/yarn-lock/replace', () =>
         yarnLock1,
         'fresh',
         '~0.2.1',
-        '0.2.5'
+        '0.2.5',
       );
       expect(res).not.toEqual(yarnLock1);
       const diffRes = Diff.diffLines(yarnLock1, res);
@@ -48,7 +48,7 @@ describe('modules/manager/npm/update/locked-dependency/yarn-lock/replace', () =>
         yarnLock1,
         'express',
         '4.0.0',
-        '4.4.0'
+        '4.4.0',
       );
       expect(res).not.toEqual(yarnLock1);
       const diffRes = Diff.diffLines(yarnLock1, res);
@@ -74,7 +74,7 @@ describe('modules/manager/npm/update/locked-dependency/yarn-lock/replace', () =>
         'express',
         '4.0.0',
         '4.4.0',
-        '4.4.0'
+        '4.4.0',
       );
       expect(res).not.toEqual(yarnLock1);
       const diffRes = Diff.diffLines(yarnLock1, res);
@@ -101,7 +101,7 @@ describe('modules/manager/npm/update/locked-dependency/yarn-lock/replace', () =>
         yarnLock2,
         'string-width',
         '^1.0.1 || ^2.0.0',
-        '2.2.0'
+        '2.2.0',
       );
       expect(res).not.toEqual(yarnLock2);
       const diffRes = Diff.diffLines(yarnLock2, res);
@@ -126,7 +126,7 @@ describe('modules/manager/npm/update/locked-dependency/yarn-lock/replace', () =>
         yarnLock2,
         '@embroider/addon-shim',
         '^0.48.0',
-        '0.48.1'
+        '0.48.1',
       );
       expect(res).not.toEqual(yarnLock2);
       const diffRes = Diff.diffLines(yarnLock2, res);
diff --git a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.ts b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.ts
index 6611897e05c26e430dcd6cc6cbb06cebb3095992..5cafed0730bed0899d6c05e8ffc5d26a695ebc8d 100644
--- a/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.ts
+++ b/lib/modules/manager/npm/update/locked-dependency/yarn-lock/replace.ts
@@ -6,7 +6,7 @@ export function replaceConstraintVersion(
   depName: string,
   constraint: string,
   newVersion: string,
-  newConstraint?: string
+  newConstraint?: string,
 ): string {
   if (lockFileContent.startsWith('__metadata:')) {
     // Yarn 2+
@@ -21,7 +21,7 @@ export function replaceConstraintVersion(
   if (!matchResult) {
     logger.debug(
       { depName, constraint, newVersion },
-      'Could not find constraint in lock file'
+      'Could not find constraint in lock file',
     );
     return lockFileContent;
   }
@@ -30,11 +30,11 @@ export function replaceConstraintVersion(
     const newDepNameConstraint = `${depName}@${newConstraint}`;
     constraintLine = constraintLine.replace(
       depNameConstraint,
-      newDepNameConstraint
+      newDepNameConstraint,
     );
   }
   return lockFileContent.replace(
     regEx(matchString),
-    `${constraintLine}  version "${newVersion}"\n$5`
+    `${constraintLine}  version "${newVersion}"\n$5`,
   );
 }
diff --git a/lib/modules/manager/npm/update/package-version/index.spec.ts b/lib/modules/manager/npm/update/package-version/index.spec.ts
index e40b5fb14873704e28d9fe526fbf1f45c5a81f4a..155228c5abc5a18f504d969695acdd94b1b454e1 100644
--- a/lib/modules/manager/npm/update/package-version/index.spec.ts
+++ b/lib/modules/manager/npm/update/package-version/index.spec.ts
@@ -12,7 +12,7 @@ describe('modules/manager/npm/update/package-version/index', () => {
       const { bumpedContent } = npmUpdater.bumpPackageVersion(
         content,
         '0.0.2',
-        'mirror:chalk'
+        'mirror:chalk',
       );
       expect(bumpedContent).toMatchSnapshot();
       expect(bumpedContent).not.toEqual(content);
@@ -22,7 +22,7 @@ describe('modules/manager/npm/update/package-version/index', () => {
       const { bumpedContent } = npmUpdater.bumpPackageVersion(
         content,
         '0.0.2',
-        'mirror:a'
+        'mirror:a',
       );
       expect(bumpedContent).toEqual(content);
     });
@@ -31,7 +31,7 @@ describe('modules/manager/npm/update/package-version/index', () => {
       const { bumpedContent } = npmUpdater.bumpPackageVersion(
         content,
         '0.0.2',
-        'patch'
+        'patch',
       );
       expect(bumpedContent).toMatchSnapshot();
       expect(bumpedContent).not.toEqual(content);
@@ -41,7 +41,7 @@ describe('modules/manager/npm/update/package-version/index', () => {
       const { bumpedContent } = npmUpdater.bumpPackageVersion(
         content,
         '0.0.1',
-        'patch'
+        'patch',
       );
       expect(bumpedContent).toEqual(content);
     });
@@ -50,7 +50,7 @@ describe('modules/manager/npm/update/package-version/index', () => {
       const { bumpedContent } = npmUpdater.bumpPackageVersion(
         content,
         '0.0.1',
-        'minor'
+        'minor',
       );
       expect(bumpedContent).toMatchSnapshot();
       expect(bumpedContent).not.toEqual(content);
@@ -66,7 +66,7 @@ describe('modules/manager/npm/update/package-version/index', () => {
       const { bumpedContent } = npmUpdater1.bumpPackageVersion(
         content,
         '0.0.2',
-        true as any
+        true as any,
       );
       expect(bumpedContent).toEqual(content);
     });
diff --git a/lib/modules/manager/npm/update/package-version/index.ts b/lib/modules/manager/npm/update/package-version/index.ts
index 3e72b93c5d1cd86a060da341754221175ccf4e39..1c9f8a470439cc08981c10377161aab2ff015bdc 100644
--- a/lib/modules/manager/npm/update/package-version/index.ts
+++ b/lib/modules/manager/npm/update/package-version/index.ts
@@ -6,7 +6,7 @@ import type { BumpPackageVersionResult } from '../../../types';
 type MirrorBumpVersion = `mirror:${string}`;
 
 function isMirrorBumpVersion(
-  bumpVersion: string
+  bumpVersion: string,
 ): bumpVersion is MirrorBumpVersion {
   return bumpVersion.startsWith('mirror:');
 }
@@ -14,11 +14,11 @@ function isMirrorBumpVersion(
 export function bumpPackageVersion(
   content: string,
   currentValue: string,
-  bumpVersion: ReleaseType | `mirror:${string}`
+  bumpVersion: ReleaseType | `mirror:${string}`,
 ): BumpPackageVersionResult {
   logger.debug(
     { bumpVersion, currentValue },
-    'Checking if we should bump package.json version'
+    'Checking if we should bump package.json version',
   );
   // TODO: types (#22198)
   let newPjVersion: string | null;
@@ -44,7 +44,7 @@ export function bumpPackageVersion(
     logger.debug(`newPjVersion: ${newPjVersion!}`);
     bumpedContent = content.replace(
       regEx(`(?<version>"version":\\s*")[^"]*`),
-      `$<version>${newPjVersion!}`
+      `$<version>${newPjVersion!}`,
     );
     if (bumpedContent === content) {
       logger.debug('Version was already bumped');
@@ -58,7 +58,7 @@ export function bumpPackageVersion(
         currentValue,
         bumpVersion,
       },
-      'Failed to bumpVersion'
+      'Failed to bumpVersion',
     );
   }
   return { bumpedContent };
diff --git a/lib/modules/manager/nuget/artifacts.spec.ts b/lib/modules/manager/nuget/artifacts.spec.ts
index 9c338d7e35b4ebcd36c9104baefa627e52accf18..4dfbbc4a04de05f9ece7e791d8185ff6adbf0a94 100644
--- a/lib/modules/manager/nuget/artifacts.spec.ts
+++ b/lib/modules/manager/nuget/artifacts.spec.ts
@@ -54,7 +54,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toBeEmptyArray();
   });
@@ -62,7 +62,7 @@ describe('modules/manager/nuget/artifacts', () => {
   it('aborts if lock file is unchanged', async () => {
     const execSnapshots = mockExecAll();
     fs.getSiblingFileName.mockReturnValueOnce(
-      'path/with space/packages.lock.json'
+      'path/with space/packages.lock.json',
     );
     git.getFiles.mockResolvedValueOnce({
       'path/with space/packages.lock.json': 'Current packages.lock.json',
@@ -76,7 +76,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'foo' }, { depName: 'bar' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       {
@@ -108,7 +108,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'dep' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -148,7 +148,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'dep' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toBeEmptyArray();
   });
@@ -168,7 +168,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toBeEmptyArray();
   });
@@ -191,7 +191,7 @@ describe('modules/manager/nuget/artifacts', () => {
           ...config,
           isLockFileMaintenance: true,
         },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -236,7 +236,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'dep' }],
         newPackageFileContent: '{}',
         config: { ...config, constraints: { dotnet: '7.0.100' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -296,7 +296,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'dep' }],
         newPackageFileContent: '{}',
         config: { ...config, constraints: { dotnet: '7.0.100' } },
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -350,7 +350,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'dep' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -390,7 +390,7 @@ describe('modules/manager/nuget/artifacts', () => {
         updatedDeps: [{ depName: 'dep' }],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         artifactError: {
diff --git a/lib/modules/manager/nuget/artifacts.ts b/lib/modules/manager/nuget/artifacts.ts
index 08f8f8fc6025a2b8397498220967b33400d4c65e..484d2f0b36a172381f678be4de2820824f1f94b3 100644
--- a/lib/modules/manager/nuget/artifacts.ts
+++ b/lib/modules/manager/nuget/artifacts.ts
@@ -29,7 +29,7 @@ import { getConfiguredRegistries, getDefaultRegistries } from './util';
 
 async function createCachedNuGetConfigFile(
   nugetCacheDir: string,
-  packageFileName: string
+  packageFileName: string,
 ): Promise<string> {
   const registries =
     (await getConfiguredRegistries(packageFileName)) ?? getDefaultRegistries();
@@ -46,13 +46,13 @@ async function createCachedNuGetConfigFile(
 async function runDotnetRestore(
   packageFileName: string,
   dependentPackageFileNames: string[],
-  config: UpdateArtifactsConfig
+  config: UpdateArtifactsConfig,
 ): Promise<void> {
   const nugetCacheDir = join(privateCacheDir(), 'nuget');
 
   const nugetConfigFile = await createCachedNuGetConfigFile(
     nugetCacheDir,
-    packageFileName
+    packageFileName,
   );
 
   const execOptions: ExecOptions = {
@@ -70,8 +70,8 @@ async function runDotnetRestore(
     ...dependentPackageFileNames.map(
       (fileName) =>
         `dotnet restore ${quote(
-          fileName
-        )} --force-evaluate --configfile ${quote(nugetConfigFile)}`
+          fileName,
+        )} --force-evaluate --configfile ${quote(nugetConfigFile)}`,
     ),
   ];
   await exec(cmds, execOptions);
@@ -103,35 +103,35 @@ export async function updateArtifacts({
     // have been changed in such cases.
     logger.debug(
       { packageFileName },
-      'Not updating lock file for non project files'
+      'Not updating lock file for non project files',
     );
     return null;
   }
 
   const deps = await getDependentPackageFiles(
     packageFileName,
-    isCentralManament
+    isCentralManament,
   );
   const packageFiles = deps.filter((d) => d.isLeaf).map((d) => d.name);
 
   logger.trace(
     { packageFiles },
-    `Found ${packageFiles.length} dependent package files`
+    `Found ${packageFiles.length} dependent package files`,
   );
 
   const lockFileNames = deps.map((f) =>
-    getSiblingFileName(f.name, 'packages.lock.json')
+    getSiblingFileName(f.name, 'packages.lock.json'),
   );
 
   const existingLockFileContentMap = await getFiles(lockFileNames);
 
   const hasLockFileContent = Object.values(existingLockFileContentMap).some(
-    (val) => !!val
+    (val) => !!val,
   );
   if (!hasLockFileContent) {
     logger.debug(
       { packageFileName },
-      'No lock file found for package or dependents'
+      'No lock file found for package or dependents',
     );
     return null;
   }
@@ -139,7 +139,7 @@ export async function updateArtifacts({
   try {
     if (updatedDeps.length === 0 && config.isLockFileMaintenance !== true) {
       logger.debug(
-        `Not updating lock file because no deps changed and no lock file maintenance.`
+        `Not updating lock file because no deps changed and no lock file maintenance.`,
       );
       return null;
     }
diff --git a/lib/modules/manager/nuget/config-formatter.spec.ts b/lib/modules/manager/nuget/config-formatter.spec.ts
index edd1bcb01bf52ebf77b77a19f4c9464425823b79..115c99d6717fd350a380e9a6924b9a8fd64d15a0 100644
--- a/lib/modules/manager/nuget/config-formatter.spec.ts
+++ b/lib/modules/manager/nuget/config-formatter.spec.ts
@@ -31,27 +31,27 @@ describe('modules/manager/nuget/config-formatter', () => {
 
       const myRegistry = packageSources?.childWithAttribute(
         'key',
-        'myRegistry'
+        'myRegistry',
       );
       expect(myRegistry?.name).toBe('add');
       expect(myRegistry?.attr['value']).toBe(
-        'https://my-registry.example.org/'
+        'https://my-registry.example.org/',
       );
       expect(myRegistry?.attr['protocolVersion']).toBe('2');
 
       const myRegistry2 = packageSources?.childWithAttribute(
         'key',
-        'myRegistry2'
+        'myRegistry2',
       );
       expect(myRegistry2?.name).toBe('add');
       expect(myRegistry2?.attr['value']).toBe(
-        'https://my-registry2.example.org/index.json'
+        'https://my-registry2.example.org/index.json',
       );
       expect(myRegistry2?.attr['protocolVersion']).toBe('3');
 
       const myUnnamedRegistry = packageSources?.childWithAttribute(
         'value',
-        'https://my-unnamed-registry.example.org/index.json'
+        'https://my-unnamed-registry.example.org/index.json',
       );
       expect(myUnnamedRegistry?.name).toBe('add');
       expect(myUnnamedRegistry?.attr['key']).toBe('Package source 1');
@@ -88,53 +88,53 @@ describe('modules/manager/nuget/config-formatter', () => {
 
       const myRegistry = packageSources?.childWithAttribute(
         'key',
-        'myRegistry'
+        'myRegistry',
       );
       expect(myRegistry?.name).toBe('add');
 
       const myRegistry2 = packageSources?.childWithAttribute(
         'key',
-        'myRegistry2'
+        'myRegistry2',
       );
       expect(myRegistry2?.name).toBe('add');
 
       const myRegistryCredentials = xmlDocument.descendantWithPath(
-        'packageSourceCredentials.myRegistry'
+        'packageSourceCredentials.myRegistry',
       );
       expect(
         myRegistryCredentials?.childWithAttribute('key', 'Username')?.attr[
           'value'
-        ]
+        ],
       ).toBe('some-username');
 
       expect(
         myRegistryCredentials?.childWithAttribute('key', 'ClearTextPassword')
-          ?.attr['value']
+          ?.attr['value'],
       ).toBe('some-password');
 
       expect(
         myRegistryCredentials?.childWithAttribute(
           'key',
-          'ValidAuthenticationTypes'
-        )?.attr['value']
+          'ValidAuthenticationTypes',
+        )?.attr['value'],
       ).toBe('basic');
 
       const myRegistry2Credentials = xmlDocument.descendantWithPath(
-        'packageSourceCredentials.myRegistry2'
+        'packageSourceCredentials.myRegistry2',
       );
       expect(
-        myRegistry2Credentials?.childWithAttribute('key', 'Username')
+        myRegistry2Credentials?.childWithAttribute('key', 'Username'),
       ).toBeUndefined();
       expect(
         myRegistry2Credentials?.childWithAttribute('key', 'ClearTextPassword')
-          ?.attr['value']
+          ?.attr['value'],
       ).toBe('some-password');
 
       expect(
         myRegistry2Credentials?.childWithAttribute(
           'key',
-          'ValidAuthenticationTypes'
-        )?.attr['value']
+          'ValidAuthenticationTypes',
+        )?.attr['value'],
       ).toBe('basic');
     });
 
@@ -157,27 +157,27 @@ describe('modules/manager/nuget/config-formatter', () => {
       const xmlDocument = new XmlDocument(xml);
 
       const packageSourceCredentials = xmlDocument.childNamed(
-        'packageSourceCredentials'
+        'packageSourceCredentials',
       );
       expect(packageSourceCredentials).toBeDefined();
 
       const registryCredentialsWithSpecialName =
         packageSourceCredentials?.childNamed(
-          'my__x0020__very__x003f____x0020__weird__x0021__-regi__x0024__try_name'
+          'my__x0020__very__x003f____x0020__weird__x0021__-regi__x0024__try_name',
         );
 
       expect(
         registryCredentialsWithSpecialName?.childWithAttribute(
           'key',
-          'Username'
-        )?.attr['value']
+          'Username',
+        )?.attr['value'],
       ).toBe('some-username');
 
       expect(
         registryCredentialsWithSpecialName?.childWithAttribute(
           'key',
-          'ClearTextPassword'
-        )?.attr['value']
+          'ClearTextPassword',
+        )?.attr['value'],
       ).toBe('some-password');
     });
 
@@ -196,10 +196,10 @@ describe('modules/manager/nuget/config-formatter', () => {
 
       const myRegistry = packageSources?.childWithAttribute(
         'key',
-        'myRegistry'
+        'myRegistry',
       );
       expect(myRegistry?.attr['value']).toBe(
-        'https://my-registry.example.org/'
+        'https://my-registry.example.org/',
       );
       expect(myRegistry?.attr['protocolVersion']).toBe('3');
     });
@@ -224,26 +224,26 @@ describe('modules/manager/nuget/config-formatter', () => {
       const xml = createNuGetConfigXml(registries);
       const xmlDocument = new XmlDocument(xml);
       const packageSourceMapping = xmlDocument.childNamed(
-        'packageSourceMapping'
+        'packageSourceMapping',
       );
       expect(packageSourceMapping).toBeDefined();
 
       const myRegistryMaps = packageSourceMapping?.childWithAttribute(
         'key',
-        'myRegistry'
+        'myRegistry',
       );
       expect(myRegistryMaps?.name).toBe('packageSource');
       expect(myRegistryMaps?.childNamed('package')?.attr['pattern']).toBe('*');
 
       const myRegistry2Maps = packageSourceMapping?.childWithAttribute(
         'key',
-        'myRegistry2'
+        'myRegistry2',
       );
       expect(myRegistry2Maps?.name).toBe('packageSource');
       expect(
         myRegistry2Maps
           ?.childrenNamed('package')
-          .map((child) => child.attr['pattern'])
+          .map((child) => child.attr['pattern']),
       ).toEqual(['LimitedPackages.*', 'MySpecialPackage']);
     });
 
@@ -262,7 +262,7 @@ describe('modules/manager/nuget/config-formatter', () => {
       const xml = createNuGetConfigXml(registries);
       const xmlDocument = new XmlDocument(xml);
       const packageSourceMapping = xmlDocument.childNamed(
-        'packageSourceMapping'
+        'packageSourceMapping',
       );
       expect(packageSourceMapping).toBeUndefined();
     });
diff --git a/lib/modules/manager/nuget/config-formatter.ts b/lib/modules/manager/nuget/config-formatter.ts
index 9f5355fc77bc00221720f3203e5c7425fb6aae4a..66360fdc73358a5db2decfb3bbde77b1b0315a82 100644
--- a/lib/modules/manager/nuget/config-formatter.ts
+++ b/lib/modules/manager/nuget/config-formatter.ts
@@ -72,7 +72,7 @@ export function createNuGetConfigXml(registries: Registry[]): string {
 
 function formatPackageSourceElement(
   registryInfo: ParsedRegistryUrl,
-  name: string
+  name: string,
 ): string {
   let element = `<add key="${name}" value="${registryInfo.feedUrl}" `;
 
@@ -84,7 +84,7 @@ function formatPackageSourceElement(
 }
 
 function formatPackageSourceCredentialElement(
-  credential: PackageSourceCredential
+  credential: PackageSourceCredential,
 ): string {
   const escapedName = escapeName(credential.name);
 
diff --git a/lib/modules/manager/nuget/extract.spec.ts b/lib/modules/manager/nuget/extract.spec.ts
index 5156a9fbd406b11c2974d3efce7f19f03a141fc8..c60eec0e467813eb486c04647471a6f2290c970b 100644
--- a/lib/modules/manager/nuget/extract.spec.ts
+++ b/lib/modules/manager/nuget/extract.spec.ts
@@ -24,7 +24,7 @@ describe('modules/manager/nuget/extract', () => {
 
     it('returns null for invalid csproj', async () => {
       expect(
-        await extractPackageFile('nothing here', 'bogus', config)
+        await extractPackageFile('nothing here', 'bogus', config),
       ).toBeNull();
     });
 
@@ -221,7 +221,7 @@ describe('modules/manager/nuget/extract', () => {
         packageFileVersion: '0.1.0',
       });
       expect(
-        await extractPackageFile(otherContents, otherPackageFile, config)
+        await extractPackageFile(otherContents, otherPackageFile, config),
       ).toEqual({
         deps: [
           {
@@ -243,7 +243,7 @@ describe('modules/manager/nuget/extract', () => {
       const packageFile = 'msbuild-sdk-files/global.json';
       const contents = Fixtures.get(packageFile);
       expect(
-        await extractPackageFile(contents, packageFile, config)
+        await extractPackageFile(contents, packageFile, config),
       ).toMatchObject({
         deps: [
           {
@@ -266,7 +266,7 @@ describe('modules/manager/nuget/extract', () => {
       const packageFile = 'msbuild-sdk-files/global.1.json';
       const contents = Fixtures.get(packageFile);
       expect(
-        await extractPackageFile(contents, 'global.json', config)
+        await extractPackageFile(contents, 'global.json', config),
       ).toMatchObject({
         deps: [
           {
@@ -283,7 +283,7 @@ describe('modules/manager/nuget/extract', () => {
       const packageFile = 'msbuild-sdk-files/invalid-json/global.json';
       const contents = Fixtures.get(packageFile);
       expect(
-        await extractPackageFile(contents, packageFile, config)
+        await extractPackageFile(contents, packageFile, config),
       ).toBeNull();
     });
 
@@ -291,7 +291,7 @@ describe('modules/manager/nuget/extract', () => {
       const packageFile = 'msbuild-sdk-files/not-nuget/global.json';
       const contents = Fixtures.get(packageFile);
       expect(
-        await extractPackageFile(contents, packageFile, config)
+        await extractPackageFile(contents, packageFile, config),
       ).toBeNull();
     });
 
@@ -310,7 +310,7 @@ describe('modules/manager/nuget/extract', () => {
                 depType: 'nuget',
               },
             ],
-          }
+          },
         );
       });
 
@@ -319,8 +319,8 @@ describe('modules/manager/nuget/extract', () => {
           await extractPackageFile(
             contents,
             `with-config-file/${packageFile}`,
-            config
-          )
+            config,
+          ),
         ).toEqual({
           deps: [
             {
@@ -342,14 +342,14 @@ describe('modules/manager/nuget/extract', () => {
           await extractPackageFile(
             contents.replace('"version": 1,', '"version": 2,'),
             packageFile,
-            config
-          )
+            config,
+          ),
         ).toBeNull();
       });
 
       it('returns null for no deps', async () => {
         expect(
-          await extractPackageFile('{"version": 1}', packageFile, config)
+          await extractPackageFile('{"version": 1}', packageFile, config),
         ).toBeNull();
       });
 
diff --git a/lib/modules/manager/nuget/extract.ts b/lib/modules/manager/nuget/extract.ts
index 80d15ad9f0c0a6ae825e2be45b39a9b5c9f6b35f..4cd519bf63833ee5e5f061d154bd202da232fb76 100644
--- a/lib/modules/manager/nuget/extract.ts
+++ b/lib/modules/manager/nuget/extract.ts
@@ -26,7 +26,7 @@ import { findVersion, getConfiguredRegistries } from './util';
  * so we don't include it in the extracting regexp
  */
 const checkVersion = regEx(
-  `^\\s*(?:[[])?(?:(?<currentValue>[^"(,[\\]]+)\\s*(?:,\\s*[)\\]]|])?)\\s*$`
+  `^\\s*(?:[[])?(?:(?<currentValue>[^"(,[\\]]+)\\s*(?:,\\s*[)\\]]|])?)\\s*$`,
 );
 const elemNames = new Set([
   'PackageReference',
@@ -74,7 +74,7 @@ function extractDepsFromXml(xmlNode: XmlDocument): PackageDependency[] {
 export async function extractPackageFile(
   content: string,
   packageFile: string,
-  _config: ExtractConfig
+  _config: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   logger.trace(`nuget.extractPackageFile(${packageFile})`);
 
diff --git a/lib/modules/manager/nuget/extract/global-manifest.ts b/lib/modules/manager/nuget/extract/global-manifest.ts
index 5cb31deb72ca5e41f1b6535a3dd03bff9204e53e..45a61aba1f1e20d3337c4c49f5a8c957613c954b 100644
--- a/lib/modules/manager/nuget/extract/global-manifest.ts
+++ b/lib/modules/manager/nuget/extract/global-manifest.ts
@@ -6,7 +6,7 @@ import type { MsbuildGlobalManifest } from '../types';
 
 export function extractMsbuildGlobalManifest(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   const deps: PackageDependency[] = [];
   let manifest: MsbuildGlobalManifest;
diff --git a/lib/modules/manager/nuget/package-tree.spec.ts b/lib/modules/manager/nuget/package-tree.spec.ts
index ab210f02478a942ccbb75b3b3461944b975f5756..8cf2cd9e6f013cb342050213900138450ec68bb2 100644
--- a/lib/modules/manager/nuget/package-tree.spec.ts
+++ b/lib/modules/manager/nuget/package-tree.spec.ts
@@ -10,9 +10,9 @@ jest.mock('fs', () => memfs);
 jest.mock('fs-extra', () =>
   jest
     .requireActual<typeof import('../../../../test/fixtures')>(
-      '../../../../test/fixtures'
+      '../../../../test/fixtures',
     )
-    .fsExtra()
+    .fsExtra(),
 );
 jest.mock('../../../util/git');
 
@@ -36,7 +36,7 @@ describe('modules/manager/nuget/package-tree', () => {
       scm.getFileList.mockResolvedValue(['single.csproj']);
       Fixtures.mock({
         '/tmp/repo/single.csproj': Fixtures.get(
-          'single-project-file/single.csproj'
+          'single-project-file/single.csproj',
         ),
       });
 
@@ -64,10 +64,10 @@ describe('modules/manager/nuget/package-tree', () => {
       scm.getFileList.mockResolvedValue(['one/one.csproj', 'two/two.csproj']);
       Fixtures.mock({
         '/tmp/repo/one/one.csproj': Fixtures.get(
-          'two-one-reference/one/one.csproj'
+          'two-one-reference/one/one.csproj',
         ),
         '/tmp/repo/two/two.csproj': Fixtures.get(
-          'two-one-reference/two/two.csproj'
+          'two-one-reference/two/two.csproj',
         ),
       });
 
@@ -81,18 +81,18 @@ describe('modules/manager/nuget/package-tree', () => {
       scm.getFileList.mockResolvedValue(['one/one.csproj', 'two/two.csproj']);
       Fixtures.mock({
         '/tmp/repo/one/one.csproj': Fixtures.get(
-          'two-one-reference-with-central-versions/one/one.csproj'
+          'two-one-reference-with-central-versions/one/one.csproj',
         ),
         '/tmp/repo/two/two.csproj': Fixtures.get(
-          'two-one-reference-with-central-versions/two/two.csproj'
+          'two-one-reference-with-central-versions/two/two.csproj',
         ),
         '/tmp/repo/Directory.Packages.props': Fixtures.get(
-          'two-one-reference-with-central-versions/Directory.Packages.props'
+          'two-one-reference-with-central-versions/Directory.Packages.props',
         ),
       });
 
       expect(
-        await getDependentPackageFiles('Directory.Packages.props', true)
+        await getDependentPackageFiles('Directory.Packages.props', true),
       ).toEqual([
         { isLeaf: false, name: 'one/one.csproj' },
         { isLeaf: true, name: 'two/two.csproj' },
@@ -107,13 +107,13 @@ describe('modules/manager/nuget/package-tree', () => {
       ]);
       Fixtures.mock({
         '/tmp/repo/one/one.csproj': Fixtures.get(
-          'three-two-linear-references/one/one.csproj'
+          'three-two-linear-references/one/one.csproj',
         ),
         '/tmp/repo/two/two.csproj': Fixtures.get(
-          'three-two-linear-references/two/two.csproj'
+          'three-two-linear-references/two/two.csproj',
         ),
         '/tmp/repo/three/three.csproj': Fixtures.get(
-          'three-two-linear-references/three/three.csproj'
+          'three-two-linear-references/three/three.csproj',
         ),
       });
 
@@ -141,13 +141,13 @@ describe('modules/manager/nuget/package-tree', () => {
       ]);
       Fixtures.mock({
         '/tmp/repo/one/one.csproj': Fixtures.get(
-          'three-two-treelike-references/one/one.csproj'
+          'three-two-treelike-references/one/one.csproj',
         ),
         '/tmp/repo/two/two.csproj': Fixtures.get(
-          'three-two-treelike-references/two/two.csproj'
+          'three-two-treelike-references/two/two.csproj',
         ),
         '/tmp/repo/three/three.csproj': Fixtures.get(
-          'three-two-treelike-references/three/three.csproj'
+          'three-two-treelike-references/three/three.csproj',
         ),
       });
 
@@ -169,15 +169,15 @@ describe('modules/manager/nuget/package-tree', () => {
       scm.getFileList.mockResolvedValue(['one/one.csproj', 'two/two.csproj']);
       Fixtures.mock({
         '/tmp/repo/one/one.csproj': Fixtures.get(
-          'circular-reference/one/one.csproj'
+          'circular-reference/one/one.csproj',
         ),
         '/tmp/repo/two/two.csproj': Fixtures.get(
-          'circular-reference/two/two.csproj'
+          'circular-reference/two/two.csproj',
         ),
       });
 
       await expect(getDependentPackageFiles('one/one.csproj')).rejects.toThrow(
-        'Circular reference detected in NuGet package files'
+        'Circular reference detected in NuGet package files',
       );
     });
 
diff --git a/lib/modules/manager/nuget/package-tree.ts b/lib/modules/manager/nuget/package-tree.ts
index ca277baf52c53f0426e06460da7ec3fe221d175f..0720f69b6e2fef0242db30aaa42f2e12cdd9a43d 100644
--- a/lib/modules/manager/nuget/package-tree.ts
+++ b/lib/modules/manager/nuget/package-tree.ts
@@ -15,7 +15,7 @@ export const MSBUILD_CENTRAL_FILE = 'Packages.props';
  */
 export async function getDependentPackageFiles(
   packageFileName: string,
-  isCentralManament = false
+  isCentralManament = false,
 ): Promise<ProjectFile[]> {
   const packageFiles = await getAllPackageFiles();
   const graph: ReturnType<typeof Graph> = Graph();
@@ -52,10 +52,10 @@ export async function getDependentPackageFiles(
       .filter(is.nonEmptyString);
 
     const projectReferences = projectReferenceAttributes.map((a) =>
-      upath.normalize(a)
+      upath.normalize(a),
     );
     const normalizedRelativeProjectReferences = projectReferences.map((r) =>
-      reframeRelativePathToRootOfRepo(f, r)
+      reframeRelativePathToRootOfRepo(f, r),
     );
 
     for (const ref of normalizedRelativeProjectReferences) {
@@ -85,7 +85,7 @@ export async function getDependentPackageFiles(
 function recursivelyGetDependentPackageFiles(
   packageFileName: string,
   graph: ReturnType<typeof Graph>,
-  deps: Map<string, boolean>
+  deps: Map<string, boolean>,
 ): void {
   const dependents = graph.adjacent(packageFileName);
 
@@ -106,20 +106,20 @@ function recursivelyGetDependentPackageFiles(
  */
 function reframeRelativePathToRootOfRepo(
   dependentProjectRelativePath: string,
-  projectReference: string
+  projectReference: string,
 ): string {
   const virtualRepoRoot = '/';
   const absoluteDependentProjectPath = upath.resolve(
     virtualRepoRoot,
-    dependentProjectRelativePath
+    dependentProjectRelativePath,
   );
   const absoluteProjectReferencePath = upath.resolve(
     upath.dirname(absoluteDependentProjectPath),
-    projectReference
+    projectReference,
   );
   const relativeProjectReferencePath = upath.relative(
     virtualRepoRoot,
-    absoluteProjectReferencePath
+    absoluteProjectReferencePath,
   );
 
   return relativeProjectReferencePath;
@@ -131,7 +131,7 @@ function reframeRelativePathToRootOfRepo(
 async function getAllPackageFiles(): Promise<string[]> {
   const allFiles = await scm.getFileList();
   const filteredPackageFiles = allFiles.filter(
-    minimatch.filter('*.{cs,vb,fs}proj', { matchBase: true, nocase: true })
+    minimatch.filter('*.{cs,vb,fs}proj', { matchBase: true, nocase: true }),
   );
 
   logger.trace({ filteredPackageFiles }, 'Found package files');
diff --git a/lib/modules/manager/nuget/update.spec.ts b/lib/modules/manager/nuget/update.spec.ts
index a6abf1c90ade694c0d91474dc75bde33f9937f81..864b032cef14dfe69eec9846a234cb18c0257d06 100644
--- a/lib/modules/manager/nuget/update.spec.ts
+++ b/lib/modules/manager/nuget/update.spec.ts
@@ -18,7 +18,7 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         simpleContent,
         '0.0.1',
-        'patch'
+        'patch',
       );
 
       const project = new XmlDocument(bumpedContent!);
@@ -29,12 +29,12 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         simpleContent,
         '0.0.1',
-        'patch'
+        'patch',
       );
       const { bumpedContent: bumpedContent2 } = bumpPackageVersion(
         bumpedContent!,
         '0.0.1',
-        'patch'
+        'patch',
       );
 
       expect(bumpedContent).toEqual(bumpedContent2);
@@ -44,12 +44,12 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         issue23526InitialContent,
         '4.9.0',
-        'minor'
+        'minor',
       );
       const { bumpedContent: bumpedContent2 } = bumpPackageVersion(
         bumpedContent!,
         '4.9.0',
-        'minor'
+        'minor',
       );
 
       expect(bumpedContent2).toEqual(issue23526ExpectedContent);
@@ -59,7 +59,7 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         minimumContent,
         '1',
-        'patch'
+        'patch',
       );
 
       const project = new XmlDocument(bumpedContent!);
@@ -78,7 +78,7 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         originalContent,
         '0.0.1',
-        'patch'
+        'patch',
       );
 
       expect(bumpedContent).toEqual(originalContent);
@@ -88,7 +88,7 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         simpleContent,
         '0.0.1',
-        true as any
+        true as any,
       );
       expect(bumpedContent).toEqual(simpleContent);
     });
@@ -97,7 +97,7 @@ describe('modules/manager/nuget/update', () => {
       const { bumpedContent } = bumpPackageVersion(
         prereleaseContent,
         '1.0.0-1',
-        'prerelease'
+        'prerelease',
       );
 
       const project = new XmlDocument(bumpedContent!);
@@ -111,7 +111,7 @@ describe('modules/manager/nuget/update', () => {
 
       const project = new XmlDocument(bumpedContent!);
       expect(project.valueWithPath('PropertyGroup.VersionPrefix')).toBe(
-        '1.0.1'
+        '1.0.1',
       );
     });
   });
diff --git a/lib/modules/manager/nuget/update.ts b/lib/modules/manager/nuget/update.ts
index 052cfb29974721debbf3b7a7a566e1648df48dff..dd8202e8602abe00530c23d515e479770a99fc97 100644
--- a/lib/modules/manager/nuget/update.ts
+++ b/lib/modules/manager/nuget/update.ts
@@ -8,18 +8,18 @@ import { findVersion } from './util';
 export function bumpPackageVersion(
   content: string,
   currentValue: string,
-  bumpVersion: ReleaseType
+  bumpVersion: ReleaseType,
 ): BumpPackageVersionResult {
   logger.debug(
     { bumpVersion, currentValue },
-    'Checking if we should bump project version'
+    'Checking if we should bump project version',
   );
   let bumpedContent = content;
 
   if (!semver.valid(currentValue)) {
     logger.warn(
       { currentValue },
-      'Unable to bump project version, not a valid semver'
+      'Unable to bump project version, not a valid semver',
     );
     return { bumpedContent };
   }
@@ -29,7 +29,7 @@ export function bumpPackageVersion(
     const versionNode = findVersion(project);
     if (!versionNode) {
       logger.warn(
-        "Couldn't find Version or VersionPrefix in any PropertyGroup"
+        "Couldn't find Version or VersionPrefix in any PropertyGroup",
       );
       return { bumpedContent };
     }
@@ -38,7 +38,7 @@ export function bumpPackageVersion(
     if (currentProjVersion !== currentValue) {
       logger.warn(
         { currentValue, currentProjVersion },
-        "currentValue passed to bumpPackageVersion() doesn't match value found"
+        "currentValue passed to bumpPackageVersion() doesn't match value found",
       );
       return { bumpedContent };
     }
@@ -46,7 +46,7 @@ export function bumpPackageVersion(
     const startTagPosition = versionNode.startTagPosition;
     const versionPosition = content.indexOf(
       currentProjVersion,
-      startTagPosition
+      startTagPosition,
     );
 
     const newProjVersion = semver.inc(currentValue, bumpVersion);
@@ -59,7 +59,7 @@ export function bumpPackageVersion(
       content,
       versionPosition,
       currentValue,
-      newProjVersion
+      newProjVersion,
     );
   } catch (err) {
     logger.warn(
@@ -68,7 +68,7 @@ export function bumpPackageVersion(
         currentValue,
         bumpVersion,
       },
-      'Failed to bumpVersion'
+      'Failed to bumpVersion',
     );
   }
   return { bumpedContent };
diff --git a/lib/modules/manager/nuget/util.spec.ts b/lib/modules/manager/nuget/util.spec.ts
index 1a898ccdbd7efe9ba281caf71d718ee201ca8a99..80655a1ecd94dc0ca773155c87aefbcdb3772532 100644
--- a/lib/modules/manager/nuget/util.spec.ts
+++ b/lib/modules/manager/nuget/util.spec.ts
@@ -34,7 +34,7 @@ describe('modules/manager/nuget/util', () => {
   describe('getConfiguredRegistries', () => {
     it('reads nuget config file', async () => {
       fs.findUpLocal.mockReturnValue(
-        Promise.resolve<string | null>('NuGet.config')
+        Promise.resolve<string | null>('NuGet.config'),
       );
       fs.readLocalFile.mockResolvedValueOnce(
         codeBlock`
@@ -53,7 +53,7 @@ describe('modules/manager/nuget/util', () => {
                 <package pattern="NuGet.Common"/>
               </packageSource>
             </packageSourceMapping>
-          </configuration>`
+          </configuration>`,
       );
 
       const registries = await getConfiguredRegistries('NuGet.config');
@@ -72,7 +72,7 @@ describe('modules/manager/nuget/util', () => {
 
     it('reads nuget config file with default registry', async () => {
       fs.findUpLocal.mockReturnValue(
-        Promise.resolve<string | null>('NuGet.config')
+        Promise.resolve<string | null>('NuGet.config'),
       );
       fs.readLocalFile.mockResolvedValueOnce(
         codeBlock`
@@ -89,7 +89,7 @@ describe('modules/manager/nuget/util', () => {
               <package pattern="NuGet.Common"/>
             </packageSource>
           </packageSourceMapping>
-        </configuration>`
+        </configuration>`,
       );
 
       const registries = await getConfiguredRegistries('NuGet.config');
diff --git a/lib/modules/manager/nuget/util.ts b/lib/modules/manager/nuget/util.ts
index 17036469c628c031045a672f3af16068381265b6..8511c91b07f0d3b081a8cd7bf826fab6c193eaf8 100644
--- a/lib/modules/manager/nuget/util.ts
+++ b/lib/modules/manager/nuget/util.ts
@@ -7,7 +7,7 @@ import { nugetOrg } from '../../datasource/nuget';
 import type { Registry } from './types';
 
 export async function readFileAsXmlDocument(
-  file: string
+  file: string,
 ): Promise<XmlDocument | undefined> {
   try {
     // TODO #22198
@@ -29,14 +29,14 @@ export function getDefaultRegistries(): Registry[] {
 }
 
 export async function getConfiguredRegistries(
-  packageFile: string
+  packageFile: string,
 ): Promise<Registry[] | undefined> {
   // Valid file names taken from https://github.com/NuGet/NuGet.Client/blob/f64621487c0b454eda4b98af853bf4a528bef72a/src/NuGet.Core/NuGet.Configuration/Settings/Settings.cs#L34
   const nuGetConfigFileNames = ['nuget.config', 'NuGet.config', 'NuGet.Config'];
   // normalize paths, otherwise startsWith can fail because of path delimitter mismatch
   const nuGetConfigPath = await findUpLocal(
     nuGetConfigFileNames,
-    upath.dirname(packageFile)
+    upath.dirname(packageFile),
   );
   if (!nuGetConfigPath) {
     return undefined;
@@ -91,7 +91,7 @@ export async function getConfiguredRegistries(
               registryUrl,
               sourceMappedPackagePatterns,
             },
-            `Adding registry URL ${registryUrl}`
+            `Adding registry URL ${registryUrl}`,
           );
 
           registries.push({
@@ -102,7 +102,7 @@ export async function getConfiguredRegistries(
         } else {
           logger.debug(
             { registryUrl: child.attr.value },
-            'ignoring local registry URL'
+            'ignoring local registry URL',
           );
         }
       }
diff --git a/lib/modules/manager/osgi/extract.spec.ts b/lib/modules/manager/osgi/extract.spec.ts
index a6dec86575b1a86800c4c0f3ce66a19d987c5a36..7250b59f2dc4572bcacadc3c1f8162510c922cdf 100644
--- a/lib/modules/manager/osgi/extract.spec.ts
+++ b/lib/modules/manager/osgi/extract.spec.ts
@@ -115,13 +115,13 @@ describe('modules/manager/osgi/extract', () => {
 
     it('returns null for unsupported version of feature model definition', () => {
       expect(
-        extractPackageFile(unsupportedFeatureVersion, '', undefined)
+        extractPackageFile(unsupportedFeatureVersion, '', undefined),
       ).toBeNull();
     });
 
     it('returns null for an invalid version of feature model definition', () => {
       expect(
-        extractPackageFile(invalidFeatureVersion, '', undefined)
+        extractPackageFile(invalidFeatureVersion, '', undefined),
       ).toBeNull();
     });
 
@@ -137,7 +137,7 @@ describe('modules/manager/osgi/extract', () => {
       const packageFile = extractPackageFile(
         featureWithBundlesAsObjects,
         '',
-        undefined
+        undefined,
       );
       expect(packageFile).toEqual({
         deps: [
@@ -159,7 +159,7 @@ describe('modules/manager/osgi/extract', () => {
       const packageFile = extractPackageFile(
         featureWithBundlesAsStrings,
         '',
-        undefined
+        undefined,
       );
       expect(packageFile).toEqual({
         deps: [
@@ -207,7 +207,7 @@ describe('modules/manager/osgi/extract', () => {
       const packageFile = extractPackageFile(
         doubleSlashNotComment,
         '',
-        undefined
+        undefined,
       );
       expect(packageFile).toEqual({
         deps: [
@@ -242,7 +242,7 @@ describe('modules/manager/osgi/extract', () => {
       const packageFile = extractPackageFile(
         malformedDefinitions,
         '',
-        undefined
+        undefined,
       );
       expect(packageFile).toEqual({
         deps: [
@@ -263,7 +263,7 @@ describe('modules/manager/osgi/extract', () => {
       const packageFile = extractPackageFile(
         versionWithVariable,
         '',
-        undefined
+        undefined,
       );
       expect(packageFile).toEqual({
         deps: [
diff --git a/lib/modules/manager/osgi/extract.ts b/lib/modules/manager/osgi/extract.ts
index 5e45f4f8c1a8ca4fc6dfb4d5aceb43a4621f7dc4..659848b20c00e5aa855bcffaac2222c476b6947a 100644
--- a/lib/modules/manager/osgi/extract.ts
+++ b/lib/modules/manager/osgi/extract.ts
@@ -13,7 +13,7 @@ import type { Bundle, FeatureModel } from './types';
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  _config?: ExtractConfig
+  _config?: ExtractConfig,
 ): PackageFileContent | null {
   // References:
   // - OSGi compendium release 8 ( https://docs.osgi.org/specification/osgi.cmpn/8.0.0/service.feature.html )
@@ -102,14 +102,14 @@ export function extractPackageFile(
 
 function isSupportedFeatureResourceVersion(
   featureModel: FeatureModel,
-  fileName: string
+  fileName: string,
 ): boolean {
   const resourceVersion = featureModel['feature-resource-version'];
   if (resourceVersion) {
     const resourceSemVer = coerce(resourceVersion);
     if (!resourceSemVer) {
       logger.debug(
-        `Skipping file ${fileName} due to invalid feature-resource-version '${resourceVersion}'`
+        `Skipping file ${fileName} due to invalid feature-resource-version '${resourceVersion}'`,
       );
       return false;
     }
@@ -117,7 +117,7 @@ function isSupportedFeatureResourceVersion(
     // we only support 1.x, although no over version has been defined
     if (!satisfies(resourceSemVer, '^1')) {
       logger.debug(
-        `Skipping file ${fileName} due to unsupported feature-resource-version '${resourceVersion}'`
+        `Skipping file ${fileName} due to unsupported feature-resource-version '${resourceVersion}'`,
       );
       return false;
     }
@@ -128,7 +128,7 @@ function isSupportedFeatureResourceVersion(
 
 function extractArtifactList(
   sectionName: string,
-  sectionValue: unknown
+  sectionValue: unknown,
 ): Bundle[] {
   // The 'ARTIFACTS' key is supported by the Sling/OSGi feature model implementation
   if (sectionName.includes(':ARTIFACTS|') && is.array(sectionValue)) {
diff --git a/lib/modules/manager/pep621/artifacts.ts b/lib/modules/manager/pep621/artifacts.ts
index 2d913a805015fd643d9c222bc87506f9ecb939d4..5fa070e4ffaa1e55f0f8eeb4378d6b1dfa9171ad 100644
--- a/lib/modules/manager/pep621/artifacts.ts
+++ b/lib/modules/manager/pep621/artifacts.ts
@@ -5,7 +5,7 @@ import { processors } from './processors';
 import { parsePyProject } from './utils';
 
 export async function updateArtifacts(
-  updateArtifact: UpdateArtifact
+  updateArtifact: UpdateArtifact,
 ): Promise<UpdateArtifactsResult[] | null> {
   const { packageFileName, newPackageFileContent } = updateArtifact;
 
@@ -27,7 +27,7 @@ export async function updateArtifacts(
   for (const processor of processors) {
     const artifactUpdates = await processor.updateArtifacts(
       updateArtifact,
-      project
+      project,
     );
     if (is.array(artifactUpdates)) {
       result.push(...artifactUpdates);
diff --git a/lib/modules/manager/pep621/extract.spec.ts b/lib/modules/manager/pep621/extract.spec.ts
index 80cb00c1394fb34573e3ab7ac624a7e0cd4a40ef..31b6edbb20d5804bd884cbab170f86762e4567f8 100644
--- a/lib/modules/manager/pep621/extract.spec.ts
+++ b/lib/modules/manager/pep621/extract.spec.ts
@@ -18,7 +18,7 @@ describe('modules/manager/pep621/extract', () => {
         [project]
         name =
       `,
-        'pyproject.toml'
+        'pyproject.toml',
       );
       expect(result).toBeNull();
     });
@@ -32,7 +32,7 @@ describe('modules/manager/pep621/extract', () => {
         },
       });
       const dependencies = result?.deps.filter(
-        (dep) => dep.depType === 'project.dependencies'
+        (dep) => dep.depType === 'project.dependencies',
       );
       expect(dependencies).toEqual([
         {
@@ -122,7 +122,7 @@ describe('modules/manager/pep621/extract', () => {
       ]);
 
       const optionalDependencies = result?.deps.filter(
-        (dep) => dep.depType === 'project.optional-dependencies'
+        (dep) => dep.depType === 'project.optional-dependencies',
       );
       expect(optionalDependencies).toEqual([
         {
@@ -142,7 +142,7 @@ describe('modules/manager/pep621/extract', () => {
       ]);
 
       const pdmDevDependencies = result?.deps.filter(
-        (dep) => dep.depType === 'tool.pdm.dev-dependencies'
+        (dep) => dep.depType === 'tool.pdm.dev-dependencies',
       );
       expect(pdmDevDependencies).toEqual([
         {
@@ -251,7 +251,7 @@ describe('modules/manager/pep621/extract', () => {
       verify_ssl = true
       name = "internal"
       `,
-        'pyproject.toml'
+        'pyproject.toml',
       );
 
       expect(result?.deps).toEqual([
diff --git a/lib/modules/manager/pep621/extract.ts b/lib/modules/manager/pep621/extract.ts
index 19df56d9d5d8d149238abf7fef1d474e5ee022d0..f5c5a607f64574d92146762365f60ce77934f278 100644
--- a/lib/modules/manager/pep621/extract.ts
+++ b/lib/modules/manager/pep621/extract.ts
@@ -16,7 +16,7 @@ import {
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  _config?: ExtractConfig
+  _config?: ExtractConfig,
 ): PackageFileContent | null {
   logger.trace(`pep621.extractPackageFile(${packageFile})`);
 
@@ -33,13 +33,13 @@ export function extractPackageFile(
 
   // pyProject standard definitions
   deps.push(
-    ...parseDependencyList(depTypes.dependencies, def.project?.dependencies)
+    ...parseDependencyList(depTypes.dependencies, def.project?.dependencies),
   );
   deps.push(
     ...parseDependencyGroupRecord(
       depTypes.optionalDependencies,
-      def.project?.['optional-dependencies']
-    )
+      def.project?.['optional-dependencies'],
+    ),
   );
 
   // process specific tool sets
diff --git a/lib/modules/manager/pep621/processors/hatch.ts b/lib/modules/manager/pep621/processors/hatch.ts
index 3a709518482a057867d96049a433489ff4d23811..b482cb5b18cfc0519c0ee3d78b3a1f6a59280285 100644
--- a/lib/modules/manager/pep621/processors/hatch.ts
+++ b/lib/modules/manager/pep621/processors/hatch.ts
@@ -11,7 +11,7 @@ import type { PyProjectProcessor } from './types';
 export class HatchProcessor implements PyProjectProcessor {
   process(
     pyproject: PyProject,
-    deps: PackageDependency[]
+    deps: PackageDependency[],
   ): PackageDependency[] {
     const hatch_envs = pyproject.tool?.hatch?.envs;
     if (is.nullOrUndefined(hatch_envs)) {
@@ -24,7 +24,7 @@ export class HatchProcessor implements PyProjectProcessor {
       deps.push(...envDeps);
       const extraDeps = parseDependencyList(
         depType,
-        env?.['extra-dependencies']
+        env?.['extra-dependencies'],
       );
       deps.push(...extraDeps);
     }
@@ -34,7 +34,7 @@ export class HatchProcessor implements PyProjectProcessor {
 
   updateArtifacts(
     updateArtifact: UpdateArtifact,
-    project: PyProject
+    project: PyProject,
   ): Promise<UpdateArtifactsResult[] | null> {
     // Hatch does not have lock files at the moment
     // https://github.com/pypa/hatch/issues/749
diff --git a/lib/modules/manager/pep621/processors/pdm.spec.ts b/lib/modules/manager/pep621/processors/pdm.spec.ts
index 92b7127c3a96404d6360c977001631adc6db8c2d..3e44d383d6b12800dbabb9f7f8bc34fd1bdb010e 100644
--- a/lib/modules/manager/pep621/processors/pdm.spec.ts
+++ b/lib/modules/manager/pep621/processors/pdm.spec.ts
@@ -34,7 +34,7 @@ describe('modules/manager/pep621/processors/pdm', () => {
           config,
           updatedDeps,
         },
-        {}
+        {},
       );
       expect(result).toBeNull();
     });
@@ -66,7 +66,7 @@ describe('modules/manager/pep621/processors/pdm', () => {
           config: {},
           updatedDeps,
         },
-        {}
+        {},
       );
       expect(result).toBeNull();
       expect(execSnapshots).toMatchObject([
@@ -111,7 +111,7 @@ describe('modules/manager/pep621/processors/pdm', () => {
           config: {},
           updatedDeps,
         },
-        {}
+        {},
       );
       expect(result).toEqual([
         { artifactError: { lockFile: 'pdm.lock', stderr: 'test error' } },
@@ -163,7 +163,7 @@ describe('modules/manager/pep621/processors/pdm', () => {
           config: {},
           updatedDeps,
         },
-        {}
+        {},
       );
       expect(result).toEqual([
         {
@@ -214,7 +214,7 @@ describe('modules/manager/pep621/processors/pdm', () => {
           },
           updatedDeps: [],
         },
-        {}
+        {},
       );
       expect(result).toEqual([
         {
diff --git a/lib/modules/manager/pep621/processors/pdm.ts b/lib/modules/manager/pep621/processors/pdm.ts
index 9e630952f9e9574250fe1426148864c8095eb607..d80ea729c816adc6d49a9b5a1b88e2da512e8de3 100644
--- a/lib/modules/manager/pep621/processors/pdm.ts
+++ b/lib/modules/manager/pep621/processors/pdm.ts
@@ -27,8 +27,8 @@ export class PdmProcessor implements PyProjectProcessor {
     deps.push(
       ...parseDependencyGroupRecord(
         depTypes.pdmDevDependencies,
-        pdm['dev-dependencies']
-      )
+        pdm['dev-dependencies'],
+      ),
     );
 
     const pdmSource = pdm.source;
@@ -54,7 +54,7 @@ export class PdmProcessor implements PyProjectProcessor {
 
   async updateArtifacts(
     updateArtifact: UpdateArtifact,
-    project: PyProject
+    project: PyProject,
   ): Promise<UpdateArtifactsResult[] | null> {
     const { config, updatedDeps, packageFileName } = updateArtifact;
 
@@ -140,7 +140,7 @@ function generateCMDs(updatedDeps: Upgrade[]): string[] {
         addPackageToCMDRecord(
           packagesByCMD,
           `${pdmUpdateCMD} -G ${group}`,
-          name
+          name,
         );
         break;
       }
@@ -149,7 +149,7 @@ function generateCMDs(updatedDeps: Upgrade[]): string[] {
         addPackageToCMDRecord(
           packagesByCMD,
           `${pdmUpdateCMD} -dG ${group}`,
-          name
+          name,
         );
         break;
       }
@@ -171,7 +171,7 @@ function generateCMDs(updatedDeps: Upgrade[]): string[] {
 function addPackageToCMDRecord(
   packagesByCMD: Record<string, string[]>,
   commandPrefix: string,
-  packageName: string
+  packageName: string,
 ): void {
   if (is.nullOrUndefined(packagesByCMD[commandPrefix])) {
     packagesByCMD[commandPrefix] = [];
diff --git a/lib/modules/manager/pep621/processors/types.ts b/lib/modules/manager/pep621/processors/types.ts
index bfbf0357e9ef5e7dda8e857c0d6be76f0926f2b9..16a382ce08f788efb1852e124bace09f301a2a0d 100644
--- a/lib/modules/manager/pep621/processors/types.ts
+++ b/lib/modules/manager/pep621/processors/types.ts
@@ -8,7 +8,7 @@ import type { PyProject } from '../schema';
 export interface PyProjectProcessor {
   updateArtifacts(
     updateArtifact: UpdateArtifact,
-    project: PyProject
+    project: PyProject,
   ): Promise<UpdateArtifactsResult[] | null>;
 
   /**
diff --git a/lib/modules/manager/pep621/schema.ts b/lib/modules/manager/pep621/schema.ts
index 220cdc3ebd3d82dafaf30f15339f0c3ce3649145..dbf9e1f4f2345b78b765700b49859c585d931a4a 100644
--- a/lib/modules/manager/pep621/schema.ts
+++ b/lib/modules/manager/pep621/schema.ts
@@ -26,7 +26,7 @@ export const PyProjectSchema = z.object({
                 url: z.string(),
                 name: z.string(),
                 verify_ssl: z.boolean().optional(),
-              })
+              }),
             )
             .optional(),
         })
@@ -41,7 +41,7 @@ export const PyProjectSchema = z.object({
                   dependencies: DependencyListSchema,
                   'extra-dependencies': DependencyListSchema,
                 })
-                .optional()
+                .optional(),
             )
             .optional(),
         })
diff --git a/lib/modules/manager/pep621/utils.spec.ts b/lib/modules/manager/pep621/utils.spec.ts
index 6029a7b8ec8a51efba5a4c6ee8dfc4f9c65ec6cb..849bb0f26550cff50ee1952ffeffd547db5843b5 100644
--- a/lib/modules/manager/pep621/utils.spec.ts
+++ b/lib/modules/manager/pep621/utils.spec.ts
@@ -24,7 +24,7 @@ describe('modules/manager/pep621/utils', () => {
           ? clear({ packageName, currentValue, extras, marker })
           : null;
         expect(result).toEqual(expected);
-      }
+      },
     );
   });
 });
diff --git a/lib/modules/manager/pep621/utils.ts b/lib/modules/manager/pep621/utils.ts
index b4ce5f3be6b4a275cac452e6e60d04a1d15fffbe..4420992bcb4f43a3c38f44ce4894de590dc12ed7 100644
--- a/lib/modules/manager/pep621/utils.ts
+++ b/lib/modules/manager/pep621/utils.ts
@@ -8,7 +8,7 @@ import { PyProject, PyProjectSchema } from './schema';
 import type { Pep508ParseResult } from './types';
 
 const pep508Regex = regEx(
-  /^(?<packageName>[A-Z0-9._-]+)\s*(\[(?<extras>[A-Z0-9,._-]+)\])?\s*(?<currentValue>[^;]+)?(;\s*(?<marker>.*))?/i
+  /^(?<packageName>[A-Z0-9._-]+)\s*(\[(?<extras>[A-Z0-9,._-]+)\])?\s*(?<currentValue>[^;]+)?(;\s*(?<marker>.*))?/i,
 );
 
 export const depTypes = {
@@ -18,7 +18,7 @@ export const depTypes = {
 };
 
 export function parsePEP508(
-  value: string | null | undefined
+  value: string | null | undefined,
 ): Pep508ParseResult | null {
   if (is.nullOrUndefined(value)) {
     return null;
@@ -51,7 +51,7 @@ export function parsePEP508(
 
 export function pep508ToPackageDependency(
   depType: string,
-  value: string
+  value: string,
 ): PackageDependency | null {
   const parsed = parsePEP508(value);
   if (is.nullOrUndefined(parsed)) {
@@ -75,7 +75,7 @@ export function pep508ToPackageDependency(
 
 export function parseDependencyGroupRecord(
   depType: string,
-  records: Record<string, string[]> | null | undefined
+  records: Record<string, string[]> | null | undefined,
 ): PackageDependency[] {
   if (is.nullOrUndefined(records)) {
     return [];
@@ -92,7 +92,7 @@ export function parseDependencyGroupRecord(
 
 export function parseDependencyList(
   depType: string,
-  list: string[] | null | undefined
+  list: string[] | null | undefined,
 ): PackageDependency[] {
   if (is.nullOrUndefined(list)) {
     return [];
@@ -110,7 +110,7 @@ export function parseDependencyList(
 
 export function parsePyProject(
   packageFile: string,
-  content: string
+  content: string,
 ): PyProject | null {
   try {
     const jsonMap = parseToml(content);
@@ -118,7 +118,7 @@ export function parsePyProject(
   } catch (err) {
     logger.debug(
       { packageFile, err },
-      `Failed to parse and validate pyproject file`
+      `Failed to parse and validate pyproject file`,
     );
     return null;
   }
diff --git a/lib/modules/manager/pip-compile/artifacts.spec.ts b/lib/modules/manager/pip-compile/artifacts.spec.ts
index 350a88030de5ad5adbfa5499151789166231f61b..14a92a54a443e3210b73b361b3549c194036b578 100644
--- a/lib/modules/manager/pip-compile/artifacts.spec.ts
+++ b/lib/modules/manager/pip-compile/artifacts.spec.ts
@@ -58,7 +58,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toEqual([]);
   });
@@ -73,7 +73,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchObject([
       { cmd: 'pip-compile requirements.in' },
@@ -86,7 +86,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['requirements.txt'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New requirements.txt');
     expect(
@@ -95,7 +95,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config: { ...config, constraints: { python: '3.7' } },
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchObject([
       { cmd: 'pip-compile requirements.in' },
@@ -112,7 +112,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['requirements.txt'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     fs.ensureCacheDir.mockResolvedValueOnce('/tmp/renovate/cache/others/pip');
@@ -122,7 +122,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config: { ...config, constraints: { python: '3.10.2' } },
-      })
+      }),
     ).not.toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -158,7 +158,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['requirements.txt'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     expect(
@@ -167,7 +167,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config: { ...config, constraints: { python: '3.10.2' } },
-      })
+      }),
     ).not.toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -192,7 +192,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       {
         artifactError: { lockFile: 'requirements.txt', stderr: 'not found' },
@@ -207,7 +207,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['requirements.txt'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New requirements.txt');
     expect(
@@ -216,7 +216,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: lockMaintenanceConfig,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchObject([
       { cmd: 'pip-compile requirements.in' },
@@ -233,7 +233,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['requirements.txt'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     fs.ensureCacheDir.mockResolvedValueOnce('/tmp/renovate/cache/others/pip');
@@ -246,7 +246,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
           ...config,
           constraints: { python: '3.10.2', pipTools: '6.13.0' },
         },
-      })
+      }),
     ).not.toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -278,8 +278,8 @@ describe('modules/manager/pip-compile/artifacts', () => {
         constructPipCompileCmd(
           Fixtures.get('requirementsNoHeaders.txt'),
           'subdir/requirements.in',
-          'subdir/requirements.txt'
-        )
+          'subdir/requirements.txt',
+        ),
       ).toBe('pip-compile requirements.in');
     });
 
@@ -288,10 +288,10 @@ describe('modules/manager/pip-compile/artifacts', () => {
         constructPipCompileCmd(
           Fixtures.get('requirementsWithHashes.txt'),
           'subdir/requirements.in',
-          'subdir/requirements.txt'
-        )
+          'subdir/requirements.txt',
+        ),
       ).toBe(
-        'pip-compile --allow-unsafe --generate-hashes --no-emit-index-url --strip-extras --resolver=backtracking --output-file=requirements.txt requirements.in'
+        'pip-compile --allow-unsafe --generate-hashes --no-emit-index-url --strip-extras --resolver=backtracking --output-file=requirements.txt requirements.in',
       );
     });
 
@@ -300,16 +300,16 @@ describe('modules/manager/pip-compile/artifacts', () => {
         constructPipCompileCmd(
           Fixtures.get('requirementsWithUnknownArguments.txt'),
           'subdir/requirements.in',
-          'subdir/requirements.txt'
-        )
+          'subdir/requirements.txt',
+        ),
       ).toBe('pip-compile --generate-hashes requirements.in');
       expect(logger.trace).toHaveBeenCalledWith(
         { argument: '--version' },
-        'pip-compile argument is not (yet) supported'
+        'pip-compile argument is not (yet) supported',
       );
       expect(logger.warn).toHaveBeenCalledWith(
         { argument: '--resolver=foobar' },
-        'pip-compile was previously executed with an unexpected `--resolver` value'
+        'pip-compile was previously executed with an unexpected `--resolver` value',
       );
     });
 
@@ -318,14 +318,14 @@ describe('modules/manager/pip-compile/artifacts', () => {
         constructPipCompileCmd(
           Fixtures.get('requirementsWithExploitingArguments.txt'),
           'subdir/requirements.in',
-          'subdir/requirements.txt'
-        )
+          'subdir/requirements.txt',
+        ),
       ).toBe(
-        'pip-compile --generate-hashes --output-file=requirements.txt requirements.in'
+        'pip-compile --generate-hashes --output-file=requirements.txt requirements.in',
       );
       expect(logger.warn).toHaveBeenCalledWith(
         { argument: '--output-file=/etc/shadow' },
-        'pip-compile was previously executed with an unexpected `--output-file` filename'
+        'pip-compile was previously executed with an unexpected `--output-file` filename',
       );
     });
   });
@@ -338,7 +338,7 @@ describe('modules/manager/pip-compile/artifacts', () => {
       'returns expected value for supported %s resolver',
       (argument: string, expected: string) => {
         expect(extractResolver(argument)).toBe(expected);
-      }
+      },
     );
 
     it.each(['--resolver=foo', '--resolver='])(
@@ -347,9 +347,9 @@ describe('modules/manager/pip-compile/artifacts', () => {
         expect(extractResolver(argument)).toBeNull();
         expect(logger.warn).toHaveBeenCalledWith(
           { argument },
-          'pip-compile was previously executed with an unexpected `--resolver` value'
+          'pip-compile was previously executed with an unexpected `--resolver` value',
         );
-      }
+      },
     );
   });
 });
diff --git a/lib/modules/manager/pip-compile/artifacts.ts b/lib/modules/manager/pip-compile/artifacts.ts
index 66d1c2d25733751f287e532138f5e264aa9e79f2..15b51aa9fc2cd1195fa6c3d16c2c54ca4e579b31 100644
--- a/lib/modules/manager/pip-compile/artifacts.ts
+++ b/lib/modules/manager/pip-compile/artifacts.ts
@@ -20,7 +20,7 @@ import type {
 } from '../types';
 
 function getPythonConstraint(
-  config: UpdateArtifactsConfig
+  config: UpdateArtifactsConfig,
 ): string | undefined | null {
   const { constraints = {} } = config;
   const { python } = constraints;
@@ -46,7 +46,7 @@ function getPipToolsConstraint(config: UpdateArtifactsConfig): string {
 }
 
 const constraintLineRegex = regEx(
-  /^(#.*?\r?\n)+# {4}pip-compile(?<arguments>.*?)\r?\n/
+  /^(#.*?\r?\n)+# {4}pip-compile(?<arguments>.*?)\r?\n/,
 );
 const allowedPipArguments = [
   '--allow-unsafe',
@@ -58,7 +58,7 @@ const allowedPipArguments = [
 export function constructPipCompileCmd(
   content: string,
   inputFileName: string,
-  outputFileName: string
+  outputFileName: string,
 ): string {
   const headers = constraintLineRegex.exec(content);
   const args = ['pip-compile'];
@@ -73,7 +73,7 @@ export function constructPipCompileCmd(
           // we don't trust the user-supplied output-file argument; use our value here
           logger.warn(
             { argument },
-            'pip-compile was previously executed with an unexpected `--output-file` filename'
+            'pip-compile was previously executed with an unexpected `--output-file` filename',
           );
         }
         args.push(`--output-file=${file}`);
@@ -85,7 +85,7 @@ export function constructPipCompileCmd(
       } else if (argument.startsWith('--')) {
         logger.trace(
           { argument },
-          'pip-compile argument is not (yet) supported'
+          'pip-compile argument is not (yet) supported',
         );
       } else {
         // ignore position argument (.in file)
@@ -104,7 +104,7 @@ export async function updateArtifacts({
 }: UpdateArtifact): Promise<UpdateArtifactsResult[] | null> {
   const outputFileName = inputFileName.replace(regEx(/(\.in)?$/), '.txt');
   logger.debug(
-    `pipCompile.updateArtifacts(${inputFileName}->${outputFileName})`
+    `pipCompile.updateArtifacts(${inputFileName}->${outputFileName})`,
   );
   const existingOutput = await readLocalFile(outputFileName, 'utf8');
   if (!existingOutput) {
@@ -119,7 +119,7 @@ export async function updateArtifacts({
     const cmd = constructPipCompileCmd(
       existingOutput,
       inputFileName,
-      outputFileName
+      outputFileName,
     );
     const constraint = getPythonConstraint(config);
     const pipToolsConstraint = getPipToolsConstraint(config);
@@ -181,7 +181,7 @@ export function extractResolver(argument: string): string | null {
 
   logger.warn(
     { argument },
-    'pip-compile was previously executed with an unexpected `--resolver` value'
+    'pip-compile was previously executed with an unexpected `--resolver` value',
   );
   return null;
 }
diff --git a/lib/modules/manager/pip_requirements/artifacts.spec.ts b/lib/modules/manager/pip_requirements/artifacts.spec.ts
index 314a591e70d997ec196bee6cef6ccd969b7f1691..d82910c9df68d715a44900a268ea76416b80a085 100644
--- a/lib/modules/manager/pip_requirements/artifacts.spec.ts
+++ b/lib/modules/manager/pip_requirements/artifacts.spec.ts
@@ -52,7 +52,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -64,7 +64,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [{ depName: 'eventlet' }],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -77,7 +77,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [{ depName: 'atomicwrites' }, { depName: 'boto3-stubs' }],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toBeNull();
 
     expect(execSnapshots).toMatchObject([
@@ -101,7 +101,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [{ depName: 'atomicwrites' }, { depName: 'boto3-stubs' }],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -137,7 +137,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         ],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -167,7 +167,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [{ depName: 'atomicwrites' }],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toEqual([
       {
         artifactError: {
@@ -205,7 +205,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [{ depName: 'atomicwrites' }],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -254,7 +254,7 @@ describe('modules/manager/pip_requirements/artifacts', () => {
         updatedDeps: [{ depName: 'atomicwrites' }],
         newPackageFileContent,
         config,
-      })
+      }),
     ).toEqual([
       {
         file: {
diff --git a/lib/modules/manager/pip_requirements/artifacts.ts b/lib/modules/manager/pip_requirements/artifacts.ts
index 53fd1587f49f72f17d7f21d846f2effde83dc235..d1c92e7f5351bfb6e7470f80a130a15ffd45e624 100644
--- a/lib/modules/manager/pip_requirements/artifacts.ts
+++ b/lib/modules/manager/pip_requirements/artifacts.ts
@@ -30,7 +30,7 @@ function dependencyAndHashPattern(depName: string): RegExp {
   // include all but the last hash specifier into depConstraint.
   return regEx(
     `^\\s*(?<depConstraint>${escapedDepName}${extrasPattern}\\s*==.*?\\S)\\s+--hash=`,
-    'm'
+    'm',
   );
 }
 
@@ -53,7 +53,7 @@ export async function updateArtifacts({
         continue;
       }
       const depAndHashMatch = dependencyAndHashPattern(dep.depName).exec(
-        rewrittenContent
+        rewrittenContent,
       );
       if (depAndHashMatch) {
         // If there's a match, then the regular expression guarantees
diff --git a/lib/modules/manager/pip_requirements/extract.ts b/lib/modules/manager/pip_requirements/extract.ts
index fa3c9c7b9232acbaa5645f50bcf4775b3034f00a..4670d5edcfb95b11f4fedf7dc42b4695136992bb 100644
--- a/lib/modules/manager/pip_requirements/extract.ts
+++ b/lib/modules/manager/pip_requirements/extract.ts
@@ -13,13 +13,13 @@ export const packagePattern =
   '[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]';
 export const extrasPattern = '(?:\\s*\\[[^\\]]+\\])?';
 const packageGitRegex = regEx(
-  /(?<source>(?:git\+)(?<protocol>git|ssh|https):\/\/(?<gitUrl>(?:(?<user>[^@]+)@)?(?<hostname>[\w.-]+)(?<delimiter>\/)(?<scmPath>.*\/(?<depName>[\w/-]+))(\.git)?(?:@(?<version>.*))))/
+  /(?<source>(?:git\+)(?<protocol>git|ssh|https):\/\/(?<gitUrl>(?:(?<user>[^@]+)@)?(?<hostname>[\w.-]+)(?<delimiter>\/)(?<scmPath>.*\/(?<depName>[\w/-]+))(\.git)?(?:@(?<version>.*))))/,
 );
 
 const rangePattern: string = RANGE_PATTERN;
 const specifierPartPattern = `\\s*${rangePattern.replace(
   regEx(/\?<\w+>/g),
-  '?:'
+  '?:',
 )}`;
 const specifierPattern = `${specifierPartPattern}(?:\\s*,${specifierPartPattern})*`;
 export const dependencyPattern = `(${packagePattern})(${extrasPattern})(${specifierPattern})`;
@@ -41,7 +41,7 @@ export function cleanRegistryUrls(registryUrls: string[]): string[] {
           .replace(regEx(/}$/), '');
         const sub = process.env[envvar];
         return sub ?? match;
-      }
+      },
     );
   });
 }
diff --git a/lib/modules/manager/pip_setup/extract.ts b/lib/modules/manager/pip_setup/extract.ts
index f9c99eabff32b4c44856df6d82ddaf2eb1079771..cef10e2322de8c3da30b2b6e1c6a261bc3ce4e6e 100644
--- a/lib/modules/manager/pip_setup/extract.ts
+++ b/lib/modules/manager/pip_setup/extract.ts
@@ -36,7 +36,7 @@ const extractRegex = regEx(depPattern);
 // Extract dependency string
 function depStringHandler(
   ctx: Context,
-  token: lexer.StringValueToken
+  token: lexer.StringValueToken,
 ): Context {
   const depStr = token.value;
   const match = extractRegex.exec(depStr);
@@ -73,7 +73,7 @@ const depString = q
   .opt(
     q
       .opt(q.op<Context>(','))
-      .comment(/^#\s*renovate\s*:\s*ignore\s*$/, depSkipHandler)
+      .comment(/^#\s*renovate\s*:\s*ignore\s*$/, depSkipHandler),
   );
 
 const query = q.alt(incompleteDepString, depString);
@@ -81,7 +81,7 @@ const query = q.alt(incompleteDepString, depString);
 export function extractPackageFile(
   content: string,
   _packageFile: string,
-  _config: ExtractConfig
+  _config: ExtractConfig,
 ): PackageFileContent | null {
   const res = python.query<Context, parser.Node>(content, query, { deps: [] });
   return res?.deps?.length ? res : null;
diff --git a/lib/modules/manager/pipenv/artifacts.spec.ts b/lib/modules/manager/pipenv/artifacts.spec.ts
index 2ecfd2d2b4900df1996b0fa727e14ab37160dafb..ede5e04b9141c94c3d2a15d7af51248f71092f59 100644
--- a/lib/modules/manager/pipenv/artifacts.spec.ts
+++ b/lib/modules/manager/pipenv/artifacts.spec.ts
@@ -83,14 +83,14 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
   it('returns null if unchanged', async () => {
     pipFileLock._meta.requires.python_full_version = '3.7.6';
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce(JSON.stringify(pipFileLock));
     const execSnapshots = mockExecAll();
@@ -101,14 +101,14 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
 
   it('handles no constraint', async () => {
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce('unparseable pipfile lock');
     const execSnapshots = mockExecAll();
@@ -119,21 +119,21 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
 
   it('returns updated Pipfile.lock', async () => {
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce('current pipfile.lock');
     const execSnapshots = mockExecAll();
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New Pipfile.lock');
     expect(
@@ -142,7 +142,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config: { ...config, constraints: { python: '== 3.8.*' } },
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -151,7 +151,7 @@ describe('modules/manager/pipenv/artifacts', () => {
     GlobalConfig.set(dockerAdminConfig);
     pipFileLock._meta.requires.python_version = '3.7';
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.ensureCacheDir.mockResolvedValueOnce('/tmp/renovate/cache/others/pip');
     fs.readLocalFile.mockResolvedValueOnce(JSON.stringify(pipFileLock));
@@ -163,7 +163,7 @@ describe('modules/manager/pipenv/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     expect(
@@ -172,7 +172,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -181,7 +181,7 @@ describe('modules/manager/pipenv/artifacts', () => {
     GlobalConfig.set({ ...adminConfig, binarySource: 'install' });
     pipFileLock._meta.requires.python_full_version = '3.7.6';
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.ensureCacheDir.mockResolvedValueOnce('/tmp/renovate/cache/others/pip');
     fs.readLocalFile.mockResolvedValueOnce(JSON.stringify(pipFileLock));
@@ -193,7 +193,7 @@ describe('modules/manager/pipenv/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     expect(
@@ -202,7 +202,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchObject([
       { cmd: 'install-tool python 3.7.6' },
@@ -213,7 +213,7 @@ describe('modules/manager/pipenv/artifacts', () => {
 
   it('catches errors', async () => {
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce('Current Pipfile.lock');
     fs.writeLocalFile.mockImplementationOnce(() => {
@@ -225,7 +225,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config,
-      })
+      }),
     ).toEqual([
       { artifactError: { lockFile: 'Pipfile.lock', stderr: 'not found' } },
     ]);
@@ -233,14 +233,14 @@ describe('modules/manager/pipenv/artifacts', () => {
 
   it('returns updated Pipenv.lock when doing lockfile maintenance', async () => {
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce('Current Pipfile.lock');
     const execSnapshots = mockExecAll();
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('New Pipfile.lock');
     expect(
@@ -249,14 +249,14 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: '{}',
         config: lockMaintenanceConfig,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
 
   it('uses pipenv version from Pipfile', async () => {
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     GlobalConfig.set(dockerAdminConfig);
     pipFileLock.default.pipenv.version = '==2020.8.13';
@@ -265,7 +265,7 @@ describe('modules/manager/pipenv/artifacts', () => {
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     expect(
@@ -274,7 +274,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -283,14 +283,14 @@ describe('modules/manager/pipenv/artifacts', () => {
     GlobalConfig.set(dockerAdminConfig);
     pipFileLock.develop.pipenv.version = '==2020.8.13';
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce(JSON.stringify(pipFileLock));
     const execSnapshots = mockExecAll();
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     expect(
@@ -299,7 +299,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config,
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
@@ -308,14 +308,14 @@ describe('modules/manager/pipenv/artifacts', () => {
     GlobalConfig.set(dockerAdminConfig);
     pipFileLock.default.pipenv.version = '==2020.8.13';
     fs.ensureCacheDir.mockResolvedValueOnce(
-      '/tmp/renovate/cache/others/pipenv'
+      '/tmp/renovate/cache/others/pipenv',
     );
     fs.readLocalFile.mockResolvedValueOnce(JSON.stringify(pipFileLock));
     const execSnapshots = mockExecAll();
     git.getRepoStatus.mockResolvedValue(
       partial<StatusResult>({
         modified: ['Pipfile.lock'],
-      })
+      }),
     );
     fs.readLocalFile.mockResolvedValueOnce('new lock');
     expect(
@@ -324,7 +324,7 @@ describe('modules/manager/pipenv/artifacts', () => {
         updatedDeps: [],
         newPackageFileContent: 'some new content',
         config: { ...config, constraints: { pipenv: '==2020.1.1' } },
-      })
+      }),
     ).not.toBeNull();
     expect(execSnapshots).toMatchSnapshot();
   });
diff --git a/lib/modules/manager/pipenv/artifacts.ts b/lib/modules/manager/pipenv/artifacts.ts
index db98b3df96bb79174d9997980431973d012f6749..347d3ad61b16180c0f66759b72cf27960c21eaa0 100644
--- a/lib/modules/manager/pipenv/artifacts.ts
+++ b/lib/modules/manager/pipenv/artifacts.ts
@@ -18,7 +18,7 @@ import { PipfileLockSchema } from './schema';
 
 export function getPythonConstraint(
   existingLockFileContent: string,
-  config: UpdateArtifactsConfig
+  config: UpdateArtifactsConfig,
 ): string | undefined {
   const { constraints = {} } = config;
   const { python } = constraints;
@@ -50,7 +50,7 @@ export function getPythonConstraint(
 
 export function getPipenvConstraint(
   existingLockFileContent: string,
-  config: UpdateArtifactsConfig
+  config: UpdateArtifactsConfig,
 ): string {
   const { constraints = {} } = config;
   const { pipenv } = constraints;
@@ -100,7 +100,7 @@ export async function updateArtifacts({
     const tagConstraint = getPythonConstraint(existingLockFileContent, config);
     const pipenvConstraint = getPipenvConstraint(
       existingLockFileContent,
-      config
+      config,
     );
     const execOptions: ExecOptions = {
       cwdFile: pipfileName,
diff --git a/lib/modules/manager/pipenv/extract.ts b/lib/modules/manager/pipenv/extract.ts
index e0e7f2c947f2c198103bb8fac21482f1fb06b1a7..f7d27f4206fc0e90211eed5ec518175a1b594199 100644
--- a/lib/modules/manager/pipenv/extract.ts
+++ b/lib/modules/manager/pipenv/extract.ts
@@ -15,13 +15,13 @@ const rangePattern: string = RANGE_PATTERN;
 
 const specifierPartPattern = `\\s*${rangePattern.replace(
   regEx(/\?<\w+>/g),
-  '?:'
+  '?:',
 )}\\s*`;
 const specifierPattern = `${specifierPartPattern}(?:,${specifierPartPattern})*`;
 const specifierRegex = regEx(`^${specifierPattern}$`);
 function extractFromSection(
   pipfile: PipFile,
-  section: 'packages' | 'dev-packages'
+  section: 'packages' | 'dev-packages',
 ): PackageDependency[] {
   const pipfileSection = pipfile[section];
   if (!pipfileSection) {
@@ -55,7 +55,7 @@ function extractFromSection(
         const packageMatches = packageRegex.exec(depName);
         if (!packageMatches) {
           logger.debug(
-            `Skipping dependency with malformed package name "${depName}".`
+            `Skipping dependency with malformed package name "${depName}".`,
           );
           skipReason = 'invalid-name';
         }
@@ -63,7 +63,7 @@ function extractFromSection(
         const specifierMatches = specifierRegex.exec(currentValue!);
         if (!specifierMatches) {
           logger.debug(
-            `Skipping dependency with malformed version specifier "${currentValue!}".`
+            `Skipping dependency with malformed version specifier "${currentValue!}".`,
           );
           skipReason = 'invalid-version';
         }
@@ -88,7 +88,7 @@ function extractFromSection(
       if (requirements.index) {
         if (is.array(pipfile.source)) {
           const source = pipfile.source.find(
-            (item) => item.name === requirements.index
+            (item) => item.name === requirements.index,
           );
           if (source) {
             dep.registryUrls = [source.url];
@@ -103,7 +103,7 @@ function extractFromSection(
 
 export async function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   logger.trace(`pipenv.extractPackageFile(${packageFile})`);
 
diff --git a/lib/modules/manager/pipenv/schema.ts b/lib/modules/manager/pipenv/schema.ts
index e39a9b112c25fd572370c6640d038e7187774ae0..ff92d014841324aeebbc551ed39c6f100ce44863 100644
--- a/lib/modules/manager/pipenv/schema.ts
+++ b/lib/modules/manager/pipenv/schema.ts
@@ -6,7 +6,7 @@ const PipfileLockEntrySchema = z
     z.string(),
     z.object({
       version: z.string().optional(),
-    })
+    }),
   )
   .optional();
 
@@ -24,5 +24,5 @@ export const PipfileLockSchema = Json.pipe(
       .optional(),
     default: PipfileLockEntrySchema,
     develop: PipfileLockEntrySchema,
-  })
+  }),
 );
diff --git a/lib/modules/manager/poetry/artifacts.spec.ts b/lib/modules/manager/poetry/artifacts.spec.ts
index 46b511a3b0a0132e8865e66f01d7f09776c7e469..0ea60e8bb8a633bff2ad7ac61923afc040f1a7b4 100644
--- a/lib/modules/manager/poetry/artifacts.spec.ts
+++ b/lib/modules/manager/poetry/artifacts.spec.ts
@@ -49,7 +49,7 @@ describe('modules/manager/poetry/artifacts', () => {
         python = "${pythonVersion}"
       `;
       expect(getPythonConstraint(pyprojectContent, poetryLock)).toBe(
-        pythonVersion
+        pythonVersion,
       );
     });
 
@@ -65,14 +65,14 @@ describe('modules/manager/poetry/artifacts', () => {
     it('detects poetry from first line of poetry.lock', () => {
       const pyprojectContent = '';
       expect(getPoetryRequirement(pyprojectContent, poetry142lock)).toBe(
-        '1.4.2'
+        '1.4.2',
       );
     });
 
     it('detects poetry from metadata', () => {
       const pyprojectContent = '';
       expect(getPoetryRequirement(pyprojectContent, poetry12lock)).toBe(
-        '<1.3.0'
+        '<1.3.0',
       );
     });
   });
@@ -94,7 +94,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps,
           newPackageFileContent: '',
           config,
-        })
+        }),
       ).toBeNull();
       expect(execSnapshots).toEqual([]);
     });
@@ -107,7 +107,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps: [],
           newPackageFileContent: '',
           config,
-        })
+        }),
       ).toBeNull();
       expect(execSnapshots).toEqual([]);
     });
@@ -124,7 +124,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps,
           newPackageFileContent: '',
           config,
-        })
+        }),
       ).toBeNull();
       expect(execSnapshots).toMatchObject([
         {
@@ -150,7 +150,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps,
           newPackageFileContent: '{}',
           config,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -188,7 +188,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps,
           newPackageFileContent: pyproject10toml,
           config,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -228,7 +228,7 @@ describe('modules/manager/poetry/artifacts', () => {
           url = "some.url"
         `,
           config,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -259,7 +259,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps,
           newPackageFileContent: '{}',
           config,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -306,7 +306,7 @@ describe('modules/manager/poetry/artifacts', () => {
               python: '~2.7 || ^3.4',
             },
           },
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -382,7 +382,7 @@ describe('modules/manager/poetry/artifacts', () => {
               python: '~2.7 || ^3.4',
             },
           },
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -440,7 +440,7 @@ describe('modules/manager/poetry/artifacts', () => {
       // poetry.lock
       fs.getSiblingFileName.mockReturnValueOnce('poetry.lock');
       fs.readLocalFile.mockResolvedValueOnce(
-        '[metadata]\npython-versions = "~2.7 || ^3.4"'
+        '[metadata]\npython-versions = "~2.7 || ^3.4"',
       );
       fs.readLocalFile.mockResolvedValueOnce('New poetry.lock');
       // python
@@ -461,7 +461,7 @@ describe('modules/manager/poetry/artifacts', () => {
             ...config,
             constraints: {},
           },
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -500,7 +500,7 @@ describe('modules/manager/poetry/artifacts', () => {
       // poetry.lock
       fs.getSiblingFileName.mockReturnValueOnce('poetry.lock');
       fs.readLocalFile.mockResolvedValueOnce(
-        '[metadata]\npython-versions = "~2.7 || ^3.4"'
+        '[metadata]\npython-versions = "~2.7 || ^3.4"',
       );
       fs.readLocalFile.mockResolvedValueOnce('New poetry.lock');
       // python
@@ -521,7 +521,7 @@ describe('modules/manager/poetry/artifacts', () => {
             ...config,
             constraints: {},
           },
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -554,7 +554,7 @@ describe('modules/manager/poetry/artifacts', () => {
           updatedDeps,
           newPackageFileContent: '{}',
           config,
-        })
+        }),
       ).toMatchObject([{ artifactError: { lockFile: 'poetry.lock' } }]);
       expect(execSnapshots).toMatchObject([]);
     });
@@ -574,7 +574,7 @@ describe('modules/manager/poetry/artifacts', () => {
             ...config,
             updateType: 'lockFileMaintenance',
           },
-        })
+        }),
       ).toEqual([
         {
           file: {
diff --git a/lib/modules/manager/poetry/artifacts.ts b/lib/modules/manager/poetry/artifacts.ts
index 1aa6576c553cb1db88deae9d5dbde50a473d460b..edaa03f4701ec994c20505fa69185621d9acf058 100644
--- a/lib/modules/manager/poetry/artifacts.ts
+++ b/lib/modules/manager/poetry/artifacts.ts
@@ -24,7 +24,7 @@ import type { PoetryFile, PoetrySource } from './types';
 
 export function getPythonConstraint(
   pyProjectContent: string,
-  existingLockFileContent: string
+  existingLockFileContent: string,
 ): string | null {
   // Read Python version from `pyproject.toml` first as it could have been updated
   const pyprojectPythonConstraint = Result.parse(
@@ -32,8 +32,8 @@ export function getPythonConstraint(
     PoetrySchemaToml.transform(
       ({ packageFileContent }) =>
         packageFileContent.deps.find((dep) => dep.depName === 'python')
-          ?.currentValue
-    )
+          ?.currentValue,
+    ),
   ).unwrapOrNull();
   if (pyprojectPythonConstraint) {
     logger.debug('Using python version from pyproject.toml');
@@ -42,7 +42,7 @@ export function getPythonConstraint(
 
   const lockfilePythonConstraint = Result.parse(
     existingLockFileContent,
-    Lockfile.transform(({ pythonVersions }) => pythonVersions)
+    Lockfile.transform(({ pythonVersions }) => pythonVersions),
   ).unwrapOrNull();
   if (lockfilePythonConstraint) {
     logger.debug('Using python version from poetry.lock');
@@ -54,7 +54,7 @@ export function getPythonConstraint(
 
 export function getPoetryRequirement(
   pyProjectContent: string,
-  existingLockFileContent: string
+  existingLockFileContent: string,
 ): undefined | string | null {
   // Read Poetry version from first line of poetry.lock
   const firstLine = existingLockFileContent.split('\n')[0];
@@ -66,7 +66,7 @@ export function getPoetryRequirement(
 
   const { val: lockfilePoetryConstraint } = Result.parse(
     existingLockFileContent,
-    Lockfile.transform(({ poetryConstraint }) => poetryConstraint)
+    Lockfile.transform(({ poetryConstraint }) => poetryConstraint),
   ).unwrap();
   if (lockfilePoetryConstraint) {
     logger.debug('Using poetry version from poetry.lock metadata');
@@ -75,7 +75,7 @@ export function getPoetryRequirement(
 
   const { val: pyprojectPoetryConstraint } = Result.parse(
     pyProjectContent,
-    PoetrySchemaToml.transform(({ poetryRequirement }) => poetryRequirement)
+    PoetrySchemaToml.transform(({ poetryRequirement }) => poetryRequirement),
   ).unwrap();
   if (pyprojectPoetryConstraint) {
     logger.debug('Using poetry version from pyproject.toml');
@@ -115,7 +115,7 @@ function getMatchingHostRule(url: string | undefined): HostRule {
 
 function getSourceCredentialVars(
   pyprojectContent: string,
-  packageFileName: string
+  packageFileName: string,
 ): NodeJS.ProcessEnv {
   const poetrySources = getPoetrySources(pyprojectContent, packageFileName);
   const envVars: NodeJS.ProcessEnv = {};
@@ -175,7 +175,7 @@ export async function updateArtifacts({
           .map((dep) => dep.depName)
           .filter(is.string)
           .map((dep) => quote(dep))
-          .join(' ')}`
+          .join(' ')}`,
       );
     }
     const pythonConstraint =
diff --git a/lib/modules/manager/poetry/extract.ts b/lib/modules/manager/poetry/extract.ts
index a0a50369f72452766a4e4cf52330823bf2069c97..3146224b722b4d09afb3e6cff11b3f92ac04725c 100644
--- a/lib/modules/manager/poetry/extract.ts
+++ b/lib/modules/manager/poetry/extract.ts
@@ -13,12 +13,12 @@ import { Lockfile, PoetrySchemaToml } from './schema';
 
 export async function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): Promise<PackageFileContent | null> {
   logger.trace(`poetry.extractPackageFile(${packageFile})`);
   const { val: res, err } = Result.parse(
     content,
-    PoetrySchemaToml.transform(({ packageFileContent }) => packageFileContent)
+    PoetrySchemaToml.transform(({ packageFileContent }) => packageFileContent),
   ).unwrap();
   if (err) {
     logger.debug({ packageFile, err }, `Poetry: error parsing pyproject.toml`);
@@ -29,7 +29,7 @@ export async function extractPackageFile(
   const lockContents = (await readLocalFile(lockfileName, 'utf8'))!;
   const lockfileMapping = Result.parse(
     lockContents,
-    Lockfile.transform(({ lock }) => lock)
+    Lockfile.transform(({ lock }) => lock),
   ).unwrapOrElse({});
 
   let pythonVersion: string | undefined;
diff --git a/lib/modules/manager/poetry/schema.ts b/lib/modules/manager/poetry/schema.ts
index 219861c7e8b9d06f4c3054a21a32a6ff379af848..afe3f8ec6db36d7b6716b5e268cce2ebe8abd093 100644
--- a/lib/modules/manager/poetry/schema.ts
+++ b/lib/modules/manager/poetry/schema.ts
@@ -88,7 +88,7 @@ const PoetryPypiDependency = z.union([
       datasource: PypiDatasource.id,
       currentValue: version,
       managerData: { nestedVersion: false },
-    })
+    }),
   ),
 ]);
 
@@ -96,7 +96,7 @@ const PoetryArrayDependency = z.array(z.unknown()).transform(
   (): PackageDependency => ({
     datasource: PypiDatasource.id,
     skipReason: 'multiple-constraint-dep',
-  })
+  }),
 );
 
 const PoetryDependency = z.union([
@@ -131,7 +131,7 @@ export const PoetryDependencies = LooseRecord(
 
     dep.skipReason = 'invalid-version';
     return dep;
-  })
+  }),
 ).transform((record) => {
   const deps: PackageDependency[] = [];
   for (const [depName, dep] of Object.entries(record)) {
@@ -152,7 +152,7 @@ export const PoetryDependencies = LooseRecord(
 
 function withDepType<
   Output extends PackageDependency[],
-  Schema extends ZodType<Output, ZodTypeDef, unknown>
+  Schema extends ZodType<Output, ZodTypeDef, unknown>,
 >(schema: Schema, depType: string): ZodEffects<Schema> {
   return schema.transform((deps) => {
     for (const dep of deps) {
@@ -166,7 +166,7 @@ export const PoetryGroupDependencies = LooseRecord(
   z.string(),
   z
     .object({ dependencies: PoetryDependencies })
-    .transform(({ dependencies }) => dependencies)
+    .transform(({ dependencies }) => dependencies),
 ).transform((record) => {
   const deps: PackageDependency[] = [];
   for (const [groupName, group] of Object.entries(record)) {
@@ -183,7 +183,7 @@ export const PoetrySectionSchema = z
     dependencies: withDepType(PoetryDependencies, 'dependencies').optional(),
     'dev-dependencies': withDepType(
       PoetryDependencies,
-      'dev-dependencies'
+      'dev-dependencies',
     ).optional(),
     extras: withDepType(PoetryDependencies, 'extras').optional(),
     group: PoetryGroupDependencies.optional(),
@@ -192,7 +192,7 @@ export const PoetrySectionSchema = z
         .object({
           url: z.string(),
         })
-        .transform(({ url }) => url)
+        .transform(({ url }) => url),
     )
       .refine((urls) => urls.length > 0)
       .transform((urls) => [
@@ -225,7 +225,7 @@ export const PoetrySectionSchema = z
       }
 
       return res;
-    }
+    },
   );
 
 export type PoetrySectionSchema = z.infer<typeof PoetrySectionSchema>;
@@ -258,11 +258,11 @@ export const PoetrySchema = z
           // https://python-poetry.org/docs/pyproject/#poetry-and-pep-517
           (buildBackend) =>
             buildBackend === 'poetry.masonry.api' ||
-            buildBackend === 'poetry.core.masonry.api'
+            buildBackend === 'poetry.core.masonry.api',
         ),
         requires: LooseArray(BuildSystemRequireVal).transform((vals) => {
           const req = vals.find(
-            ({ depName }) => depName === 'poetry' || depName === 'poetry_core'
+            ({ depName }) => depName === 'poetry' || depName === 'poetry_core',
           );
           return req?.poetryRequirement;
         }),
@@ -275,7 +275,7 @@ export const PoetrySchema = z
     ({ tool: packageFileContent, 'build-system': poetryRequirement }) => ({
       packageFileContent,
       poetryRequirement,
-    })
+    }),
   );
 
 export type PoetrySchema = z.infer<typeof PoetrySchema>;
@@ -296,7 +296,7 @@ export const Lockfile = Toml.pipe(
           name: z.string(),
           version: z.string(),
         })
-        .transform(({ name, version }): [string, string] => [name, version])
+        .transform(({ name, version }): [string, string] => [name, version]),
     )
       .transform((entries) => Object.fromEntries(entries))
       .catch({}),
@@ -316,17 +316,17 @@ export const Lockfile = Toml.pipe(
         }) => ({
           poetryConstraint,
           pythonVersions,
-        })
+        }),
       )
       .catch({
         poetryConstraint: undefined,
         pythonVersions: undefined,
       }),
-  })
+  }),
 ).transform(
   ({ package: lock, metadata: { poetryConstraint, pythonVersions } }) => ({
     lock,
     poetryConstraint,
     pythonVersions,
-  })
+  }),
 );
diff --git a/lib/modules/manager/poetry/update-locked.ts b/lib/modules/manager/poetry/update-locked.ts
index fc0ca5fa886d17bf97f610f2230bb165df9e3856..27ad335ee8bc09489dc45b6012ba6e8b3be519d2 100644
--- a/lib/modules/manager/poetry/update-locked.ts
+++ b/lib/modules/manager/poetry/update-locked.ts
@@ -4,12 +4,12 @@ import type { UpdateLockedConfig, UpdateLockedResult } from '../types';
 import { Lockfile } from './schema';
 
 export function updateLockedDependency(
-  config: UpdateLockedConfig
+  config: UpdateLockedConfig,
 ): UpdateLockedResult {
   const { depName, currentVersion, newVersion, lockFile, lockFileContent } =
     config;
   logger.debug(
-    `poetry.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
+    `poetry.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`,
   );
 
   const LockedVersionSchema = Lockfile.transform(({ lock }) => lock[depName]);
@@ -18,7 +18,7 @@ export function updateLockedDependency(
       (lockedVersion): UpdateLockedResult =>
         lockedVersion === newVersion
           ? { status: 'already-updated' }
-          : { status: 'unsupported' }
+          : { status: 'unsupported' },
     )
     .unwrapOrElse({ status: 'unsupported' });
 }
diff --git a/lib/modules/manager/pre-commit/extract.spec.ts b/lib/modules/manager/pre-commit/extract.spec.ts
index cefbcfc63275a21e5325200dd319a622785baf99..49958c6b728352145c981e9cc24d7c1a2a9b40ee 100644
--- a/lib/modules/manager/pre-commit/extract.spec.ts
+++ b/lib/modules/manager/pre-commit/extract.spec.ts
@@ -11,14 +11,14 @@ const filename = '.pre-commit.yaml';
 const complexPrecommitConfig = Fixtures.get('complex.pre-commit-config.yaml');
 const examplePrecommitConfig = Fixtures.get('.pre-commit-config.yaml');
 const emptyReposPrecommitConfig = Fixtures.get(
-  'empty_repos.pre-commit-config.yaml'
+  'empty_repos.pre-commit-config.yaml',
 );
 const noReposPrecommitConfig = Fixtures.get('no_repos.pre-commit-config.yaml');
 const invalidRepoPrecommitConfig = Fixtures.get(
-  'invalid_repo.pre-commit-config.yaml'
+  'invalid_repo.pre-commit-config.yaml',
 );
 const enterpriseGitPrecommitConfig = Fixtures.get(
-  'enterprise.pre-commit-config.yaml'
+  'enterprise.pre-commit-config.yaml',
 );
 
 describe('modules/manager/pre-commit/extract', () => {
diff --git a/lib/modules/manager/pre-commit/extract.ts b/lib/modules/manager/pre-commit/extract.ts
index 174cd1bf3554b1f74fdda179df0e3bd41ebf438a..adb3ca1e5bf6583454fbf60b3fd63e37d7e124db 100644
--- a/lib/modules/manager/pre-commit/extract.ts
+++ b/lib/modules/manager/pre-commit/extract.ts
@@ -26,7 +26,7 @@ import type { PreCommitConfig } from './types';
  */
 function determineDatasource(
   repository: string,
-  hostname: string
+  hostname: string,
 ): { datasource?: string; registryUrls?: string[]; skipReason?: SkipReason } {
   if (hostname === 'github.com' || detectPlatform(repository) === 'github') {
     logger.debug({ repository, hostname }, 'Found github dependency');
@@ -42,7 +42,7 @@ function determineDatasource(
     // 1 check, to possibly prevent 3 failures in combined query of hostType & url.
     logger.debug(
       { repository, hostUrl },
-      'Provided hostname does not match any hostRules. Ignoring'
+      'Provided hostname does not match any hostRules. Ignoring',
     );
     return { skipReason: 'unknown-registry', registryUrls: [hostname] };
   }
@@ -53,21 +53,21 @@ function determineDatasource(
     if (is.nonEmptyObject(find({ hostType, url: hostUrl }))) {
       logger.debug(
         { repository, hostUrl, hostType },
-        `Provided hostname matches a ${hostType} hostrule.`
+        `Provided hostname matches a ${hostType} hostrule.`,
       );
       return { datasource: sourceId, registryUrls: [hostname] };
     }
   }
   logger.debug(
     { repository, registry: hostUrl },
-    'Provided hostname did not match any of the hostRules of hostType github nor gitlab'
+    'Provided hostname did not match any of the hostRules of hostType github nor gitlab',
   );
   return { skipReason: 'unknown-registry', registryUrls: [hostname] };
 }
 
 function extractDependency(
   tag: string,
-  repository: string
+  repository: string,
 ): {
   depName?: string;
   depType?: string;
@@ -103,7 +103,7 @@ function extractDependency(
   }
   logger.info(
     { repository },
-    'Could not separate hostname from full dependency url.'
+    'Could not separate hostname from full dependency url.',
   );
   return {
     depName: undefined,
@@ -143,7 +143,7 @@ function findDependencies(precommitFile: PreCommitConfig): PackageDependency[] {
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   type ParsedContent = Record<string, unknown> | PreCommitConfig;
   let parsedContent: ParsedContent;
@@ -152,21 +152,21 @@ export function extractPackageFile(
   } catch (err) {
     logger.debug(
       { filename: packageFile, err },
-      'Failed to parse pre-commit config YAML'
+      'Failed to parse pre-commit config YAML',
     );
     return null;
   }
   if (!is.plainObject<Record<string, unknown>>(parsedContent)) {
     logger.debug(
       { packageFile },
-      `Parsing of pre-commit config YAML returned invalid result`
+      `Parsing of pre-commit config YAML returned invalid result`,
     );
     return null;
   }
   if (!matchesPrecommitConfigHeuristic(parsedContent)) {
     logger.debug(
       { packageFile },
-      `File does not look like a pre-commit config file`
+      `File does not look like a pre-commit config file`,
     );
     return null;
   }
@@ -179,7 +179,7 @@ export function extractPackageFile(
   } catch (err) /* istanbul ignore next */ {
     logger.debug(
       { packageFile, err },
-      'Error scanning parsed pre-commit config'
+      'Error scanning parsed pre-commit config',
     );
   }
   return null;
diff --git a/lib/modules/manager/pre-commit/parsing.ts b/lib/modules/manager/pre-commit/parsing.ts
index b500502025ee6ff8ed6d8dd2c9c366a542ced8fa..53fb5defa84269b8a50fb04a677ed25ee8510259 100644
--- a/lib/modules/manager/pre-commit/parsing.ts
+++ b/lib/modules/manager/pre-commit/parsing.ts
@@ -10,7 +10,7 @@ import type { PreCommitConfig, PreCommitDependency } from './types';
  *     rev: v1.0.0
  */
 export function matchesPrecommitConfigHeuristic(
-  data: unknown
+  data: unknown,
 ): data is PreCommitConfig {
   return !!(data && typeof data === 'object' && hasKey('repos', data));
 }
@@ -23,7 +23,7 @@ export function matchesPrecommitConfigHeuristic(
  *     rev: v1.0.0
  */
 export function matchesPrecommitDependencyHeuristic(
-  data: unknown
+  data: unknown,
 ): data is PreCommitDependency {
   return !!(
     data &&
diff --git a/lib/modules/manager/pub/artifacts.spec.ts b/lib/modules/manager/pub/artifacts.spec.ts
index 762158dade532baa7263475bf87cd6502967cd22..57fd3aa12705e2c5096f2646a86f5425cfb35692 100644
--- a/lib/modules/manager/pub/artifacts.spec.ts
+++ b/lib/modules/manager/pub/artifacts.spec.ts
@@ -65,7 +65,7 @@ describe('modules/manager/pub/artifacts', () => {
 
   it('returns null if updatedDeps is empty', async () => {
     expect(
-      await pub.updateArtifacts({ ...updateArtifact, updatedDeps: [] })
+      await pub.updateArtifacts({ ...updateArtifact, updatedDeps: [] }),
     ).toBeNull();
   });
 
@@ -83,7 +83,7 @@ describe('modules/manager/pub/artifacts', () => {
             flutter: 2.0.0
         `,
         updatedDeps: [{ depName: 'dart' }, { depName: 'flutter' }],
-      })
+      }),
     ).toEqual([
       {
         file: {
@@ -112,7 +112,7 @@ describe('modules/manager/pub/artifacts', () => {
         await pub.updateArtifacts({
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
-        })
+        }),
       ).toBeNull();
       expect(execSnapshots).toMatchObject([
         {
@@ -130,7 +130,7 @@ describe('modules/manager/pub/artifacts', () => {
         await pub.updateArtifacts({
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -157,7 +157,7 @@ describe('modules/manager/pub/artifacts', () => {
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
           updatedDeps: [{ depName: params.sdk }],
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -184,7 +184,7 @@ describe('modules/manager/pub/artifacts', () => {
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
           config: { ...config, updateType: 'lockFileMaintenance' },
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -215,7 +215,7 @@ describe('modules/manager/pub/artifacts', () => {
         await pub.updateArtifacts({
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -260,7 +260,7 @@ describe('modules/manager/pub/artifacts', () => {
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
           config: { ...config, constraints: { dart: '3.3.9' } },
-        })
+        }),
       ).toEqual([
         {
           file: {
@@ -288,7 +288,7 @@ describe('modules/manager/pub/artifacts', () => {
         await pub.updateArtifacts({
           ...updateArtifact,
           newPackageFileContent: params.packageFileContent,
-        })
+        }),
       ).toEqual([{ artifactError: { lockFile, stderr } }]);
     });
   });
diff --git a/lib/modules/manager/pub/artifacts.ts b/lib/modules/manager/pub/artifacts.ts
index 82bbdb2f05a78b941cd79e3ce3b06af71d3e8c5b..5daac2d60b3d94c9fe350f9f149f0ad73efcb863 100644
--- a/lib/modules/manager/pub/artifacts.ts
+++ b/lib/modules/manager/pub/artifacts.ts
@@ -94,7 +94,7 @@ export async function updateArtifacts({
 function getExecCommand(
   toolName: string,
   updatedDeps: Upgrade<Record<string, unknown>>[],
-  isLockFileMaintenance: boolean
+  isLockFileMaintenance: boolean,
 ): string {
   if (isLockFileMaintenance) {
     return `${toolName} pub upgrade`;
diff --git a/lib/modules/manager/pub/extract.ts b/lib/modules/manager/pub/extract.ts
index da56397eb43aa8f10b40b64210624a9cd11dcebc..87d37223e818cebf23def048804f5577ec8ff520 100644
--- a/lib/modules/manager/pub/extract.ts
+++ b/lib/modules/manager/pub/extract.ts
@@ -9,7 +9,7 @@ import { parsePubspec } from './utils';
 
 function extractFromSection(
   pubspec: PubspecSchema,
-  sectionKey: keyof Pick<PubspecSchema, 'dependencies' | 'dev_dependencies'>
+  sectionKey: keyof Pick<PubspecSchema, 'dependencies' | 'dev_dependencies'>,
 ): PackageDependency[] {
   const sectionContent = pubspec[sectionKey];
   if (!sectionContent) {
@@ -84,7 +84,7 @@ function extractFlutter(pubspec: PubspecSchema): PackageDependency[] {
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   const pubspec = parsePubspec(packageFile, content);
   if (!pubspec) {
diff --git a/lib/modules/manager/pub/schema.ts b/lib/modules/manager/pub/schema.ts
index 9e9bba91ec46d30bf1679bf30a1d22fe8027c4b6..667cc063b5c3c0f25b9e61ff74112d48fbe3efa9 100644
--- a/lib/modules/manager/pub/schema.ts
+++ b/lib/modules/manager/pub/schema.ts
@@ -6,7 +6,7 @@ const PubspecDependencySchema = LooseRecord(
   z.union([
     z.string(),
     z.object({ version: z.string().optional(), path: z.string().optional() }),
-  ])
+  ]),
 );
 
 export const PubspecSchema = z.object({
diff --git a/lib/modules/manager/pub/utils.ts b/lib/modules/manager/pub/utils.ts
index dda0df348ec989f6ebd8c902e128e11620b0be9b..a0643e9728eca1051bf165eab11d0e8932bc22e5 100644
--- a/lib/modules/manager/pub/utils.ts
+++ b/lib/modules/manager/pub/utils.ts
@@ -8,7 +8,7 @@ import {
 
 export function parsePubspec(
   fileName: string,
-  fileContent: string
+  fileContent: string,
 ): PubspecSchema | null {
   const res = PubspecYaml.safeParse(fileContent);
   if (res.success) {
@@ -21,7 +21,7 @@ export function parsePubspec(
 
 export function parsePubspecLock(
   fileName: string,
-  fileContent: string
+  fileContent: string,
 ): PubspecLockSchema | null {
   const res = PubspecLockYaml.safeParse(fileContent);
   if (res.success) {
@@ -29,7 +29,7 @@ export function parsePubspecLock(
   } else {
     logger.debug(
       { err: res.error, fileName },
-      'Error parsing pubspec lockfile.'
+      'Error parsing pubspec lockfile.',
     );
   }
   return null;
diff --git a/lib/modules/manager/puppet/common.spec.ts b/lib/modules/manager/puppet/common.spec.ts
index 48a050c39d4c33c74b29be6a34067804dfd8a376..4cc56027a7a649aad57ee29b75505ccffdf83330 100644
--- a/lib/modules/manager/puppet/common.spec.ts
+++ b/lib/modules/manager/puppet/common.spec.ts
@@ -7,21 +7,21 @@ describe('modules/manager/puppet/common', () => {
   describe('RE_REPOSITORY_GENERIC_GIT_SSH_FORMAT', () => {
     it('access by index', () => {
       const regex = RE_REPOSITORY_GENERIC_GIT_SSH_FORMAT.exec(
-        'git@gitlab.com:dir1/dir2/project.git'
+        'git@gitlab.com:dir1/dir2/project.git',
       );
       expect(regex).not.toBeNull();
       expect(String(regex)).toBe(
-        'git@gitlab.com:dir1/dir2/project.git,dir1/dir2/project.git'
+        'git@gitlab.com:dir1/dir2/project.git,dir1/dir2/project.git',
       );
     });
 
     it('access by named group', () => {
       const regex = RE_REPOSITORY_GENERIC_GIT_SSH_FORMAT.exec(
-        'git@gitlab.com:dir1/dir2/project.git'
+        'git@gitlab.com:dir1/dir2/project.git',
       );
       expect(regex).not.toBeNull();
       expect(String(regex)).toBe(
-        'git@gitlab.com:dir1/dir2/project.git,dir1/dir2/project.git'
+        'git@gitlab.com:dir1/dir2/project.git,dir1/dir2/project.git',
       );
       expect(regex?.groups).not.toBeNull();
       expect(regex?.groups?.repository).toBe('dir1/dir2/project.git');
@@ -36,7 +36,7 @@ describe('modules/manager/puppet/common', () => {
     it('parseable url', () => {
       const url = parseGitOwnerRepo(
         'https://gitlab.com/example/example',
-        false
+        false,
       );
       expect(url).toBe('example/example');
     });
diff --git a/lib/modules/manager/puppet/common.ts b/lib/modules/manager/puppet/common.ts
index 4b501d05296e79afafa67d86dc1ce51a63dc0ac7..82e8ac819dca5e1fdd432ccd92c37cf31c516e80 100644
--- a/lib/modules/manager/puppet/common.ts
+++ b/lib/modules/manager/puppet/common.ts
@@ -2,12 +2,12 @@ import { regEx } from '../../../util/regex';
 import { parseUrl } from '../../../util/url';
 
 export const RE_REPOSITORY_GENERIC_GIT_SSH_FORMAT = regEx(
-  /^git@[^:]*:(?<repository>.+)$/
+  /^git@[^:]*:(?<repository>.+)$/,
 );
 
 export function parseGitOwnerRepo(
   git: string,
-  githubUrl: boolean
+  githubUrl: boolean,
 ): string | null {
   const genericGitSsh = RE_REPOSITORY_GENERIC_GIT_SSH_FORMAT.exec(git);
 
diff --git a/lib/modules/manager/puppet/extract.spec.ts b/lib/modules/manager/puppet/extract.spec.ts
index 304ae6a36b857224fa8cb37d9ffd047f13c5b847..14ee82a00587ac5d655a5b0fcd21e312188ba3ce 100644
--- a/lib/modules/manager/puppet/extract.spec.ts
+++ b/lib/modules/manager/puppet/extract.spec.ts
@@ -17,7 +17,7 @@ describe('modules/manager/puppet/extract', () => {
           "mod 'puppetlabs/stdlib', '8.0.0'",
           "mod 'puppetlabs/apache', '6.5.1'",
           "mod 'puppetlabs/puppetdb', '7.9.0'",
-        ].join(EOL)
+        ].join(EOL),
       );
 
       expect(res).toMatchObject({
@@ -46,7 +46,7 @@ describe('modules/manager/puppet/extract', () => {
 
     it('extracts multiple modules from Puppetfile with multiple forges/registries', () => {
       const res = extractPackageFile(
-        Fixtures.get('Puppetfile.multiple_forges')
+        Fixtures.get('Puppetfile.multiple_forges'),
       );
 
       expect(res).toMatchObject({
@@ -124,7 +124,7 @@ describe('modules/manager/puppet/extract', () => {
 
     it('Use GithubTagsDatasource only if host is exactly github.com', () => {
       const res = extractPackageFile(
-        `mod 'apache', :git => 'https://github.com.example.com/puppetlabs/puppetlabs-apache', :tag => '0.9.0'`
+        `mod 'apache', :git => 'https://github.com.example.com/puppetlabs/puppetlabs-apache', :tag => '0.9.0'`,
       );
 
       expect(res).toEqual({
@@ -145,7 +145,7 @@ describe('modules/manager/puppet/extract', () => {
 
     it('Github url without https is skipped', () => {
       const res = extractPackageFile(
-        `mod 'apache', :git => 'http://github.com/puppetlabs/puppetlabs-apache', :tag => '0.9.0'`
+        `mod 'apache', :git => 'http://github.com/puppetlabs/puppetlabs-apache', :tag => '0.9.0'`,
       );
 
       expect(res).toMatchObject({
@@ -164,7 +164,7 @@ describe('modules/manager/puppet/extract', () => {
         [
           "mod 'stdlib',",
           "  :git => 'git@github.com:puppetlabs/puppetlabs-stdlib.git',",
-        ].join(EOL)
+        ].join(EOL),
       );
 
       expect(res).toEqual({
@@ -183,7 +183,7 @@ describe('modules/manager/puppet/extract', () => {
         [
           "mod 'stdlib', '0.1.0', 'i create a skip reason'",
           "  :git => 'git@github.com:puppetlabs/puppetlabs-stdlib.git',",
-        ].join(EOL)
+        ].join(EOL),
       );
 
       expect(res).toMatchObject({
diff --git a/lib/modules/manager/puppet/extract.ts b/lib/modules/manager/puppet/extract.ts
index 9df1a5a998b96a29a53dbb70ca67f5b37964fab4..84e165af1be05562af329aae6fb985f5e9be0677 100644
--- a/lib/modules/manager/puppet/extract.ts
+++ b/lib/modules/manager/puppet/extract.ts
@@ -10,7 +10,7 @@ import type { PuppetfileModule } from './types';
 
 function parseForgeDependency(
   module: PuppetfileModule,
-  forgeUrl: string | null
+  forgeUrl: string | null,
 ): PackageDependency {
   const dep: PackageDependency = {
     depName: module.name,
@@ -45,7 +45,7 @@ function parseGitDependency(module: PuppetfileModule): PackageDependency {
 
   if (githubUrl && parsedUrl && parsedUrl.protocol !== 'https:') {
     logger.debug(
-      `Access to github is only allowed for https, your url was: ${git}`
+      `Access to github is only allowed for https, your url was: ${git}`,
     );
     return {
       depName: moduleName,
diff --git a/lib/modules/manager/puppet/puppetfile-parser.spec.ts b/lib/modules/manager/puppet/puppetfile-parser.spec.ts
index 03d6a7f46edd715139e2156cb15220588faacb6b..87fff080d8b0a49005fac68c98e61e90490c7dea 100644
--- a/lib/modules/manager/puppet/puppetfile-parser.spec.ts
+++ b/lib/modules/manager/puppet/puppetfile-parser.spec.ts
@@ -33,7 +33,7 @@ describe('modules/manager/puppet/puppetfile-parser', () => {
         [
           "mod 'apache', :git => 'https://github.com/puppetlabs/puppetlabs-apache', :tag => '0.9.0'",
           "mod 'stdlib', :git => 'git@github.com:puppetlabs/puppetlabs-stdlib.git', :tag => '5.0.0'",
-        ].join(EOL)
+        ].join(EOL),
       );
       const defaultRegistryModules = puppetfile.getModulesOfForge(undefined);
 
@@ -81,18 +81,18 @@ describe('modules/manager/puppet/puppetfile-parser', () => {
       const defaultRegistryModulesNull = puppetfile.getModulesOfForge(null);
 
       expect(defaultRegistryModulesUndefined).toEqual(
-        defaultRegistryModulesNull
+        defaultRegistryModulesNull,
       );
     });
 
     it('Puppetfile_multiple_forges', () => {
       const puppetfile = parsePuppetfile(
-        Fixtures.get('Puppetfile.multiple_forges')
+        Fixtures.get('Puppetfile.multiple_forges'),
       );
       expect(puppetfile.getForges()).toHaveLength(2);
 
       const defaultRegistryModules = puppetfile.getModulesOfForge(
-        puppetLabsRegistryUrl
+        puppetLabsRegistryUrl,
       );
 
       expect(defaultRegistryModules).toEqual([
@@ -111,7 +111,7 @@ describe('modules/manager/puppet/puppetfile-parser', () => {
       ]);
 
       const someOtherPuppetForgeModules = puppetfile.getModulesOfForge(
-        'https://some-other-puppet-forge.com'
+        'https://some-other-puppet-forge.com',
       );
 
       expect(someOtherPuppetForgeModules).toEqual([
@@ -136,7 +136,7 @@ describe('modules/manager/puppet/puppetfile-parser', () => {
           "mod 'puppetlabs/stdlib', '8.0.0'",
           "mod 'puppetlabs/apache', '6.5.1'",
           "mod 'puppetlabs/puppetdb', '7.9.0'",
-        ].join(EOL)
+        ].join(EOL),
       );
       expect(puppetfile.getForges()).toHaveLength(1);
 
@@ -165,12 +165,12 @@ describe('modules/manager/puppet/puppetfile-parser', () => {
           "mod 'puppetlabs/stdlib', '8.0.0'",
           "mod 'puppetlabs/apache', '6.5.1'",
           "mod 'puppetlabs/puppetdb', '7.9.0'",
-        ].join(EOL)
+        ].join(EOL),
       );
       expect(puppetfile.getForges()).toHaveLength(1);
 
       const defaultRegistryModules = puppetfile.getModulesOfForge(
-        puppetLabsRegistryUrl
+        puppetLabsRegistryUrl,
       );
 
       expect(defaultRegistryModules).toEqual([
@@ -191,7 +191,7 @@ describe('modules/manager/puppet/puppetfile-parser', () => {
 
     it('Puppetfile_with_comments', () => {
       const puppetfile = parsePuppetfile(
-        Fixtures.get('Puppetfile.with_comments')
+        Fixtures.get('Puppetfile.with_comments'),
       );
       expect(puppetfile.getForges()).toHaveLength(1);
 
diff --git a/lib/modules/manager/puppet/puppetfile-parser.ts b/lib/modules/manager/puppet/puppetfile-parser.ts
index 51fbdbe43c37f971d87e0d435747888a1b27fbda..d0dced630ba09b0c24dab4e9d92a3fa6df1646de 100644
--- a/lib/modules/manager/puppet/puppetfile-parser.ts
+++ b/lib/modules/manager/puppet/puppetfile-parser.ts
@@ -31,7 +31,7 @@ export class Puppetfile {
   }
 
   public getModulesOfForge(
-    forgeUrl: string | null | undefined
+    forgeUrl: string | null | undefined,
   ): PuppetfileModule[] {
     const modules = this.forgeModules.get(forgeUrl ?? null);
 
@@ -90,7 +90,7 @@ export function parsePuppetfile(content: string): Puppetfile {
 
 function fillPuppetfileModule(
   currentPuppetfileModule: PuppetfileModule,
-  value: string
+  value: string,
 ): void {
   // "positional" module values
   if (currentPuppetfileModule.name === undefined) {
diff --git a/lib/modules/manager/sbt/extract.spec.ts b/lib/modules/manager/sbt/extract.spec.ts
index 24bce088c6bd82d45a787c2a3decf8a35cb8285e..6ebed93eff7102d8ef890ff65a2b05d1e4f3e8c8 100644
--- a/lib/modules/manager/sbt/extract.spec.ts
+++ b/lib/modules/manager/sbt/extract.spec.ts
@@ -9,7 +9,7 @@ const sbtScalaVersionVariable = Fixtures.get(`scala-version-variable.sbt`);
 const sbtMissingScalaVersion = Fixtures.get(`missing-scala-version.sbt`);
 const sbtDependencyFile = Fixtures.get(`dependency-file.scala`);
 const sbtPrivateVariableDependencyFile = Fixtures.get(
-  `private-variable-dependency-file.scala`
+  `private-variable-dependency-file.scala`,
 );
 
 describe('modules/manager/sbt/extract', () => {
@@ -19,22 +19,22 @@ describe('modules/manager/sbt/extract', () => {
       expect(extractPackageFile('non-sense')).toBeNull();
       expect(extractPackageFile('version := "1.2.3"')).toBeNull();
       expect(
-        extractPackageFile('libraryDependencies += "foo" % "bar" % ???')
+        extractPackageFile('libraryDependencies += "foo" % "bar" % ???'),
       ).toBeNull();
       expect(
-        extractPackageFile('libraryDependencies += "foo" % "bar" %% "baz"')
+        extractPackageFile('libraryDependencies += "foo" % "bar" %% "baz"'),
       ).toBeNull();
       expect(
-        extractPackageFile('libraryDependencies += ??? % "bar" % "baz"')
+        extractPackageFile('libraryDependencies += ??? % "bar" % "baz"'),
       ).toBeNull();
       expect(
-        extractPackageFile('libraryDependencies += "foo" % ??? % "baz"')
+        extractPackageFile('libraryDependencies += "foo" % ??? % "baz"'),
       ).toBeNull();
 
       expect(extractPackageFile('libraryDependencies += ')).toBeNull();
       expect(extractPackageFile('libraryDependencies += "foo"')).toBeNull();
       expect(
-        extractPackageFile('libraryDependencies += "foo" % "bar" %')
+        extractPackageFile('libraryDependencies += "foo" % "bar" %'),
       ).toBeNull();
     });
 
@@ -262,7 +262,7 @@ describe('modules/manager/sbt/extract', () => {
 
     it('extract deps from native scala file with private variables', () => {
       expect(
-        extractPackageFile(sbtPrivateVariableDependencyFile)
+        extractPackageFile(sbtPrivateVariableDependencyFile),
       ).toMatchSnapshot({
         deps: [
           {
@@ -367,7 +367,7 @@ describe('modules/manager/sbt/extract', () => {
       expect(
         extractPackageFile(`
         addCompilerPlugin("org.scala-tools.sxr" %% "sxr" % "0.3.0")
-        `)
+        `),
       ).toMatchObject({
         deps: [
           {
@@ -386,8 +386,8 @@ describe('modules/manager/sbt/extract', () => {
           codeBlock`
             sbt.version=1.6.0
           `,
-          'project/build.properties'
-        )
+          'project/build.properties',
+        ),
       ).toMatchObject({
         deps: [
           {
@@ -410,8 +410,8 @@ describe('modules/manager/sbt/extract', () => {
             sbt.version=1.6.0
             another.conf=1.4.0
           `,
-          'project/build.properties'
-        )
+          'project/build.properties',
+        ),
       ).toMatchObject({
         deps: [
           {
@@ -433,8 +433,8 @@ describe('modules/manager/sbt/extract', () => {
           codeBlock`
             another.conf=1.4.0
           `,
-          'project/build.properties'
-        )
+          'project/build.properties',
+        ),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/manager/sbt/extract.ts b/lib/modules/manager/sbt/extract.ts
index d54d1843266d2075f9b0b2f61344551a8e40a1a3..ff07ca18ccb0cf0a101e0059a9aae5d1c0a92915 100644
--- a/lib/modules/manager/sbt/extract.ts
+++ b/lib/modules/manager/sbt/extract.ts
@@ -39,7 +39,7 @@ interface Ctx {
 const scala = lang.createLang('scala');
 
 const sbtVersionRegex = regEx(
-  'sbt\\.version *= *(?<version>\\d+\\.\\d+\\.\\d+)'
+  'sbt\\.version *= *(?<version>\\d+\\.\\d+\\.\\d+)',
 );
 
 const scalaVersionMatch = q
@@ -53,7 +53,7 @@ const scalaVersionMatch = q
         ctx.scalaVersion = scalaVersion;
       }
       return ctx;
-    })
+    }),
   )
   .handler((ctx) => {
     if (ctx.scalaVersion) {
@@ -91,7 +91,7 @@ const packageFileVersionMatch = q
         ctx.packageFileVersion = packageFileVersion;
       }
       return ctx;
-    })
+    }),
   );
 
 const variableNameMatch = q
@@ -113,7 +113,7 @@ const variableDefinitionMatch = q
   .alt(
     q.sym<Ctx>('lazy').join(assignmentMatch),
     assignmentMatch,
-    variableNameMatch.op(':=')
+    variableNameMatch.op(':='),
   )
   .join(variableValueMatch);
 
@@ -125,7 +125,7 @@ const groupIdMatch = q.alt<Ctx>(
     }
     return ctx;
   }),
-  q.str<Ctx>((ctx, { value: groupId }) => ({ ...ctx, groupId }))
+  q.str<Ctx>((ctx, { value: groupId }) => ({ ...ctx, groupId })),
 );
 
 const artifactIdMatch = q.alt<Ctx>(
@@ -136,7 +136,7 @@ const artifactIdMatch = q.alt<Ctx>(
     }
     return ctx;
   }),
-  q.str<Ctx>((ctx, { value: artifactId }) => ({ ...ctx, artifactId }))
+  q.str<Ctx>((ctx, { value: artifactId }) => ({ ...ctx, artifactId })),
 );
 
 const versionMatch = q.alt<Ctx>(
@@ -148,7 +148,7 @@ const versionMatch = q.alt<Ctx>(
     }
     return ctx;
   }),
-  q.str<Ctx>((ctx, { value: currentValue }) => ({ ...ctx, currentValue }))
+  q.str<Ctx>((ctx, { value: currentValue }) => ({ ...ctx, currentValue })),
 );
 
 const simpleDependencyMatch = groupIdMatch
@@ -228,8 +228,8 @@ const sbtPackageMatch = q
     q.alt<Ctx>(
       q.sym<Ctx>('classifier').str(depTypeHandler),
       q.op<Ctx>('%').sym(depTypeHandler),
-      q.op<Ctx>('%').str(depTypeHandler)
-    )
+      q.op<Ctx>('%').str(depTypeHandler),
+    ),
   )
   .handler(depHandler);
 
@@ -262,7 +262,7 @@ const addResolverMatch = q.sym<Ctx>('resolvers').alt(
     type: 'wrapped-tree',
     maxDepth: 1,
     search: resolverMatch,
-  })
+  }),
 );
 
 function registryUrlHandler(ctx: Ctx): Ctx {
@@ -284,14 +284,14 @@ const query = q.tree<Ctx>({
     sbtPackageMatch,
     sbtPluginMatch,
     addResolverMatch,
-    variableDefinitionMatch
+    variableDefinitionMatch,
   ),
   postHandler: registryUrlHandler,
 });
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   if (
     packageFile === 'project/build.properties' ||
diff --git a/lib/modules/manager/sbt/update.spec.ts b/lib/modules/manager/sbt/update.spec.ts
index b3336b956c6e56d1ebf12fde258e48727b380ac1..bd1b8fe021a9f524fc0fc1eacc1d96705400b19f 100644
--- a/lib/modules/manager/sbt/update.spec.ts
+++ b/lib/modules/manager/sbt/update.spec.ts
@@ -11,7 +11,7 @@ describe('modules/manager/sbt/update', () => {
       const { bumpedContent } = sbtUpdater.bumpPackageVersion(
         content,
         '0.0.2',
-        'patch'
+        'patch',
       );
 
       expect(bumpedContent).toEqual(content.replace('0.0.2', '0.0.3'));
@@ -22,7 +22,7 @@ describe('modules/manager/sbt/update', () => {
       const { bumpedContent } = sbtUpdater.bumpPackageVersion(
         content,
         '0.0.1',
-        'patch'
+        'patch',
       );
 
       expect(bumpedContent).toEqual(content);
@@ -32,7 +32,7 @@ describe('modules/manager/sbt/update', () => {
       const { bumpedContent } = sbtUpdater.bumpPackageVersion(
         content,
         '0.0.1',
-        'minor'
+        'minor',
       );
       expect(bumpedContent).toEqual(content.replace('0.0.2', '0.1.0'));
       expect(bumpedContent).not.toEqual(content);
@@ -42,7 +42,7 @@ describe('modules/manager/sbt/update', () => {
       const { bumpedContent } = sbtUpdater.bumpPackageVersion(
         content,
         '0.0.2',
-        true as any
+        true as any,
       );
 
       expect(bumpedContent).toEqual(content);
diff --git a/lib/modules/manager/sbt/update.ts b/lib/modules/manager/sbt/update.ts
index 0fa3ecfe75e40797df82c7b811aa0b1c7501cc09..df85b8ce5bfa75756d4b1c8d4e3d5f5b1915a428 100644
--- a/lib/modules/manager/sbt/update.ts
+++ b/lib/modules/manager/sbt/update.ts
@@ -6,11 +6,11 @@ import type { BumpPackageVersionResult } from '../types';
 export function bumpPackageVersion(
   content: string,
   currentValue: string,
-  bumpVersion: ReleaseType
+  bumpVersion: ReleaseType,
 ): BumpPackageVersionResult {
   logger.debug(
     { bumpVersion, currentValue },
-    'Checking if we should bump build.sbt version'
+    'Checking if we should bump build.sbt version',
   );
   let bumpedContent = content;
   const bumpedVersion = semver.inc(currentValue, bumpVersion);
@@ -20,7 +20,7 @@ export function bumpPackageVersion(
   }
   bumpedContent = content.replace(
     regEx(/^(version\s*:=\s*).*$/m),
-    `$1"${bumpedVersion}"`
+    `$1"${bumpedVersion}"`,
   );
 
   if (bumpedContent === content) {
diff --git a/lib/modules/manager/setup-cfg/extract.ts b/lib/modules/manager/setup-cfg/extract.ts
index 6328cbf6673e8bd16b27c428e6b966da68880efa..3aaca6352305a1813c2061430825d17aff1e9b55 100644
--- a/lib/modules/manager/setup-cfg/extract.ts
+++ b/lib/modules/manager/setup-cfg/extract.ts
@@ -17,7 +17,7 @@ function getSectionRecord(str: string): string {
 
 function getDepType(
   section: string | null,
-  record: string | null
+  record: string | null,
 ): null | string {
   if (section === 'options') {
     if (record === 'install_requires') {
@@ -39,7 +39,7 @@ function getDepType(
 function parseDep(
   line: string,
   section: string | null,
-  record: string | null
+  record: string | null,
 ): PackageDependency | null {
   const packagePattern = '[a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9._-]*[a-zA-Z0-9]';
   const extrasPattern = '(?:\\s*\\[[^\\]]+\\])?';
@@ -47,7 +47,7 @@ function parseDep(
   const rangePattern: string = RANGE_PATTERN;
   const specifierPartPattern = `\\s*${rangePattern.replace(
     regEx(/\?<\w+>/g),
-    '?:'
+    '?:',
   )}`;
   const specifierPattern = `${specifierPartPattern}(?:\\s*,${specifierPartPattern})*`;
   const dependencyPattern = `(${packagePattern})(${extrasPattern})(${specifierPattern})`;
@@ -86,7 +86,7 @@ function parseDep(
 }
 
 export function extractPackageFile(
-  content: string
+  content: string,
 ): Result<PackageFileContent | null> {
   logger.trace('setup-cfg.extractPackageFile()');
 
diff --git a/lib/modules/manager/swift/index.spec.ts b/lib/modules/manager/swift/index.spec.ts
index 0cbaecbdca6759e3e2e33b1cebea3307f9849609..64378889a0eacbbe4d563d81caedcbddbfda96b6 100644
--- a/lib/modules/manager/swift/index.spec.ts
+++ b/lib/modules/manager/swift/index.spec.ts
@@ -28,130 +28,130 @@ describe('modules/manager/swift/index', () => {
       expect(extractPackageFile(`dependencies:[.package(]`)).toBeNull();
       expect(extractPackageFile(`dependencies:[.package(url],`)).toBeNull();
       expect(
-        extractPackageFile(`dependencies:[.package(url.package(]`)
+        extractPackageFile(`dependencies:[.package(url.package(]`),
       ).toBeNull();
       expect(
-        extractPackageFile(`dependencies:[.package(url:.package(`)
+        extractPackageFile(`dependencies:[.package(url:.package(`),
       ).toBeNull();
       expect(extractPackageFile(`dependencies:[.package(url:]`)).toBeNull();
       expect(extractPackageFile(`dependencies:[.package(url:"fo`)).toBeNull();
       expect(extractPackageFile(`dependencies:[.package(url:"fo]`)).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://example.com/something.git"]`
-        )
+          `dependencies:[.package(url:"https://example.com/something.git"]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git"]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git"]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git".package(]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git".package(]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", ]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", ]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", .package(]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", .package(]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", .exact(]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", .exact(]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", exact:]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", exact:]`,
+        ),
       ).toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", exact:.package()]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", exact:.package()]`,
+        ),
       ).toBeNull();
     });
 
     it('parses packages with invalid versions', () => {
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from]`,
+        ),
       ).not.toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from.package(`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from.package(`,
+        ),
       ).not.toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from:]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from:]`,
+        ),
       ).not.toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from:.package(`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git", from:.package(`,
+        ),
       ).not.toBeNull();
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3")]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3")]`,
+        ),
       ).not.toBeNull();
     });
 
     it('parses package descriptions', () => {
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",from:"1.2.3")]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",from:"1.2.3")]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: 'from:"1.2.3"' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3"...)]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3"...)]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '"1.2.3"...' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3"..."1.2.4")]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3"..."1.2.4")]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '"1.2.3"..."1.2.4"' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3"..<"1.2.4")]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git","1.2.3"..<"1.2.4")]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '"1.2.3"..<"1.2.4"' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",..."1.2.3")]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",..."1.2.3")]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '..."1.2.3"' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",..<"1.2.3")]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",..<"1.2.3")]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '..<"1.2.3"' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",.exact("1.2.3"))]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",.exact("1.2.3"))]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '1.2.3' }] });
       expect(
         extractPackageFile(
-          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",exact:"1.2.3"))]`
-        )
+          `dependencies:[.package(url:"https://github.com/vapor/vapor.git",exact:"1.2.3"))]`,
+        ),
       ).toMatchObject({ deps: [{ currentValue: '1.2.3' }] });
     });
 
     it('parses multiple packages', () => {
       expect(
-        extractPackageFile(Fixtures.get(`SamplePackage.swift`))
+        extractPackageFile(Fixtures.get(`SamplePackage.swift`)),
       ).toMatchSnapshot();
     });
   });
diff --git a/lib/modules/manager/tekton/extract.spec.ts b/lib/modules/manager/tekton/extract.spec.ts
index 1415f9325aeaeea0c81962a4e4f367a4133bb6ab..74df01953ae2588bc3354a03c6295686d0ee55cc 100644
--- a/lib/modules/manager/tekton/extract.spec.ts
+++ b/lib/modules/manager/tekton/extract.spec.ts
@@ -6,7 +6,7 @@ describe('modules/manager/tekton/extract', () => {
     it('extracts deps from a file', () => {
       const result = extractPackageFile(
         Fixtures.get('multi-doc.yaml'),
-        'test-file.yaml'
+        'test-file.yaml',
       );
       expect(result).toMatchSnapshot();
       expect(result?.deps).toHaveLength(39);
@@ -23,8 +23,8 @@ describe('modules/manager/tekton/extract', () => {
         ---
         bundle: registry.com/repo
       `,
-          'test-file.yaml'
-        )
+          'test-file.yaml',
+        ),
       ).toBeNull();
     });
 
diff --git a/lib/modules/manager/tekton/extract.ts b/lib/modules/manager/tekton/extract.ts
index e43f789f1025ef123db0067e83963ab5570e48b8..810a4a8673bb8826ecf68cb4f9568c6a15cd948d 100644
--- a/lib/modules/manager/tekton/extract.ts
+++ b/lib/modules/manager/tekton/extract.ts
@@ -13,7 +13,7 @@ import type {
 
 export function extractPackageFile(
   content: string,
-  packageFile: string
+  packageFile: string,
 ): PackageFileContent | null {
   logger.trace(`tekton.extractPackageFile(${packageFile})`);
   const deps: PackageDependency[] = [];
@@ -23,7 +23,7 @@ export function extractPackageFile(
   } catch (err) {
     logger.debug(
       { err, packageFile },
-      'Failed to parse YAML resource as a Tekton resource'
+      'Failed to parse YAML resource as a Tekton resource',
     );
     return null;
   }
@@ -108,14 +108,14 @@ function addDep(ref: TektonBundle, deps: PackageDependency[]): void {
       currentValue: dep.currentValue,
       currentDigest: dep.currentDigest,
     },
-    'Tekton bundle dependency found'
+    'Tekton bundle dependency found',
   );
   deps.push(dep);
 }
 
 function addStepImageSpec(
   spec: TektonResourceSpec | undefined,
-  deps: PackageDependency[]
+  deps: PackageDependency[],
 ): void {
   if (is.nullOrUndefined(spec)) {
     return;
@@ -138,14 +138,14 @@ function addStepImageSpec(
         currentValue: dep.currentValue,
         currentDigest: dep.currentDigest,
       },
-      'Tekton step image dependency found'
+      'Tekton step image dependency found',
     );
     deps.push(dep);
   }
 }
 
 function getBundleValue(
-  fields: TektonResolverParamsField[] | undefined
+  fields: TektonResolverParamsField[] | undefined,
 ): string | undefined {
   for (const field of coerceArray(fields)) {
     if (field.name === 'bundle') {
diff --git a/lib/modules/manager/terraform/base.ts b/lib/modules/manager/terraform/base.ts
index 242a6ca617db48c4bd8190179fcb81bebfd4ca6f..786824c4144e7d68e782cdadc5661b9e406237f0 100644
--- a/lib/modules/manager/terraform/base.ts
+++ b/lib/modules/manager/terraform/base.ts
@@ -21,19 +21,19 @@ export abstract class DependencyExtractor {
   abstract extract(
     hclRoot: TerraformDefinitionFile,
     locks: ProviderLock[],
-    config: ExtractConfig
+    config: ExtractConfig,
   ): PackageDependency[];
 }
 
 export abstract class TerraformProviderExtractor extends DependencyExtractor {
   sourceExtractionRegex = regEx(
-    /^(?:(?<hostname>(?:[a-zA-Z0-9-_]+\.+)+[a-zA-Z0-9-_]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/
+    /^(?:(?<hostname>(?:[a-zA-Z0-9-_]+\.+)+[a-zA-Z0-9-_]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/,
   );
 
   protected analyzeTerraformProvider(
     dep: PackageDependency,
     locks: ProviderLock[],
-    depType: string
+    depType: string,
   ): PackageDependency {
     dep.depType = depType;
     dep.depName = dep.managerData?.moduleName;
diff --git a/lib/modules/manager/terraform/extract.spec.ts b/lib/modules/manager/terraform/extract.spec.ts
index f2f59d12a83ea0fa4803732894e001bad0799bc3..273537bc332c5397c831be368a4753b0f828a0ac 100644
--- a/lib/modules/manager/terraform/extract.spec.ts
+++ b/lib/modules/manager/terraform/extract.spec.ts
@@ -279,7 +279,7 @@ describe('modules/manager/terraform/extract', () => {
       const res = await extractPackageFile(
         azureDevOpsModules,
         'modules.tf',
-        {}
+        {},
       );
       expect(res?.deps).toHaveLength(3);
       expect(res?.deps).toIncludeAllPartialMembers([
@@ -683,7 +683,7 @@ describe('modules/manager/terraform/extract', () => {
       const res = await extractPackageFile(
         lockedVersion,
         'lockedVersion.tf',
-        {}
+        {},
       );
       expect(res?.deps).toHaveLength(3);
       expect(res?.deps.filter((dep) => dep.skipReason)).toHaveLength(0);
@@ -719,7 +719,7 @@ describe('modules/manager/terraform/extract', () => {
       const res = await extractPackageFile(
         terraformBlock,
         'terraformBlock.tf',
-        {}
+        {},
       );
       expect(res?.deps).toHaveLength(1);
       expect(res?.deps.filter((dep) => dep.skipReason)).toHaveLength(0);
@@ -739,7 +739,7 @@ describe('modules/manager/terraform/extract', () => {
       const res = await extractPackageFile(
         tfeWorkspaceBlock,
         'tfeWorkspace.tf',
-        {}
+        {},
       );
       expect(res?.deps).toHaveLength(3);
       expect(res?.deps.filter((dep) => dep.skipReason)).toHaveLength(1);
@@ -770,7 +770,7 @@ describe('modules/manager/terraform/extract', () => {
           resource my provider
         `,
         'tfeWorkspace.tf',
-        {}
+        {},
       );
       expect(res).toBeNull();
     });
diff --git a/lib/modules/manager/terraform/extract.ts b/lib/modules/manager/terraform/extract.ts
index 9196c45aae3cd3091489ea6cacda0734004093a5..ff3805d34d59dea2f6577d4c183eccdd5e8bc7e5 100644
--- a/lib/modules/manager/terraform/extract.ts
+++ b/lib/modules/manager/terraform/extract.ts
@@ -16,7 +16,7 @@ import {
 export async function extractPackageFile(
   content: string,
   packageFile: string,
-  config: ExtractConfig
+  config: ExtractConfig,
 ): Promise<PackageFileContent | null> {
   logger.trace({ content }, `terraform.extractPackageFile(${packageFile})`);
 
@@ -30,7 +30,7 @@ export async function extractPackageFile(
   if (!passedExtractors.length) {
     logger.debug(
       { packageFile },
-      'preflight content check has not found any relevant content'
+      'preflight content check has not found any relevant content',
     );
     return null;
   }
@@ -38,7 +38,7 @@ export async function extractPackageFile(
     { packageFile },
     `preflight content check passed for extractors: [${passedExtractors
       .map((value) => value.constructor.name)
-      .toString()}]`
+      .toString()}]`,
   );
 
   const dependencies: PackageDependency[] = [];
diff --git a/lib/modules/manager/terraform/extractors/others/modules.spec.ts b/lib/modules/manager/terraform/extractors/others/modules.spec.ts
index eb2ff9844bc401ed977f777bd36b26262a95e959..82be37043c13870ae0d7da6ac5994ef3a3f67f0b 100644
--- a/lib/modules/manager/terraform/extractors/others/modules.spec.ts
+++ b/lib/modules/manager/terraform/extractors/others/modules.spec.ts
@@ -17,13 +17,13 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
   describe('githubRefMatchRegex', () => {
     it('should split project and tag from source', () => {
       const groups = githubRefMatchRegex.exec(
-        'github.com/hashicorp/example?ref=v1.0.0'
+        'github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       const depth = githubRefMatchRegex.exec(
-        'github.com/hashicorp/example?depth=1&ref=v1.0.0'
+        'github.com/hashicorp/example?depth=1&ref=v1.0.0',
       )?.groups;
       const depth2 = githubRefMatchRegex.exec(
-        'github.com/hashicorp/example?ref=v1.0.0&depth=1'
+        'github.com/hashicorp/example?ref=v1.0.0&depth=1',
       )?.groups;
       expect(groups).toEqual({
         project: 'hashicorp/example',
@@ -41,7 +41,7 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should parse alpha-numeric characters as well as dots, underscores, and dashes in repo names', () => {
       const groups = githubRefMatchRegex.exec(
-        'github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
       expect(groups).toEqual({
         project: 'hashicorp/example.repo-123',
@@ -53,22 +53,22 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
   describe('gitTagsRefMatchRegex', () => {
     it('should split project and tag from source', () => {
       const http = gitTagsRefMatchRegex.exec(
-        'http://github.com/hashicorp/example?ref=v1.0.0'
+        'http://github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       const https = gitTagsRefMatchRegex.exec(
-        'https://github.com/hashicorp/example?ref=v1.0.0'
+        'https://github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       const ssh = gitTagsRefMatchRegex.exec(
-        'ssh://github.com/hashicorp/example?ref=v1.0.0'
+        'ssh://github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       const depth = gitTagsRefMatchRegex.exec(
-        'ssh://github.com/hashicorp/example?depth=1&ref=v1.0.0'
+        'ssh://github.com/hashicorp/example?depth=1&ref=v1.0.0',
       )?.groups;
       const depth2 = gitTagsRefMatchRegex.exec(
-        'ssh://github.com/hashicorp/example?ref=v1.0.0&depth=1'
+        'ssh://github.com/hashicorp/example?ref=v1.0.0&depth=1',
       )?.groups;
       const folder = gitTagsRefMatchRegex.exec(
-        'git::ssh://git@git.example.com/modules/foo-module.git//bar?depth=1&ref=v1.0.0'
+        'git::ssh://git@git.example.com/modules/foo-module.git//bar?depth=1&ref=v1.0.0',
       )?.groups;
 
       expect(http).toMatchObject({
@@ -99,17 +99,17 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should parse alpha-numeric characters as well as dots, underscores, and dashes in repo names', () => {
       const http = gitTagsRefMatchRegex.exec(
-        'http://github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'http://github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
       const https = gitTagsRefMatchRegex.exec(
-        'https://github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'https://github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
       const ssh = gitTagsRefMatchRegex.exec(
-        'ssh://github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'ssh://github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
 
       const withoutSshHttpHttps = gitTagsRefMatchRegex.exec(
-        'git@my-gitlab-instance.local:devops/terraform/instance.git?ref=v5.0.0'
+        'git@my-gitlab-instance.local:devops/terraform/instance.git?ref=v5.0.0',
       )?.groups;
 
       expect(http).toMatchObject({
@@ -134,25 +134,25 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
   describe('bitbucketRefMatchRegex', () => {
     it('should split workspace, project and tag from source', () => {
       const ssh = bitbucketRefMatchRegex.exec(
-        'git::ssh://git@bitbucket.org/hashicorp/example.git?ref=v1.0.0'
+        'git::ssh://git@bitbucket.org/hashicorp/example.git?ref=v1.0.0',
       )?.groups;
       const https = bitbucketRefMatchRegex.exec(
-        'git::https://git@bitbucket.org/hashicorp/example.git?ref=v1.0.0'
+        'git::https://git@bitbucket.org/hashicorp/example.git?ref=v1.0.0',
       )?.groups;
       const plain = bitbucketRefMatchRegex.exec(
-        'bitbucket.org/hashicorp/example.git?ref=v1.0.0'
+        'bitbucket.org/hashicorp/example.git?ref=v1.0.0',
       )?.groups;
       const subfolder = bitbucketRefMatchRegex.exec(
-        'bitbucket.org/hashicorp/example.git/terraform?ref=v1.0.0'
+        'bitbucket.org/hashicorp/example.git/terraform?ref=v1.0.0',
       )?.groups;
       const subfolderWithDoubleSlash = bitbucketRefMatchRegex.exec(
-        'bitbucket.org/hashicorp/example.git//terraform?ref=v1.0.0'
+        'bitbucket.org/hashicorp/example.git//terraform?ref=v1.0.0',
       )?.groups;
       const depth = bitbucketRefMatchRegex.exec(
-        'git::https://git@bitbucket.org/hashicorp/example.git?depth=1&ref=v1.0.0'
+        'git::https://git@bitbucket.org/hashicorp/example.git?depth=1&ref=v1.0.0',
       )?.groups;
       const depth2 = bitbucketRefMatchRegex.exec(
-        'git::https://git@bitbucket.org/hashicorp/example.git?ref=v1.0.0&depth=1'
+        'git::https://git@bitbucket.org/hashicorp/example.git?ref=v1.0.0&depth=1',
       )?.groups;
 
       expect(ssh).toMatchObject({
@@ -194,7 +194,7 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should parse alpha-numeric characters as well as dots, underscores, and dashes in repo names', () => {
       const dots = bitbucketRefMatchRegex.exec(
-        'bitbucket.org/hashicorp/example.repo-123.git?ref=v1.0.0'
+        'bitbucket.org/hashicorp/example.repo-123.git?ref=v1.0.0',
       )?.groups;
 
       expect(dots).toMatchObject({
@@ -208,7 +208,7 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
   describe('azureDevOpsSshRefMatchRegex', () => {
     it('should split organization, project, repository and tag from source url', () => {
       const ssh = azureDevOpsSshRefMatchRegex.exec(
-        'git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository?ref=1.0.0'
+        'git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository?ref=1.0.0',
       )?.groups;
 
       expect(ssh).toEqual({
@@ -223,7 +223,7 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should split organization, project, repository and tag from source url with git prefix', () => {
       const sshGit = azureDevOpsSshRefMatchRegex.exec(
-        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository?ref=1.0.0'
+        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository?ref=1.0.0',
       )?.groups;
 
       expect(sshGit).toEqual({
@@ -238,7 +238,7 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should split organization, project, repository and tag from source url with subfolder', () => {
       const subfolder = azureDevOpsSshRefMatchRegex.exec(
-        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?ref=1.0.0'
+        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?ref=1.0.0',
       )?.groups;
 
       expect(subfolder).toEqual({
@@ -253,10 +253,10 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should split organization, project, repository and tag from source url with depth argument', () => {
       const depth = azureDevOpsSshRefMatchRegex.exec(
-        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?depth=1&ref=1.0.0'
+        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?depth=1&ref=1.0.0',
       )?.groups;
       const depth2 = azureDevOpsSshRefMatchRegex.exec(
-        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?ref=1.0.0&depth=1'
+        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?ref=1.0.0&depth=1',
       )?.groups;
 
       expect(depth).toEqual({
@@ -279,7 +279,7 @@ describe('modules/manager/terraform/extractors/others/modules', () => {
 
     it('should parse alpha-numeric characters as well as dots, underscores, and dashes in repo names', () => {
       const dots = azureDevOpsSshRefMatchRegex.exec(
-        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?ref=v1.0.0'
+        'git::git@ssh.dev.azure.com:v3/MyOrg/MyProject/MyRepository//some-module/path?ref=v1.0.0',
       )?.groups;
 
       expect(dots).toEqual({
diff --git a/lib/modules/manager/terraform/extractors/others/modules.ts b/lib/modules/manager/terraform/extractors/others/modules.ts
index 012add6e899a5fbb8e09189698809fe6d0ef20c7..c6d129dcf5cf5e1d23e2e980c20c2e56d9a7ca2d 100644
--- a/lib/modules/manager/terraform/extractors/others/modules.ts
+++ b/lib/modules/manager/terraform/extractors/others/modules.ts
@@ -10,16 +10,16 @@ import { DependencyExtractor } from '../../base';
 import type { TerraformDefinitionFile } from '../../hcl/types';
 
 export const githubRefMatchRegex = regEx(
-  /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/i
+  /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/i,
 );
 export const bitbucketRefMatchRegex = regEx(
-  /(?:git::)?(?<url>(?:http|https|ssh)?(?::\/\/)?(?:.*@)?(?<path>bitbucket\.org\/(?<workspace>.*)\/(?<project>.*).git\/?(?<subfolder>.*)))\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/
+  /(?:git::)?(?<url>(?:http|https|ssh)?(?::\/\/)?(?:.*@)?(?<path>bitbucket\.org\/(?<workspace>.*)\/(?<project>.*).git\/?(?<subfolder>.*)))\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/,
 );
 export const gitTagsRefMatchRegex = regEx(
-  /(?:git::)?(?<url>(?:(?:http|https|ssh):\/\/)?(?:.*@)?(?<path>.*\/(?<project>.*\/.*)))\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/
+  /(?:git::)?(?<url>(?:(?:http|https|ssh):\/\/)?(?:.*@)?(?<path>.*\/(?<project>.*\/.*)))\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/,
 );
 export const azureDevOpsSshRefMatchRegex = regEx(
-  /(?:git::)?(?<url>git@ssh\.dev\.azure\.com:v3\/(?<organization>[^/]*)\/(?<project>[^/]*)\/(?<repository>[^/]*))(?<modulepath>.*)?\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/
+  /(?:git::)?(?<url>git@ssh\.dev\.azure\.com:v3\/(?<organization>[^/]*)\/(?<project>[^/]*)\/(?<repository>[^/]*))(?<modulepath>.*)?\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/,
 );
 const hostnameMatchRegex = regEx(/^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/);
 
@@ -69,7 +69,7 @@ export class ModuleExtractor extends DependencyExtractor {
     if (githubRefMatch?.groups) {
       dep.packageName = githubRefMatch.groups.project.replace(
         regEx(/\.git$/),
-        ''
+        '',
       );
       dep.depName = 'github.com/' + dep.packageName;
       dep.currentValue = githubRefMatch.groups.tag;
diff --git a/lib/modules/manager/terraform/extractors/others/providers.ts b/lib/modules/manager/terraform/extractors/others/providers.ts
index 11332966171a6647f2f6ab61b054e2a5cd865f99..2a3f8b9019452f3d9828d3ebe88673ed37d114e5 100644
--- a/lib/modules/manager/terraform/extractors/others/providers.ts
+++ b/lib/modules/manager/terraform/extractors/others/providers.ts
@@ -12,7 +12,7 @@ export class ProvidersExtractor extends TerraformProviderExtractor {
 
   extract(
     hclRoot: TerraformDefinitionFile,
-    locks: ProviderLock[]
+    locks: ProviderLock[],
   ): PackageDependency[] {
     const providerTypes = hclRoot?.provider;
     if (is.nullOrUndefined(providerTypes)) {
@@ -23,7 +23,7 @@ export class ProvidersExtractor extends TerraformProviderExtractor {
     if (!is.plainObject(providerTypes)) {
       logger.debug(
         { providerTypes },
-        'Terraform: unexpected `providerTypes` value'
+        'Terraform: unexpected `providerTypes` value',
       );
       return [];
     }
@@ -39,7 +39,7 @@ export class ProvidersExtractor extends TerraformProviderExtractor {
             },
           },
           locks,
-          'provider'
+          'provider',
         );
         dependencies.push(dep);
       }
diff --git a/lib/modules/manager/terraform/extractors/resources/generic-docker-image-ref.ts b/lib/modules/manager/terraform/extractors/resources/generic-docker-image-ref.ts
index 7e89f12fd8200e9dee063c248636bf400e7dc10e..5dcbd169c8fc6f61e3d713dabbc11038044c995e 100644
--- a/lib/modules/manager/terraform/extractors/resources/generic-docker-image-ref.ts
+++ b/lib/modules/manager/terraform/extractors/resources/generic-docker-image-ref.ts
@@ -14,7 +14,7 @@ export class GenericDockerImageRefExtractor extends DependencyExtractor {
   extract(
     hclMap: TerraformDefinitionFile,
     _locks: ProviderLock[],
-    config: ExtractConfig
+    config: ExtractConfig,
   ): PackageDependency[] {
     const resourceTypMap = hclMap.resource;
     if (is.nullOrUndefined(resourceTypMap)) {
@@ -34,7 +34,7 @@ export class GenericDockerImageRefExtractor extends DependencyExtractor {
       // loop over instances of a resource type
       for (const instance of Object.values(resourceInstancesMap).flat()) {
         dependencies.push(
-          ...this.walkPath({ depType: type }, instance, path, config)
+          ...this.walkPath({ depType: type }, instance, path, config),
         );
       }
     }
@@ -53,7 +53,7 @@ export class GenericDockerImageRefExtractor extends DependencyExtractor {
     abstractDep: PackageDependency,
     parentElement: unknown,
     leftPath: string[],
-    config: ExtractConfig
+    config: ExtractConfig,
   ): PackageDependency[] {
     const dependencies: PackageDependency[] = [];
     // if there are no path elements left, we have reached the end of the path
@@ -95,7 +95,12 @@ export class GenericDockerImageRefExtractor extends DependencyExtractor {
     if (is.array(element)) {
       for (const arrayElement of element) {
         dependencies.push(
-          ...this.walkPath(abstractDep, arrayElement, leftPath.slice(1), config)
+          ...this.walkPath(
+            abstractDep,
+            arrayElement,
+            leftPath.slice(1),
+            config,
+          ),
         );
       }
       return dependencies;
diff --git a/lib/modules/manager/terraform/extractors/resources/helm-release.ts b/lib/modules/manager/terraform/extractors/resources/helm-release.ts
index 4e2ec94052ff35e8143a1d0a3dfad31d65133aa4..c2f5f8ef6b6aac889b5468092c27b1a18dfa9770 100644
--- a/lib/modules/manager/terraform/extractors/resources/helm-release.ts
+++ b/lib/modules/manager/terraform/extractors/resources/helm-release.ts
@@ -18,7 +18,7 @@ export class HelmReleaseExtractor extends DependencyExtractor {
   override extract(
     hclMap: TerraformDefinitionFile,
     _locks: ProviderLock[],
-    config: ExtractConfig
+    config: ExtractConfig,
   ): PackageDependency[] {
     const dependencies = [];
 
@@ -31,7 +31,7 @@ export class HelmReleaseExtractor extends DependencyExtractor {
     if (!is.plainObject(helmReleases)) {
       logger.debug(
         { helmReleases },
-        'Terraform: unexpected `helmReleases` value'
+        'Terraform: unexpected `helmReleases` value',
       );
       return [];
     }
@@ -60,10 +60,10 @@ export class HelmReleaseExtractor extends DependencyExtractor {
           this.processOCI(
             joinUrlParts(
               helmRelease.repository.replace('oci://', ''),
-              helmRelease.chart
+              helmRelease.chart,
             ),
             config,
-            dep
+            dep,
           );
         } else {
           dep.registryUrls = [helmRelease.repository];
@@ -77,12 +77,12 @@ export class HelmReleaseExtractor extends DependencyExtractor {
   private processOCI(
     depName: string,
     config: ExtractConfig,
-    dep: PackageDependency
+    dep: PackageDependency,
   ): void {
     const { depName: packageName, datasource } = getDep(
       depName,
       false,
-      config.registryAliases
+      config.registryAliases,
     );
     dep.packageName = packageName;
     dep.datasource = datasource;
diff --git a/lib/modules/manager/terraform/extractors/terraform-block/required-provider.ts b/lib/modules/manager/terraform/extractors/terraform-block/required-provider.ts
index 79c447f3830a8982bbabceb49f3b5eeb55e2d870..5062d5fbedfc3365075d5d5d2550467c633ec206 100644
--- a/lib/modules/manager/terraform/extractors/terraform-block/required-provider.ts
+++ b/lib/modules/manager/terraform/extractors/terraform-block/required-provider.ts
@@ -14,7 +14,7 @@ export class RequiredProviderExtractor extends TerraformProviderExtractor {
 
   extract(
     hclRoot: TerraformDefinitionFile,
-    locks: ProviderLock[]
+    locks: ProviderLock[],
   ): PackageDependency[] {
     const terraformBlocks = hclRoot?.terraform;
     if (is.nullOrUndefined(terraformBlocks)) {
@@ -51,7 +51,7 @@ export class RequiredProviderExtractor extends TerraformProviderExtractor {
           };
         }
         dependencies.push(
-          this.analyzeTerraformProvider(dep, locks, 'required_provider')
+          this.analyzeTerraformProvider(dep, locks, 'required_provider'),
         );
       }
     }
diff --git a/lib/modules/manager/terraform/extractors/terraform-block/terraform-version.ts b/lib/modules/manager/terraform/extractors/terraform-block/terraform-version.ts
index a7b31810ed242efc6978fa7a94c699931992b26c..9322492c80c558a5eaa18825380b155398589c08 100644
--- a/lib/modules/manager/terraform/extractors/terraform-block/terraform-version.ts
+++ b/lib/modules/manager/terraform/extractors/terraform-block/terraform-version.ts
@@ -26,7 +26,7 @@ export class TerraformVersionExtractor extends DependencyExtractor {
       dependencies.push(
         this.analyseTerraformVersion({
           currentValue: requiredVersion,
-        })
+        }),
       );
     }
     return dependencies;
diff --git a/lib/modules/manager/terraform/hcl/index.ts b/lib/modules/manager/terraform/hcl/index.ts
index 8691daf3801a5f89efbed2aafba3e52c1c9a7dc0..6b3df514b069a4999404026161749f9e000300c9 100644
--- a/lib/modules/manager/terraform/hcl/index.ts
+++ b/lib/modules/manager/terraform/hcl/index.ts
@@ -3,7 +3,7 @@ import type { TerraformDefinitionFile } from './types';
 
 export async function parseHCL(
   content: string,
-  fileName: string
+  fileName: string,
 ): Promise<TerraformDefinitionFile | null> {
   try {
     return await parse(fileName, content);
diff --git a/lib/modules/manager/terraform/lockfile/hash.spec.ts b/lib/modules/manager/terraform/lockfile/hash.spec.ts
index 79c14232fba2f1c52868e95d2071dcafc42c4838..0dd528599b27e3b3b4089455aa5cbc98288d64bb 100644
--- a/lib/modules/manager/terraform/lockfile/hash.spec.ts
+++ b/lib/modules/manager/terraform/lockfile/hash.spec.ts
@@ -12,17 +12,17 @@ const terraformCloudReleaseBackendUrl =
   TerraformProviderDatasource.defaultRegistryUrls[0];
 const releaseBackendAzurerm = Fixtures.get('releaseBackendAzurerm_2_56_0.json');
 const releaseBackendGoogleSha256 = Fixtures.get(
-  'releaseBackendGoogle_4_84_0_SHA256SUMS'
+  'releaseBackendGoogle_4_84_0_SHA256SUMS',
 );
 const terraformCloudSDCJson = Fixtures.get(
   'service-discovery.json',
-  '../../../../modules/datasource/terraform-provider/'
+  '../../../../modules/datasource/terraform-provider/',
 );
 const terraformCloudBackendAzurermVersions = Fixtures.get(
-  'terraformCloudBackendAzurermVersions.json'
+  'terraformCloudBackendAzurermVersions.json',
 );
 const terraformCloudBackendGoogleVersions = Fixtures.get(
-  'terraformCloudBackendGoogleVersions.json'
+  'terraformCloudBackendGoogleVersions.json',
 );
 
 const log = logger.logger;
@@ -45,7 +45,7 @@ describe('modules/manager/terraform/lockfile/hash', () => {
     const result = await TerraformProviderHash.createHashes(
       'https://example.com',
       'test/gitlab',
-      '2.56.0'
+      '2.56.0',
     );
     expect(result).toBeNull();
   });
@@ -59,7 +59,7 @@ describe('modules/manager/terraform/lockfile/hash', () => {
     const result = await TerraformProviderHash.createHashes(
       'https://releases.hashicorp.com',
       'hashicorp/azurerm',
-      '2.59.0'
+      '2.59.0',
     );
     expect(result).toBeNull();
   });
@@ -73,28 +73,28 @@ describe('modules/manager/terraform/lockfile/hash', () => {
     const result = await TerraformProviderHash.createHashes(
       'https://releases.hashicorp.com',
       'hashicorp/azurerm',
-      '2.56.0'
+      '2.56.0',
     );
     expect(result).toBeNull();
   });
 
   it('fail to create hashes', async () => {
     const readStreamLinux = createReadStream(
-      getFixturePath('releaseBackendAzurerm_2_56_0.json')
+      getFixturePath('releaseBackendAzurerm_2_56_0.json'),
     );
     const readStreamDarwin = createReadStream(
-      getFixturePath('releaseBackendAzurerm_2_56_0.json')
+      getFixturePath('releaseBackendAzurerm_2_56_0.json'),
     );
     httpMock
       .scope(releaseBackendUrl)
       .get('/terraform-provider-azurerm/2.56.0/index.json')
       .reply(200, releaseBackendAzurerm)
       .get(
-        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip'
+        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip',
       )
       .reply(200, readStreamLinux)
       .get(
-        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip'
+        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip',
       )
       .reply(200, readStreamDarwin);
 
@@ -102,35 +102,35 @@ describe('modules/manager/terraform/lockfile/hash', () => {
       TerraformProviderHash.createHashes(
         'https://releases.hashicorp.com',
         'hashicorp/azurerm',
-        '2.56.0'
-      )
+        '2.56.0',
+      ),
     ).rejects.toThrow();
   });
 
   it('full walkthrough', async () => {
     const readStreamLinux = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     const readStreamDarwin = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     httpMock
       .scope(releaseBackendUrl)
       .get('/terraform-provider-azurerm/2.56.0/index.json')
       .reply(200, releaseBackendAzurerm)
       .get(
-        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip'
+        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip',
       )
       .reply(200, readStreamLinux)
       .get(
-        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip'
+        '/terraform-provider-azurerm/2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip',
       )
       .reply(200, readStreamDarwin);
 
     const result = await TerraformProviderHash.createHashes(
       'https://releases.hashicorp.com',
       'hashicorp/azurerm',
-      '2.56.0'
+      '2.56.0',
     );
     expect(log.error.mock.calls).toMatchSnapshot();
     expect(result).not.toBeNull();
@@ -143,10 +143,10 @@ describe('modules/manager/terraform/lockfile/hash', () => {
 
   it('full walkthrough on terraform cloud', async () => {
     const readStreamLinux = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     const readStreamDarwin = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     httpMock
       .scope(terraformCloudReleaseBackendUrl)
@@ -178,22 +178,22 @@ describe('modules/manager/terraform/lockfile/hash', () => {
     httpMock
       .scope('https://github.com')
       .get(
-        '/hashicorp/terraform-provider-google/releases/download/v4.84.0/terraform-provider-google_4.84.0_SHA256SUMS'
+        '/hashicorp/terraform-provider-google/releases/download/v4.84.0/terraform-provider-google_4.84.0_SHA256SUMS',
       )
       .reply(200, releaseBackendGoogleSha256)
       .get(
-        '/hashicorp/terraform-provider-google/releases/download/v4.84.0/terraform-provider-google_4.84.0_linux_amd64.zip'
+        '/hashicorp/terraform-provider-google/releases/download/v4.84.0/terraform-provider-google_4.84.0_linux_amd64.zip',
       )
       .reply(200, readStreamLinux)
       .get(
-        '/hashicorp/terraform-provider-google/releases/download/v4.84.0/terraform-provider-google_4.84.0_darwin_amd64.zip'
+        '/hashicorp/terraform-provider-google/releases/download/v4.84.0/terraform-provider-google_4.84.0_darwin_amd64.zip',
       )
       .reply(200, readStreamDarwin);
 
     const result = await TerraformProviderHash.createHashes(
       'https://registry.terraform.io',
       'hashicorp/google',
-      '4.84.0'
+      '4.84.0',
     );
     expect(log.error.mock.calls).toBeEmptyArray();
     expect(result).toMatchObject([
@@ -208,10 +208,10 @@ describe('modules/manager/terraform/lockfile/hash', () => {
 
   it('full walkthrough without ziphashes available', async () => {
     const readStreamLinux = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     const readStreamDarwin = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     httpMock
       .scope(terraformCloudReleaseBackendUrl)
@@ -239,18 +239,18 @@ describe('modules/manager/terraform/lockfile/hash', () => {
     httpMock
       .scope('https://github.com')
       .get(
-        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip'
+        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip',
       )
       .reply(200, readStreamLinux)
       .get(
-        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip'
+        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip',
       )
       .reply(200, readStreamDarwin);
 
     const result = await TerraformProviderHash.createHashes(
       'https://registry.terraform.io',
       'hashicorp/azurerm',
-      '2.56.0'
+      '2.56.0',
     );
     expect(log.error.mock.calls).toBeEmptyArray();
     expect(result).toMatchObject([
@@ -261,10 +261,10 @@ describe('modules/manager/terraform/lockfile/hash', () => {
 
   it('it does not add any ziphashes when the shasums endpoint fails`', async () => {
     const readStreamLinux = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
     const readStreamDarwin = createReadStream(
-      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip'
+      'lib/modules/manager/terraform/lockfile/__fixtures__/test.zip',
     );
 
     httpMock
@@ -297,22 +297,22 @@ describe('modules/manager/terraform/lockfile/hash', () => {
     httpMock
       .scope('https://github.com')
       .get(
-        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS'
+        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_SHA256SUMS',
       )
       .replyWithError('endoint failed')
       .get(
-        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip'
+        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_linux_amd64.zip',
       )
       .reply(200, readStreamLinux)
       .get(
-        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip'
+        '/hashicorp/terraform-provider-azurerm/releases/download/v2.56.0/terraform-provider-azurerm_2.56.0_darwin_amd64.zip',
       )
       .reply(200, readStreamDarwin);
 
     const result = await TerraformProviderHash.createHashes(
       'https://registry.terraform.io',
       'hashicorp/azurerm',
-      '2.56.0'
+      '2.56.0',
     );
 
     expect(log.error.mock.calls).toBeEmptyArray();
diff --git a/lib/modules/manager/terraform/lockfile/hash.ts b/lib/modules/manager/terraform/lockfile/hash.ts
index 08ebab3967deb1514e044dcac64a46789a41a2ce..62357e30fe75b7cbd1bb27f276592cb87e0ce5e4 100644
--- a/lib/modules/manager/terraform/lockfile/hash.ts
+++ b/lib/modules/manager/terraform/lockfile/hash.ts
@@ -42,7 +42,7 @@ export class TerraformProviderHash {
 
   static async hashOfZipContent(
     zipFilePath: string,
-    extractPath: string
+    extractPath: string,
   ): Promise<string> {
     await extract(zipFilePath, { dir: extractPath });
     const files = await fs.listCacheDir(extractPath);
@@ -65,12 +65,12 @@ export class TerraformProviderHash {
   })
   static async calculateSingleHash(
     build: TerraformBuild,
-    cacheDir: string
+    cacheDir: string,
   ): Promise<string> {
     const downloadFileName = upath.join(cacheDir, build.filename);
     const extractPath = upath.join(cacheDir, 'extract', build.filename);
     logger.trace(
-      `Downloading archive and generating hash for ${build.name}-${build.version}...`
+      `Downloading archive and generating hash for ${build.name}-${build.version}...`,
     );
     const readStream = TerraformProviderHash.http.stream(build.url);
     const writeStream = fs.createCacheWriteStream(downloadFileName);
@@ -81,7 +81,7 @@ export class TerraformProviderHash {
       const hash = await this.hashOfZipContent(downloadFileName, extractPath);
       logger.trace(
         { hash },
-        `Generated hash for ${build.name}-${build.version}`
+        `Generated hash for ${build.name}-${build.version}`,
       );
       return hash;
     } finally {
@@ -91,7 +91,7 @@ export class TerraformProviderHash {
   }
 
   static async calculateHashScheme1Hashes(
-    builds: TerraformBuild[]
+    builds: TerraformBuild[],
   ): Promise<string[]> {
     const cacheDir = await ensureCacheDir('./others/terraform');
 
@@ -104,12 +104,12 @@ export class TerraformProviderHash {
   static async createHashes(
     registryURL: string,
     repository: string,
-    version: string
+    version: string,
   ): Promise<string[] | null> {
     const builds = await TerraformProviderHash.terraformDatasource.getBuilds(
       registryURL,
       repository,
-      version
+      version,
     );
     if (!builds) {
       return null;
@@ -119,12 +119,12 @@ export class TerraformProviderHash {
     if (builds.length > 0 && builds[0].shasums_url) {
       zhHashes =
         (await TerraformProviderHash.terraformDatasource.getZipHashes(
-          builds[0].shasums_url
+          builds[0].shasums_url,
         )) ?? [];
     }
 
     const h1Hashes = await TerraformProviderHash.calculateHashScheme1Hashes(
-      builds
+      builds,
     );
 
     const hashes = [];
diff --git a/lib/modules/manager/terraform/lockfile/index.spec.ts b/lib/modules/manager/terraform/lockfile/index.spec.ts
index 9db80efa401a4699cdb74b68ab8c30fefb3ba600..8681a6c4f4b76892223530823fd8833b500c96e3 100644
--- a/lib/modules/manager/terraform/lockfile/index.spec.ts
+++ b/lib/modules/manager/terraform/lockfile/index.spec.ts
@@ -41,7 +41,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
         updatedDeps: [{ depName: 'aws' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -55,7 +55,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
         updatedDeps: [{ depName: 'aws' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -69,7 +69,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
         updatedDeps: [{ depName: 'aws' }],
         newPackageFileContent: '',
         config,
-      })
+      }),
     ).toBeNull();
   });
 
@@ -335,7 +335,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
       }
     `);
     fs.findLocalSiblingOrParent.mockResolvedValueOnce(
-      'test/.terraform.lock.hcl'
+      'test/.terraform.lock.hcl',
     );
 
     mockHash.mockResolvedValueOnce([
@@ -421,7 +421,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
       }
     `);
     fs.findLocalSiblingOrParent.mockResolvedValueOnce(
-      'test/.terraform.lock.hcl'
+      'test/.terraform.lock.hcl',
     );
 
     mockHash.mockResolvedValue([
@@ -658,7 +658,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
       }
     `);
     fs.findLocalSiblingOrParent.mockResolvedValueOnce(
-      'subfolder/.terraform.lock.hcl'
+      'subfolder/.terraform.lock.hcl',
     );
 
     mockGetPkgReleases
@@ -680,7 +680,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
       })
       .mockResolvedValueOnce(
         // random
-        null
+        null,
       );
     mockHash.mockResolvedValue([
       'h1:lDsKRxDRXPEzA4AxkK4t+lJd3IQIP2UoaplJGjQSp2s=',
@@ -743,7 +743,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
           "2.56.0",
         ],
       ]
-    `
+    `,
     );
   });
 
@@ -893,7 +893,7 @@ describe('modules/manager/terraform/lockfile/index', () => {
           "2.2.2",
         ],
       ]
-    `
+    `,
     );
   });
 
diff --git a/lib/modules/manager/terraform/lockfile/index.ts b/lib/modules/manager/terraform/lockfile/index.ts
index 287df2bff9b17977338d37a44d180ebfe09d5116..fa69ebe6f88b8ce139c516cc7389d0d0457bc92b 100644
--- a/lib/modules/manager/terraform/lockfile/index.ts
+++ b/lib/modules/manager/terraform/lockfile/index.ts
@@ -21,7 +21,7 @@ import {
 } from './util';
 
 async function updateAllLocks(
-  locks: ProviderLock[]
+  locks: ProviderLock[],
 ): Promise<ProviderLockUpdate[]> {
   const updates = await p.map(
     locks,
@@ -39,7 +39,7 @@ async function updateAllLocks(
       const versionsList = releases.map((release) => release.version);
       const newVersion = versioning.getSatisfyingVersion(
         versionsList,
-        lock.constraints
+        lock.constraints,
       );
 
       // if the new version is the same as the last, signal that no update is needed
@@ -53,13 +53,13 @@ async function updateAllLocks(
           (await TerraformProviderHash.createHashes(
             lock.registryUrl,
             lock.packageName,
-            newVersion
+            newVersion,
           )) ?? [],
         ...lock,
       };
       return update;
     },
-    { concurrency: 4 }
+    { concurrency: 4 },
   );
 
   return updates.filter(is.truthy);
@@ -67,14 +67,14 @@ async function updateAllLocks(
 
 function getNewConstraint(
   dep: Upgrade<Record<string, unknown>>,
-  oldConstraint: string | undefined
+  oldConstraint: string | undefined,
 ): string | undefined {
   const { currentValue, currentVersion, newValue, newVersion, packageName } =
     dep;
 
   if (oldConstraint && currentValue && newValue && currentValue === newValue) {
     logger.debug(
-      `Leaving constraints "${oldConstraint}" unchanged for "${packageName}" as current and new values are the same`
+      `Leaving constraints "${oldConstraint}" unchanged for "${packageName}" as current and new values are the same`,
     );
     return oldConstraint;
   }
@@ -86,7 +86,7 @@ function getNewConstraint(
     oldConstraint.includes(currentValue)
   ) {
     logger.debug(
-      `Updating constraint "${oldConstraint}" to replace "${currentValue}" with "${newValue}" for "${packageName}"`
+      `Updating constraint "${oldConstraint}" to replace "${currentValue}" with "${newValue}" for "${packageName}"`,
     );
     return oldConstraint.replace(currentValue, newValue);
   }
@@ -98,7 +98,7 @@ function getNewConstraint(
     oldConstraint.includes(currentVersion)
   ) {
     logger.debug(
-      `Updating constraint "${oldConstraint}" to replace "${currentVersion}" with "${newVersion}" for "${packageName}"`
+      `Updating constraint "${oldConstraint}" to replace "${currentVersion}" with "${newVersion}" for "${packageName}"`,
     );
     return oldConstraint.replace(currentVersion, newVersion);
   }
@@ -109,7 +109,7 @@ function getNewConstraint(
   }
 
   logger.debug(
-    `Could not detect constraint to update for "${packageName}" so setting to newValue "${newValue}"`
+    `Could not detect constraint to update for "${packageName}" so setting to newValue "${newValue}"`,
   );
   return newValue;
 }
@@ -148,7 +148,7 @@ export async function updateArtifacts({
     } else {
       const providerDeps = updatedDeps.filter((dep) =>
         // TODO #22198
-        ['provider', 'required_provider'].includes(dep.depType!)
+        ['provider', 'required_provider'].includes(dep.depType!),
       );
       for (const dep of providerDeps) {
         massageProviderLookupName(dep);
@@ -158,7 +158,7 @@ export async function updateArtifacts({
           ? registryUrls[0]
           : TerraformProviderDatasource.defaultRegistryUrls[0];
         const updateLock = locks.find(
-          (value) => value.packageName === packageName
+          (value) => value.packageName === packageName,
         );
         // istanbul ignore if: needs test
         if (!updateLock) {
@@ -173,7 +173,7 @@ export async function updateArtifacts({
             (await TerraformProviderHash.createHashes(
               registryUrl,
               updateLock.packageName,
-              newVersion!
+              newVersion!,
             )) ?? /* istanbul ignore next: needs test */ [],
           ...updateLock,
         };
diff --git a/lib/modules/manager/terraform/lockfile/update-locked.ts b/lib/modules/manager/terraform/lockfile/update-locked.ts
index b2bc6eb5f34aa888599837fccdecb2b1713b518d..d4351033e3ee0f1575fa83931b9b141e1b427fb0 100644
--- a/lib/modules/manager/terraform/lockfile/update-locked.ts
+++ b/lib/modules/manager/terraform/lockfile/update-locked.ts
@@ -4,18 +4,18 @@ import type { UpdateLockedConfig, UpdateLockedResult } from '../../types';
 import { extractLocks } from './util';
 
 export function updateLockedDependency(
-  config: UpdateLockedConfig
+  config: UpdateLockedConfig,
 ): UpdateLockedResult {
   const { depName, currentVersion, newVersion, lockFile, lockFileContent } =
     config;
   // TODO: fix types (#22198)
   logger.debug(
-    `terraform.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`
+    `terraform.updateLockedDependency: ${depName}@${currentVersion} -> ${newVersion} [${lockFile}]`,
   );
   try {
     const locked = extractLocks(coerceString(lockFileContent));
     const lockedDep = locked?.find(
-      (dep) => dep.packageName === coerceString(depName)
+      (dep) => dep.packageName === coerceString(depName),
     );
     if (lockedDep?.version === newVersion) {
       return { status: 'already-updated' };
diff --git a/lib/modules/manager/terraform/lockfile/util.ts b/lib/modules/manager/terraform/lockfile/util.ts
index 5054f950ebb59b35d3546b6cca2f565316382839..b3193ec1e0f4794cb8604bc9fe152e6cf0d390b6 100644
--- a/lib/modules/manager/terraform/lockfile/util.ts
+++ b/lib/modules/manager/terraform/lockfile/util.ts
@@ -10,13 +10,13 @@ import type {
 } from './types';
 
 const providerStartLineRegex = regEx(
-  `^provider "(?<registryUrl>[^/]*)/(?<namespace>[^/]*)/(?<depName>[^/]*)"`
+  `^provider "(?<registryUrl>[^/]*)/(?<namespace>[^/]*)/(?<depName>[^/]*)"`,
 );
 const versionLineRegex = regEx(
-  `^(?<prefix>[\\s]*version[\\s]*=[\\s]*")(?<version>[^"']+)(?<suffix>".*)$`
+  `^(?<prefix>[\\s]*version[\\s]*=[\\s]*")(?<version>[^"']+)(?<suffix>".*)$`,
 );
 const constraintLineRegex = regEx(
-  `^(?<prefix>[\\s]*constraints[\\s]*=[\\s]*")(?<constraint>[^"']+)(?<suffix>".*)$`
+  `^(?<prefix>[\\s]*constraints[\\s]*=[\\s]*")(?<constraint>[^"']+)(?<suffix>".*)$`,
 );
 const hashLineRegex = regEx(`^(?<prefix>\\s*")(?<hash>[^"]+)(?<suffix>",.*)$`);
 
@@ -132,7 +132,7 @@ export function isPinnedVersion(value: string | undefined): boolean {
 export function writeLockUpdates(
   updates: ProviderLockUpdate[],
   lockFilePath: string,
-  oldLockFileContent: string
+  oldLockFileContent: string,
 ): UpdateArtifactsResult {
   const lines = oldLockFileContent.split(newlineRegex);
 
@@ -141,7 +141,7 @@ export function writeLockUpdates(
   // sort updates in order of appearance in the lockfile
   // TODO #22198
   updates.sort(
-    (a, b) => a.lineNumbers.block!.start - b.lineNumbers.block!.start
+    (a, b) => a.lineNumbers.block!.start - b.lineNumbers.block!.start,
   );
   updates.forEach((update, index, array) => {
     // re add leading whitespace
@@ -154,14 +154,14 @@ export function writeLockUpdates(
     const leadingNonRelevantLines = lines.slice(
       startWhitespace,
       // TODO #22198
-      update.lineNumbers.block!.start
+      update.lineNumbers.block!.start,
     );
     sections.push(leadingNonRelevantLines);
 
     const providerBlockLines = lines.slice(
       // TODO #22198
       update.lineNumbers.block!.start,
-      update.lineNumbers.block!.end
+      update.lineNumbers.block!.end,
     );
     const newProviderBlockLines: string[] = [];
     let hashLinePrefix = '';
@@ -169,7 +169,7 @@ export function writeLockUpdates(
     providerBlockLines.forEach((providerBlockLine, providerBlockIndex) => {
       const versionLine = providerBlockLine.replace(
         versionLineRegex,
-        `$<prefix>${update.newVersion}$<suffix>`
+        `$<prefix>${update.newVersion}$<suffix>`,
       );
       if (versionLine !== providerBlockLine) {
         newProviderBlockLines.push(versionLine);
@@ -178,7 +178,7 @@ export function writeLockUpdates(
 
       const constraintLine = providerBlockLine.replace(
         constraintLineRegex,
-        `$<prefix>${update.newConstraint}$<suffix>`
+        `$<prefix>${update.newConstraint}$<suffix>`,
       );
       if (constraintLine !== providerBlockLine) {
         newProviderBlockLines.push(constraintLine);
@@ -196,24 +196,24 @@ export function writeLockUpdates(
     });
 
     const hashesWithWhitespace = update.newHashes.map(
-      (value) => `${hashLinePrefix}${value}${hashLineSuffix}`
+      (value) => `${hashLinePrefix}${value}${hashLineSuffix}`,
     );
     newProviderBlockLines.splice(
       // TODO #22198
       update.lineNumbers.hashes.start!,
       0,
-      ...hashesWithWhitespace
+      ...hashesWithWhitespace,
     );
     sections.push(newProviderBlockLines);
   });
 
   const trailingNotUpdatedLines = lines.slice(
-    updates[updates.length - 1].lineNumbers.block?.end
+    updates[updates.length - 1].lineNumbers.block?.end,
   );
   sections.push(trailingNotUpdatedLines);
 
   const newLines = sections.reduce((previousValue, currentValue) =>
-    previousValue.concat(currentValue)
+    previousValue.concat(currentValue),
   );
   const newContent = newLines.join('\n');
 
diff --git a/lib/modules/manager/terraform/util.ts b/lib/modules/manager/terraform/util.ts
index 7400949c66c2cfca247b8311c6b63290aa530f34..0f1c8ff86d670d8628ed51e5174ecbd5d3d350cd 100644
--- a/lib/modules/manager/terraform/util.ts
+++ b/lib/modules/manager/terraform/util.ts
@@ -7,7 +7,7 @@ import { extractLocks, findLockFile, readLockFile } from './lockfile/util';
 
 export function checkFileContainsDependency(
   content: string,
-  checkList: string[]
+  checkList: string[],
 ): boolean {
   return checkList.some((check) => content.includes(check));
 }
@@ -35,7 +35,7 @@ export function massageProviderLookupName(dep: PackageDependency): void {
 
 export function getLockedVersion(
   dep: PackageDependency,
-  locks: ProviderLock[]
+  locks: ProviderLock[],
 ): string | undefined {
   const depRegistryUrl = dep.registryUrls
     ? dep.registryUrls[0]
@@ -43,7 +43,7 @@ export function getLockedVersion(
   const foundLock = locks.find(
     (lock) =>
       lock.packageName === dep.packageName &&
-      lock.registryUrl === depRegistryUrl
+      lock.registryUrl === depRegistryUrl,
   );
   if (foundLock) {
     return foundLock.version;
@@ -52,7 +52,7 @@ export function getLockedVersion(
 }
 
 export async function extractLocksForPackageFile(
-  fileName: string
+  fileName: string,
 ): Promise<ProviderLock[]> {
   const locks: ProviderLock[] = [];
   const lockFilePath = await findLockFile(fileName);
diff --git a/lib/modules/manager/terragrunt/artifacts.spec.ts b/lib/modules/manager/terragrunt/artifacts.spec.ts
index d69cc1ba354d5bfe1213c9b5dfa310d69b087146..592aa0f521053f94a0ff67c1fb0be99c8cd74db1 100644
--- a/lib/modules/manager/terragrunt/artifacts.spec.ts
+++ b/lib/modules/manager/terragrunt/artifacts.spec.ts
@@ -66,6 +66,6 @@ describe('modules/manager/terragrunt/artifacts', () => {
         config: localConfig,
       });
       expect(terraformLockfile.updateArtifacts).not.toHaveBeenCalled();
-    }
+    },
   );
 });
diff --git a/lib/modules/manager/terragrunt/artifacts.ts b/lib/modules/manager/terragrunt/artifacts.ts
index 865ee1195dba61677b82ede4d0a4dc17d271f262..d04ecb9d2298e67d8b9666965427450032412012 100644
--- a/lib/modules/manager/terragrunt/artifacts.ts
+++ b/lib/modules/manager/terragrunt/artifacts.ts
@@ -3,13 +3,13 @@ import { updateArtifacts as updateTerraformArtifacts } from '../terraform/lockfi
 import type { UpdateArtifact, UpdateArtifactsResult } from '../types';
 
 export async function updateArtifacts(
-  artifact: UpdateArtifact
+  artifact: UpdateArtifact,
 ): Promise<UpdateArtifactsResult[] | null> {
   if (artifact.config.updateType !== 'lockFileMaintenance') {
     logger.debug(
       `UpdateType ${
         artifact.config.updateType as string
-      } is not supported for terragrunt`
+      } is not supported for terragrunt`,
     );
     return null;
   }
diff --git a/lib/modules/manager/terragrunt/extract.spec.ts b/lib/modules/manager/terragrunt/extract.spec.ts
index d4720df6168e5a268b00d11403bab72634f6e0ee..5b7af54845281cd5bb0d9ebd91a1af50f8d6d8c0 100644
--- a/lib/modules/manager/terragrunt/extract.spec.ts
+++ b/lib/modules/manager/terragrunt/extract.spec.ts
@@ -507,7 +507,7 @@ describe('modules/manager/terragrunt/extract', () => {
         extractPackageFile(`terragrunt {
         source = "../fe"
       }
-      `)
+      `),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/manager/terragrunt/extract.ts b/lib/modules/manager/terragrunt/extract.ts
index 3f3e704b42f77bb759ce58664ad5e4484a39b9ef..c253ab22a1e09a0fb3e194073062f03afc5e0990 100644
--- a/lib/modules/manager/terragrunt/extract.ts
+++ b/lib/modules/manager/terragrunt/extract.ts
@@ -13,7 +13,7 @@ const contentCheckList = ['terraform {'];
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   logger.trace({ content }, `terragrunt.extractPackageFile(${packageFile!})`);
   if (!checkFileContainsDependency(content, contentCheckList)) {
@@ -27,10 +27,10 @@ export function extractPackageFile(
       const terragruntDependency = dependencyBlockExtractionRegex.exec(line);
       if (terragruntDependency?.groups) {
         logger.trace(
-          `Matched ${terragruntDependency.groups.type} on line ${lineNumber}`
+          `Matched ${terragruntDependency.groups.type} on line ${lineNumber}`,
         );
         const tfDepType = getTerragruntDependencyType(
-          terragruntDependency.groups.type
+          terragruntDependency.groups.type,
         );
         let result: ExtractionResult | null = null;
         switch (tfDepType) {
@@ -41,7 +41,7 @@ export function extractPackageFile(
           /* istanbul ignore next */
           default:
             logger.trace(
-              `Could not identify TerragruntDependencyType ${terragruntDependency.groups.type} on line ${lineNumber}.`
+              `Could not identify TerragruntDependencyType ${terragruntDependency.groups.type} on line ${lineNumber}.`,
             );
             break;
         }
diff --git a/lib/modules/manager/terragrunt/modules.spec.ts b/lib/modules/manager/terragrunt/modules.spec.ts
index 35f8fc625ca3e57e4c682554f93b62cee8c6436d..b2b303618607043aabdcd505066ac78131b51e40 100644
--- a/lib/modules/manager/terragrunt/modules.spec.ts
+++ b/lib/modules/manager/terragrunt/modules.spec.ts
@@ -4,7 +4,7 @@ describe('modules/manager/terragrunt/modules', () => {
   describe('githubRefMatchRegex', () => {
     it('should split project and tag from source', () => {
       const groups = githubRefMatchRegex.exec(
-        'github.com/hashicorp/example?ref=v1.0.0'
+        'github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       expect(groups).toEqual({
         project: 'hashicorp/example',
@@ -14,7 +14,7 @@ describe('modules/manager/terragrunt/modules', () => {
 
     it('should parse alpha-numeric characters as well as dots, underscores, and dashes in repo names', () => {
       const groups = githubRefMatchRegex.exec(
-        'github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
       expect(groups).toEqual({
         project: 'hashicorp/example.repo-123',
@@ -26,13 +26,13 @@ describe('modules/manager/terragrunt/modules', () => {
   describe('gitTagsRefMatchRegex', () => {
     it('should split project and tag from source', () => {
       const http = gitTagsRefMatchRegex.exec(
-        'http://github.com/hashicorp/example?ref=v1.0.0'
+        'http://github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       const https = gitTagsRefMatchRegex.exec(
-        'https://github.com/hashicorp/example?ref=v1.0.0'
+        'https://github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
       const ssh = gitTagsRefMatchRegex.exec(
-        'ssh://github.com/hashicorp/example?ref=v1.0.0'
+        'ssh://github.com/hashicorp/example?ref=v1.0.0',
       )?.groups;
 
       expect(http).toMatchObject({
@@ -51,13 +51,13 @@ describe('modules/manager/terragrunt/modules', () => {
 
     it('should parse alpha-numeric characters as well as dots, underscores, and dashes in repo names', () => {
       const http = gitTagsRefMatchRegex.exec(
-        'http://github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'http://github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
       const https = gitTagsRefMatchRegex.exec(
-        'https://github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'https://github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
       const ssh = gitTagsRefMatchRegex.exec(
-        'ssh://github.com/hashicorp/example.repo-123?ref=v1.0.0'
+        'ssh://github.com/hashicorp/example.repo-123?ref=v1.0.0',
       )?.groups;
 
       expect(http).toMatchObject({
diff --git a/lib/modules/manager/terragrunt/modules.ts b/lib/modules/manager/terragrunt/modules.ts
index 6b6004a1628c8d6073c4940ef12e37f088ebcc17..56722fe7a12323e66418ab7eaa1d7c5edc412bd9 100644
--- a/lib/modules/manager/terragrunt/modules.ts
+++ b/lib/modules/manager/terragrunt/modules.ts
@@ -8,16 +8,16 @@ import { extractTerragruntProvider } from './providers';
 import type { ExtractionResult, TerraformManagerData } from './types';
 
 export const githubRefMatchRegex = regEx(
-  /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/i
+  /github\.com([/:])(?<project>[^/]+\/[a-z0-9-_.]+).*\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/i,
 );
 export const gitTagsRefMatchRegex = regEx(
-  /(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/
+  /(?:git::)?(?<url>(?:http|https|ssh):\/\/(?:.*@)?(?<path>.*.*\/(?<project>.*\/.*)))\?(depth=\d+&)?ref=(?<tag>.*?)(&depth=\d+)?$/,
 );
 const hostnameMatchRegex = regEx(/^(?<hostname>([\w|\d]+\.)+[\w|\d]+)/);
 
 export function extractTerragruntModule(
   startingLine: number,
-  lines: string[]
+  lines: string[],
 ): ExtractionResult {
   const moduleName = 'terragrunt';
   const result = extractTerragruntProvider(startingLine, lines, moduleName);
@@ -29,7 +29,7 @@ export function extractTerragruntModule(
 }
 
 export function analyseTerragruntModule(
-  dep: PackageDependency<TerraformManagerData>
+  dep: PackageDependency<TerraformManagerData>,
 ): void {
   // TODO #22198
   const source = dep.managerData!.source;
@@ -40,7 +40,7 @@ export function analyseTerragruntModule(
     dep.depType = 'github';
     dep.packageName = githubRefMatch.groups.project.replace(
       regEx(/\.git$/),
-      ''
+      '',
     );
     dep.depName = 'github.com/' + dep.packageName;
     dep.currentValue = githubRefMatch.groups.tag;
diff --git a/lib/modules/manager/terragrunt/providers.ts b/lib/modules/manager/terragrunt/providers.ts
index 914b1917d1cdf66456ea78f5508a562920ac88fb..cd16b53b1c0e38ec1337019be953312d0b1dbb5a 100644
--- a/lib/modules/manager/terragrunt/providers.ts
+++ b/lib/modules/manager/terragrunt/providers.ts
@@ -4,7 +4,7 @@ import type { ExtractionResult, TerraformManagerData } from './types';
 import { keyValueExtractionRegex } from './util';
 
 export const sourceExtractionRegex = regEx(
-  /^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/
+  /^(?:(?<hostname>(?:[a-zA-Z0-9]+\.+)+[a-zA-Z0-9]+)\/)?(?:(?<namespace>[^/]+)\/)?(?<type>[^/]+)/,
 );
 
 function extractBracesContent(content: string): number {
@@ -26,7 +26,7 @@ function extractBracesContent(content: string): number {
 export function extractTerragruntProvider(
   startingLine: number,
   lines: string[],
-  moduleName: string
+  moduleName: string,
 ): ExtractionResult {
   const lineNumber = startingLine;
   let line: string;
diff --git a/lib/modules/manager/terragrunt/util.spec.ts b/lib/modules/manager/terragrunt/util.spec.ts
index 65577b8503af4e82cfca7985c9c966423beae204..7a13ed04c27f38d07bef9392b3a89fe9db0aa43b 100644
--- a/lib/modules/manager/terragrunt/util.spec.ts
+++ b/lib/modules/manager/terragrunt/util.spec.ts
@@ -16,7 +16,7 @@ describe('modules/manager/terragrunt/util', () => {
 
     it('returns unknown on string with random chars', () => {
       expect(getTerragruntDependencyType('sdfsgdsfadfhfghfhgdfsdf')).toBe(
-        'unknown'
+        'unknown',
       );
     });
   });
diff --git a/lib/modules/manager/terragrunt/util.ts b/lib/modules/manager/terragrunt/util.ts
index e9dd1d382cf678f6f65a618f1858ab3bfe83e394..35c37cd6c49df22f01ffa5a2673a0adb7e81ed9b 100644
--- a/lib/modules/manager/terragrunt/util.ts
+++ b/lib/modules/manager/terragrunt/util.ts
@@ -2,11 +2,11 @@ import { regEx } from '../../../util/regex';
 import type { TerragruntDependencyTypes } from './common';
 
 export const keyValueExtractionRegex = regEx(
-  /^\s*source\s+=\s+"(?<value>[^"]+)"\s*$/
+  /^\s*source\s+=\s+"(?<value>[^"]+)"\s*$/,
 );
 
 export function getTerragruntDependencyType(
-  value: string
+  value: string,
 ): TerragruntDependencyTypes {
   switch (value) {
     case 'terraform': {
@@ -20,7 +20,7 @@ export function getTerragruntDependencyType(
 
 export function checkFileContainsDependency(
   content: string,
-  checkList: string[]
+  checkList: string[],
 ): boolean {
   return checkList.some((check) => content.includes(check));
 }
diff --git a/lib/modules/manager/tflint-plugin/extract.spec.ts b/lib/modules/manager/tflint-plugin/extract.spec.ts
index b2ff137d6355353f1424d09a1c619fd9754e827b..52cd0dbbc723eb1871e51a68cf5da42f579ede02 100644
--- a/lib/modules/manager/tflint-plugin/extract.spec.ts
+++ b/lib/modules/manager/tflint-plugin/extract.spec.ts
@@ -21,7 +21,7 @@ describe('modules/manager/tflint-plugin/extract', () => {
   describe('extractPackageFile()', () => {
     it('returns null for empty', () => {
       expect(
-        extractPackageFile('nothing here', 'doesnt-exist.hcl', {})
+        extractPackageFile('nothing here', 'doesnt-exist.hcl', {}),
       ).toBeNull();
     });
 
@@ -31,7 +31,7 @@ describe('modules/manager/tflint-plugin/extract', () => {
       `;
 
       expect(
-        extractPackageFile(configNoVersion, 'doesnt-exist.hcl', {})
+        extractPackageFile(configNoVersion, 'doesnt-exist.hcl', {}),
       ).toBeNull();
     });
 
diff --git a/lib/modules/manager/tflint-plugin/extract.ts b/lib/modules/manager/tflint-plugin/extract.ts
index f77e9ecb39ec77518644011e4e1b42f3f805b8eb..6da3cf2614748dd474d533cc49f080b29145b706 100644
--- a/lib/modules/manager/tflint-plugin/extract.ts
+++ b/lib/modules/manager/tflint-plugin/extract.ts
@@ -10,19 +10,19 @@ import type { ExtractionResult } from './types';
 import { checkFileContainsPlugins } from './util';
 
 const dependencyBlockExtractionRegex = regEx(
-  /^\s*plugin\s+"(?<pluginName>[^"]+)"\s+{\s*$/
+  /^\s*plugin\s+"(?<pluginName>[^"]+)"\s+{\s*$/,
 );
 
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  _config: ExtractConfig
+  _config: ExtractConfig,
 ): PackageFileContent | null {
   logger.trace({ content }, `tflint.extractPackageFile(${packageFile})`);
   if (!checkFileContainsPlugins(content)) {
     logger.debug(
       { packageFile },
-      'preflight content check has not found any relevant content'
+      'preflight content check has not found any relevant content',
     );
     return null;
   }
@@ -41,7 +41,7 @@ export function extractPackageFile(
         result = extractTFLintPlugin(
           lineNumber,
           lines,
-          tfLintPlugin.groups.pluginName
+          tfLintPlugin.groups.pluginName,
         );
         if (result) {
           lineNumber = result.lineNumber;
diff --git a/lib/modules/manager/tflint-plugin/plugins.ts b/lib/modules/manager/tflint-plugin/plugins.ts
index 726206bc45d91622482cfff7e8d021a66abf9a64..65b8d18b199c214dc95483da517810ae762346fe 100644
--- a/lib/modules/manager/tflint-plugin/plugins.ts
+++ b/lib/modules/manager/tflint-plugin/plugins.ts
@@ -9,7 +9,7 @@ import { keyValueExtractionRegex } from './util';
 export function extractTFLintPlugin(
   startingLine: number,
   lines: string[],
-  pluginName: string
+  pluginName: string,
 ): ExtractionResult {
   let lineNumber = startingLine;
   const deps: PackageDependency[] = [];
@@ -62,7 +62,7 @@ export function extractTFLintPlugin(
 
 function analyseTFLintPlugin(
   source: string | null,
-  version: string | null
+  version: string | null,
 ): PackageDependency {
   const dep: PackageDependency = {};
 
diff --git a/lib/modules/manager/tflint-plugin/util.ts b/lib/modules/manager/tflint-plugin/util.ts
index 9a598f0dbd1370bec48d6cce7b2796630f28e0af..2ec510106cb9a98843e25915222fb0903afba3c2 100644
--- a/lib/modules/manager/tflint-plugin/util.ts
+++ b/lib/modules/manager/tflint-plugin/util.ts
@@ -1,7 +1,7 @@
 import { regEx } from '../../../util/regex';
 
 export const keyValueExtractionRegex = regEx(
-  /^\s*(?<key>[^\s]+)\s+=\s+"(?<value>[^"]+)"\s*$/
+  /^\s*(?<key>[^\s]+)\s+=\s+"(?<value>[^"]+)"\s*$/,
 );
 
 export function checkFileContainsPlugins(content: string): boolean {
diff --git a/lib/modules/manager/travis/extract.ts b/lib/modules/manager/travis/extract.ts
index 07cd7b9dbcfccab000126146ff0a6fe9558f5af5..886cc4a0f4b1e80a80f02c58227ceb2cffa82128 100644
--- a/lib/modules/manager/travis/extract.ts
+++ b/lib/modules/manager/travis/extract.ts
@@ -7,7 +7,7 @@ import type { TravisMatrixItem, TravisYaml } from './types';
 
 export function extractPackageFile(
   content: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   let doc: TravisYaml;
   try {
diff --git a/lib/modules/manager/types.ts b/lib/modules/manager/types.ts
index 29a13410b27af0062794da224135169f6d3cc9d3..8435b4e9b9d4673a8b2eea0b9e5f0fdd92aa2500 100644
--- a/lib/modules/manager/types.ts
+++ b/lib/modules/manager/types.ts
@@ -234,34 +234,34 @@ export interface ManagerApi extends ModuleApi {
   bumpPackageVersion?(
     content: string,
     currentValue: string,
-    bumpVersion: ReleaseType
+    bumpVersion: ReleaseType,
   ): Result<BumpPackageVersionResult>;
 
   detectGlobalConfig?(): Result<GlobalManagerConfig>;
 
   extractAllPackageFiles?(
     config: ExtractConfig,
-    files: string[]
+    files: string[],
   ): Result<PackageFile[] | null>;
 
   extractPackageFile?(
     content: string,
     packageFile?: string,
-    config?: ExtractConfig
+    config?: ExtractConfig,
   ): Result<PackageFileContent | null>;
 
   getRangeStrategy?(config: RangeConfig): RangeStrategy;
 
   updateArtifacts?(
-    updateArtifact: UpdateArtifact
+    updateArtifact: UpdateArtifact,
   ): Result<UpdateArtifactsResult[] | null>;
 
   updateDependency?(
-    updateDependencyConfig: UpdateDependencyConfig
+    updateDependencyConfig: UpdateDependencyConfig,
   ): Result<string | null>;
 
   updateLockedDependency?(
-    config: UpdateLockedConfig
+    config: UpdateLockedConfig,
   ): Result<UpdateLockedResult>;
 }
 
diff --git a/lib/modules/manager/velaci/extract.ts b/lib/modules/manager/velaci/extract.ts
index bd77b593b247642a48d9f62f59aea66d0c289d09..50ea39a6ea3aa29195ac3ecd231630782e731d16 100644
--- a/lib/modules/manager/velaci/extract.ts
+++ b/lib/modules/manager/velaci/extract.ts
@@ -7,7 +7,7 @@ import type { VelaPipelineConfiguration } from './types';
 
 export function extractPackageFile(
   file: string,
-  packageFile?: string
+  packageFile?: string,
 ): PackageFileContent | null {
   let doc: VelaPipelineConfiguration | undefined;
 
diff --git a/lib/modules/manager/woodpecker/extract.spec.ts b/lib/modules/manager/woodpecker/extract.spec.ts
index 5288df6e775cbfb4bf70629c11ba0c151f3a26bf..9149b024d0a1ed8efc41388c5f93b2673410b5d4 100644
--- a/lib/modules/manager/woodpecker/extract.spec.ts
+++ b/lib/modules/manager/woodpecker/extract.spec.ts
@@ -128,7 +128,7 @@ describe('modules/manager/woodpecker/extract', () => {
           registryAliases: {
             'quay.io': 'my-quay-mirror.registry.com',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -157,7 +157,7 @@ describe('modules/manager/woodpecker/extract', () => {
           registryAliases: {
             'index.docker.io': 'my-docker-mirror.registry.com',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -187,7 +187,7 @@ describe('modules/manager/woodpecker/extract', () => {
             'quay.io': 'my-quay-mirror.registry.com',
             'my-quay-mirror.registry.com': 'quay.io',
           },
-        }
+        },
       );
       expect(res).toEqual({
         deps: [
@@ -212,7 +212,7 @@ describe('modules/manager/woodpecker/extract', () => {
             image: quay.io/something/redis:alpine
           `,
         '',
-        {}
+        {},
       );
       expect(res).toEqual({
         deps: [
@@ -240,7 +240,7 @@ describe('modules/manager/woodpecker/extract', () => {
             image: quay.io/something/redis:alpine
         `,
         '',
-        {}
+        {},
       );
 
       expect(res).toEqual({
@@ -275,7 +275,7 @@ describe('modules/manager/woodpecker/extract', () => {
             image: woodpeckerci/plugin-git:latest
         `,
         '',
-        {}
+        {},
       );
 
       expect(res).toEqual({
@@ -301,7 +301,7 @@ describe('modules/manager/woodpecker/extract', () => {
             3.5
         `,
         '',
-        {}
+        {},
       );
 
       expect(res).toBeNull();
diff --git a/lib/modules/manager/woodpecker/extract.ts b/lib/modules/manager/woodpecker/extract.ts
index a0f4f1eaa73df7bc032d86dca6755a96a12a316b..6012709aaff281e52cc9fa40727f40c611930f94 100644
--- a/lib/modules/manager/woodpecker/extract.ts
+++ b/lib/modules/manager/woodpecker/extract.ts
@@ -6,18 +6,18 @@ import type { ExtractConfig, PackageFileContent } from '../types';
 import type { WoodpeckerConfig } from './types';
 
 function woodpeckerVersionDecider(
-  woodpeckerConfig: WoodpeckerConfig
+  woodpeckerConfig: WoodpeckerConfig,
 ): (keyof WoodpeckerConfig)[] {
   const keys = ['clone', 'steps', 'pipeline', 'services'];
   return Object.keys(woodpeckerConfig).filter((key) =>
-    keys.includes(key)
+    keys.includes(key),
   ) as (keyof WoodpeckerConfig)[];
 }
 
 export function extractPackageFile(
   content: string,
   packageFile: string,
-  extractConfig: ExtractConfig
+  extractConfig: ExtractConfig,
 ): PackageFileContent | null {
   logger.debug('woodpecker.extractPackageFile()');
   let config: WoodpeckerConfig;
@@ -27,21 +27,21 @@ export function extractPackageFile(
     if (!config) {
       logger.debug(
         { packageFile },
-        'Null config when parsing Woodpecker Configuration content'
+        'Null config when parsing Woodpecker Configuration content',
       );
       return null;
     }
     if (typeof config !== 'object') {
       logger.debug(
         { packageFile, type: typeof config },
-        'Unexpected type for Woodpecker Configuration content'
+        'Unexpected type for Woodpecker Configuration content',
       );
       return null;
     }
   } catch (err) {
     logger.debug(
       { packageFile, err },
-      'Error parsing Woodpecker Configuration config YAML'
+      'Error parsing Woodpecker Configuration config YAML',
     );
     return null;
   }
@@ -58,7 +58,7 @@ export function extractPackageFile(
   const deps = pipelineKeys.flatMap((pipelineKey) =>
     Object.values(config[pipelineKey] ?? {})
       .filter((step) => is.string(step?.image))
-      .map((step) => getDep(step.image, true, extractConfig.registryAliases))
+      .map((step) => getDep(step.image, true, extractConfig.registryAliases)),
   );
 
   logger.trace({ deps }, 'Woodpecker Configuration image');
diff --git a/lib/modules/platform/azure/azure-helper.spec.ts b/lib/modules/platform/azure/azure-helper.spec.ts
index d1cb1a939ad640e33b867cb1c22796afbc1c5df6..4707ef8560f6004b5eb19f474a85b464e6562f23 100644
--- a/lib/modules/platform/azure/azure-helper.spec.ts
+++ b/lib/modules/platform/azure/azure-helper.spec.ts
@@ -20,7 +20,7 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getRefs: jest.fn(() => [{ objectId: 132 }]),
-          } as any)
+          } as any),
       );
       const res = await azureHelper.getRefs('123', 'branch');
       expect(res).toMatchSnapshot();
@@ -31,7 +31,7 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getRefs: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       const res = await azureHelper.getRefs('123');
       expect(res).toHaveLength(0);
@@ -42,7 +42,7 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getRefs: jest.fn(() => [{ objectId: '132' }]),
-          } as any)
+          } as any),
       );
       const res = await azureHelper.getRefs('123', 'refs/head/branch1');
       expect(res).toMatchSnapshot();
@@ -55,12 +55,12 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getRefs: jest.fn(() => [{ objectId: '132' }]),
-          } as any)
+          } as any),
       );
       const res = await azureHelper.getAzureBranchObj(
         '123',
         'branchName',
-        'base'
+        'base',
       );
       expect(res).toMatchSnapshot();
     });
@@ -70,7 +70,7 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getRefs: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       const res = await azureHelper.getAzureBranchObj('123', 'branchName');
       expect(res).toMatchSnapshot();
@@ -96,13 +96,13 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getItemText: jest.fn(() => mockEventStream),
-          } as any)
+          } as any),
       );
 
       const res = await azureHelper.getFile(
         '123',
         'repository',
-        './myFilePath/test'
+        './myFilePath/test',
       );
       expect(res).toBeNull();
     });
@@ -125,13 +125,13 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getItemText: jest.fn(() => mockEventStream),
-          } as any)
+          } as any),
       );
 
       const res = await azureHelper.getFile(
         '123',
         'repository',
-        './myFilePath/test'
+        './myFilePath/test',
       );
       expect(res).toBeNull();
     });
@@ -154,13 +154,13 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getItemText: jest.fn(() => mockEventStream),
-          } as any)
+          } as any),
       );
 
       const res = await azureHelper.getFile(
         '123',
         'repository',
-        './myFilePath/test'
+        './myFilePath/test',
       );
       expect(res).toMatchSnapshot();
     });
@@ -172,13 +172,13 @@ describe('modules/platform/azure/azure-helper', () => {
             getItemText: jest.fn(() => ({
               readable: false,
             })),
-          } as any)
+          } as any),
       );
 
       const res = await azureHelper.getFile(
         '123',
         'repository',
-        './myFilePath/test'
+        './myFilePath/test',
       );
       expect(res).toBeNull();
     });
@@ -192,7 +192,7 @@ describe('modules/platform/azure/azure-helper', () => {
             getCommit: jest.fn(() => ({
               parents: ['123456'],
             })),
-          } as any)
+          } as any),
       );
       const res = await azureHelper.getCommitDetails('123', '123456');
       expect(res).toMatchSnapshot();
@@ -205,10 +205,10 @@ describe('modules/platform/azure/azure-helper', () => {
         () =>
           ({
             getPolicyConfigurations: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       expect(await azureHelper.getMergeMethod('', '')).toEqual(
-        GitPullRequestMergeStrategy.NoFastForward
+        GitPullRequestMergeStrategy.NoFastForward,
       );
     });
 
@@ -231,10 +231,10 @@ describe('modules/platform/azure/azure-helper', () => {
                 },
               },
             ]),
-          } as any)
+          } as any),
       );
       expect(await azureHelper.getMergeMethod('', '')).toEqual(
-        GitPullRequestMergeStrategy.Squash
+        GitPullRequestMergeStrategy.Squash,
       );
     });
 
@@ -270,10 +270,10 @@ describe('modules/platform/azure/azure-helper', () => {
                 },
               },
             ]),
-          } as any)
+          } as any),
       );
       expect(await azureHelper.getMergeMethod('', '')).toEqual(
-        GitPullRequestMergeStrategy.Rebase
+        GitPullRequestMergeStrategy.Rebase,
       );
     });
 
@@ -339,10 +339,10 @@ describe('modules/platform/azure/azure-helper', () => {
                 },
               },
             ]),
-          } as any)
+          } as any),
       );
       expect(
-        await azureHelper.getMergeMethod('', '', refMock, defaultBranchMock)
+        await azureHelper.getMergeMethod('', '', refMock, defaultBranchMock),
       ).toEqual(GitPullRequestMergeStrategy.Rebase);
     });
 
@@ -395,10 +395,10 @@ describe('modules/platform/azure/azure-helper', () => {
                 },
               },
             ]),
-          } as any)
+          } as any),
       );
       expect(
-        await azureHelper.getMergeMethod('', '', refMock, defaultBranchMock)
+        await azureHelper.getMergeMethod('', '', refMock, defaultBranchMock),
       ).toEqual(GitPullRequestMergeStrategy.Rebase);
     });
   });
diff --git a/lib/modules/platform/azure/azure-helper.ts b/lib/modules/platform/azure/azure-helper.ts
index 8273d9e04d54eaad13566e2f6bb89f26cf764754..d4c2ade19a155978c5e7a97ac4ab73b6852c45e1 100644
--- a/lib/modules/platform/azure/azure-helper.ts
+++ b/lib/modules/platform/azure/azure-helper.ts
@@ -17,14 +17,14 @@ const mergePolicyGuid = 'fa4e907d-c16b-4a4c-9dfa-4916e5d171ab'; // Magic GUID fo
 
 export async function getRefs(
   repoId: string,
-  branchName?: string
+  branchName?: string,
 ): Promise<GitRef[]> {
   logger.debug(`getRefs(${repoId}, ${branchName!})`);
   const azureApiGit = await azureApi.gitApi();
   const refs = await azureApiGit.getRefs(
     repoId,
     undefined,
-    getBranchNameWithoutRefsPrefix(branchName)
+    getBranchNameWithoutRefsPrefix(branchName),
   );
   return refs;
 }
@@ -37,7 +37,7 @@ export interface AzureBranchObj {
 export async function getAzureBranchObj(
   repoId: string,
   branchName: string,
-  from?: string
+  from?: string,
 ): Promise<AzureBranchObj> {
   const fromBranchName = getNewBranchName(from);
   const refs = await getRefs(repoId, fromBranchName);
@@ -60,7 +60,7 @@ export async function getAzureBranchObj(
 export async function getFile(
   repoId: string,
   filePath: string,
-  branchName: string
+  branchName: string,
 ): Promise<string | null> {
   logger.trace(`getFile(filePath=${filePath}, branchName=${branchName})`);
   const azureApiGit = await azureApi.gitApi();
@@ -77,7 +77,7 @@ export async function getFile(
       versionType: 0, // branch
       versionOptions: 0,
       version: getBranchNameWithoutRefsheadsPrefix(branchName),
-    }
+    },
   );
 
   if (item?.readable) {
@@ -105,7 +105,7 @@ export async function getFile(
 
 export async function getCommitDetails(
   commit: string,
-  repoId: string
+  repoId: string,
 ): Promise<GitCommit> {
   logger.debug(`getCommitDetails(${commit}, ${repoId})`);
   const azureApiGit = await azureApi.gitApi();
@@ -117,7 +117,7 @@ export async function getMergeMethod(
   repoId: string,
   project: string,
   branchRef?: string | null,
-  defaultBranch?: string
+  defaultBranch?: string,
 ): Promise<GitPullRequestMergeStrategy> {
   type Scope = {
     repositoryId: string;
@@ -157,8 +157,8 @@ export async function getMergeMethod(
     `getMergeMethod(${repoId}, ${project}, ${branchRef!}) determining mergeMethod from matched policy:\n${JSON.stringify(
       policyConfigurations,
       null,
-      4
-    )}`
+      4,
+    )}`,
   );
 
   try {
@@ -168,7 +168,7 @@ export async function getMergeMethod(
         (p) =>
           GitPullRequestMergeStrategy[
             p.slice(5) as never
-          ] as never as GitPullRequestMergeStrategy
+          ] as never as GitPullRequestMergeStrategy,
       )
       .find((p) => p)!;
   } catch (err) {
diff --git a/lib/modules/platform/azure/index.spec.ts b/lib/modules/platform/azure/index.spec.ts
index fc0c05077ede1f4f70336c3b6b548b23702d1634..3f4b4a40743e599890f0fc655519671f1e3699c9 100644
--- a/lib/modules/platform/azure/index.spec.ts
+++ b/lib/modules/platform/azure/index.spec.ts
@@ -23,7 +23,7 @@ jest.mock('./azure-helper');
 jest.mock('../../../util/git');
 jest.mock('../../../util/host-rules', () => mockDeep());
 jest.mock('../../../util/sanitize', () =>
-  mockDeep({ sanitize: (s: string) => s })
+  mockDeep({ sanitize: (s: string) => s }),
 );
 jest.mock('timers/promises');
 
@@ -75,7 +75,7 @@ describe('modules/platform/azure/index', () => {
               },
             },
           ]),
-        } as any)
+        } as any),
     );
     return azure.getRepos();
   }
@@ -91,7 +91,7 @@ describe('modules/platform/azure/index', () => {
       expect(() =>
         azure.initPlatform({
           endpoint: 'https://dev.azure.com/renovate12345',
-        })
+        }),
       ).toThrow();
     });
 
@@ -101,7 +101,7 @@ describe('modules/platform/azure/index', () => {
         azure.initPlatform({
           endpoint: 'https://dev.azure.com/renovate12345',
           username: 'user',
-        })
+        }),
       ).toThrow();
     });
 
@@ -111,7 +111,7 @@ describe('modules/platform/azure/index', () => {
         azure.initPlatform({
           endpoint: 'https://dev.azure.com/renovate12345',
           password: 'pass',
-        })
+        }),
       ).toThrow();
     });
 
@@ -120,7 +120,7 @@ describe('modules/platform/azure/index', () => {
         await azure.initPlatform({
           endpoint: 'https://dev.azure.com/renovate12345',
           token: 'token',
-        })
+        }),
       ).toMatchSnapshot();
     });
   });
@@ -129,7 +129,7 @@ describe('modules/platform/azure/index', () => {
     it('should return an array of repos', async () => {
       const repos = await getRepos(
         'sometoken',
-        'https://dev.azure.com/renovate12345'
+        'https://dev.azure.com/renovate12345',
       );
       expect(azureApi.gitApi.mock.calls).toMatchSnapshot();
       expect(repos).toMatchSnapshot();
@@ -164,7 +164,7 @@ describe('modules/platform/azure/index', () => {
             isDisabled: true,
           },
         ]),
-      })
+      }),
     );
 
     if (is.string(args)) {
@@ -192,7 +192,7 @@ describe('modules/platform/azure/index', () => {
       await expect(
         initRepo({
           repository: 'some/repo3',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_ARCHIVED);
     });
 
@@ -200,7 +200,7 @@ describe('modules/platform/azure/index', () => {
       await expect(
         initRepo({
           repository: 'some/missing',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_NOT_FOUND);
     });
   });
@@ -229,7 +229,7 @@ describe('modules/platform/azure/index', () => {
                 },
               ]),
             getPullRequestCommits: jest.fn().mockReturnValue([]),
-          } as any)
+          } as any),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -270,7 +270,7 @@ describe('modules/platform/azure/index', () => {
                 },
               ]),
             getPullRequestCommits: jest.fn().mockReturnValue([]),
-          } as any)
+          } as any),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -311,7 +311,7 @@ describe('modules/platform/azure/index', () => {
                 },
               ]),
             getPullRequestCommits: jest.fn().mockReturnValue([]),
-          } as any)
+          } as any),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -352,7 +352,7 @@ describe('modules/platform/azure/index', () => {
                 },
               ]),
             getPullRequestCommits: jest.fn().mockReturnValue([]),
-          } as any)
+          } as any),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -398,7 +398,7 @@ describe('modules/platform/azure/index', () => {
               },
             ]),
           getPullRequestCommits: jest.fn().mockReturnValue([]),
-        })
+        }),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -446,7 +446,7 @@ describe('modules/platform/azure/index', () => {
               },
             ]),
           getPullRequestCommits: jest.fn().mockReturnValue([]),
-        })
+        }),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -475,7 +475,7 @@ describe('modules/platform/azure/index', () => {
       azureApi.gitApi.mockResolvedValueOnce(
         partial<IGitApi>({
           getPullRequests: jest.fn().mockRejectedValueOnce(new Error()),
-        })
+        }),
       );
       const res = await azure.findPr({
         branchName: 'branch-a',
@@ -491,7 +491,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             getPullRequests: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       expect(await azure.getPrList()).toEqual([]);
     });
@@ -503,7 +503,7 @@ describe('modules/platform/azure/index', () => {
       azureApi.gitApi.mockResolvedValue(
         partial<IGitApi>({
           getPullRequests: jest.fn().mockResolvedValueOnce([]),
-        })
+        }),
       );
       const pr = await azure.getBranchPr('somebranch');
       expect(pr).toBeNull();
@@ -526,7 +526,7 @@ describe('modules/platform/azure/index', () => {
             ])
             .mockResolvedValueOnce([]),
           getPullRequestLabels: jest.fn().mockResolvedValue([]),
-        })
+        }),
       );
       const pr = await azure.getBranchPr('branch-a', 'branch-b');
       expect(pr).toEqual({
@@ -561,11 +561,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'a-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('green');
     });
@@ -582,11 +582,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'a-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('green');
     });
@@ -603,11 +603,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'a-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('red');
     });
@@ -624,11 +624,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'a-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('red');
     });
@@ -645,11 +645,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'a-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('yellow');
     });
@@ -666,11 +666,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'a-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('yellow');
     });
@@ -688,11 +688,11 @@ describe('modules/platform/azure/index', () => {
               context: { genre: 'a-genre', name: 'a-name' },
             },
           ]),
-        })
+        }),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBe('yellow');
     });
@@ -709,11 +709,11 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'another-genre', name: 'a-name' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatusCheck(
         'somebranch',
-        'a-genre/a-name'
+        'a-genre/a-name',
       );
       expect(res).toBeNull();
     });
@@ -732,7 +732,7 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'renovate' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatus('somebranch', true);
       expect(res).toBe('green');
@@ -750,7 +750,7 @@ describe('modules/platform/azure/index', () => {
                 context: { genre: 'renovate' },
               },
             ]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatus('somebranch', false);
       expect(res).toBe('yellow');
@@ -763,7 +763,7 @@ describe('modules/platform/azure/index', () => {
           ({
             getBranch: jest.fn(() => ({ commit: { commitId: 'abcd1234' } })),
             getStatuses: jest.fn(() => [{ state: GitStatusState.Error }]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatus('somebranch', true);
       expect(res).toBe('red');
@@ -776,7 +776,7 @@ describe('modules/platform/azure/index', () => {
           ({
             getBranch: jest.fn(() => ({ commit: { commitId: 'abcd1234' } })),
             getStatuses: jest.fn(() => [{ state: GitStatusState.Pending }]),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatus('somebranch', true);
       expect(res).toBe('yellow');
@@ -789,7 +789,7 @@ describe('modules/platform/azure/index', () => {
           ({
             getBranch: jest.fn(() => ({ commit: { commitId: 'abcd1234' } })),
             getStatuses: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       const res = await azure.getBranchStatus('somebranch', true);
       expect(res).toBe('yellow');
@@ -808,7 +808,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             getPullRequests: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       const pr = await azure.getPr(1234);
       expect(pr).toBeNull();
@@ -837,7 +837,7 @@ describe('modules/platform/azure/index', () => {
                 },
               },
             ]),
-          } as any)
+          } as any),
       );
       const pr = await azure.getPr(1234);
       expect(pr).toMatchSnapshot();
@@ -854,7 +854,7 @@ describe('modules/platform/azure/index', () => {
               pullRequestId: 456,
             })),
             createPullRequestLabel: jest.fn(() => ({})),
-          } as any)
+          } as any),
       );
       const pr = await azure.createPr({
         sourceBranch: 'some-branch',
@@ -875,7 +875,7 @@ describe('modules/platform/azure/index', () => {
               pullRequestId: 456,
             })),
             createPullRequestLabel: jest.fn(() => ({})),
-          } as any)
+          } as any),
       );
       const pr = await azure.createPr({
         sourceBranch: 'some-branch',
@@ -914,7 +914,7 @@ describe('modules/platform/azure/index', () => {
             createPullRequest: jest.fn().mockResolvedValue(prResult),
             createPullRequestLabel: jest.fn().mockResolvedValue({}),
             updatePullRequest: updateFn,
-          })
+          }),
         );
         const pr = await azure.createPr({
           sourceBranch: 'some-branch',
@@ -971,21 +971,21 @@ describe('modules/platform/azure/index', () => {
           },
         ];
         const updateFn = jest.fn(() =>
-          Promise.resolve(prUpdateResults.shift()!)
+          Promise.resolve(prUpdateResults.shift()!),
         );
 
         azureHelper.getMergeMethod.mockResolvedValueOnce(
-          GitPullRequestMergeStrategy.Squash
+          GitPullRequestMergeStrategy.Squash,
         );
 
         azureApi.gitApi.mockResolvedValue(
           partial<IGitApi>({
             createPullRequest: jest.fn(() =>
-              Promise.resolve(prResult.shift()!)
+              Promise.resolve(prResult.shift()!),
             ),
             createPullRequestLabel: jest.fn().mockResolvedValue({}),
             updatePullRequest: updateFn,
-          })
+          }),
         );
         await azure.createPr({
           sourceBranch: 'some-branch',
@@ -1034,7 +1034,7 @@ describe('modules/platform/azure/index', () => {
             createPullRequest: jest.fn(() => prResult),
             createPullRequestLabel: jest.fn(() => ({})),
             createPullRequestReviewer: updateFn,
-          } as any)
+          } as any),
       );
       const pr = await azure.createPr({
         sourceBranch: 'some-branch',
@@ -1057,7 +1057,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             updatePullRequest,
-          } as any)
+          } as any),
       );
       await azure.updatePr({
         number: 1234,
@@ -1075,7 +1075,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             updatePullRequest,
-          } as any)
+          } as any),
       );
       await azure.updatePr({
         number: 1234,
@@ -1091,7 +1091,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             updatePullRequest,
-          } as any)
+          } as any),
       );
       await azure.updatePr({
         number: 1234,
@@ -1109,7 +1109,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             updatePullRequest,
-          } as any)
+          } as any),
       );
       await azure.updatePr({
         number: 1234,
@@ -1145,7 +1145,7 @@ describe('modules/platform/azure/index', () => {
               pullRequestId: prResult.pullRequestId,
               createdBy: prResult.createdBy,
             })),
-          } as any)
+          } as any),
       );
       const pr = await azure.updatePr({
         number: prResult.pullRequestId,
@@ -1278,10 +1278,10 @@ describe('modules/platform/azure/index', () => {
 
       const commentContent = gitApiMock.createThread.mock.calls[0];
       expect(JSON.stringify(commentContent)).not.toContain(
-        'checking the rebase/retry box above'
+        'checking the rebase/retry box above',
       );
       expect(JSON.stringify(commentContent)).toContain(
-        'renaming the PR to start with \\"rebase!\\"'
+        'renaming the PR to start with \\"rebase!\\"',
       );
     });
   });
@@ -1319,7 +1319,7 @@ describe('modules/platform/azure/index', () => {
         { status: 4 },
         '1',
         42,
-        123
+        123,
       );
     });
 
@@ -1335,7 +1335,7 @@ describe('modules/platform/azure/index', () => {
         { status: 4 },
         '1',
         42,
-        124
+        124,
       );
     });
 
@@ -1360,7 +1360,7 @@ describe('modules/platform/azure/index', () => {
             getRepositories: jest.fn(() => [{ id: '1', project: { id: 2 } }]),
             createThread: jest.fn(() => [{ id: 123 }]),
             getThreads: jest.fn(() => []),
-          } as any)
+          } as any),
       );
       azureApi.coreApi.mockImplementation(
         () =>
@@ -1372,7 +1372,7 @@ describe('modules/platform/azure/index', () => {
             getTeamMembersWithExtendedProperties: jest.fn(() => [
               { identity: { displayName: 'jyc', uniqueName: 'jyc', id: 123 } },
             ]),
-          } as any)
+          } as any),
       );
       await azure.addAssignees(123, ['test@bonjour.fr', 'jyc', 'def']);
       expect(azureApi.gitApi).toHaveBeenCalledTimes(3);
@@ -1387,7 +1387,7 @@ describe('modules/platform/azure/index', () => {
           ({
             getRepositories: jest.fn(() => [{ id: '1', project: { id: 2 } }]),
             createPullRequestReviewer: jest.fn(),
-          } as any)
+          } as any),
       );
       azureApi.coreApi.mockImplementation(
         () =>
@@ -1399,7 +1399,7 @@ describe('modules/platform/azure/index', () => {
             getTeamMembersWithExtendedProperties: jest.fn(() => [
               { identity: { displayName: 'jyc', uniqueName: 'jyc', id: 123 } },
             ]),
-          } as any)
+          } as any),
       );
       await azure.addReviewers(123, ['test@bonjour.fr', 'jyc', 'required:def']);
       expect(azureApi.gitApi).toHaveBeenCalledTimes(3);
@@ -1412,7 +1412,7 @@ describe('modules/platform/azure/index', () => {
         '\n---\n\n - [ ] <!-- rebase-check --> rebase\n<!--renovate-config-hash:-->' +
         'plus also [a link](https://github.com/foo/bar/issues/5)';
       expect(azure.massageMarkdown(prBody)).toBe(
-        'plus also [a link](https://github.com/foo/bar/issues/5)'
+        'plus also [a link](https://github.com/foo/bar/issues/5)',
       );
     });
 
@@ -1421,7 +1421,7 @@ describe('modules/platform/azure/index', () => {
         'You can manually request rebase by checking the rebase/retry box above.\n\n' +
         'plus also [a link](https://github.com/foo/bar/issues/5)';
       expect(azure.massageMarkdown(commentContent)).toBe(
-        'You can manually request rebase by renaming the PR to start with "rebase!".\n\nplus also [a link](https://github.com/foo/bar/issues/5)'
+        'You can manually request rebase by renaming the PR to start with "rebase!".\n\nplus also [a link](https://github.com/foo/bar/issues/5)',
       );
     });
   });
@@ -1435,7 +1435,7 @@ describe('modules/platform/azure/index', () => {
           ({
             getBranch: jest.fn(() => ({ commit: { commitId: 'abcd1234' } })),
             createCommitStatus: createCommitStatusMock,
-          } as any)
+          } as any),
       );
       await azure.setBranchStatus({
         branchName: 'test',
@@ -1455,7 +1455,7 @@ describe('modules/platform/azure/index', () => {
           targetUrl: 'test.com',
         },
         'abcd1234',
-        '1'
+        '1',
       );
     });
 
@@ -1467,7 +1467,7 @@ describe('modules/platform/azure/index', () => {
           ({
             getBranch: jest.fn(() => ({ commit: { commitId: 'abcd1234' } })),
             createCommitStatus: createCommitStatusMock,
-          } as any)
+          } as any),
       );
       await azure.setBranchStatus({
         branchName: 'test',
@@ -1487,7 +1487,7 @@ describe('modules/platform/azure/index', () => {
           targetUrl: 'test.com',
         },
         'abcd1234',
-        '1'
+        '1',
       );
     });
   });
@@ -1510,7 +1510,7 @@ describe('modules/platform/azure/index', () => {
               title: 'title',
             })),
             updatePullRequest: updatePullRequestMock,
-          } as any)
+          } as any),
       );
 
       azureHelper.getMergeMethod = jest
@@ -1533,7 +1533,7 @@ describe('modules/platform/azure/index', () => {
           },
         },
         '1',
-        pullRequestIdMock
+        pullRequestIdMock,
       );
       expect(res).toBeTrue();
     });
@@ -1552,7 +1552,7 @@ describe('modules/platform/azure/index', () => {
             updatePullRequest: jest
               .fn()
               .mockRejectedValue(new Error(`oh no pr couldn't be updated`)),
-          } as any)
+          } as any),
       );
 
       azureHelper.getMergeMethod = jest
@@ -1576,7 +1576,7 @@ describe('modules/platform/azure/index', () => {
               targetRefName: 'refs/heads/ding',
             })),
             updatePullRequest: jest.fn(),
-          } as any)
+          } as any),
       );
       azureHelper.getMergeMethod = jest
         .fn()
@@ -1611,7 +1611,7 @@ describe('modules/platform/azure/index', () => {
             updatePullRequest: jest.fn(() => ({
               status: 1,
             })),
-          } as any)
+          } as any),
       );
       azureHelper.getMergeMethod = jest
         .fn()
@@ -1643,7 +1643,7 @@ describe('modules/platform/azure/index', () => {
             updatePullRequest: jest.fn(() => ({
               status: 1,
             })),
-          } as any)
+          } as any),
       );
       azureHelper.getMergeMethod = jest
         .fn()
@@ -1653,7 +1653,7 @@ describe('modules/platform/azure/index', () => {
         id: pullRequestIdMock,
       });
       expect(getPullRequestByIdMock).toHaveBeenCalledTimes(
-        expectedNumRetries + 1
+        expectedNumRetries + 1,
       );
       expect(logger.warn).toHaveBeenCalled();
       expect(res).toBeTrue();
@@ -1667,7 +1667,7 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             deletePullRequestLabels: jest.fn(),
-          } as any)
+          } as any),
       );
       await azure.deleteLabel(1234, 'rebase');
       expect(azureApi.gitApi.mock.calls).toMatchSnapshot();
@@ -1685,9 +1685,9 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             getItemContent: jest.fn(() =>
-              Promise.resolve(Readable.from(JSON.stringify(data)))
+              Promise.resolve(Readable.from(JSON.stringify(data))),
             ),
-          } as any)
+          } as any),
       );
       const res = await azure.getJsonFile('file.json');
       expect(res).toEqual(data);
@@ -1704,9 +1704,9 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             getItemContent: jest.fn(() =>
-              Promise.resolve(Readable.from(json5Data))
+              Promise.resolve(Readable.from(json5Data)),
             ),
-          } as any)
+          } as any),
       );
       const res = await azure.getJsonFile('file.json5');
       expect(res).toEqual({ foo: 'bar' });
@@ -1718,9 +1718,9 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             getItemContent: jest.fn(() =>
-              Promise.resolve(Readable.from(JSON.stringify(data)))
+              Promise.resolve(Readable.from(JSON.stringify(data))),
             ),
-          } as any)
+          } as any),
       );
       const res = await azure.getJsonFile('file.json', undefined, 'dev');
       expect(res).toEqual(data);
@@ -1731,9 +1731,9 @@ describe('modules/platform/azure/index', () => {
         () =>
           ({
             getItemContent: jest.fn(() =>
-              Promise.resolve(Readable.from('!@#'))
+              Promise.resolve(Readable.from('!@#')),
             ),
-          } as any)
+          } as any),
       );
       await expect(azure.getJsonFile('file.json')).rejects.toThrow();
     });
@@ -1745,7 +1745,7 @@ describe('modules/platform/azure/index', () => {
             getItemContent: jest.fn(() => {
               throw new Error('some error');
             }),
-          } as any)
+          } as any),
       );
       await expect(azure.getJsonFile('file.json')).rejects.toThrow();
     });
@@ -1754,12 +1754,12 @@ describe('modules/platform/azure/index', () => {
       const data = { foo: 'bar' };
       const gitApiMock = {
         getItemContent: jest.fn(() =>
-          Promise.resolve(Readable.from(JSON.stringify(data)))
+          Promise.resolve(Readable.from(JSON.stringify(data))),
         ),
         getRepositories: jest.fn(() =>
           Promise.resolve([
             { id: '123456', name: 'bar', project: { name: 'foo' } },
-          ])
+          ]),
         ),
       };
       azureApi.gitApi.mockImplementationOnce(() => gitApiMock as any);
@@ -1772,7 +1772,7 @@ describe('modules/platform/azure/index', () => {
       azureApi.gitApi.mockResolvedValueOnce(
         partial<IGitApi>({
           getRepositories: jest.fn(() => Promise.resolve([])),
-        })
+        }),
       );
       const res = await azure.getJsonFile('file.json', 'foo/bar');
       expect(res).toBeNull();
diff --git a/lib/modules/platform/azure/index.ts b/lib/modules/platform/azure/index.ts
index e367d9c1423b7df50bcdcb28d436522c496deecc..874da300b9ff6dc94c3c2f2cfc77be3d27dff245 100644
--- a/lib/modules/platform/azure/index.ts
+++ b/lib/modules/platform/azure/index.ts
@@ -95,7 +95,7 @@ export function initPlatform({
   }
   if (!token && !(username && password)) {
     throw new Error(
-      'Init: You must configure an Azure DevOps token, or a username and password'
+      'Init: You must configure an Azure DevOps token, or a username and password',
     );
   }
   // TODO: Add a connection check that endpoint/token combination are valid (#9593)
@@ -120,7 +120,7 @@ export async function getRepos(): Promise<string[]> {
 export async function getRawFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<string | null> {
   try {
     const azureApiGit = await azureApi.gitApi();
@@ -152,7 +152,7 @@ export async function getRawFile(
       undefined,
       undefined,
       undefined,
-      branchOrTag ? versionDescriptor : undefined
+      branchOrTag ? versionDescriptor : undefined,
     );
 
     const str = await streamToString(buf);
@@ -162,7 +162,7 @@ export async function getRawFile(
       err.message?.includes('<title>Azure DevOps Services Unavailable</title>')
     ) {
       logger.debug(
-        'Azure DevOps is currently unavailable when attempting to fetch file - throwing ExternalHostError'
+        'Azure DevOps is currently unavailable when attempting to fetch file - throwing ExternalHostError',
       );
       throw new ExternalHostError(err, id);
     }
@@ -179,7 +179,7 @@ export async function getRawFile(
 export async function getJsonFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<any> {
   const raw = await getRawFile(fileName, repoName, branchOrTag);
   return parseJson(raw, fileName);
@@ -227,7 +227,7 @@ export async function initRepo({
   });
   // TODO: types (#22198)
   const manualUrl = `${defaults.endpoint!}${encodeURIComponent(
-    projectName
+    projectName,
   )}/_git/${encodeURIComponent(repoName)}`;
   const url = repo.remoteUrl ?? manualUrl;
   await git.initRepo({
@@ -262,7 +262,7 @@ export async function getPrList(): Promise<AzurePr[]> {
         config.project,
         0,
         skip,
-        100
+        100,
       );
       prs = prs.concat(fetchedPrs);
       skip += 100;
@@ -280,7 +280,7 @@ export async function getPr(pullRequestId: number): Promise<Pr | null> {
     return null;
   }
   const azurePr = (await getPrList()).find(
-    (item) => item.number === pullRequestId
+    (item) => item.number === pullRequestId,
   );
 
   if (!azurePr) {
@@ -290,7 +290,7 @@ export async function getPr(pullRequestId: number): Promise<Pr | null> {
   const azureApiGit = await azureApi.gitApi();
   const labels = await azureApiGit.getPullRequestLabels(
     config.repoId,
-    pullRequestId
+    pullRequestId,
   );
 
   azurePr.labels = labels
@@ -311,12 +311,12 @@ export async function findPr({
     const prs = await getPrList();
 
     prsFiltered = prs.filter(
-      (item) => item.sourceRefName === getNewBranchName(branchName)
+      (item) => item.sourceRefName === getNewBranchName(branchName),
     );
 
     if (prTitle) {
       prsFiltered = prsFiltered.filter(
-        (item) => item.title.toUpperCase() === prTitle.toUpperCase()
+        (item) => item.title.toUpperCase() === prTitle.toUpperCase(),
       );
     }
 
@@ -348,7 +348,7 @@ export async function findPr({
 
 export async function getBranchPr(
   branchName: string,
-  targetBranch?: string
+  targetBranch?: string,
 ): Promise<Pr | null> {
   logger.debug(`getBranchPr(${branchName}, ${targetBranch})`);
   const existingPr = await findPr({
@@ -365,7 +365,7 @@ async function getStatusCheck(branchName: string): Promise<GitStatus[]> {
     config.repoId,
 
     // TODO: fix undefined (#22198)
-    getBranchNameWithoutRefsheadsPrefix(branchName)!
+    getBranchNameWithoutRefsheadsPrefix(branchName)!,
   );
   // only grab the latest statuses, it will group any by context
   return azureApiGit.getStatuses(
@@ -375,7 +375,7 @@ async function getStatusCheck(branchName: string): Promise<GitStatus[]> {
     undefined,
     undefined,
     undefined,
-    true
+    true,
   );
 }
 
@@ -390,7 +390,7 @@ const azureToRenovateStatusMapping: Record<GitStatusState, BranchStatus> = {
 
 export async function getBranchStatusCheck(
   branchName: string,
-  context: string
+  context: string,
 ): Promise<BranchStatus | null> {
   const res = await getStatusCheck(branchName);
   for (const check of res) {
@@ -404,7 +404,7 @@ export async function getBranchStatusCheck(
 
 export async function getBranchStatus(
   branchName: string,
-  internalChecksAsSuccess: boolean
+  internalChecksAsSuccess: boolean,
 ): Promise<BranchStatus> {
   logger.debug(`getBranchStatus(${branchName})`);
   const statuses = await getStatusCheck(branchName);
@@ -416,7 +416,7 @@ export async function getBranchStatus(
   const noOfFailures = statuses.filter(
     (status) =>
       status.state === GitStatusState.Error ||
-      status.state === GitStatusState.Failed
+      status.state === GitStatusState.Failed,
   ).length;
   if (noOfFailures) {
     return 'red';
@@ -424,7 +424,7 @@ export async function getBranchStatus(
   const noOfPending = statuses.filter(
     (status) =>
       status.state === GitStatusState.NotSet ||
-      status.state === GitStatusState.Pending
+      status.state === GitStatusState.Pending,
   ).length;
   if (noOfPending) {
     return 'yellow';
@@ -434,11 +434,11 @@ export async function getBranchStatus(
     statuses.every(
       (status) =>
         status.state === GitStatusState.Succeeded &&
-        status.context?.genre === 'renovate'
+        status.context?.genre === 'renovate',
     )
   ) {
     logger.debug(
-      'Successful checks are all internal renovate/ checks, so returning "pending" branch status'
+      'Successful checks are all internal renovate/ checks, so returning "pending" branch status',
     );
     return 'yellow';
   }
@@ -446,7 +446,7 @@ export async function getBranchStatus(
 }
 
 async function getMergeStrategy(
-  targetRefName: string
+  targetRefName: string,
 ): Promise<GitPullRequestMergeStrategy> {
   return (
     config.mergeMethods[targetRefName] ??
@@ -454,7 +454,7 @@ async function getMergeStrategy(
       config.repoId,
       config.project,
       targetRefName,
-      config.defaultBranch
+      config.defaultBranch,
     ))
   );
 }
@@ -486,7 +486,7 @@ export async function createPr({
       workItemRefs,
       isDraft: draftPR,
     },
-    config.repoId
+    config.repoId,
   );
   if (platformOptions?.usePlatformAutomerge) {
     const mergeStrategy = await getMergeStrategy(pr.targetRefName!);
@@ -504,7 +504,7 @@ export async function createPr({
       },
       config.repoId,
       // TODO #22198
-      pr.pullRequestId!
+      pr.pullRequestId!,
     );
   }
   if (platformOptions?.autoApprove) {
@@ -518,7 +518,7 @@ export async function createPr({
       config.repoId,
       // TODO #22198
       pr.pullRequestId!,
-      pr.createdBy!.id!
+      pr.createdBy!.id!,
     );
   }
   await Promise.all(
@@ -529,9 +529,9 @@ export async function createPr({
         },
         config.repoId,
         // TODO #22198
-        pr.pullRequestId!
-      )
-    )
+        pr.pullRequestId!,
+      ),
+    ),
   );
   return getRenovatePRFormat(pr);
 }
@@ -565,7 +565,7 @@ export async function updatePr({
         status: PullRequestStatus.Active,
       },
       config.repoId,
-      prNo
+      prNo,
     );
   } else if (state === 'closed') {
     objToUpdate.status = PullRequestStatus.Abandoned;
@@ -582,7 +582,7 @@ export async function updatePr({
       config.repoId,
       // TODO #22198
       pr.pullRequestId!,
-      pr.createdBy!.id!
+      pr.createdBy!.id!,
     );
   }
 
@@ -622,11 +622,11 @@ export async function ensureComment({
         status: 1,
       },
       config.repoId,
-      number
+      number,
     );
     logger.info(
       { repository: config.repository, issueNo: number, topic },
-      'Comment added'
+      'Comment added',
     );
   } else if (commentNeedsUpdating) {
     await azureApiGit.updateComment(
@@ -637,16 +637,16 @@ export async function ensureComment({
       number,
       threadIdFound,
       // TODO #22198
-      commentIdFound!
+      commentIdFound!,
     );
     logger.debug(
       { repository: config.repository, issueNo: number, topic },
-      'Comment updated'
+      'Comment updated',
     );
   } else {
     logger.debug(
       { repository: config.repository, issueNo: number, topic },
-      'Comment is already update-to-date'
+      'Comment is already update-to-date',
     );
   }
 
@@ -654,7 +654,7 @@ export async function ensureComment({
 }
 
 export async function ensureCommentRemoval(
-  removeConfig: EnsureCommentRemovalConfig
+  removeConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   const { number: issueNo } = removeConfig;
   const key =
@@ -671,14 +671,14 @@ export async function ensureCommentRemoval(
     const thread = threads.find(
       (thread: GitPullRequestCommentThread): boolean =>
         !!thread.comments?.[0].content?.startsWith(
-          `### ${removeConfig.topic}\n\n`
-        )
+          `### ${removeConfig.topic}\n\n`,
+        ),
     );
     threadIdFound = thread?.id;
   } else {
     const thread = threads.find(
       (thread: GitPullRequestCommentThread): boolean =>
-        thread.comments?.[0].content?.trim() === removeConfig.content
+        thread.comments?.[0].content?.trim() === removeConfig.content,
     );
     threadIdFound = thread?.id;
   }
@@ -690,7 +690,7 @@ export async function ensureCommentRemoval(
       },
       config.repoId,
       issueNo,
-      threadIdFound
+      threadIdFound,
     );
   }
 }
@@ -709,12 +709,12 @@ export async function setBranchStatus({
   url: targetUrl,
 }: BranchStatusConfig): Promise<void> {
   logger.debug(
-    `setBranchStatus(${branchName}, ${context}, ${description}, ${state}, ${targetUrl!})`
+    `setBranchStatus(${branchName}, ${context}, ${description}, ${state}, ${targetUrl!})`,
   );
   const azureApiGit = await azureApi.gitApi();
   const branch = await azureApiGit.getBranch(
     config.repoId,
-    getBranchNameWithoutRefsheadsPrefix(branchName)!
+    getBranchNameWithoutRefsheadsPrefix(branchName)!,
   );
   const statusToCreate: GitStatus = {
     description,
@@ -726,7 +726,7 @@ export async function setBranchStatus({
     statusToCreate,
     // TODO #22198
     branch.commit!.commitId!,
-    config.repoId
+    config.repoId,
   );
   logger.trace(`Created commit status of ${state} on branch ${branchName}`);
 }
@@ -759,14 +759,14 @@ export async function mergePr({
       pr.lastMergeSourceCommit?.commitId
     } using mergeStrategy ${mergeStrategy} (${
       GitPullRequestMergeStrategy[mergeStrategy]
-    })`
+    })`,
   );
 
   try {
     const response = await azureApiGit.updatePullRequest(
       objToUpdate,
       config.repoId,
-      pullRequestId
+      pullRequestId,
     );
 
     let retries = 0;
@@ -776,7 +776,7 @@ export async function mergePr({
       const sleepMs = retries * 1000;
       logger.trace(
         { pullRequestId, status: pr.status, retries },
-        `Updated PR to closed status but change has not taken effect yet. Retrying...`
+        `Updated PR to closed status but change has not taken effect yet. Retrying...`,
       );
 
       await setTimeout(sleepMs);
@@ -790,7 +790,7 @@ export async function mergePr({
         `Expected PR to have status ${
           PullRequestStatus[PullRequestStatus.Completed]
           // TODO #22198
-        }. However, it is ${PullRequestStatus[pr.status!]}.`
+        }. However, it is ${PullRequestStatus[pr.status!]}.`,
       );
     }
     return true;
@@ -805,11 +805,11 @@ export function massageMarkdown(input: string): string {
   return smartTruncate(input, 4000)
     .replace(
       'you tick the rebase/retry checkbox',
-      'rename PR to start with "rebase!"'
+      'rename PR to start with "rebase!"',
     )
     .replace(
       'checking the rebase/retry box above',
-      'renaming the PR to start with "rebase!"'
+      'renaming the PR to start with "rebase!"',
     )
     .replace(regEx(`\n---\n\n.*?<!-- rebase-check -->.*?\n`), '')
     .replace(regEx(/<!--renovate-(?:debug|config-hash):.*?-->/g), '');
@@ -854,9 +854,9 @@ async function getUserIds(users: string[]): Promise<User[]> {
         await azureApiCore.getTeamMembersWithExtendedProperties(
           // TODO #22198
           repo.project!.id!,
-          t.id!
-        )
-    )
+          t.id!,
+        ),
+    ),
   );
 
   const ids: { id: string; name: string; isRequired: boolean }[] = [];
@@ -913,7 +913,7 @@ async function getUserIds(users: string[]): Promise<User[]> {
  */
 export async function addAssignees(
   issueNo: number,
-  assignees: string[]
+  assignees: string[],
 ): Promise<void> {
   logger.trace(`addAssignees(${issueNo}, [${assignees.join(', ')}])`);
   const ids = await getUserIds(assignees);
@@ -931,7 +931,7 @@ export async function addAssignees(
  */
 export async function addReviewers(
   prNo: number,
-  reviewers: string[]
+  reviewers: string[],
 ): Promise<void> {
   logger.trace(`addReviewers(${prNo}, [${reviewers.join(', ')}])`);
   const azureApiGit = await azureApi.gitApi();
@@ -946,16 +946,16 @@ export async function addReviewers(
         },
         config.repoId,
         prNo,
-        obj.id
+        obj.id,
       );
       logger.debug(`Reviewer added: ${obj.name}`);
-    })
+    }),
   );
 }
 
 export async function deleteLabel(
   prNumber: number,
-  label: string
+  label: string,
 ): Promise<void> {
   logger.debug(`Deleting label ${label} from #${prNumber}`);
   const azureApiGit = await azureApi.gitApi();
diff --git a/lib/modules/platform/azure/schema.ts b/lib/modules/platform/azure/schema.ts
index 57406229d105cf9d787fe51ef3e3b5c57991894d..1d266ce6f29a55c07d37b8dda46c76e984b1fe8d 100644
--- a/lib/modules/platform/azure/schema.ts
+++ b/lib/modules/platform/azure/schema.ts
@@ -13,7 +13,7 @@ const WrappedException: z.ZodSchema<WrappedException> = z.lazy(() =>
     stackTrace: z.string().optional(),
     typeKey: z.string().optional(),
     typeName: z.string().optional(),
-  })
+  }),
 );
 
 export const WrappedExceptionSchema = Json.pipe(WrappedException);
diff --git a/lib/modules/platform/azure/util.spec.ts b/lib/modules/platform/azure/util.spec.ts
index 38755391de04b60679fd603e0310613726186dfe..bfdea61b104c98b206237151b938ea692aae3ca6 100644
--- a/lib/modules/platform/azure/util.spec.ts
+++ b/lib/modules/platform/azure/util.spec.ts
@@ -43,7 +43,7 @@ describe('modules/platform/azure/util', () => {
 
     it('should parse valid genre and name with slash', () => {
       const context = getGitStatusContextFromCombinedName(
-        'my-genre/status-name'
+        'my-genre/status-name',
       );
       expect(context).toEqual({
         genre: 'my-genre',
@@ -53,7 +53,7 @@ describe('modules/platform/azure/util', () => {
 
     it('should parse valid genre and name with multiple slashes', () => {
       const context = getGitStatusContextFromCombinedName(
-        'my-genre/sub-genre/status-name'
+        'my-genre/sub-genre/status-name',
       );
       expect(context).toEqual({
         genre: 'my-genre/sub-genre',
@@ -170,8 +170,8 @@ describe('modules/platform/azure/util', () => {
     it('should return an error', () => {
       expect(() => getProjectAndRepo('prjName/myRepoName/blalba')).toThrow(
         Error(
-          `prjName/myRepoName/blalba can be only structured this way : 'repository' or 'projectName/repository'!`
-        )
+          `prjName/myRepoName/blalba can be only structured this way : 'repository' or 'projectName/repository'!`,
+        ),
       );
     });
   });
@@ -185,7 +185,7 @@ describe('modules/platform/azure/util', () => {
 
     it('returns null when repo is not found', () => {
       expect(
-        getRepoByName('foo/foo', [{ name: 'bar', project: { name: 'bar' } }])
+        getRepoByName('foo/foo', [{ name: 'bar', project: { name: 'bar' } }]),
       ).toBeNull();
     });
 
@@ -198,7 +198,7 @@ describe('modules/platform/azure/util', () => {
           { id: '2', name: 'bar' },
           { id: '3', name: 'bar', project: { name: 'foo' } },
           { id: '4', name: 'bar', project: { name: 'foo' } },
-        ])
+        ]),
       ).toMatchObject({ id: '3' });
     });
 
@@ -207,7 +207,7 @@ describe('modules/platform/azure/util', () => {
         getRepoByName('foo', [
           { id: '1', name: 'bar', project: { name: 'bar' } },
           { id: '2', name: 'foo', project: { name: 'foo' } },
-        ])
+        ]),
       ).toMatchObject({ id: '2' });
     });
 
diff --git a/lib/modules/platform/azure/util.ts b/lib/modules/platform/azure/util.ts
index de52dd46cc9bc17ea8be05fff2799d43761a191e..adbb2560a6ff868147e25cd5e9e58a3242fa7cf3 100644
--- a/lib/modules/platform/azure/util.ts
+++ b/lib/modules/platform/azure/util.ts
@@ -13,7 +13,7 @@ import { getPrBodyStruct } from '../pr-body';
 import type { AzurePr } from './types';
 
 export function getGitStatusContextCombinedName(
-  context: GitStatusContext | null | undefined
+  context: GitStatusContext | null | undefined,
 ): string | undefined {
   if (!context) {
     return undefined;
@@ -27,7 +27,7 @@ export function getGitStatusContextCombinedName(
 }
 
 export function getGitStatusContextFromCombinedName(
-  context: string | undefined | null
+  context: string | undefined | null,
 ): GitStatusContext | undefined {
   if (!context) {
     return undefined;
@@ -46,7 +46,7 @@ export function getGitStatusContextFromCombinedName(
 }
 
 export function getBranchNameWithoutRefsheadsPrefix(
-  branchPath: string | undefined
+  branchPath: string | undefined,
 ): string | undefined {
   if (!branchPath) {
     logger.error(`getBranchNameWithoutRefsheadsPrefix(undefined)`);
@@ -54,7 +54,7 @@ export function getBranchNameWithoutRefsheadsPrefix(
   }
   if (!branchPath.startsWith('refs/heads/')) {
     logger.trace(
-      `The refs/heads/ name should have started with 'refs/heads/' but it didn't. (${branchPath})`
+      `The refs/heads/ name should have started with 'refs/heads/' but it didn't. (${branchPath})`,
     );
     return branchPath;
   }
@@ -62,7 +62,7 @@ export function getBranchNameWithoutRefsheadsPrefix(
 }
 
 export function getBranchNameWithoutRefsPrefix(
-  branchPath?: string
+  branchPath?: string,
 ): string | undefined {
   if (!branchPath) {
     logger.error(`getBranchNameWithoutRefsPrefix(undefined)`);
@@ -70,7 +70,7 @@ export function getBranchNameWithoutRefsPrefix(
   }
   if (!branchPath.startsWith('refs/')) {
     logger.trace(
-      `The ref name should have started with 'refs/' but it didn't. (${branchPath})`
+      `The ref name should have started with 'refs/' but it didn't. (${branchPath})`,
     );
     return branchPath;
   }
@@ -86,10 +86,10 @@ export function getRenovatePRFormat(azurePr: GitPullRequest): AzurePr {
   const number = azurePr.pullRequestId;
 
   const sourceBranch = getBranchNameWithoutRefsheadsPrefix(
-    azurePr.sourceRefName
+    azurePr.sourceRefName,
   );
   const targetBranch = getBranchNameWithoutRefsheadsPrefix(
-    azurePr.targetRefName
+    azurePr.targetRefName,
   );
   const bodyStruct = getPrBodyStruct(azurePr.description);
 
@@ -163,7 +163,7 @@ export function getProjectAndRepo(str: string): {
 
 export function getRepoByName(
   name: string,
-  repos: (GitRepository | null | undefined)[] | undefined | null
+  repos: (GitRepository | null | undefined)[] | undefined | null,
 ): GitRepository | null {
   logger.trace(`getRepoByName(${name})`);
 
@@ -174,7 +174,7 @@ export function getRepoByName(
   const foundRepo = repos?.find(
     (r) =>
       project === r?.project?.name?.toLowerCase() &&
-      repo === r?.name?.toLowerCase()
+      repo === r?.name?.toLowerCase(),
   );
   if (!foundRepo) {
     logger.debug(`Repo not found: ${name}`);
diff --git a/lib/modules/platform/bitbucket-server/index.spec.ts b/lib/modules/platform/bitbucket-server/index.spec.ts
index 72c6c9c094280e31f8458322d1c35f2692a3fa30..b5236718daade1b0d6eab8d3edebbb6fb64b297f 100644
--- a/lib/modules/platform/bitbucket-server/index.spec.ts
+++ b/lib/modules/platform/bitbucket-server/index.spec.ts
@@ -20,7 +20,7 @@ function sshLink(projectKey: string, repositorySlug: string): string {
 function httpLink(
   endpointStr: string,
   projectKey: string,
-  repositorySlug: string
+  repositorySlug: string,
 ): string {
   return `${endpointStr}scm/${projectKey.toLowerCase()}/${repositorySlug}.git`;
 }
@@ -31,7 +31,7 @@ function repoMock(
   repositorySlug: string,
   options: { cloneUrl: { https: boolean; ssh: boolean } } = {
     cloneUrl: { https: true, ssh: true },
-  }
+  },
 ) {
   const endpointStr = endpoint.toString();
   const links: {
@@ -92,7 +92,7 @@ function repoMock(
 function prMock(
   endpoint: URL | string,
   projectKey: string,
-  repositorySlug: string
+  repositorySlug: string,
 ) {
   const endpointStr = endpoint.toString();
   return {
@@ -193,7 +193,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
           .reply(200, repoMock(url, 'SOME', 'repo'))
           .get(
-            `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+            `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
           )
           .reply(200, {
             displayId: 'master',
@@ -215,7 +215,7 @@ describe('modules/platform/bitbucket-server/index', () => {
         git.branchExists.mockReturnValue(true);
         git.isBranchBehindBase.mockResolvedValue(false);
         git.getBranchCommit.mockReturnValue(
-          '0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         );
         const endpoint =
           scenarioName === 'endpoint with path'
@@ -241,7 +241,7 @@ describe('modules/platform/bitbucket-server/index', () => {
         it('should throw if no username/password', () => {
           expect.assertions(1);
           expect(() =>
-            bitbucket.initPlatform({ endpoint: 'endpoint' })
+            bitbucket.initPlatform({ endpoint: 'endpoint' }),
           ).toThrow();
         });
 
@@ -251,7 +251,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               endpoint: 'https://stash.renovatebot.com',
               username: 'abc',
               password: '123',
-            })
+            }),
           ).toMatchSnapshot();
         });
       });
@@ -262,7 +262,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           httpMock
             .scope(urlHost)
             .get(
-              `${urlPath}/rest/api/1.0/repos?permission=REPO_WRITE&state=AVAILABLE&limit=100`
+              `${urlPath}/rest/api/1.0/repos?permission=REPO_WRITE&state=AVAILABLE&limit=100`,
             )
             .reply(200, {
               size: 1,
@@ -283,7 +283,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, repoMock(url, 'SOME', 'repo'))
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -292,7 +292,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             await bitbucket.initRepo({
               endpoint: 'https://stash.renovatebot.com/vcs/',
               repository: 'SOME/repo',
-            })
+            }),
           ).toMatchSnapshot();
         });
 
@@ -303,7 +303,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, repoMock(url, 'SOME', 'repo'))
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -312,7 +312,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             await bitbucket.initRepo({
               endpoint: 'https://stash.renovatebot.com/vcs/',
               repository: 'SOME/repo',
-            })
+            }),
           ).toEqual({
             defaultBranch: 'master',
             isFork: false,
@@ -330,7 +330,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, responseMock)
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -341,7 +341,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             gitUrl: 'ssh',
           });
           expect(git.initRepo).toHaveBeenCalledWith(
-            expect.objectContaining({ url: sshLink('SOME', 'repo') })
+            expect.objectContaining({ url: sshLink('SOME', 'repo') }),
           );
           expect(res).toEqual({
             defaultBranch: 'master',
@@ -361,7 +361,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, responseMock)
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -375,7 +375,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           expect(git.initRepo).toHaveBeenCalledWith(
             expect.objectContaining({
               url: link,
-            })
+            }),
           );
           expect(res).toEqual({
             defaultBranch: 'master',
@@ -394,7 +394,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, responseMock)
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -408,9 +408,9 @@ describe('modules/platform/bitbucket-server/index', () => {
             expect.objectContaining({
               url: httpLink(url.toString(), 'SOME', 'repo').replace(
                 'https://',
-                `https://${username}:${password}@`
+                `https://${username}:${password}@`,
               ),
-            })
+            }),
           );
           expect(res).toEqual({
             defaultBranch: 'master',
@@ -429,7 +429,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, responseMock)
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -439,7 +439,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             repository: 'SOME/repo',
           });
           expect(git.initRepo).toHaveBeenCalledWith(
-            expect.objectContaining({ url: sshLink('SOME', 'repo') })
+            expect.objectContaining({ url: sshLink('SOME', 'repo') }),
           );
           expect(res).toMatchSnapshot();
         });
@@ -454,7 +454,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, responseMock)
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -467,9 +467,9 @@ describe('modules/platform/bitbucket-server/index', () => {
             expect.objectContaining({
               url: httpLink(url.toString(), 'SOME', 'repo').replace(
                 'https://',
-                `https://${username}:${password}@`
+                `https://${username}:${password}@`,
               ),
-            })
+            }),
           );
           expect(res).toMatchSnapshot();
         });
@@ -485,7 +485,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, responseMock)
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(200, {
               displayId: 'master',
@@ -498,7 +498,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           expect(git.initRepo).toHaveBeenCalledWith(
             expect.objectContaining({
               url: link,
-            })
+            }),
           );
           expect(res).toMatchSnapshot();
         });
@@ -510,14 +510,14 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, repoMock(url, 'SOME', 'repo'))
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/branches/default`,
             )
             .reply(204);
           await expect(
             bitbucket.initRepo({
               endpoint: 'https://stash.renovatebot.com/vcs/',
               repository: 'SOME/repo',
-            })
+            }),
           ).rejects.toThrow(REPOSITORY_EMPTY);
         });
       });
@@ -528,7 +528,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           httpMock
             .scope(urlHost)
             .get(
-              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`
+              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`,
             )
             .reply(200, {
               mergeConfig: null,
@@ -542,7 +542,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           httpMock
             .scope(urlHost)
             .get(
-              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`
+              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`,
             )
             .reply(200, {
               mergeConfig: {
@@ -560,7 +560,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             httpMock
               .scope(urlHost)
               .get(
-                `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`
+                `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`,
               )
               .reply(200, {
                 mergeConfig: {
@@ -571,7 +571,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               });
             const actual = await bitbucket.getRepoForceRebase();
             expect(actual).toBeTrue();
-          }
+          },
         );
 
         it.each(['no-ff', 'ff', 'rebase-no-ff', 'squash'])(
@@ -581,7 +581,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             httpMock
               .scope(urlHost)
               .get(
-                `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`
+                `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/settings/pull-requests`,
               )
               .reply(200, {
                 mergeConfig: {
@@ -592,7 +592,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               });
             const actual = await bitbucket.getRepoForceRebase();
             expect(actual).toBeFalse();
-          }
+          },
         );
       });
 
@@ -607,12 +607,12 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .twice()
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
@@ -624,12 +624,12 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .twice()
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
@@ -639,7 +639,7 @@ describe('modules/platform/bitbucket-server/index', () => {
         it('throws not-found 1', async () => {
           await initRepo();
           await expect(
-            bitbucket.addReviewers(null as any, ['name'])
+            bitbucket.addReviewers(null as any, ['name']),
           ).rejects.toThrow(REPOSITORY_NOT_FOUND);
         });
 
@@ -647,12 +647,12 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/4`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/4`,
             )
             .reply(404);
 
           await expect(bitbucket.addReviewers(4, ['name'])).rejects.toThrow(
-            REPOSITORY_NOT_FOUND
+            REPOSITORY_NOT_FOUND,
           );
         });
 
@@ -660,16 +660,16 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(404);
 
           await expect(bitbucket.addReviewers(5, ['name'])).rejects.toThrow(
-            REPOSITORY_NOT_FOUND
+            REPOSITORY_NOT_FOUND,
           );
         });
 
@@ -677,15 +677,15 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(409);
           await expect(bitbucket.addReviewers(5, ['name'])).rejects.toThrow(
-            REPOSITORY_CHANGED
+            REPOSITORY_CHANGED,
           );
         });
 
@@ -693,11 +693,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(409, {
               errors: [
@@ -720,7 +720,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             });
 
           await expect(
-            bitbucket.addReviewers(5, ['name'])
+            bitbucket.addReviewers(5, ['name']),
           ).rejects.toThrowErrorMatchingSnapshot();
         });
 
@@ -728,15 +728,15 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(405);
           await expect(
-            bitbucket.addReviewers(5, ['name'])
+            bitbucket.addReviewers(5, ['name']),
           ).rejects.toThrowErrorMatchingSnapshot();
         });
       });
@@ -752,7 +752,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           httpMock
             .scope(urlHost)
             .get(
-              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/pull-requests/3/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/pull-requests/3/activities?limit=100`,
             )
             .reply(200);
           const res = await bitbucket.ensureComment({
@@ -767,7 +767,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -786,14 +786,14 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ action: 'OTHER' }],
             })
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments`,
             )
             .reply(200);
 
@@ -802,7 +802,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               number: 5,
               topic: 'topic',
               content: 'content',
-            })
+            }),
           ).toBeTrue();
         });
 
@@ -810,7 +810,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -829,14 +829,14 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ action: 'OTHER' }],
             })
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments`,
             )
             .reply(200);
 
@@ -845,7 +845,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               number: 5,
               topic: null,
               content: 'content',
-            })
+            }),
           ).toBeTrue();
         });
 
@@ -853,7 +853,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -872,20 +872,20 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ action: 'OTHER' }],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21`,
             )
             .reply(200, {
               version: 1,
             })
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21`,
             )
             .reply(200);
 
@@ -894,7 +894,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               number: 5,
               topic: 'some-subject',
               content: 'some\ncontent',
-            })
+            }),
           ).toBeTrue();
         });
 
@@ -902,7 +902,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -921,14 +921,14 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ action: 'OTHER' }],
             })
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments`,
             )
             .reply(200);
 
@@ -937,7 +937,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               number: 5,
               topic: null,
               content: 'some\ncontent',
-            })
+            }),
           ).toBeTrue();
         });
 
@@ -945,7 +945,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -964,7 +964,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
@@ -976,7 +976,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               number: 5,
               topic: 'some-subject',
               content: 'blablabla',
-            })
+            }),
           ).toBeTrue();
         });
 
@@ -984,7 +984,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -1003,7 +1003,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1024,7 +1024,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           httpMock
             .scope(urlHost)
             .get(
-              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -1043,7 +1043,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/undefined/repos/undefined/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1054,7 +1054,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               type: 'by-topic',
               number: 5,
               topic: 'topic',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1062,7 +1062,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -1081,20 +1081,20 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ action: 'OTHER' }],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21`,
             )
             .reply(200, {
               version: 1,
             })
             .delete(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21?version=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/21?version=1`,
             )
             .reply(200);
 
@@ -1103,7 +1103,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               type: 'by-topic',
               number: 5,
               topic: 'some-subject',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1111,7 +1111,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -1130,20 +1130,20 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ action: 'OTHER' }],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/22`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/22`,
             )
             .reply(200, {
               version: 1,
             })
             .delete(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/22?version=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/comments/22?version=1`,
             )
             .reply(200);
 
@@ -1152,7 +1152,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               type: 'by-content',
               number: 5,
               content: '!merge',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1160,7 +1160,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100`,
             )
             .reply(200, {
               isLastPage: false,
@@ -1179,7 +1179,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/activities?limit=100&start=1`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1191,7 +1191,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               type: 'by-topic',
               number: 5,
               topic: 'topic',
-            })
+            }),
           ).toResolve();
         });
       });
@@ -1201,7 +1201,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1216,19 +1216,19 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`,
             )
             .reply(200, {
               isLastPage: true,
               values: [prMock(url, 'SOME', 'repo')],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
           expect(
-            await bitbucket.getBranchPr('userName1/pullRequest5')
+            await bitbucket.getBranchPr('userName1/pullRequest5'),
           ).toMatchSnapshot();
         });
 
@@ -1236,7 +1236,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1244,7 +1244,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             });
 
           expect(
-            await bitbucket.getBranchPr('userName1/pullRequest1')
+            await bitbucket.getBranchPr('userName1/pullRequest1'),
           ).toBeNull();
         });
 
@@ -1252,7 +1252,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1260,7 +1260,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             });
 
           expect(
-            await bitbucket.getBranchPr('userName1/pullRequest1')
+            await bitbucket.getBranchPr('userName1/pullRequest1'),
           ).toBeNull();
         });
       });
@@ -1270,7 +1270,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1282,7 +1282,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               branchName: 'userName1/pullRequest5',
               prTitle: 'title',
               state: 'open',
-            })
+            }),
           ).toMatchSnapshot();
         });
 
@@ -1290,7 +1290,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests?state=ALL&role.1=AUTHOR&username.1=abc&limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1302,7 +1302,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               branchName: 'userName1/pullRequest5',
               prTitle: 'title',
               state: 'closed',
-            })
+            }),
           ).toBeNull();
         });
       });
@@ -1314,11 +1314,11 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, prMock(url, 'SOME', 'repo'))
             .get(
-              `${urlPath}/rest/default-reviewers/1.0/projects/SOME/repos/repo/reviewers?sourceRefId=refs/heads/branch&targetRefId=refs/heads/master&sourceRepoId=5&targetRepoId=5`
+              `${urlPath}/rest/default-reviewers/1.0/projects/SOME/repos/repo/reviewers?sourceRefId=refs/heads/branch&targetRefId=refs/heads/master&sourceRepoId=5&targetRepoId=5`,
             )
             .reply(200, [{ name: 'jcitizen' }])
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
@@ -1340,11 +1340,11 @@ describe('modules/platform/bitbucket-server/index', () => {
             .get(`${urlPath}/rest/api/1.0/projects/SOME/repos/repo`)
             .reply(200, prMock(url, 'SOME', 'repo'))
             .get(
-              `${urlPath}/rest/default-reviewers/1.0/projects/SOME/repos/repo/reviewers?sourceRefId=refs/heads/branch&targetRefId=refs/heads/master&sourceRepoId=5&targetRepoId=5`
+              `${urlPath}/rest/default-reviewers/1.0/projects/SOME/repos/repo/reviewers?sourceRefId=refs/heads/branch&targetRefId=refs/heads/master&sourceRepoId=5&targetRepoId=5`,
             )
             .reply(200, [{ name: 'jcitizen' }])
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
@@ -1372,7 +1372,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
@@ -1383,11 +1383,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/3`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/3`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .twice()
             .reply(200, prMock(url, 'SOME', 'repo'));
@@ -1403,7 +1403,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, {
               version: 0,
@@ -1423,11 +1423,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200);
 
@@ -1437,7 +1437,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               prTitle: 'title',
               prBody: 'body',
               targetBranch: 'new_base',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1445,15 +1445,15 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, { state: 'OPEN', version: 42 })
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/decline?version=42`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/decline?version=42`,
             )
             .reply(200, { status: 'DECLINED' });
 
@@ -1463,7 +1463,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               prTitle: 'title',
               prBody: 'body',
               state: 'closed',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1471,15 +1471,15 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, { state: 'DECLINED', version: 42 })
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/reopen?version=42`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/reopen?version=42`,
             )
             .reply(200, { status: 'OPEN' });
 
@@ -1489,7 +1489,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               prTitle: 'title',
               prBody: 'body',
               state: 'open',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1500,7 +1500,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               number: null as any,
               prTitle: 'title',
               prBody: 'body',
-            })
+            }),
           ).rejects.toThrow(REPOSITORY_NOT_FOUND);
         });
 
@@ -1508,11 +1508,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/4`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/4`,
             )
             .reply(404);
           await expect(
-            bitbucket.updatePr({ number: 4, prTitle: 'title', prBody: 'body' })
+            bitbucket.updatePr({ number: 4, prTitle: 'title', prBody: 'body' }),
           ).rejects.toThrow(REPOSITORY_NOT_FOUND);
         });
 
@@ -1520,16 +1520,16 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(404);
 
           await expect(
-            bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+            bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
           ).rejects.toThrow(REPOSITORY_NOT_FOUND);
         });
 
@@ -1537,11 +1537,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(409, {
               errors: [
@@ -1563,12 +1563,12 @@ describe('modules/platform/bitbucket-server/index', () => {
               ],
             })
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
               `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
-              (body) => body.reviewers.length === 0
+              (body) => body.reviewers.length === 0,
             )
             .reply(200, prMock(url, 'SOME', 'repo'));
 
@@ -1578,7 +1578,7 @@ describe('modules/platform/bitbucket-server/index', () => {
               prTitle: 'title',
               prBody: 'body',
               state: 'open',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1586,16 +1586,16 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(409);
 
           await expect(
-            bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+            bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
           ).rejects.toThrow(REPOSITORY_CHANGED);
         });
 
@@ -1603,16 +1603,16 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .put(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(405);
 
           await expect(
-            bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+            bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
           ).rejects.toThrowErrorMatchingSnapshot();
         });
       });
@@ -1622,11 +1622,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`,
             )
             .reply(200);
 
@@ -1634,7 +1634,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             await bitbucket.mergePr({
               branchName: 'branch',
               id: 5,
-            })
+            }),
           ).toBeTrue();
         });
 
@@ -1651,7 +1651,7 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/4`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/4`,
             )
             .reply(404);
 
@@ -1659,7 +1659,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             bitbucket.mergePr({
               branchName: 'branch',
               id: 4,
-            })
+            }),
           ).rejects.toThrow(REPOSITORY_NOT_FOUND);
         });
 
@@ -1667,11 +1667,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`,
             )
             .reply(404);
 
@@ -1679,7 +1679,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             bitbucket.mergePr({
               branchName: 'branch',
               id: 5,
-            })
+            }),
           ).rejects.toThrow(REPOSITORY_NOT_FOUND);
         });
 
@@ -1687,11 +1687,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`,
             )
             .reply(409);
 
@@ -1699,7 +1699,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             await bitbucket.mergePr({
               branchName: 'branch',
               id: 5,
-            })
+            }),
           ).toBeFalsy();
         });
 
@@ -1707,11 +1707,11 @@ describe('modules/platform/bitbucket-server/index', () => {
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5`,
             )
             .reply(200, prMock(url, 'SOME', 'repo'))
             .post(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/pull-requests/5/merge?version=1`,
             )
             .reply(405);
 
@@ -1719,7 +1719,7 @@ describe('modules/platform/bitbucket-server/index', () => {
             bitbucket.mergePr({
               branchName: 'branch',
               id: 5,
-            })
+            }),
           ).resolves.toBeFalse();
         });
       });
@@ -1728,8 +1728,8 @@ describe('modules/platform/bitbucket-server/index', () => {
         it('returns diff files', () => {
           expect(
             bitbucket.massageMarkdown(
-              '<details><summary>foo</summary>bar</details>text<details>'
-            )
+              '<details><summary>foo</summary>bar</details>text<details>',
+            ),
           ).toMatchSnapshot();
         });
 
@@ -1754,7 +1754,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {
               successful: 3,
@@ -1763,7 +1763,7 @@ Followed by some information.
             });
 
           expect(await bitbucket.getBranchStatus('somebranch', true)).toBe(
-            'green'
+            'green',
           );
         });
 
@@ -1771,7 +1771,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {
               successful: 3,
@@ -1780,12 +1780,12 @@ Followed by some information.
             });
 
           expect(await bitbucket.getBranchStatus('somebranch', true)).toBe(
-            'yellow'
+            'yellow',
           );
 
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {
               successful: 0,
@@ -1794,7 +1794,7 @@ Followed by some information.
             });
 
           expect(await bitbucket.getBranchStatus('somebranch', true)).toBe(
-            'yellow'
+            'yellow',
           );
         });
 
@@ -1802,7 +1802,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {
               successful: 1,
@@ -1811,17 +1811,17 @@ Followed by some information.
             });
 
           expect(await bitbucket.getBranchStatus('somebranch', true)).toBe(
-            'red'
+            'red',
           );
 
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .replyWithError('requst-failed');
 
           expect(await bitbucket.getBranchStatus('somebranch', true)).toBe(
-            'red'
+            'red',
           );
         });
 
@@ -1829,7 +1829,7 @@ Followed by some information.
           git.branchExists.mockReturnValue(false);
           await initRepo();
           await expect(
-            bitbucket.getBranchStatus('somebranch', true)
+            bitbucket.getBranchStatus('somebranch', true),
           ).rejects.toThrow(REPOSITORY_CHANGED);
         });
       });
@@ -1839,7 +1839,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1853,7 +1853,7 @@ Followed by some information.
             });
 
           expect(
-            await bitbucket.getBranchStatusCheck('somebranch', 'context-2')
+            await bitbucket.getBranchStatusCheck('somebranch', 'context-2'),
           ).toBe('green');
         });
 
@@ -1861,7 +1861,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1875,7 +1875,7 @@ Followed by some information.
             });
 
           expect(
-            await bitbucket.getBranchStatusCheck('somebranch', 'context-2')
+            await bitbucket.getBranchStatusCheck('somebranch', 'context-2'),
           ).toBe('yellow');
         });
 
@@ -1883,7 +1883,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1897,7 +1897,7 @@ Followed by some information.
             });
 
           expect(
-            await bitbucket.getBranchStatusCheck('somebranch', 'context-2')
+            await bitbucket.getBranchStatusCheck('somebranch', 'context-2'),
           ).toBe('red');
         });
 
@@ -1905,17 +1905,17 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .replyWithError('requst-failed');
 
           expect(
-            await bitbucket.getBranchStatusCheck('somebranch', 'context-2')
+            await bitbucket.getBranchStatusCheck('somebranch', 'context-2'),
           ).toBeNull();
 
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -1923,7 +1923,7 @@ Followed by some information.
             });
 
           expect(
-            await bitbucket.getBranchStatusCheck('somebranch', 'context-2')
+            await bitbucket.getBranchStatusCheck('somebranch', 'context-2'),
           ).toBeNull();
         });
       });
@@ -1933,7 +1933,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .twice()
             .reply(200, {
@@ -1941,11 +1941,11 @@ Followed by some information.
               values: [{ key: 'context-1', state: 'SUCCESSFUL' }],
             })
             .post(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200)
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {});
 
@@ -1955,7 +1955,7 @@ Followed by some information.
               context: 'context-2',
               description: null as any,
               state: 'green',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1963,7 +1963,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .twice()
             .reply(200, {
@@ -1971,11 +1971,11 @@ Followed by some information.
               values: [{ key: 'context-1', state: 'SUCCESSFUL' }],
             })
             .post(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200)
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {});
 
@@ -1985,7 +1985,7 @@ Followed by some information.
               context: 'context-2',
               description: null as any,
               state: 'red',
-            })
+            }),
           ).toResolve();
         });
 
@@ -1993,7 +1993,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .twice()
             .reply(200, {
@@ -2001,11 +2001,11 @@ Followed by some information.
               values: [{ key: 'context-1', state: 'SUCCESSFUL' }],
             })
             .post(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200)
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {});
 
@@ -2015,7 +2015,7 @@ Followed by some information.
               context: 'context-2',
               description: null as any,
               state: 'red',
-            })
+            }),
           ).toResolve();
         });
 
@@ -2023,7 +2023,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .twice()
             .reply(200, {
@@ -2031,11 +2031,11 @@ Followed by some information.
               values: [{ key: 'context-1', state: 'SUCCESSFUL' }],
             })
             .post(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200)
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/stats/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .reply(200, {});
 
@@ -2045,7 +2045,7 @@ Followed by some information.
               context: 'context-2',
               description: null as any,
               state: 'yellow',
-            })
+            }),
           ).toResolve();
         });
 
@@ -2053,14 +2053,14 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .reply(200, {
               isLastPage: true,
               values: [{ key: 'context-1', state: 'SUCCESSFUL' }],
             })
             .post(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e`,
             )
             .replyWithError('requst-failed');
 
@@ -2070,7 +2070,7 @@ Followed by some information.
               context: 'context-2',
               description: null as any,
               state: 'green',
-            })
+            }),
           ).toResolve();
         });
 
@@ -2078,7 +2078,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`
+              `${urlPath}/rest/build-status/1.0/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e?limit=100`,
             )
             .reply(200, {
               isLastPage: true,
@@ -2091,7 +2091,7 @@ Followed by some information.
               context: 'context-1',
               description: null as any,
               state: 'green',
-            })
+            }),
           ).toResolve();
         });
       });
@@ -2102,7 +2102,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`,
             )
             .reply(200, {
               isLastPage: true,
@@ -2122,7 +2122,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json5?limit=20000`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json5?limit=20000`,
             )
             .reply(200, {
               isLastPage: true,
@@ -2137,7 +2137,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/DIFFERENT/repos/repo/browse/file.json?limit=20000`
+              `${urlPath}/rest/api/1.0/projects/DIFFERENT/repos/repo/browse/file.json?limit=20000`,
             )
             .reply(200, {
               isLastPage: true,
@@ -2145,7 +2145,7 @@ Followed by some information.
             });
           const res = await bitbucket.getJsonFile(
             'file.json',
-            'DIFFERENT/repo'
+            'DIFFERENT/repo',
           );
           expect(res).toEqual(data);
         });
@@ -2155,7 +2155,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000&at=dev`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000&at=dev`,
             )
             .reply(200, {
               isLastPage: true,
@@ -2164,7 +2164,7 @@ Followed by some information.
           const res = await bitbucket.getJsonFile(
             'file.json',
             'SOME/repo',
-            'dev'
+            'dev',
           );
           expect(res).toEqual(data);
         });
@@ -2173,7 +2173,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`,
             )
             .reply(200, {
               isLastPage: true,
@@ -2186,7 +2186,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`,
             )
             .reply(200, {
               isLastPage: false,
@@ -2199,7 +2199,7 @@ Followed by some information.
           const scope = await initRepo();
           scope
             .get(
-              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`
+              `${urlPath}/rest/api/1.0/projects/SOME/repos/repo/browse/file.json?limit=20000`,
             )
             .replyWithError('some error');
           await expect(bitbucket.getJsonFile('file.json')).rejects.toThrow();
diff --git a/lib/modules/platform/bitbucket-server/index.ts b/lib/modules/platform/bitbucket-server/index.ts
index eaea2c62f02e7e1bcfe18b734e58e86d33c86626..bd9a0be19c34c0a0d8f413c462d9b22b8c237379 100644
--- a/lib/modules/platform/bitbucket-server/index.ts
+++ b/lib/modules/platform/bitbucket-server/index.ts
@@ -89,7 +89,7 @@ export function initPlatform({
   }
   if (!(username && password)) {
     throw new Error(
-      'Init: You must configure a Bitbucket Server username/password'
+      'Init: You must configure a Bitbucket Server username/password',
     );
   }
   // TODO: Add a connection check that endpoint/username/password combination are valid (#9595)
@@ -106,11 +106,11 @@ export async function getRepos(): Promise<string[]> {
   logger.debug('Autodiscovering Bitbucket Server repositories');
   try {
     const repos = await utils.accumulateValues(
-      `./rest/api/1.0/repos?permission=REPO_WRITE&state=AVAILABLE`
+      `./rest/api/1.0/repos?permission=REPO_WRITE&state=AVAILABLE`,
     );
     const result = repos.map(
       (r: { project: { key: string }; slug: string }) =>
-        `${r.project.key}/${r.slug}`
+        `${r.project.key}/${r.slug}`,
     );
     logger.debug({ result }, 'result of getRepos()');
     return result;
@@ -123,7 +123,7 @@ export async function getRepos(): Promise<string[]> {
 export async function getRawFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<string | null> {
   const repo = repoName ?? config.repository;
   const [project, slug] = repo.split('/');
@@ -143,7 +143,7 @@ export async function getRawFile(
 export async function getJsonFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<any> {
   // TODO #22198
   const raw = await getRawFile(fileName, repoName, branchOrTag);
@@ -177,13 +177,13 @@ export async function initRepo({
   try {
     const info = (
       await bitbucketServerHttp.getJson<BbsRestRepo>(
-        `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}`
+        `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}`,
       )
     ).body;
     config.owner = info.project.key;
     logger.debug(`${repository} owner = ${config.owner}`);
     const branchRes = await bitbucketServerHttp.getJson<BbsRestBranch>(
-      `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/branches/default`
+      `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/branches/default`,
     );
 
     // 204 means empty, 404 means repo not found or missing default branch. repo must exist here.
@@ -197,7 +197,7 @@ export async function initRepo({
       defaults.endpoint!,
       gitUrl,
       info,
-      opts
+      opts,
     );
 
     await git.initRepo({
@@ -235,7 +235,7 @@ export async function getRepoForceRebase(): Promise<boolean> {
   const res = await bitbucketServerHttp.getJson<{
     mergeConfig: { defaultStrategy: { id: string } };
   }>(
-    `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/settings/pull-requests`
+    `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/settings/pull-requests`,
   );
 
   // If the default merge strategy contains `ff-only` the PR can only be merged
@@ -243,13 +243,13 @@ export async function getRepoForceRebase(): Promise<boolean> {
   // The current options for id are:
   // no-ff, ff, ff-only, rebase-no-ff, rebase-ff-only, squash, squash-ff-only
   return Boolean(
-    res.body?.mergeConfig?.defaultStrategy?.id.includes('ff-only')
+    res.body?.mergeConfig?.defaultStrategy?.id.includes('ff-only'),
   );
 }
 // Gets details for a PR
 export async function getPr(
   prNo: number,
-  refreshCache?: boolean
+  refreshCache?: boolean,
 ): Promise<BbsPr | null> {
   logger.debug(`getPr(${prNo})`);
   if (!prNo) {
@@ -258,7 +258,7 @@ export async function getPr(
 
   const res = await bitbucketServerHttp.getJson<BbsRestPr>(
     `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}`,
-    { memCache: !refreshCache }
+    { memCache: !refreshCache },
   );
 
   const pr: BbsPr = {
@@ -306,7 +306,7 @@ export async function getPrList(refreshCache?: boolean): Promise<Pr[]> {
     }
     const query = getQueryString(searchParams);
     const values = await utils.accumulateValues(
-      `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}`
+      `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests?${query}`,
     );
 
     config.prList = values.map(utils.prInfo);
@@ -356,7 +356,7 @@ export async function refreshPr(number: number): Promise<void> {
 
 async function getStatus(
   branchName: string,
-  memCache = true
+  memCache = true,
 ): Promise<utils.BitbucketCommitStatus> {
   const branchCommit = git.getBranchCommit(branchName);
 
@@ -364,7 +364,7 @@ async function getStatus(
     await bitbucketServerHttp.getJson<utils.BitbucketCommitStatus>(
       // TODO: types (#22198)
       `./rest/build-status/1.0/commits/stats/${branchCommit!}`,
-      { memCache }
+      { memCache },
     )
   ).body;
 }
@@ -373,7 +373,7 @@ async function getStatus(
 // umbrella for status checks
 // https://docs.atlassian.com/bitbucket-server/rest/6.0.0/bitbucket-build-rest.html#idp2
 export async function getBranchStatus(
-  branchName: string
+  branchName: string,
 ): Promise<BranchStatus> {
   logger.debug(`getBranchStatus(${branchName})`);
 
@@ -402,7 +402,7 @@ export async function getBranchStatus(
 
 function getStatusCheck(
   branchName: string,
-  memCache = true
+  memCache = true,
 ): Promise<utils.BitbucketStatus[]> {
   const branchCommit = git.getBranchCommit(branchName);
 
@@ -410,14 +410,14 @@ function getStatusCheck(
     // TODO: types (#22198)
     `./rest/build-status/1.0/commits/${branchCommit!}`,
     'get',
-    { memCache }
+    { memCache },
   );
 }
 
 // https://docs.atlassian.com/bitbucket-server/rest/6.0.0/bitbucket-build-rest.html#idp2
 export async function getBranchStatusCheck(
   branchName: string,
-  context: string
+  context: string,
 ): Promise<BranchStatus | null> {
   logger.debug(`getBranchStatusCheck(${branchName}, context=${context})`);
 
@@ -483,7 +483,7 @@ export async function setBranchStatus({
     await bitbucketServerHttp.postJson(
       // TODO: types (#22198)
       `./rest/build-status/1.0/commits/${branchCommit!}`,
-      { body }
+      { body },
     );
 
     // update status cache
@@ -554,7 +554,7 @@ export function addAssignees(iid: number, assignees: string[]): Promise<void> {
 
 export async function addReviewers(
   prNo: number,
-  reviewers: string[]
+  reviewers: string[],
 ): Promise<void> {
   logger.debug(`Adding reviewers '${reviewers.join(', ')}' to #${prNo}`);
 
@@ -577,7 +577,7 @@ export async function addReviewers(
             user: { name },
           })),
         },
-      }
+      },
     );
     await getPr(prNo, true);
   } catch (err) {
@@ -589,7 +589,7 @@ export async function addReviewers(
       !utils.isInvalidReviewersResponse(err)
     ) {
       logger.debug(
-        '409 response to adding reviewers - has repository changed?'
+        '409 response to adding reviewers - has repository changed?',
       );
       throw new Error(REPOSITORY_CHANGED);
     } else {
@@ -611,13 +611,13 @@ type Comment = { text: string; id: number };
 async function getComments(prNo: number): Promise<Comment[]> {
   // GET /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/activities
   let comments = await utils.accumulateValues(
-    `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/activities`
+    `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/activities`,
   );
 
   comments = comments
     .filter(
       (a: { action: string; commentAction: string }) =>
-        a.action === 'COMMENTED' && a.commentAction === 'ADDED'
+        a.action === 'COMMENTED' && a.commentAction === 'ADDED',
     )
     .map((a: { comment: Comment }) => a.comment);
 
@@ -632,18 +632,18 @@ async function addComment(prNo: number, text: string): Promise<void> {
     `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/comments`,
     {
       body: { text },
-    }
+    },
   );
 }
 
 async function getCommentVersion(
   prNo: number,
-  commentId: number
+  commentId: number,
 ): Promise<number> {
   // GET /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/comments/{commentId}
   const { version } = (
     await bitbucketServerHttp.getJson<{ version: number }>(
-      `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/comments/${commentId}`
+      `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/comments/${commentId}`,
     )
   ).body;
 
@@ -653,7 +653,7 @@ async function getCommentVersion(
 async function editComment(
   prNo: number,
   commentId: number,
-  text: string
+  text: string,
 ): Promise<void> {
   const version = await getCommentVersion(prNo, commentId);
 
@@ -662,7 +662,7 @@ async function editComment(
     `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/comments/${commentId}`,
     {
       body: { text, version },
-    }
+    },
   );
 }
 
@@ -671,7 +671,7 @@ async function deleteComment(prNo: number, commentId: number): Promise<void> {
 
   // DELETE /rest/api/1.0/projects/{projectKey}/repos/{repositorySlug}/pull-requests/{pullRequestId}/comments/{commentId}
   await bitbucketServerHttp.deleteJson(
-    `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/comments/${commentId}?version=${version}`
+    `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}/comments/${commentId}?version=${version}`,
   );
 }
 
@@ -709,13 +709,13 @@ export async function ensureComment({
       await addComment(number, body);
       logger.info(
         { repository: config.repository, prNo: number, topic },
-        'Comment added'
+        'Comment added',
       );
     } else if (commentNeedsUpdating) {
       await editComment(number, commentId, body);
       logger.debug(
         { repository: config.repository, prNo: number },
-        'Comment updated'
+        'Comment updated',
       );
     } else {
       logger.debug('Comment is already update-to-date');
@@ -728,7 +728,7 @@ export async function ensureComment({
 }
 
 export async function ensureCommentRemoval(
-  deleteConfig: EnsureCommentRemovalConfig
+  deleteConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   try {
     const { number: prNo } = deleteConfig;
@@ -780,7 +780,7 @@ export async function createPr({
     logger.debug(`fetching default reviewers`);
     const { id } = (
       await bitbucketServerHttp.getJson<{ id: number }>(
-        `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}`
+        `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}`,
       )
     ).body;
 
@@ -789,8 +789,8 @@ export async function createPr({
         `./rest/default-reviewers/1.0/projects/${config.projectKey}/repos/${
           config.repositorySlug
         }/reviewers?sourceRefId=refs/heads/${escapeHash(
-          sourceBranch
-        )}&targetRefId=refs/heads/${base}&sourceRepoId=${id}&targetRepoId=${id}`
+          sourceBranch,
+        )}&targetRefId=refs/heads/${base}&sourceRepoId=${id}&targetRepoId=${id}`,
       )
     ).body;
 
@@ -814,7 +814,7 @@ export async function createPr({
   try {
     prInfoRes = await bitbucketServerHttp.postJson<BbsRestPr>(
       `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests`,
-      { body }
+      { body },
     );
   } catch (err) /* istanbul ignore next */ {
     if (
@@ -822,7 +822,7 @@ export async function createPr({
       'com.atlassian.bitbucket.pull.EmptyPullRequestException'
     ) {
       logger.debug(
-        'Empty pull request - deleting branch so it can be recreated next run'
+        'Empty pull request - deleting branch so it can be recreated next run',
       );
       await deleteBranch(sourceBranch);
       throw new Error(REPOSITORY_CHANGED);
@@ -883,7 +883,7 @@ export async function updatePr({
       state: string;
     }>(
       `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${prNo}`,
-      { body }
+      { body },
     );
 
     updatePrVersion(prNo, updatedPr.version);
@@ -904,7 +904,7 @@ export async function updatePr({
       const { body: updatedStatePr } = await bitbucketServerHttp.postJson<{
         version: number;
       }>(
-        `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${pr.number}/${command}?version=${updatedPr.version}`
+        `./rest/api/1.0/projects/${config.projectKey}/repos/${config.repositorySlug}/pull-requests/${pr.number}/${command}?version=${updatedPr.version}`,
       );
 
       updatePrVersion(pr.number, updatedStatePr.version);
@@ -949,7 +949,7 @@ export async function mergePr({
       // TODO: types (#22198)
       `./rest/api/1.0/projects/${config.projectKey}/repos/${
         config.repositorySlug
-      }/pull-requests/${prNo}/merge?version=${pr.version!}`
+      }/pull-requests/${prNo}/merge?version=${pr.version!}`,
     );
     updatePrVersion(prNo, body.version);
   } catch (err) {
@@ -974,11 +974,11 @@ export function massageMarkdown(input: string): string {
   return smartTruncate(input, 30000)
     .replace(
       'you tick the rebase/retry checkbox',
-      'rename PR to start with "rebase!"'
+      'rename PR to start with "rebase!"',
     )
     .replace(
       'checking the rebase/retry box above',
-      'renaming the PR to start with "rebase!"'
+      'renaming the PR to start with "rebase!"',
     )
     .replace(regEx(/<\/?summary>/g), '**')
     .replace(regEx(/<\/?details>/g), '')
diff --git a/lib/modules/platform/bitbucket-server/utils.spec.ts b/lib/modules/platform/bitbucket-server/utils.spec.ts
index 55154a87749effcffee0ee6d53a065da08d480b4..24c6c54cbb7228b472e0e74902f30849ae7454de 100644
--- a/lib/modules/platform/bitbucket-server/utils.spec.ts
+++ b/lib/modules/platform/bitbucket-server/utils.spec.ts
@@ -19,7 +19,7 @@ function sshLink(projectKey: string, repositorySlug: string): string {
 function httpLink(
   endpointStr: string,
   projectKey: string,
-  repositorySlug: string
+  repositorySlug: string,
 ): string {
   return `${endpointStr}scm/${projectKey}/${repositorySlug}.git`;
 }
@@ -30,7 +30,7 @@ function infoMock(
   repositorySlug: string,
   options: { cloneUrl: { https: boolean; ssh: boolean } } = {
     cloneUrl: { https: true, ssh: true },
-  }
+  },
 ): BbsRestRepo {
   const endpointStr = endpoint.toString();
   const links: {
@@ -79,7 +79,7 @@ function infoMock(
 
 describe('modules/platform/bitbucket-server/utils', () => {
   function createError(
-    body: Partial<BitbucketErrorResponse> | undefined = undefined
+    body: Partial<BitbucketErrorResponse> | undefined = undefined,
   ) {
     return partial<BitbucketError>({
       response: partial<Response<BitbucketErrorResponse>>({ body }),
@@ -96,16 +96,16 @@ describe('modules/platform/bitbucket-server/utils', () => {
               reviewerErrors: [{ context: 'dummy' }, {}],
             },
           ],
-        })
-      )
+        }),
+      ),
     ).toStrictEqual(['dummy']);
     expect(getInvalidReviewers(createError())).toStrictEqual([]);
     expect(
       getInvalidReviewers(
         createError({
           errors: [{ exceptionName: BITBUCKET_INVALID_REVIEWERS_EXCEPTION }],
-        })
-      )
+        }),
+      ),
     ).toStrictEqual([]);
   });
 
@@ -133,13 +133,13 @@ describe('modules/platform/bitbucket-server/utils', () => {
               infoMock(url, 'SOME', 'repo', {
                 cloneUrl: { https: false, ssh: false },
               }),
-              opts
-            )
+              opts,
+            ),
           ).toBe(
             httpLink(url.toString(), 'SOME', 'repo').replace(
               'https://',
-              `https://${username}:${password}@`
-            )
+              `https://${username}:${password}@`,
+            ),
           );
         });
 
@@ -152,13 +152,13 @@ describe('modules/platform/bitbucket-server/utils', () => {
               infoMock(url, 'SOME', 'repo', {
                 cloneUrl: { https: true, ssh: false },
               }),
-              opts
-            )
+              opts,
+            ),
           ).toBe(
             httpLink(url.toString(), 'SOME', 'repo').replace(
               'https://',
-              `https://${username}:${password}@`
-            )
+              `https://${username}:${password}@`,
+            ),
           );
         });
 
@@ -171,8 +171,8 @@ describe('modules/platform/bitbucket-server/utils', () => {
               infoMock(url, 'SOME', 'repo', {
                 cloneUrl: { https: false, ssh: true },
               }),
-              opts
-            )
+              opts,
+            ),
           ).toBe(sshLink('SOME', 'repo'));
         });
 
@@ -183,13 +183,13 @@ describe('modules/platform/bitbucket-server/utils', () => {
               url.toString(),
               'default',
               infoMock(url, 'SOME', 'repo'),
-              opts
-            )
+              opts,
+            ),
           ).toBe(
             httpLink(url.toString(), 'SOME', 'repo').replace(
               'https://',
-              `https://${username}:${password}@`
-            )
+              `https://${username}:${password}@`,
+            ),
           );
         });
 
@@ -202,8 +202,8 @@ describe('modules/platform/bitbucket-server/utils', () => {
               infoMock('invalidUrl', 'SOME', 'repo', {
                 cloneUrl: { https: true, ssh: false },
               }),
-              opts
-            )
+              opts,
+            ),
           ).toThrow(Error(CONFIG_GIT_URL_UNAVAILABLE));
         });
 
@@ -216,13 +216,13 @@ describe('modules/platform/bitbucket-server/utils', () => {
               infoMock(url, 'SOME', 'repo', {
                 cloneUrl: { https: false, ssh: false },
               }),
-              opts
-            )
+              opts,
+            ),
           ).toBe(
             httpLink(url.toString(), 'SOME', 'repo').replace(
               'https://',
-              `https://${username}:${password}@`
-            )
+              `https://${username}:${password}@`,
+            ),
           );
         });
 
@@ -235,8 +235,8 @@ describe('modules/platform/bitbucket-server/utils', () => {
               infoMock(url, 'SOME', 'repo', {
                 cloneUrl: { https: false, ssh: false },
               }),
-              opts
-            )
+              opts,
+            ),
           ).toThrow(Error(CONFIG_GIT_URL_UNAVAILABLE));
         });
 
@@ -247,8 +247,8 @@ describe('modules/platform/bitbucket-server/utils', () => {
               url.toString(),
               'ssh',
               infoMock(url, 'SOME', 'repo'),
-              opts
-            )
+              opts,
+            ),
           ).toBe(sshLink('SOME', 'repo'));
         });
 
@@ -259,13 +259,13 @@ describe('modules/platform/bitbucket-server/utils', () => {
               url.toString(),
               'endpoint',
               infoMock(url, 'SOME', 'repo'),
-              opts
-            )
+              opts,
+            ),
           ).toBe(
             httpLink(url.toString(), 'SOME', 'repo').replace(
               'https://',
-              `https://${username}:${password}@`
-            )
+              `https://${username}:${password}@`,
+            ),
           );
         });
       });
diff --git a/lib/modules/platform/bitbucket-server/utils.ts b/lib/modules/platform/bitbucket-server/utils.ts
index c083785d13bc2ffe1f86d0a091ee984f63ae54ee..882451953cbc90150d0598b86670c532ebb5a9c4 100644
--- a/lib/modules/platform/bitbucket-server/utils.ts
+++ b/lib/modules/platform/bitbucket-server/utils.ts
@@ -50,7 +50,7 @@ const addMaxLength = (inputUrl: string, limit = 100): string => {
 function callApi<T>(
   apiUrl: string,
   method: string,
-  options?: HttpOptions
+  options?: HttpOptions,
 ): Promise<HttpResponse<T>> {
   /* istanbul ignore next */
   switch (method.toLowerCase()) {
@@ -76,7 +76,7 @@ export async function accumulateValues<T = any>(
   reqUrl: string,
   method = 'get',
   options?: HttpOptions,
-  limit?: number
+  limit?: number,
 ): Promise<T[]> {
   let accumulator: T[] = [];
   let nextUrl = addMaxLength(reqUrl, limit);
@@ -128,7 +128,7 @@ export function isInvalidReviewersResponse(err: BitbucketError): boolean {
   return (
     errors.length > 0 &&
     errors.every(
-      (error) => error.exceptionName === BITBUCKET_INVALID_REVIEWERS_EXCEPTION
+      (error) => error.exceptionName === BITBUCKET_INVALID_REVIEWERS_EXCEPTION,
     )
   );
 }
@@ -141,7 +141,7 @@ export function getInvalidReviewers(err: BitbucketError): string[] {
       invalidReviewers = invalidReviewers.concat(
         error.reviewerErrors
           ?.map(({ context }) => context)
-          .filter(is.nonEmptyString) ?? []
+          .filter(is.nonEmptyString) ?? [],
       );
     }
   }
@@ -152,7 +152,7 @@ export function getInvalidReviewers(err: BitbucketError): string[] {
 function generateUrlFromEndpoint(
   defaultEndpoint: string,
   opts: HostRule,
-  repository: string
+  repository: string,
 ): string {
   const url = new URL(defaultEndpoint);
   const generatedUrl = git.getUrl({
@@ -185,7 +185,7 @@ export function getRepoGitUrl(
   defaultEndpoint: string,
   gitUrl: GitUrlOption | undefined,
   info: BbsRestRepo,
-  opts: HostRule
+  opts: HostRule,
 ): string {
   if (gitUrl === 'ssh') {
     const sshUrl = info.links.clone?.find(({ name }) => name === 'ssh');
diff --git a/lib/modules/platform/bitbucket/comments.spec.ts b/lib/modules/platform/bitbucket/comments.spec.ts
index 7b6ed8f0cd345c43e3161f7f1f9f3b0eeed24765..6e9dd0ea0cb13ed4895f6e720bc15f7e96c68c75 100644
--- a/lib/modules/platform/bitbucket/comments.spec.ts
+++ b/lib/modules/platform/bitbucket/comments.spec.ts
@@ -24,7 +24,7 @@ describe('modules/platform/bitbucket/comments', () => {
           number: 3,
           topic: 'topic',
           content: 'content',
-        })
+        }),
       ).toBeFalse();
     });
 
@@ -43,7 +43,7 @@ describe('modules/platform/bitbucket/comments', () => {
           number: 5,
           topic: 'topic',
           content: 'content',
-        })
+        }),
       ).toBeTrue();
     });
 
@@ -136,7 +136,7 @@ describe('modules/platform/bitbucket/comments', () => {
           number: 5,
           topic: null,
           content: 'blablabla',
-        })
+        }),
       ).toBeTrue();
     });
   });
@@ -153,7 +153,7 @@ describe('modules/platform/bitbucket/comments', () => {
           type: 'by-topic',
           number: 5,
           topic: 'topic',
-        })
+        }),
       ).toResolve();
     });
 
@@ -178,7 +178,7 @@ describe('modules/platform/bitbucket/comments', () => {
           type: 'by-topic',
           number: 5,
           topic: 'some-subject',
-        })
+        }),
       ).toResolve();
     });
 
@@ -203,7 +203,7 @@ describe('modules/platform/bitbucket/comments', () => {
           type: 'by-content',
           number: 5,
           content: 'some-content',
-        })
+        }),
       ).toResolve();
     });
 
@@ -219,7 +219,7 @@ describe('modules/platform/bitbucket/comments', () => {
           type: 'by-content',
           number: 5,
           content: 'topic',
-        })
+        }),
       ).toResolve();
     });
   });
diff --git a/lib/modules/platform/bitbucket/comments.ts b/lib/modules/platform/bitbucket/comments.ts
index f601701d891f3738f7c0394d0abd8692aae5ec49..2d91964a30d53f5ed3304aa9c4df3a0f78e1d6fc 100644
--- a/lib/modules/platform/bitbucket/comments.ts
+++ b/lib/modules/platform/bitbucket/comments.ts
@@ -21,14 +21,14 @@ interface EnsureBitbucketCommentConfig extends EnsureCommentConfig {
 
 async function getComments(
   config: CommentsConfig,
-  prNo: number
+  prNo: number,
 ): Promise<Comment[]> {
   const comments = (
     await bitbucketHttp.getJson<PagedResult<Comment>>(
       `/2.0/repositories/${config.repository}/pullrequests/${prNo}/comments`,
       {
         paginate: true,
-      }
+      },
     )
   ).body.values;
 
@@ -39,13 +39,13 @@ async function getComments(
 async function addComment(
   config: CommentsConfig,
   prNo: number,
-  raw: string
+  raw: string,
 ): Promise<void> {
   await bitbucketHttp.postJson(
     `/2.0/repositories/${config.repository}/pullrequests/${prNo}/comments`,
     {
       body: { content: { raw } },
-    }
+    },
   );
 }
 
@@ -53,23 +53,23 @@ async function editComment(
   config: CommentsConfig,
   prNo: number,
   commentId: number,
-  raw: string
+  raw: string,
 ): Promise<void> {
   await bitbucketHttp.putJson(
     `/2.0/repositories/${config.repository}/pullrequests/${prNo}/comments/${commentId}`,
     {
       body: { content: { raw } },
-    }
+    },
   );
 }
 
 async function deleteComment(
   config: CommentsConfig,
   prNo: number,
-  commentId: number
+  commentId: number,
 ): Promise<void> {
   await bitbucketHttp.deleteJson(
-    `/2.0/repositories/${config.repository}/pullrequests/${prNo}/comments/${commentId}`
+    `/2.0/repositories/${config.repository}/pullrequests/${prNo}/comments/${commentId}`,
   );
 }
 
@@ -111,7 +111,7 @@ export async function ensureComment({
       await addComment(config, prNo, body);
       logger.info(
         { repository: config.repository, prNo, topic },
-        'Comment added'
+        'Comment added',
       );
     } else if (commentNeedsUpdating) {
       await editComment(config, prNo, commentId, body);
@@ -128,12 +128,12 @@ export async function ensureComment({
 
 export async function reopenComments(
   config: CommentsConfig,
-  prNo: number
+  prNo: number,
 ): Promise<Comment[]> {
   const comments = await getComments(config, prNo);
 
   const reopenComments = comments.filter((comment) =>
-    comment.content.raw.startsWith(REOPEN_PR_COMMENT_KEYWORD)
+    comment.content.raw.startsWith(REOPEN_PR_COMMENT_KEYWORD),
   );
 
   return reopenComments;
@@ -141,7 +141,7 @@ export async function reopenComments(
 
 export async function ensureCommentRemoval(
   config: CommentsConfig,
-  deleteConfig: EnsureCommentRemovalConfig
+  deleteConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   try {
     const { number: prNo } = deleteConfig;
@@ -176,10 +176,10 @@ function sanitizeCommentBody(body: string): string {
   return body
     .replace(
       'checking the rebase/retry box above',
-      'renaming this PR to start with "rebase!"'
+      'renaming this PR to start with "rebase!"',
     )
     .replace(
       'rename this PR to get a fresh replacement',
-      'add a comment starting with "reopen!" to get a fresh replacement'
+      'add a comment starting with "reopen!" to get a fresh replacement',
     );
 }
diff --git a/lib/modules/platform/bitbucket/index.spec.ts b/lib/modules/platform/bitbucket/index.spec.ts
index 5070c067247f2c83e5cfe7591e20f00916af6d12..8a6695384d53efe3cb104e90cae7f39ad640b350 100644
--- a/lib/modules/platform/bitbucket/index.spec.ts
+++ b/lib/modules/platform/bitbucket/index.spec.ts
@@ -47,7 +47,7 @@ describe('modules/platform/bitbucket/index', () => {
   async function initRepoMock(
     config?: Partial<RepoParams>,
     repoResp?: any,
-    existingScope?: httpMock.Scope
+    existingScope?: httpMock.Scope,
   ): Promise<httpMock.Scope> {
     const repository = config?.repository ?? 'some/repo';
 
@@ -80,7 +80,7 @@ describe('modules/platform/bitbucket/index', () => {
         password: '123',
       });
       expect(logger.warn).toHaveBeenCalledWith(
-        'Init: Bitbucket Cloud endpoint should generally be https://api.bitbucket.org/ but is being configured to a different value. Did you mean to use Bitbucket Server?'
+        'Init: Bitbucket Cloud endpoint should generally be https://api.bitbucket.org/ but is being configured to a different value. Did you mean to use Bitbucket Server?',
       );
     });
 
@@ -94,7 +94,7 @@ describe('modules/platform/bitbucket/index', () => {
           endpoint: baseUrl,
           username: 'abc',
           password: '123',
-        })
+        }),
       ).toEqual(expectedResult);
     });
 
@@ -107,7 +107,7 @@ describe('modules/platform/bitbucket/index', () => {
         await bitbucket.initPlatform({
           endpoint: baseUrl,
           token: 'abc',
-        })
+        }),
       ).toEqual(expectedResult);
     });
 
@@ -118,7 +118,7 @@ describe('modules/platform/bitbucket/index', () => {
         .reply(403, { error: { detail: { required: ['account'] } } });
       await bitbucket.initPlatform({ username: 'renovate', password: 'pass' });
       expect(logger.warn).toHaveBeenCalledWith(
-        `Bitbucket: missing 'account' scope for password`
+        `Bitbucket: missing 'account' scope for password`,
       );
     });
   });
@@ -145,7 +145,7 @@ describe('modules/platform/bitbucket/index', () => {
       expect(
         await bitbucket.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -161,7 +161,7 @@ describe('modules/platform/bitbucket/index', () => {
       expect(
         await bitbucket.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).toEqual({
         defaultBranch: 'master',
         isFork: false,
@@ -235,7 +235,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, { values: [pr] })
         .get('/2.0/repositories/some/repo/pullrequests/5')
@@ -248,7 +248,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, { values: [pr] });
 
@@ -267,7 +267,7 @@ describe('modules/platform/bitbucket/index', () => {
           target: { hash: 'master_hash' },
         })
         .get(
-          '/2.0/repositories/some/repo/commit/master_hash/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/master_hash/statuses?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -292,7 +292,7 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/repositories/some/repo/commit/branch_hash/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/branch_hash/statuses?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -317,7 +317,7 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/repositories/some/repo/commit/pending/branch_hash/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/pending/branch_hash/statuses?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -328,7 +328,7 @@ describe('modules/platform/bitbucket/index', () => {
           ],
         });
       expect(await bitbucket.getBranchStatus('pending/branch', true)).toBe(
-        'yellow'
+        'yellow',
       );
     });
 
@@ -336,7 +336,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/refs/branches/branch-with-empty-status'
+          '/2.0/repositories/some/repo/refs/branches/branch-with-empty-status',
         )
         .reply(200, {
           name: 'branch-with-empty-status',
@@ -346,13 +346,13 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/repositories/some/repo/commit/branch-with-empty-status/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/branch-with-empty-status/statuses?pagelen=100',
         )
         .reply(200, {
           values: [],
         });
       expect(
-        await bitbucket.getBranchStatus('branch-with-empty-status', true)
+        await bitbucket.getBranchStatus('branch-with-empty-status', true),
       ).toBe('yellow');
     });
 
@@ -368,7 +368,7 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/repositories/some/repo/commit/branch_hash/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/branch_hash/statuses?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -392,7 +392,7 @@ describe('modules/platform/bitbucket/index', () => {
           target: { hash: 'master_hash' },
         })
         .get(
-          '/2.0/repositories/some/repo/commit/master_hash/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/master_hash/statuses?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -433,7 +433,7 @@ describe('modules/platform/bitbucket/index', () => {
         .post('/2.0/repositories/some/repo/commit/branch_hash/statuses/build')
         .reply(200)
         .get(
-          '/2.0/repositories/some/repo/commit/branch_hash/statuses?pagelen=100'
+          '/2.0/repositories/some/repo/commit/branch_hash/statuses?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -450,7 +450,7 @@ describe('modules/platform/bitbucket/index', () => {
           description: 'description',
           state: 'red',
           url: 'targetUrl',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -462,7 +462,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { has_issues: true });
       scope
         .get(
-          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)%20AND%20reporter.uuid%3D%2212345%22'
+          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)%20AND%20reporter.uuid%3D%2212345%22',
         )
         .reply(200, {
           values: [
@@ -486,11 +486,11 @@ describe('modules/platform/bitbucket/index', () => {
         {
           repository: 'some/empty',
         },
-        { has_issues: true }
+        { has_issues: true },
       );
       scope
         .get(
-          '/2.0/repositories/some/empty/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)'
+          '/2.0/repositories/some/empty/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)',
         )
         .reply(200, {
           values: [],
@@ -504,7 +504,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { has_issues: true });
       scope
         .get(
-          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)'
+          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)',
         )
         .reply(200, {
           values: [
@@ -525,22 +525,22 @@ describe('modules/platform/bitbucket/index', () => {
         .put('/2.0/repositories/some/repo/issues/26')
         .reply(200);
       expect(
-        await bitbucket.ensureIssue({ title: 'title', body: 'body' })
+        await bitbucket.ensureIssue({ title: 'title', body: 'body' }),
       ).toBe('updated');
     });
 
     it('creates new issue', async () => {
       const scope = await initRepoMock(
         { repository: 'some/empty' },
-        { has_issues: true }
+        { has_issues: true },
       );
       scope
         .get(
-          '/2.0/repositories/some/empty/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)'
+          '/2.0/repositories/some/empty/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)',
         )
         .reply(200, { values: [] })
         .get(
-          '/2.0/repositories/some/empty/issues?q=title%3D%22old-title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)'
+          '/2.0/repositories/some/empty/issues?q=title%3D%22old-title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)',
         )
         .reply(200, { values: [] })
         .post('/2.0/repositories/some/empty/issues')
@@ -550,7 +550,7 @@ describe('modules/platform/bitbucket/index', () => {
           title: 'title',
           reuseTitle: 'old-title',
           body: 'body',
-        })
+        }),
       ).toBe('created');
     });
 
@@ -558,7 +558,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { has_issues: true });
       scope
         .get(
-          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)'
+          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)',
         )
         .reply(200, {
           values: [
@@ -580,7 +580,7 @@ describe('modules/platform/bitbucket/index', () => {
         await bitbucket.ensureIssue({
           title: 'title',
           body: '\n content \n',
-        })
+        }),
       ).toBeNull();
     });
   });
@@ -595,7 +595,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { has_issues: true });
       scope
         .get(
-          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)'
+          '/2.0/repositories/some/repo/issues?q=title%3D%22title%22%20AND%20(state%20%3D%20%22new%22%20OR%20state%20%3D%20%22open%22)',
         )
         .reply(200, {
           values: [
@@ -683,7 +683,7 @@ describe('modules/platform/bitbucket/index', () => {
         .put('/2.0/repositories/some/repo/pullrequests/5')
         .reply(200);
       await expect(
-        bitbucket.addReviewers(5, ['someuser', 'someotheruser'])
+        bitbucket.addReviewers(5, ['someuser', 'someotheruser']),
       ).toResolve();
     });
 
@@ -704,7 +704,7 @@ describe('modules/platform/bitbucket/index', () => {
         bitbucket.addReviewers(5, [
           'someuser',
           '{90b6646d-1724-4a64-9fd9-539515fe94e9}',
-        ])
+        ]),
       ).toResolve();
     });
   });
@@ -720,7 +720,7 @@ describe('modules/platform/bitbucket/index', () => {
           number: 3,
           topic: 'topic',
           content: 'content',
-        })
+        }),
       ).toMatchSnapshot();
     });
   });
@@ -736,7 +736,7 @@ describe('modules/platform/bitbucket/index', () => {
           type: 'by-topic',
           number: 3,
           topic: 'topic',
-        })
+        }),
       ).toMatchSnapshot();
     });
   });
@@ -753,7 +753,7 @@ describe('modules/platform/bitbucket/index', () => {
       await initRepoMock(undefined, null, scope);
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&q=author.uuid="12345"&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&q=author.uuid="12345"&pagelen=50',
         )
         .reply(200, {
           values: [
@@ -779,14 +779,14 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, { values: [pr] });
       expect(
         await bitbucket.findPr({
           branchName: 'branch',
           prTitle: 'title',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -805,7 +805,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, {
           values: [
@@ -843,7 +843,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { is_private: true });
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, {
           values: [
@@ -883,7 +883,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { is_private: false });
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, {
           values: [
@@ -899,7 +899,7 @@ describe('modules/platform/bitbucket/index', () => {
         .get('/2.0/repositories/some/repo/pullrequests/5/comments?pagelen=100')
         .reply(200, { values: [prComment] })
         .get(
-          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D'
+          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D',
         )
         .reply(200, { values: [workspaceMember] });
 
@@ -927,7 +927,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock({}, { is_private: false });
       scope
         .get(
-          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50'
+          '/2.0/repositories/some/repo/pullrequests?state=OPEN&state=MERGED&state=DECLINED&state=SUPERSEDED&pagelen=50',
         )
         .reply(200, {
           values: [
@@ -943,7 +943,7 @@ describe('modules/platform/bitbucket/index', () => {
         .get('/2.0/repositories/some/repo/pullrequests/5/comments?pagelen=100')
         .reply(200, { values: [prComment] })
         .get(
-          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D'
+          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D',
         )
         .reply(404);
 
@@ -978,7 +978,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: [projectReviewer, repoReviewer],
@@ -1022,7 +1022,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: [
@@ -1046,7 +1046,7 @@ describe('modules/platform/bitbucket/index', () => {
           account_status: 'active',
         })
         .get(
-          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D'
+          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D',
         )
         .reply(200)
         .get('/2.0/users/%7Ba10e0228-ad84-11ed-afa1-0242ac120002%7D')
@@ -1054,7 +1054,7 @@ describe('modules/platform/bitbucket/index', () => {
           account_status: 'active',
         })
         .get(
-          '/2.0/workspaces/some/members/%7Ba10e0228-ad84-11ed-afa1-0242ac120002%7D'
+          '/2.0/workspaces/some/members/%7Ba10e0228-ad84-11ed-afa1-0242ac120002%7D',
         )
         .reply(404)
         .get('/2.0/users/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D')
@@ -1093,7 +1093,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: [memberReviewer, notMemberReviewer],
@@ -1112,11 +1112,11 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D'
+          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D',
         )
         .reply(404)
         .get(
-          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D'
+          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D',
         )
         .reply(200)
         .post('/2.0/repositories/some/repo/pullrequests')
@@ -1144,7 +1144,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: [reviewer],
@@ -1163,7 +1163,7 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D'
+          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D',
         )
         .reply(401);
       await expect(() =>
@@ -1175,7 +1175,7 @@ describe('modules/platform/bitbucket/index', () => {
           platformOptions: {
             bbUseDefaultReviewers: true,
           },
-        })
+        }),
       ).rejects.toThrow(new Error('Response code 401 (Unauthorized)'));
     });
 
@@ -1199,7 +1199,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: reviewers,
@@ -1240,7 +1240,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: [reviewer],
@@ -1264,7 +1264,7 @@ describe('modules/platform/bitbucket/index', () => {
           platformOptions: {
             bbUseDefaultReviewers: true,
           },
-        })
+        }),
       ).rejects.toThrow(new Error('Response code 400 (Bad Request)'));
     });
 
@@ -1279,7 +1279,7 @@ describe('modules/platform/bitbucket/index', () => {
       const scope = await initRepoMock();
       scope
         .get(
-          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100'
+          '/2.0/repositories/some/repo/effective-default-reviewers?pagelen=100',
         )
         .reply(200, {
           values: [reviewer],
@@ -1303,7 +1303,7 @@ describe('modules/platform/bitbucket/index', () => {
           platformOptions: {
             bbUseDefaultReviewers: true,
           },
-        })
+        }),
       ).rejects.toThrow(new Error('Response code 400 (Bad Request)'));
     });
   });
@@ -1394,7 +1394,7 @@ describe('modules/platform/bitbucket/index', () => {
           prTitle: 'title',
           prBody: 'body',
           targetBranch: 'new_base',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1439,7 +1439,7 @@ describe('modules/platform/bitbucket/index', () => {
           account_status: 'active',
         })
         .get(
-          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D'
+          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D',
         )
         .reply(200)
         .get('/2.0/users/%7Ba10e0228-ad84-11ed-afa1-0242ac120002%7D')
@@ -1447,7 +1447,7 @@ describe('modules/platform/bitbucket/index', () => {
           account_status: 'active',
         })
         .get(
-          '/2.0/workspaces/some/members/%7Ba10e0228-ad84-11ed-afa1-0242ac120002%7D'
+          '/2.0/workspaces/some/members/%7Ba10e0228-ad84-11ed-afa1-0242ac120002%7D',
         )
         .reply(404)
         .get('/2.0/users/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D')
@@ -1457,7 +1457,7 @@ describe('modules/platform/bitbucket/index', () => {
         .put('/2.0/repositories/some/repo/pullrequests/5')
         .reply(200);
       await expect(
-        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
       ).toResolve();
     });
 
@@ -1490,18 +1490,18 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D'
+          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D',
         )
         .reply(404)
         .get(
-          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D'
+          '/2.0/workspaces/some/members/%7B90b6646d-1724-4a64-9fd9-539515fe94e9%7D',
         )
         .reply(200)
         .put('/2.0/repositories/some/repo/pullrequests/5')
         .reply(200);
 
       await expect(
-        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
       ).toResolve();
     });
 
@@ -1529,11 +1529,11 @@ describe('modules/platform/bitbucket/index', () => {
           },
         })
         .get(
-          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D'
+          '/2.0/workspaces/some/members/%7Bd2238482-2e9f-48b3-8630-de22ccb9e42f%7D',
         )
         .reply(401);
       await expect(() =>
-        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
       ).rejects.toThrow(new Error('Response code 401 (Unauthorized)'));
     });
 
@@ -1558,7 +1558,7 @@ describe('modules/platform/bitbucket/index', () => {
           },
         });
       await expect(() =>
-        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
       ).rejects.toThrowErrorMatchingSnapshot();
     });
 
@@ -1583,7 +1583,7 @@ describe('modules/platform/bitbucket/index', () => {
           },
         });
       await expect(() =>
-        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
       ).rejects.toThrow(new Error('Response code 400 (Bad Request)'));
     });
 
@@ -1593,7 +1593,7 @@ describe('modules/platform/bitbucket/index', () => {
         .get('/2.0/repositories/some/repo/pullrequests/5')
         .reply(500, undefined);
       await expect(() =>
-        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' })
+        bitbucket.updatePr({ number: 5, prTitle: 'title', prBody: 'body' }),
       ).rejects.toThrowErrorMatchingSnapshot();
     });
 
@@ -1612,7 +1612,7 @@ describe('modules/platform/bitbucket/index', () => {
           number: pr.id,
           prTitle: pr.title,
           state: 'closed',
-        })
+        }),
       ).toBeUndefined();
     });
   });
@@ -1625,7 +1625,7 @@ describe('modules/platform/bitbucket/index', () => {
         await bitbucket.mergePr({
           branchName: 'branch',
           id: 5,
-        })
+        }),
       ).toBeTrue();
     });
 
@@ -1637,7 +1637,7 @@ describe('modules/platform/bitbucket/index', () => {
           branchName: 'branch',
           id: 5,
           strategy: 'auto',
-        })
+        }),
       ).toBeTrue();
     });
 
@@ -1649,7 +1649,7 @@ describe('modules/platform/bitbucket/index', () => {
           branchName: 'branch',
           id: 5,
           strategy: 'merge-commit',
-        })
+        }),
       ).toBeTrue();
     });
 
@@ -1661,7 +1661,7 @@ describe('modules/platform/bitbucket/index', () => {
           branchName: 'branch',
           id: 5,
           strategy: 'squash',
-        })
+        }),
       ).toBe(true);
     });
 
@@ -1682,7 +1682,7 @@ describe('modules/platform/bitbucket/index', () => {
           branchName: 'branch',
           id: 5,
           strategy: 'fast-forward',
-        })
+        }),
       ).toBeTrue();
     });
   });
@@ -1745,7 +1745,7 @@ describe('modules/platform/bitbucket/index', () => {
       const res = await bitbucket.getJsonFile(
         'file.json',
         'some/repo',
-        'feat/123-test'
+        'feat/123-test',
       );
       expect(res).toEqual(data);
     });
diff --git a/lib/modules/platform/bitbucket/index.ts b/lib/modules/platform/bitbucket/index.ts
index b1b918ee20ed12a9fa5640e45af94860c24d384d..9f30b6e2aef8ff1e2ecdd2b93f17104d39f3dcba 100644
--- a/lib/modules/platform/bitbucket/index.ts
+++ b/lib/modules/platform/bitbucket/index.ts
@@ -68,12 +68,12 @@ export async function initPlatform({
 }: PlatformParams): Promise<PlatformResult> {
   if (!(username && password) && !token) {
     throw new Error(
-      'Init: You must configure either a Bitbucket token or username and password'
+      'Init: You must configure either a Bitbucket token or username and password',
     );
   }
   if (endpoint && endpoint !== BITBUCKET_PROD_ENDPOINT) {
     logger.warn(
-      `Init: Bitbucket Cloud endpoint should generally be ${BITBUCKET_PROD_ENDPOINT} but is being configured to a different value. Did you mean to use Bitbucket Server?`
+      `Init: Bitbucket Cloud endpoint should generally be ${BITBUCKET_PROD_ENDPOINT} but is being configured to a different value. Did you mean to use Bitbucket Server?`,
     );
     defaults.endpoint = endpoint;
   }
@@ -119,7 +119,7 @@ export async function getRepos(): Promise<string[]> {
         `/2.0/repositories/?role=contributor`,
         {
           paginate: true,
-        }
+        },
       )
     ).body.values;
     return repos.map((repo) => repo.full_name);
@@ -132,7 +132,7 @@ export async function getRepos(): Promise<string[]> {
 export async function getRawFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<string | null> {
   // See: https://developer.atlassian.com/bitbucket/api/2/reference/resource/repositories/%7Bworkspace%7D/%7Brepo_slug%7D/src/%7Bcommit%7D/%7Bpath%7D
   const repo = repoName ?? config.repository;
@@ -155,7 +155,7 @@ export async function getRawFile(
 export async function getJsonFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<any> {
   // TODO #22198
   const raw = await getRawFile(fileName, repoName, branchOrTag);
@@ -184,9 +184,9 @@ export async function initRepo({
     info = utils.repoInfoTransformer(
       (
         await bitbucketHttp.getJson<RepoInfoBody>(
-          `/2.0/repositories/${repository}`
+          `/2.0/repositories/${repository}`,
         )
-      ).body
+      ).body,
     );
 
     mainBranch = info.mainbranch;
@@ -195,7 +195,7 @@ export async function initRepo({
       // Fetch Bitbucket development branch
       const developmentBranch = (
         await bitbucketHttp.getJson<RepoBranchingModel>(
-          `/2.0/repositories/${repository}/branching-model`
+          `/2.0/repositories/${repository}/branching-model`,
         )
       ).body.development?.branch?.name;
 
@@ -303,7 +303,7 @@ export async function findPr({
     (p) =>
       p.sourceBranch === branchName &&
       (!prTitle || p.title.toUpperCase() === prTitle.toUpperCase()) &&
-      matchesState(p.state, state)
+      matchesState(p.state, state),
   );
   if (pr) {
     logger.debug(`Found PR #${pr.number}`);
@@ -320,7 +320,7 @@ export async function findPr({
       if (config.is_private) {
         // Only workspace members could have commented on a private repository
         logger.debug(
-          `Found '${comments.REOPEN_PR_COMMENT_KEYWORD}' comment from workspace member. Renovate will reopen PR ${pr.number} as a new PR`
+          `Found '${comments.REOPEN_PR_COMMENT_KEYWORD}' comment from workspace member. Renovate will reopen PR ${pr.number} as a new PR`,
         );
         return null;
       }
@@ -328,7 +328,7 @@ export async function findPr({
       for (const comment of reopenComments) {
         if (await isAccountMemberOfWorkspace(comment.user, config.repository)) {
           logger.debug(
-            `Found '${comments.REOPEN_PR_COMMENT_KEYWORD}' comment from workspace member. Renovate will reopen PR ${pr.number} as a new PR`
+            `Found '${comments.REOPEN_PR_COMMENT_KEYWORD}' comment from workspace member. Renovate will reopen PR ${pr.number} as a new PR`,
           );
           return null;
         }
@@ -343,7 +343,7 @@ export async function findPr({
 export async function getPr(prNo: number): Promise<Pr | null> {
   const pr = (
     await bitbucketHttp.getJson<PrResponse>(
-      `/2.0/repositories/${config.repository}/pullrequests/${prNo}`
+      `/2.0/repositories/${config.repository}/pullrequests/${prNo}`,
     )
   ).body;
 
@@ -370,14 +370,14 @@ const escapeHash = (input: string): string =>
 
 // Return the commit SHA for a branch
 async function getBranchCommit(
-  branchName: string
+  branchName: string,
 ): Promise<string | undefined> {
   try {
     const branch = (
       await bitbucketHttp.getJson<BranchResponse>(
         `/2.0/repositories/${config.repository}/refs/branches/${escapeHash(
-          branchName
-        )}`
+          branchName,
+        )}`,
       )
     ).body;
     return branch.target.hash;
@@ -399,7 +399,7 @@ export async function getBranchPr(branchName: string): Promise<Pr | null> {
 
 async function getStatus(
   branchName: string,
-  memCache = true
+  memCache = true,
 ): Promise<BitbucketStatus[]> {
   const sha = await getBranchCommit(branchName);
   return (
@@ -408,14 +408,14 @@ async function getStatus(
       {
         paginate: true,
         memCache,
-      }
+      },
     )
   ).body.values;
 }
 // Returns the combined status for a branch.
 export async function getBranchStatus(
   branchName: string,
-  internalChecksAsSuccess: boolean
+  internalChecksAsSuccess: boolean,
 ): Promise<BranchStatus> {
   logger.debug(`getBranchStatus(${branchName})`);
   const statuses = await getStatus(branchName);
@@ -426,13 +426,13 @@ export async function getBranchStatus(
   }
   const noOfFailures = statuses.filter(
     (status: { state: string }) =>
-      status.state === 'FAILED' || status.state === 'STOPPED'
+      status.state === 'FAILED' || status.state === 'STOPPED',
   ).length;
   if (noOfFailures) {
     return 'red';
   }
   const noOfPending = statuses.filter(
-    (status: { state: string }) => status.state === 'INPROGRESS'
+    (status: { state: string }) => status.state === 'INPROGRESS',
   ).length;
   if (noOfPending) {
     return 'yellow';
@@ -441,11 +441,11 @@ export async function getBranchStatus(
     !internalChecksAsSuccess &&
     statuses.every(
       (status) =>
-        status.state === 'SUCCESSFUL' && status.key?.startsWith('renovate/')
+        status.state === 'SUCCESSFUL' && status.key?.startsWith('renovate/'),
     )
   ) {
     logger.debug(
-      'Successful checks are all internal renovate/ checks, so returning "pending" branch status'
+      'Successful checks are all internal renovate/ checks, so returning "pending" branch status',
     );
     return 'yellow';
   }
@@ -460,7 +460,7 @@ const bbToRenovateStatusMapping: Record<string, BranchStatus> = {
 
 export async function getBranchStatusCheck(
   branchName: string,
-  context: string
+  context: string,
 ): Promise<BranchStatus | null> {
   const statuses = await getStatus(branchName);
   const bbState = statuses.find((status) => status.key === context)?.state;
@@ -490,7 +490,7 @@ export async function setBranchStatus({
 
   await bitbucketHttp.postJson(
     `/2.0/repositories/${config.repository}/commit/${sha}/statuses/build`,
-    { body }
+    { body },
   );
   // update status cache
   await getStatus(branchName, false);
@@ -511,7 +511,7 @@ async function findOpenIssues(title: string): Promise<BbIssue[]> {
     return (
       (
         await bitbucketHttp.getJson<{ values: BbIssue[] }>(
-          `/2.0/repositories/${config.repository}/issues?q=${filter}`
+          `/2.0/repositories/${config.repository}/issues?q=${filter}`,
         )
       ).body.values || /* istanbul ignore next */ []
     );
@@ -545,7 +545,7 @@ async function closeIssue(issueNumber: number): Promise<void> {
     `/2.0/repositories/${config.repository}/issues/${issueNumber}`,
     {
       body: { state: 'closed' },
-    }
+    },
   );
 }
 
@@ -554,11 +554,11 @@ export function massageMarkdown(input: string): string {
   return smartTruncate(input, 50000)
     .replace(
       'you tick the rebase/retry checkbox',
-      'by renaming this PR to start with "rebase!"'
+      'by renaming this PR to start with "rebase!"',
     )
     .replace(
       'checking the rebase/retry box above',
-      'renaming the PR to start with "rebase!"'
+      'renaming the PR to start with "rebase!"',
     )
     .replace(regEx(/<\/?summary>/g), '**')
     .replace(regEx(/<\/?(details|blockquote)>/g), '')
@@ -607,7 +607,7 @@ export async function ensureIssue({
                 markup: 'markdown',
               },
             },
-          }
+          },
         );
         return 'updated';
       }
@@ -623,7 +623,7 @@ export async function ensureIssue({
               markup: 'markdown',
             },
           },
-        }
+        },
       );
       return 'created';
     }
@@ -654,7 +654,7 @@ export async function getIssueList(): Promise<Issue[]> {
     return (
       (
         await bitbucketHttp.getJson<{ values: Issue[] }>(
-          `/2.0/repositories/${config.repository}/issues?q=${filter}`
+          `/2.0/repositories/${config.repository}/issues?q=${filter}`,
         )
       ).body.values || []
     );
@@ -678,7 +678,7 @@ export async function ensureIssueClosing(title: string): Promise<void> {
 
 export function addAssignees(
   _prNr: number,
-  _assignees: string[]
+  _assignees: string[],
 ): Promise<void> {
   // Bitbucket supports "participants" and "reviewers" so does not seem to have the concept of "assignee"
   logger.warn('Cannot add assignees');
@@ -687,7 +687,7 @@ export function addAssignees(
 
 export async function addReviewers(
   prId: number,
-  reviewers: string[]
+  reviewers: string[],
 ): Promise<void> {
   logger.debug(`Adding reviewers '${reviewers.join(', ')}' to #${prId}`);
 
@@ -708,7 +708,7 @@ export async function addReviewers(
     `/2.0/repositories/${config.repository}/pullrequests/${prId}`,
     {
       body,
-    }
+    },
   );
 }
 
@@ -732,14 +732,14 @@ export function ensureComment({
 }
 
 export function ensureCommentRemoval(
-  deleteConfig: EnsureCommentRemovalConfig
+  deleteConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   return comments.ensureCommentRemoval(config, deleteConfig);
 }
 
 async function sanitizeReviewers(
   reviewers: Account[],
-  err: any
+  err: any,
 ): Promise<Account[] | undefined> {
   if (err.statusCode === 400 && err.body?.error?.fields?.reviewers) {
     const sanitizedReviewers: Account[] = [];
@@ -755,7 +755,7 @@ async function sanitizeReviewers(
       if (msg === MSG_MALFORMED_REVIEWERS_LIST) {
         logger.debug(
           { err },
-          'PR contains reviewers that may be either inactive or no longer a member of this workspace. Will try setting only active reviewers'
+          'PR contains reviewers that may be either inactive or no longer a member of this workspace. Will try setting only active reviewers',
         );
 
         // Validate that each previous PR reviewer account is still active
@@ -775,7 +775,7 @@ async function sanitizeReviewers(
       } else if (msg.endsWith(MSG_NOT_WORKSPACE_MEMBER)) {
         logger.debug(
           { err },
-          'PR contains reviewer accounts which are no longer member of this workspace. Will try setting only member reviewers'
+          'PR contains reviewer accounts which are no longer member of this workspace. Will try setting only member reviewers',
         );
 
         // Validate that each previous PR reviewer account is still a member of this workspace
@@ -787,7 +787,7 @@ async function sanitizeReviewers(
       } else if (msg.endsWith(MSG_AUTHOR_AND_REVIEWER)) {
         logger.debug(
           { err },
-          'PR contains reviewer accounts which are also the author. Will try setting only non-author reviewers'
+          'PR contains reviewer accounts which are also the author. Will try setting only non-author reviewers',
         );
         const author = msg.replace(MSG_AUTHOR_AND_REVIEWER, '').trim();
         for (const reviewer of reviewers) {
@@ -808,13 +808,13 @@ async function sanitizeReviewers(
 
 async function isAccountMemberOfWorkspace(
   reviewer: Account,
-  repository: string
+  repository: string,
 ): Promise<boolean> {
   const workspace = repository.split('/')[0];
 
   try {
     await bitbucketHttp.get(
-      `/2.0/workspaces/${workspace}/members/${reviewer.uuid}`
+      `/2.0/workspaces/${workspace}/members/${reviewer.uuid}`,
     );
 
     return true;
@@ -823,7 +823,7 @@ async function isAccountMemberOfWorkspace(
     if (err.statusCode === 404) {
       logger.debug(
         { err },
-        `User ${reviewer.display_name} is not a member of the workspace ${workspace}. Will be removed from the PR`
+        `User ${reviewer.display_name} is not a member of the workspace ${workspace}. Will be removed from the PR`,
       );
 
       return false;
@@ -854,7 +854,7 @@ export async function createPr({
         `/2.0/repositories/${config.repository}/effective-default-reviewers`,
         {
           paginate: true,
-        }
+        },
       )
     ).body;
     reviewers = reviewersResponse.values.map((reviewer: EffectiveReviewer) => ({
@@ -886,7 +886,7 @@ export async function createPr({
         `/2.0/repositories/${config.repository}/pullrequests`,
         {
           body,
-        }
+        },
       )
     ).body;
     const pr = utils.prInfo(prRes);
@@ -911,7 +911,7 @@ export async function createPr({
               ...body,
               reviewers: sanitizedReviewers,
             },
-          }
+          },
         )
       ).body;
       const pr = utils.prInfo(prRes);
@@ -935,7 +935,7 @@ export async function updatePr({
   // Updating a PR in Bitbucket will clear the reviewers if reviewers is not present
   const pr = (
     await bitbucketHttp.getJson<PrResponse>(
-      `/2.0/repositories/${config.repository}/pullrequests/${prNo}`
+      `/2.0/repositories/${config.repository}/pullrequests/${prNo}`,
     )
   ).body;
 
@@ -955,7 +955,7 @@ export async function updatePr({
 
     await bitbucketHttp.putJson(
       `/2.0/repositories/${config.repository}/pullrequests/${prNo}`,
-      { body }
+      { body },
     );
   } catch (err) {
     // Try sanitizing reviewers
@@ -972,14 +972,14 @@ export async function updatePr({
             description: sanitize(description),
             reviewers: sanitizedReviewers,
           },
-        }
+        },
       );
     }
   }
 
   if (state === 'closed' && pr) {
     await bitbucketHttp.postJson(
-      `/2.0/repositories/${config.repository}/pullrequests/${prNo}/decline`
+      `/2.0/repositories/${config.repository}/pullrequests/${prNo}/decline`,
     );
   }
 }
@@ -1002,7 +1002,7 @@ export async function mergePr({
       `/2.0/repositories/${config.repository}/pullrequests/${prNo}/merge`,
       {
         body: mergeBodyTransformer(mergeStrategy),
-      }
+      },
     );
     logger.debug('Automerging succeeded');
   } catch (err) /* istanbul ignore next */ {
diff --git a/lib/modules/platform/bitbucket/utils.ts b/lib/modules/platform/bitbucket/utils.ts
index d4a1e6c147c76a2373b9d86f040ca012adc3ed35..06d72dacd31b2315603fdbbcef5ecf24ba34c8a7 100644
--- a/lib/modules/platform/bitbucket/utils.ts
+++ b/lib/modules/platform/bitbucket/utils.ts
@@ -31,7 +31,7 @@ const bitbucketMergeStrategies: Map<MergeStrategy, BitbucketMergeStrategy> =
   ]);
 
 export function mergeBodyTransformer(
-  mergeStrategy: MergeStrategy | undefined
+  mergeStrategy: MergeStrategy | undefined,
 ): MergeRequestBody {
   const body: MergeRequestBody = {
     close_source_branch: true,
diff --git a/lib/modules/platform/codecommit/codecommit-client.ts b/lib/modules/platform/codecommit/codecommit-client.ts
index 42a5af5d670e99c80dea03f948f6185f8f9bc80d..4880d4d11dbe93a482c6a0b75ad5a54df933f5e9 100644
--- a/lib/modules/platform/codecommit/codecommit-client.ts
+++ b/lib/modules/platform/codecommit/codecommit-client.ts
@@ -64,7 +64,7 @@ export function buildCodeCommitClient(): void {
 }
 
 export async function deleteComment(
-  commentId: string
+  commentId: string,
 ): Promise<DeleteCommentContentOutput> {
   const input: DeleteCommentContentInput = {
     commentId,
@@ -74,7 +74,7 @@ export async function deleteComment(
 }
 
 export async function getPrComments(
-  pullRequestId: string
+  pullRequestId: string,
 ): Promise<GetCommentsForPullRequestOutput> {
   const input: GetCommentsForPullRequestInput = {
     pullRequestId,
@@ -85,7 +85,7 @@ export async function getPrComments(
 
 export async function updateComment(
   commentId: string,
-  content: string
+  content: string,
 ): Promise<UpdateCommentOutput> {
   const input: UpdateCommentInput = {
     commentId,
@@ -100,7 +100,7 @@ export async function createPrComment(
   repositoryName: string | undefined,
   content: string,
   beforeCommitId: string,
-  afterCommitId: string
+  afterCommitId: string,
 ): Promise<PostCommentForPullRequestOutput> {
   const input: PostCommentForPullRequestInput = {
     pullRequestId,
@@ -147,7 +147,7 @@ export async function createPrComment(
 
 export async function updatePrStatus(
   pullRequestId: string,
-  pullRequestStatus: PullRequestStatusEnum
+  pullRequestStatus: PullRequestStatusEnum,
 ): Promise<UpdatePullRequestStatusOutput> {
   const input: UpdatePullRequestStatusInput = {
     pullRequestId,
@@ -159,7 +159,7 @@ export async function updatePrStatus(
 
 export async function updatePrTitle(
   prNo: string,
-  title: string
+  title: string,
 ): Promise<UpdatePullRequestTitleOutput> {
   const input: UpdatePullRequestTitleInput = {
     pullRequestId: `${prNo}`,
@@ -171,7 +171,7 @@ export async function updatePrTitle(
 
 export async function updatePrDescription(
   pullRequestId: string,
-  description: string
+  description: string,
 ): Promise<UpdatePullRequestDescriptionOutput> {
   const input: UpdatePullRequestDescriptionInput = {
     pullRequestId,
@@ -186,7 +186,7 @@ export async function createPr(
   description: string,
   sourceReference: string,
   destinationReference: string,
-  repositoryName: string | undefined
+  repositoryName: string | undefined,
 ): Promise<CreatePullRequestOutput> {
   const input: CreatePullRequestInput = {
     title,
@@ -206,7 +206,7 @@ export async function createPr(
 export async function getFile(
   repositoryName: string | undefined,
   filePath: string,
-  commitSpecifier: string | undefined
+  commitSpecifier: string | undefined,
 ): Promise<GetFileOutput> {
   const input: GetFileInput = {
     repositoryName,
@@ -218,7 +218,7 @@ export async function getFile(
 }
 
 export async function listPullRequests(
-  repositoryName: string
+  repositoryName: string,
 ): Promise<ListPullRequestsOutput> {
   const input: ListPullRequestsInput = {
     repositoryName,
@@ -230,7 +230,7 @@ export async function listPullRequests(
 }
 
 export async function getRepositoryInfo(
-  repository: string
+  repository: string,
 ): Promise<GetRepositoryOutput> {
   const input: GetRepositoryInput = {
     repositoryName: `${repository}`,
@@ -240,7 +240,7 @@ export async function getRepositoryInfo(
 }
 
 export async function getPr(
-  pullRequestId: string
+  pullRequestId: string,
 ): Promise<GetPullRequestOutput | undefined> {
   const input: GetPullRequestInput = {
     pullRequestId,
@@ -263,7 +263,7 @@ export async function listRepositories(): Promise<ListRepositoriesOutput> {
 
 export async function createPrApprovalRule(
   pullRequestId: string,
-  approvalRuleContent: string
+  approvalRuleContent: string,
 ): Promise<CreatePullRequestApprovalRuleOutput> {
   const input: CreatePullRequestApprovalRuleInput = {
     approvalRuleContent,
@@ -276,7 +276,7 @@ export async function createPrApprovalRule(
 
 export function getCodeCommitUrl(
   repoMetadata: RepositoryMetadata,
-  repoName: string
+  repoName: string,
 ): string {
   logger.debug('get code commit url');
   if (!process.env.AWS_ACCESS_KEY_ID || !process.env.AWS_SECRET_ACCESS_KEY) {
diff --git a/lib/modules/platform/codecommit/index.spec.ts b/lib/modules/platform/codecommit/index.spec.ts
index f8fd754ccf15f0d2dabbf46dacfd69bcc21c30d3..d56363fe16e3c979c238f6cceed6ed823473a7e4 100644
--- a/lib/modules/platform/codecommit/index.spec.ts
+++ b/lib/modules/platform/codecommit/index.spec.ts
@@ -54,10 +54,10 @@ describe('modules/platform/codecommit/index', () => {
 
   it('validates massageMarkdown functionality', () => {
     const newStr = codeCommit.massageMarkdown(
-      '<details><summary>foo</summary>bar</details>text<details>\n<!--renovate-debug:hiddenmessage123-->'
+      '<details><summary>foo</summary>bar</details>text<details>\n<!--renovate-debug:hiddenmessage123-->',
     );
     expect(newStr).toBe(
-      '**foo**bartext\n[//]: # (<!--renovate-debug:hiddenmessage123-->)'
+      '**foo**bartext\n[//]: # (<!--renovate-debug:hiddenmessage123-->)',
     );
   });
 
@@ -68,7 +68,7 @@ describe('modules/platform/codecommit/index', () => {
           endpoint: 'https://git-codecommit.REGION.amazonaws.com/',
           username: 'abc',
           password: '123',
-        })
+        }),
       ).toEqual({
         endpoint: 'https://git-codecommit.REGION.amazonaws.com/',
       });
@@ -80,7 +80,7 @@ describe('modules/platform/codecommit/index', () => {
         codeCommit.initPlatform({
           username: 'abc',
           password: '123',
-        })
+        }),
       ).resolves.toEqual({
         endpoint: 'https://git-codecommit.REGION.amazonaws.com/',
       });
@@ -88,7 +88,7 @@ describe('modules/platform/codecommit/index', () => {
 
     it('should', async () => {
       await expect(
-        codeCommit.initPlatform({ endpoint: 'non://parsable.url' })
+        codeCommit.initPlatform({ endpoint: 'non://parsable.url' }),
       ).resolves.toEqual({
         endpoint: 'non://parsable.url',
       });
@@ -114,7 +114,7 @@ describe('modules/platform/codecommit/index', () => {
       });
 
       await expect(
-        codeCommit.initRepo({ repository: 'repositoryName' })
+        codeCommit.initRepo({ repository: 'repositoryName' }),
       ).rejects.toThrow(new Error(PLATFORM_BAD_CREDENTIALS));
     });
 
@@ -124,7 +124,7 @@ describe('modules/platform/codecommit/index', () => {
         .on(GetRepositoryCommand)
         .rejectsOnce(new Error('Could not find repository'));
       await expect(
-        codeCommit.initRepo({ repository: 'repositoryName' })
+        codeCommit.initRepo({ repository: 'repositoryName' }),
       ).rejects.toThrow(new Error(REPOSITORY_NOT_FOUND));
     });
 
@@ -132,7 +132,7 @@ describe('modules/platform/codecommit/index', () => {
       jest.spyOn(git, 'initRepo').mockReturnValueOnce(Promise.resolve());
       codeCommitClient.on(GetRepositoryCommand).resolvesOnce({});
       await expect(
-        codeCommit.initRepo({ repository: 'repositoryName' })
+        codeCommit.initRepo({ repository: 'repositoryName' }),
       ).rejects.toThrow(new Error(REPOSITORY_NOT_FOUND));
     });
 
@@ -144,7 +144,7 @@ describe('modules/platform/codecommit/index', () => {
         },
       });
       await expect(
-        codeCommit.initRepo({ repository: 'repositoryName' })
+        codeCommit.initRepo({ repository: 'repositoryName' }),
       ).rejects.toThrow(new Error(REPOSITORY_EMPTY));
     });
 
@@ -159,7 +159,7 @@ describe('modules/platform/codecommit/index', () => {
       process.env.AWS_ACCESS_KEY_ID = 'something';
       process.env.AWS_SECRET_ACCESS_KEY = 'something';
       await expect(
-        codeCommit.initRepo({ repository: 'repositoryName' })
+        codeCommit.initRepo({ repository: 'repositoryName' }),
       ).resolves.toEqual({
         repoFingerprint:
           'f0bcfd81abefcdf9ae5e5de58d1a868317503ea76422309bc212d1ef25a1e67789d0bfa752a7e2abd4510f4f3e4f60cdaf6202a42883fb97bb7110ab3600785e',
@@ -179,8 +179,8 @@ describe('modules/platform/codecommit/index', () => {
             cloneUrlHttp:
               'https://git-codecommit.us-east-1.amazonaws.com/v1/repos/name',
           },
-          'name'
-        )
+          'name',
+        ),
       ).toBe('https://git-codecommit.us-east-1.amazonaws.com/v1/repos/name');
     });
 
@@ -194,8 +194,8 @@ describe('modules/platform/codecommit/index', () => {
             defaultBranch: 'main',
             repositoryId: 'id',
           },
-          'name'
-        )
+          'name',
+        ),
       ).toBe('https://git-codecommit.eu-central-1.amazonaws.com/v1/repos/name');
     });
 
@@ -219,10 +219,10 @@ describe('modules/platform/codecommit/index', () => {
             defaultBranch: 'main',
             repositoryId: 'id',
           },
-          'name'
-        )
+          'name',
+        ),
       ).toBe(
-        `https://access-key-id:${token}@git-codecommit.eu-central-1.amazonaws.com/v1/repos/name`
+        `https://access-key-id:${token}@git-codecommit.eu-central-1.amazonaws.com/v1/repos/name`,
       );
     });
   });
@@ -726,7 +726,7 @@ describe('modules/platform/codecommit/index', () => {
           targetBranch: 'targetBranch',
           prTitle: 'mytitle',
           prBody: 'mybody',
-        })
+        }),
       ).rejects.toThrow(new Error('Could not create pr, missing PR info'));
     });
   });
@@ -742,7 +742,7 @@ describe('modules/platform/codecommit/index', () => {
           prTitle: 'title',
           prBody: 'body',
           state: 'open',
-        })
+        }),
       ).toResolve();
     });
 
@@ -769,7 +769,7 @@ describe('modules/platform/codecommit/index', () => {
           prTitle: 'title',
           prBody: 'new description',
           state: 'open',
-        })
+        }),
       ).toResolve();
     });
 
@@ -795,7 +795,7 @@ describe('modules/platform/codecommit/index', () => {
           prTitle: 'title',
           prBody: 'new description',
           state: 'open',
-        })
+        }),
       ).toResolve();
     });
 
@@ -811,7 +811,7 @@ describe('modules/platform/codecommit/index', () => {
           prTitle: 'title',
           prBody: 'body',
           state: 'open',
-        })
+        }),
       ).toResolve();
     });
 
@@ -825,7 +825,7 @@ describe('modules/platform/codecommit/index', () => {
           prTitle: 'title',
           prBody: 'body',
           state: 'closed',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -1017,7 +1017,7 @@ describe('modules/platform/codecommit/index', () => {
       expect(res).toBeTrue();
       expect(logger.logger.info).toHaveBeenCalledWith(
         { repository: undefined, prNo: 42, topic: 'some-subject' },
-        'Comment added'
+        'Comment added',
       );
     });
 
@@ -1050,7 +1050,7 @@ describe('modules/platform/codecommit/index', () => {
       expect(res).toBeTrue();
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { repository: undefined, prNo: 42, topic: 'some-subject' },
-        'Comment updated'
+        'Comment updated',
       );
     });
 
@@ -1082,7 +1082,7 @@ describe('modules/platform/codecommit/index', () => {
       expect(res).toBeTrue();
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { repository: undefined, prNo: 42, topic: 'some-subject' },
-        'Comment is already update-to-date'
+        'Comment is already update-to-date',
       );
     });
 
@@ -1114,7 +1114,7 @@ describe('modules/platform/codecommit/index', () => {
       expect(res).toBeTrue();
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { repository: undefined, prNo: 42, topic: null },
-        'Comment is already update-to-date'
+        'Comment is already update-to-date',
       );
     });
 
@@ -1129,7 +1129,7 @@ describe('modules/platform/codecommit/index', () => {
       expect(res).toBeFalse();
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { err },
-        'Unable to retrieve pr comments'
+        'Unable to retrieve pr comments',
       );
     });
 
@@ -1211,7 +1211,7 @@ describe('modules/platform/codecommit/index', () => {
         topic: 'some-subject',
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'comment "some-subject" in PR #42 was removed'
+        'comment "some-subject" in PR #42 was removed',
       );
     });
 
@@ -1224,7 +1224,7 @@ describe('modules/platform/codecommit/index', () => {
         topic: 'some-subject',
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'commentsForPullRequestData not found'
+        'commentsForPullRequestData not found',
       );
     });
 
@@ -1249,7 +1249,7 @@ describe('modules/platform/codecommit/index', () => {
         topic: 'some-subject',
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'comments object not found under commentsForPullRequestData'
+        'comments object not found under commentsForPullRequestData',
       );
     });
 
@@ -1280,7 +1280,7 @@ describe('modules/platform/codecommit/index', () => {
         content: 'my comment content',
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'comment "my comment content" in PR #42 was removed'
+        'comment "my comment content" in PR #42 was removed',
       );
     });
 
@@ -1294,7 +1294,7 @@ describe('modules/platform/codecommit/index', () => {
       });
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { err },
-        'Unable to retrieve pr comments'
+        'Unable to retrieve pr comments',
       );
     });
   });
@@ -1317,11 +1317,13 @@ describe('modules/platform/codecommit/index', () => {
         .on(CreatePullRequestApprovalRuleCommand)
         .resolvesOnce(res);
       await expect(
-        codeCommit.addReviewers(13, ['arn:aws:iam::someUser:user/ReviewerUser'])
+        codeCommit.addReviewers(13, [
+          'arn:aws:iam::someUser:user/ReviewerUser',
+        ]),
       ).toResolve();
       expect(logger.logger.debug).toHaveBeenCalledWith(
         res,
-        'Approval Rule Added to PR #13:'
+        'Approval Rule Added to PR #13:',
       );
     });
   });
diff --git a/lib/modules/platform/codecommit/index.ts b/lib/modules/platform/codecommit/index.ts
index 7b7ddcc449396e048cbb2af3973df0535fd790e2..6a925666cfb45f27b03f27eddfa8169d3002bbbc 100644
--- a/lib/modules/platform/codecommit/index.ts
+++ b/lib/modules/platform/codecommit/index.ts
@@ -204,12 +204,12 @@ export async function findPr({
     const prs = await getPrList();
     const refsHeadBranchName = getNewBranchName(branchName);
     prsFiltered = prs.filter(
-      (item) => item.sourceBranch === refsHeadBranchName
+      (item) => item.sourceBranch === refsHeadBranchName,
     );
 
     if (prTitle) {
       prsFiltered = prsFiltered.filter(
-        (item) => item.title.toUpperCase() === prTitle.toUpperCase()
+        (item) => item.title.toUpperCase() === prTitle.toUpperCase(),
       );
     }
 
@@ -233,7 +233,7 @@ export async function findPr({
 }
 
 export async function getBranchPr(
-  branchName: string
+  branchName: string,
 ): Promise<CodeCommitPr | null> {
   logger.debug(`getBranchPr(${branchName})`);
   const existingPr = await findPr({
@@ -244,7 +244,7 @@ export async function getBranchPr(
 }
 
 export async function getPr(
-  pullRequestId: number
+  pullRequestId: number,
 ): Promise<CodeCommitPr | null> {
   logger.debug(`getPr(${pullRequestId})`);
   const prRes = await client.getPr(`${pullRequestId}`);
@@ -306,11 +306,11 @@ export function massageMarkdown(input: string): string {
   return input
     .replace(
       'you tick the rebase/retry checkbox',
-      'rename PR to start with "rebase!"'
+      'rename PR to start with "rebase!"',
     )
     .replace(
       'checking the rebase/retry box above',
-      'renaming the PR to start with "rebase!"'
+      'renaming the PR to start with "rebase!"',
     )
     .replace(regEx(/<\/?summary>/g), '**')
     .replace(regEx(/<\/?details>/g), '')
@@ -318,14 +318,14 @@ export function massageMarkdown(input: string): string {
     .replace(regEx(/\]\(\.\.\/pull\//g), '](../../pull-requests/')
     .replace(
       regEx(/(?<hiddenComment><!--renovate-(?:debug|config-hash):.*?-->)/g),
-      '[//]: # ($<hiddenComment>)'
+      '[//]: # ($<hiddenComment>)',
     );
 }
 
 export async function getJsonFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<any> {
   const raw = await getRawFile(fileName, repoName, branchOrTag);
   return parseJson(raw, fileName);
@@ -334,12 +334,12 @@ export async function getJsonFile(
 export async function getRawFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<string | null> {
   const fileRes = await client.getFile(
     repoName ?? config.repository,
     fileName,
-    branchOrTag
+    branchOrTag,
   );
   if (!fileRes?.fileContent) {
     return null;
@@ -368,7 +368,7 @@ export async function createPr({
     sanitize(description),
     sourceBranch,
     targetBranch,
-    config.repository
+    config.repository,
   );
 
   if (
@@ -413,7 +413,7 @@ export async function updatePr({
   if (body && cachedPr?.body !== body) {
     await client.updatePrDescription(
       `${prNo}`,
-      smartTruncate(sanitize(body), AMAZON_MAX_BODY_LENGTH)
+      smartTruncate(sanitize(body), AMAZON_MAX_BODY_LENGTH),
     );
   }
 
@@ -515,15 +515,15 @@ export async function mergePr({
 
 export async function addReviewers(
   prNo: number,
-  reviewers: string[]
+  reviewers: string[],
 ): Promise<void> {
   const numberOfApprovers = reviewers.length;
   const approvalRuleContents = `{"Version":"2018-11-08","Statements": [{"Type": "Approvers","NumberOfApprovalsNeeded":${numberOfApprovers},"ApprovalPoolMembers": ${JSON.stringify(
-    reviewers
+    reviewers,
   )}}]}`;
   const res = await client.createPrApprovalRule(
     `${prNo}`,
-    approvalRuleContents
+    approvalRuleContents,
   );
   if (res) {
     const approvalRule = res.approvalRule;
@@ -573,7 +573,7 @@ export function deleteLabel(prNumber: number, label: string): Promise<void> {
 export function getBranchStatus(branchName: string): Promise<BranchStatus> {
   logger.debug(`getBranchStatus(${branchName})`);
   logger.debug(
-    'returning branch status yellow, because getBranchStatus isnt supported on aws yet'
+    'returning branch status yellow, because getBranchStatus isnt supported on aws yet',
   );
   return Promise.resolve('yellow');
 }
@@ -581,11 +581,11 @@ export function getBranchStatus(branchName: string): Promise<BranchStatus> {
 /* istanbul ignore next */
 export function getBranchStatusCheck(
   branchName: string,
-  context: string
+  context: string,
 ): Promise<BranchStatus | null> {
   logger.debug(`getBranchStatusCheck(${branchName}, context=${context})`);
   logger.debug(
-    'returning null, because getBranchStatusCheck is not supported on aws yet'
+    'returning null, because getBranchStatusCheck is not supported on aws yet',
   );
   return Promise.resolve(null);
 }
@@ -652,23 +652,23 @@ export async function ensureComment({
       config.repository,
       body,
       thisPr[0].destinationCommit,
-      thisPr[0].sourceCommit
+      thisPr[0].sourceCommit,
     );
     logger.info(
       { repository: config.repository, prNo: number, topic },
-      'Comment added'
+      'Comment added',
     );
   } else if (commentNeedsUpdating && commentId) {
     await client.updateComment(commentId, body);
 
     logger.debug(
       { repository: config.repository, prNo: number, topic },
-      'Comment updated'
+      'Comment updated',
     );
   } else {
     logger.debug(
       { repository: config.repository, prNo: number, topic },
-      'Comment is already update-to-date'
+      'Comment is already update-to-date',
     );
   }
 
@@ -676,7 +676,7 @@ export async function ensureComment({
 }
 
 export async function ensureCommentRemoval(
-  removeConfig: EnsureCommentRemovalConfig
+  removeConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   const { number: prNo } = removeConfig;
   const key =
@@ -702,7 +702,7 @@ export async function ensureCommentRemoval(
   for (const commentObj of prCommentsResponse.commentsForPullRequestData) {
     if (!commentObj?.comments) {
       logger.debug(
-        'comments object not found under commentsForPullRequestData'
+        'comments object not found under commentsForPullRequestData',
       );
       continue;
     }
diff --git a/lib/modules/platform/comment.ts b/lib/modules/platform/comment.ts
index 213d8eb6c8cb89f377b17286120c9af25b11ccbc..33f2e6e8f96df66234528d9a2cd9a070a589d5c1 100644
--- a/lib/modules/platform/comment.ts
+++ b/lib/modules/platform/comment.ts
@@ -4,7 +4,7 @@ import type { EnsureCommentConfig, EnsureCommentRemovalConfig } from './types';
 import { platform } from '.';
 
 export async function ensureComment(
-  commentConfig: EnsureCommentConfig
+  commentConfig: EnsureCommentConfig,
 ): Promise<boolean> {
   const { number, content } = commentConfig;
   const topic = commentConfig.topic ?? '';
@@ -26,7 +26,7 @@ export async function ensureComment(
 }
 
 export async function ensureCommentRemoval(
-  config: EnsureCommentRemovalConfig
+  config: EnsureCommentRemovalConfig,
 ): Promise<void> {
   await platform.ensureCommentRemoval(config);
 
@@ -39,7 +39,7 @@ export async function ensureCommentRemoval(
     } else if (type === 'by-content') {
       const contentHash = hash(config.content);
       for (const [cachedTopic, cachedContentHash] of Object.entries(
-        repoCache.prComments?.[number]
+        repoCache.prComments?.[number],
       )) {
         if (cachedContentHash === contentHash) {
           delete repoCache.prComments?.[number]?.[cachedTopic];
diff --git a/lib/modules/platform/gitea/gitea-helper.spec.ts b/lib/modules/platform/gitea/gitea-helper.spec.ts
index d1cd25298332ecc926332611304edb2bbb4582d3..77409d59bb772def33ed418983b49e21ac7a1c7c 100644
--- a/lib/modules/platform/gitea/gitea-helper.spec.ts
+++ b/lib/modules/platform/gitea/gitea-helper.spec.ts
@@ -276,14 +276,14 @@ describe('modules/platform/gitea/gitea-helper', () => {
       httpMock
         .scope(baseUrl)
         .get(
-          `/repos/${mockRepo.full_name}/contents/${mockContents.path}?ref=${mockCommitHash}`
+          `/repos/${mockRepo.full_name}/contents/${mockContents.path}?ref=${mockCommitHash}`,
         )
         .reply(200, { ...mockContents, contentString: undefined });
 
       const res = await getRepoContents(
         mockRepo.full_name,
         mockContents.path,
-        mockCommitHash
+        mockCommitHash,
       );
       expect(res).toEqual(mockContents);
     });
@@ -298,7 +298,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
 
       const res = await getRepoContents(
         mockRepo.full_name,
-        otherMockContents.path
+        otherMockContents.path,
       );
       expect(res).toEqual(otherMockContents);
     });
@@ -410,12 +410,12 @@ describe('modules/platform/gitea/gitea-helper', () => {
       httpMock
         .scope(baseUrl)
         .post(
-          `/repos/${mockRepo.full_name}/pulls/${mockPR.number}/requested_reviewers`
+          `/repos/${mockRepo.full_name}/pulls/${mockPR.number}/requested_reviewers`,
         )
         .reply(200);
 
       await expect(
-        requestPrReviewers(mockRepo.full_name, mockPR.number, {})
+        requestPrReviewers(mockRepo.full_name, mockPR.number, {}),
       ).toResolve();
     });
   });
@@ -435,7 +435,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
       httpMock
         .scope(baseUrl)
         .get(
-          `/repos/${mockRepo.full_name}/pulls?state=open&labels=${mockLabel.id}&labels=${otherMockLabel.id}`
+          `/repos/${mockRepo.full_name}/pulls?state=open&labels=${mockLabel.id}&labels=${otherMockLabel.id}`,
         )
         .reply(200, [mockPR]);
 
@@ -506,7 +506,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
         mockIssue.number,
         {
           labels: [1, 3],
-        }
+        },
       );
       expect(res).toEqual(updatedMockLabels);
     });
@@ -589,14 +589,14 @@ describe('modules/platform/gitea/gitea-helper', () => {
       httpMock
         .scope(baseUrl)
         .delete(
-          `/repos/${mockRepo.full_name}/issues/${mockIssue.number}/labels/${mockLabel.id}`
+          `/repos/${mockRepo.full_name}/issues/${mockIssue.number}/labels/${mockLabel.id}`,
         )
         .reply(200);
 
       const res = await unassignLabel(
         mockRepo.full_name,
         mockIssue.number,
-        mockLabel.id
+        mockLabel.id,
       );
       expect(res).toBeUndefined();
     });
@@ -607,14 +607,14 @@ describe('modules/platform/gitea/gitea-helper', () => {
       httpMock
         .scope(baseUrl)
         .post(
-          `/repos/${mockRepo.full_name}/issues/${mockIssue.number}/comments`
+          `/repos/${mockRepo.full_name}/issues/${mockIssue.number}/comments`,
         )
         .reply(200, mockComment);
 
       const res = await createComment(
         mockRepo.full_name,
         mockIssue.number,
-        mockComment.body
+        mockComment.body,
       );
       expect(res).toEqual(mockComment);
     });
@@ -635,7 +635,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
       const res = await updateComment(
         mockRepo.full_name,
         mockComment.id,
-        'new-body'
+        'new-body',
       );
       expect(res).toEqual(updatedMockComment);
     });
@@ -646,7 +646,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
       httpMock
         .scope(baseUrl)
         .delete(
-          `/repos/${mockRepo.full_name}/issues/comments/${mockComment.id}`
+          `/repos/${mockRepo.full_name}/issues/comments/${mockComment.id}`,
         )
         .reply(200);
 
@@ -693,7 +693,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
 
       const res = await getCombinedCommitStatus(
         mockRepo.full_name,
-        mockBranch.name
+        mockBranch.name,
       );
       expect(res.worstStatus).not.toBe('unknown');
       expect(res.statuses).toEqual([mockCommitStatus, otherMockCommitStatus]);
@@ -752,7 +752,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
         httpMock
           .scope(baseUrl)
           .get(
-            `/repos/${mockRepo.full_name}/commits/${mockBranch.name}/statuses`
+            `/repos/${mockRepo.full_name}/commits/${mockBranch.name}/statuses`,
           )
           .reply(200, commitStatuses);
 
@@ -760,7 +760,7 @@ describe('modules/platform/gitea/gitea-helper', () => {
         // should be less important than the one which just got added
         const res = await getCombinedCommitStatus(
           mockRepo.full_name,
-          mockBranch.name
+          mockBranch.name,
         );
         expect(res.worstStatus).toEqual(expected);
       }
diff --git a/lib/modules/platform/gitea/gitea-helper.ts b/lib/modules/platform/gitea/gitea-helper.ts
index 188211d6028685b48d0a5033e5800fc5a0687133..7dbc6594a5dc90be4a8a59705aee2001fbbf98b9 100644
--- a/lib/modules/platform/gitea/gitea-helper.ts
+++ b/lib/modules/platform/gitea/gitea-helper.ts
@@ -44,7 +44,7 @@ const commitStatusStates: CommitStatusType[] = [
 ];
 
 export async function getCurrentUser(
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<User> {
   const url = `${API_PATH}/user`;
   const res = await giteaHttp.getJson<User>(url, options);
@@ -59,7 +59,7 @@ export async function getVersion(options?: GiteaHttpOptions): Promise<string> {
 
 export async function searchRepos(
   params: RepoSearchParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Repo[]> {
   const query = getQueryString(params);
   const url = `${API_PATH}/repos/search?${query}`;
@@ -70,7 +70,7 @@ export async function searchRepos(
 
   if (!res.body.ok) {
     throw new Error(
-      'Unable to search for repositories, ok flag has not been set'
+      'Unable to search for repositories, ok flag has not been set',
     );
   }
 
@@ -79,7 +79,7 @@ export async function searchRepos(
 
 export async function getRepo(
   repoPath: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Repo> {
   const url = `${API_PATH}/repos/${repoPath}`;
   const res = await giteaHttp.getJson<Repo>(url, options);
@@ -90,11 +90,11 @@ export async function getRepoContents(
   repoPath: string,
   filePath: string,
   ref?: string | null,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<RepoContents> {
   const query = getQueryString(ref ? { ref } : {});
   const url = `${API_PATH}/repos/${repoPath}/contents/${urlEscape(
-    filePath
+    filePath,
   )}?${query}`;
   const res = await giteaHttp.getJson<RepoContents>(url, options);
 
@@ -108,7 +108,7 @@ export async function getRepoContents(
 export async function createPR(
   repoPath: string,
   params: PRCreateParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<PR> {
   const url = `${API_PATH}/repos/${repoPath}/pulls`;
   const res = await giteaHttp.postJson<PR>(url, {
@@ -123,7 +123,7 @@ export async function updatePR(
   repoPath: string,
   idx: number,
   params: PRUpdateParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<PR> {
   const url = `${API_PATH}/repos/${repoPath}/pulls/${idx}`;
   const res = await giteaHttp.patchJson<PR>(url, {
@@ -137,7 +137,7 @@ export async function updatePR(
 export async function closePR(
   repoPath: string,
   idx: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<void> {
   await updatePR(repoPath, idx, {
     ...options,
@@ -149,7 +149,7 @@ export async function mergePR(
   repoPath: string,
   idx: number,
   params: PRMergeParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<void> {
   const url = `${API_PATH}/repos/${repoPath}/pulls/${idx}/merge`;
   await giteaHttp.postJson(url, {
@@ -161,7 +161,7 @@ export async function mergePR(
 export async function getPR(
   repoPath: string,
   idx: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<PR> {
   const url = `${API_PATH}/repos/${repoPath}/pulls/${idx}`;
   const res = await giteaHttp.getJson<PR>(url, options);
@@ -172,7 +172,7 @@ export async function requestPrReviewers(
   repoPath: string,
   idx: number,
   params: PrReviewersParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<void> {
   const url = `${API_PATH}/repos/${repoPath}/pulls/${idx}/requested_reviewers`;
   await giteaHttp.postJson(url, {
@@ -184,7 +184,7 @@ export async function requestPrReviewers(
 export async function searchPRs(
   repoPath: string,
   params: PRSearchParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<PR[]> {
   const query = getQueryString(params);
   const url = `${API_PATH}/repos/${repoPath}/pulls?${query}`;
@@ -199,7 +199,7 @@ export async function searchPRs(
 export async function createIssue(
   repoPath: string,
   params: IssueCreateParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Issue> {
   const url = `${API_PATH}/repos/${repoPath}/issues`;
   const res = await giteaHttp.postJson<Issue>(url, {
@@ -214,7 +214,7 @@ export async function updateIssue(
   repoPath: string,
   idx: number,
   params: IssueUpdateParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Issue> {
   const url = `${API_PATH}/repos/${repoPath}/issues/${idx}`;
   const res = await giteaHttp.patchJson<Issue>(url, {
@@ -229,7 +229,7 @@ export async function updateIssueLabels(
   repoPath: string,
   idx: number,
   params: IssueUpdateLabelsParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Label[]> {
   const url = `${API_PATH}/repos/${repoPath}/issues/${idx}/labels`;
   const res = await giteaHttp.putJson<Label[]>(url, {
@@ -243,7 +243,7 @@ export async function updateIssueLabels(
 export async function closeIssue(
   repoPath: string,
   idx: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<void> {
   await updateIssue(repoPath, idx, {
     ...options,
@@ -254,7 +254,7 @@ export async function closeIssue(
 export async function searchIssues(
   repoPath: string,
   params: IssueSearchParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Issue[]> {
   const query = getQueryString({ ...params, type: 'issues' });
   const url = `${API_PATH}/repos/${repoPath}/issues?${query}`;
@@ -269,7 +269,7 @@ export async function searchIssues(
 export async function getIssue(
   repoPath: string,
   idx: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Issue> {
   const url = `${API_PATH}/repos/${repoPath}/issues/${idx}`;
   const res = await giteaHttp.getJson<Issue>(url, options);
@@ -278,7 +278,7 @@ export async function getIssue(
 
 export async function getRepoLabels(
   repoPath: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Label[]> {
   const url = `${API_PATH}/repos/${repoPath}/labels`;
   const res = await giteaHttp.getJson<Label[]>(url, options);
@@ -288,7 +288,7 @@ export async function getRepoLabels(
 
 export async function getOrgLabels(
   orgName: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Label[]> {
   const url = `${API_PATH}/orgs/${orgName}/labels`;
   const res = await giteaHttp.getJson<Label[]>(url, options);
@@ -300,7 +300,7 @@ export async function unassignLabel(
   repoPath: string,
   issue: number,
   label: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<void> {
   const url = `${API_PATH}/repos/${repoPath}/issues/${issue}/labels/${label}`;
   await giteaHttp.deleteJson(url, options);
@@ -310,7 +310,7 @@ export async function createComment(
   repoPath: string,
   issue: number,
   body: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Comment> {
   const params: CommentCreateParams = { body };
   const url = `${API_PATH}/repos/${repoPath}/issues/${issue}/comments`;
@@ -326,7 +326,7 @@ export async function updateComment(
   repoPath: string,
   idx: number,
   body: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Comment> {
   const params: CommentUpdateParams = { body };
   const url = `${API_PATH}/repos/${repoPath}/issues/comments/${idx}`;
@@ -341,7 +341,7 @@ export async function updateComment(
 export async function deleteComment(
   repoPath: string,
   idx: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<void> {
   const url = `${API_PATH}/repos/${repoPath}/issues/comments/${idx}`;
   await giteaHttp.deleteJson(url, options);
@@ -350,7 +350,7 @@ export async function deleteComment(
 export async function getComments(
   repoPath: string,
   issue: number,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Comment[]> {
   const url = `${API_PATH}/repos/${repoPath}/issues/${issue}/comments`;
   const res = await giteaHttp.getJson<Comment[]>(url, options);
@@ -362,7 +362,7 @@ export async function createCommitStatus(
   repoPath: string,
   branchCommit: string,
   params: CommitStatusCreateParams,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<CommitStatus> {
   const url = `${API_PATH}/repos/${repoPath}/statuses/${branchCommit}`;
   const res = await giteaHttp.postJson<CommitStatus>(url, {
@@ -410,10 +410,10 @@ function filterStatus(data: CommitStatus[]): CommitStatus[] {
 export async function getCombinedCommitStatus(
   repoPath: string,
   branchName: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<CombinedCommitStatus> {
   const url = `${API_PATH}/repos/${repoPath}/commits/${urlEscape(
-    branchName
+    branchName,
   )}/statuses`;
   const res = await giteaHttp.getJson<CommitStatus[]>(url, {
     ...options,
@@ -434,7 +434,7 @@ export async function getCombinedCommitStatus(
 export async function getBranch(
   repoPath: string,
   branchName: string,
-  options?: GiteaHttpOptions
+  options?: GiteaHttpOptions,
 ): Promise<Branch> {
   const url = `${API_PATH}/repos/${repoPath}/branches/${urlEscape(branchName)}`;
   const res = await giteaHttp.getJson<Branch>(url, options);
diff --git a/lib/modules/platform/gitea/index.spec.ts b/lib/modules/platform/gitea/index.spec.ts
index 140925dec6e1cd93a79664e742b5deeee462968e..20f07b43b148ab320824e6da186897e8673ecdcb 100644
--- a/lib/modules/platform/gitea/index.spec.ts
+++ b/lib/modules/platform/gitea/index.spec.ts
@@ -226,7 +226,7 @@ describe('modules/platform/gitea/index', () => {
 
   function initFakeRepo(
     repo?: Partial<Repo>,
-    config?: Partial<RepoParams>
+    config?: Partial<RepoParams>,
   ): Promise<RepoResult> {
     helper.getRepo.mockResolvedValueOnce({ ...mockRepo, ...repo });
 
@@ -245,7 +245,7 @@ describe('modules/platform/gitea/index', () => {
       helper.getCurrentUser.mockRejectedValueOnce(new Error());
 
       await expect(
-        gitea.initPlatform({ token: 'some-token' })
+        gitea.initPlatform({ token: 'some-token' }),
       ).rejects.toThrow();
     });
 
@@ -253,7 +253,7 @@ describe('modules/platform/gitea/index', () => {
       helper.getCurrentUser.mockResolvedValueOnce(mockUser);
 
       expect(
-        await gitea.initPlatform({ token: 'some-token' })
+        await gitea.initPlatform({ token: 'some-token' }),
       ).toMatchSnapshot();
     });
 
@@ -264,7 +264,7 @@ describe('modules/platform/gitea/index', () => {
         await gitea.initPlatform({
           token: 'some-token',
           endpoint: 'https://gitea.renovatebot.com',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -275,7 +275,7 @@ describe('modules/platform/gitea/index', () => {
         await gitea.initPlatform({
           token: 'some-token',
           endpoint: 'https://gitea.renovatebot.com/api/v1',
-        })
+        }),
       ).toMatchObject({
         endpoint: 'https://gitea.renovatebot.com/',
       });
@@ -288,7 +288,7 @@ describe('modules/platform/gitea/index', () => {
       });
 
       expect(
-        await gitea.initPlatform({ token: 'some-token' })
+        await gitea.initPlatform({ token: 'some-token' }),
       ).toMatchSnapshot();
     });
   });
@@ -341,19 +341,19 @@ describe('modules/platform/gitea/index', () => {
 
     it('should abort when repo is archived', async () => {
       await expect(initFakeRepo({ archived: true })).rejects.toThrow(
-        REPOSITORY_ARCHIVED
+        REPOSITORY_ARCHIVED,
       );
     });
 
     it('should abort when repo is mirrored', async () => {
       await expect(initFakeRepo({ mirror: true })).rejects.toThrow(
-        REPOSITORY_MIRRORED
+        REPOSITORY_MIRRORED,
       );
     });
 
     it('should abort when repo is empty', async () => {
       await expect(initFakeRepo({ empty: true })).rejects.toThrow(
-        REPOSITORY_EMPTY
+        REPOSITORY_EMPTY,
       );
     });
 
@@ -365,25 +365,28 @@ describe('modules/platform/gitea/index', () => {
             push: false,
             admin: false,
           },
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_ACCESS_FORBIDDEN);
     });
 
     it('should abort when repo has no available merge methods', async () => {
       await expect(initFakeRepo({ allow_rebase: false })).rejects.toThrow(
-        REPOSITORY_BLOCKED
+        REPOSITORY_BLOCKED,
       );
     });
 
     it('should fall back to merge method "rebase-merge"', async () => {
       expect(
-        await initFakeRepo({ allow_rebase: false, allow_rebase_explicit: true })
+        await initFakeRepo({
+          allow_rebase: false,
+          allow_rebase_explicit: true,
+        }),
       ).toMatchSnapshot();
     });
 
     it('should fall back to merge method "squash"', async () => {
       expect(
-        await initFakeRepo({ allow_rebase: false, allow_squash_merge: true })
+        await initFakeRepo({ allow_rebase: false, allow_squash_merge: true }),
       ).toMatchSnapshot();
     });
 
@@ -392,7 +395,7 @@ describe('modules/platform/gitea/index', () => {
         await initFakeRepo({
           allow_rebase: false,
           allow_merge_commits: true,
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -406,7 +409,7 @@ describe('modules/platform/gitea/index', () => {
       await gitea.initRepo(repoCfg);
 
       expect(gitvcs.initRepo).toHaveBeenCalledWith(
-        expect.objectContaining({ url: mockRepo.clone_url })
+        expect.objectContaining({ url: mockRepo.clone_url }),
       );
     });
 
@@ -421,7 +424,7 @@ describe('modules/platform/gitea/index', () => {
       await gitea.initRepo(repoCfg);
 
       expect(gitvcs.initRepo).toHaveBeenCalledWith(
-        expect.objectContaining({ url: mockRepo.clone_url })
+        expect.objectContaining({ url: mockRepo.clone_url }),
       );
     });
 
@@ -436,7 +439,7 @@ describe('modules/platform/gitea/index', () => {
       await gitea.initRepo(repoCfg);
 
       expect(gitvcs.initRepo).toHaveBeenCalledWith(
-        expect.objectContaining({ url: mockRepo.ssh_url })
+        expect.objectContaining({ url: mockRepo.ssh_url }),
       );
     });
 
@@ -450,7 +453,7 @@ describe('modules/platform/gitea/index', () => {
       };
 
       await expect(gitea.initRepo(repoCfg)).rejects.toThrow(
-        CONFIG_GIT_URL_UNAVAILABLE
+        CONFIG_GIT_URL_UNAVAILABLE,
       );
     });
 
@@ -467,7 +470,7 @@ describe('modules/platform/gitea/index', () => {
       expect(gitvcs.initRepo).toHaveBeenCalledWith(
         expect.objectContaining({
           url: `https://gitea.com/${mockRepo.full_name}.git`,
-        })
+        }),
       );
     });
 
@@ -483,7 +486,7 @@ describe('modules/platform/gitea/index', () => {
       };
 
       await expect(gitea.initRepo(repoCfg)).rejects.toThrow(
-        CONFIG_GIT_URL_UNAVAILABLE
+        CONFIG_GIT_URL_UNAVAILABLE,
       );
     });
 
@@ -509,7 +512,7 @@ describe('modules/platform/gitea/index', () => {
       expect(gitvcs.initRepo).toHaveBeenCalledWith(
         expect.objectContaining({
           url: `https://${token}@gitea.com/${mockRepo.full_name}.git`,
-        })
+        }),
       );
     });
 
@@ -532,7 +535,7 @@ describe('modules/platform/gitea/index', () => {
       const url = new URL(`${mockRepo.clone_url}`);
       url.username = token;
       expect(gitvcs.initRepo).toHaveBeenCalledWith(
-        expect.objectContaining({ url: url.toString() })
+        expect.objectContaining({ url: url.toString() }),
       );
     });
 
@@ -548,7 +551,7 @@ describe('modules/platform/gitea/index', () => {
       };
 
       await expect(gitea.initRepo(repoCfg)).rejects.toThrow(
-        CONFIG_GIT_URL_UNAVAILABLE
+        CONFIG_GIT_URL_UNAVAILABLE,
       );
     });
   });
@@ -576,7 +579,7 @@ describe('modules/platform/gitea/index', () => {
           state: 'success',
           context: 'some-context',
           description: 'some-description',
-        }
+        },
       );
     });
 
@@ -591,7 +594,7 @@ describe('modules/platform/gitea/index', () => {
           state: 'pending',
           context: 'some-context',
           description: 'some-description',
-        }
+        },
       );
     });
 
@@ -607,7 +610,7 @@ describe('modules/platform/gitea/index', () => {
           context: 'some-context',
           description: 'some-description',
           target_url: 'some-url',
-        }
+        },
       );
     });
 
@@ -625,7 +628,7 @@ describe('modules/platform/gitea/index', () => {
       helper.getCombinedCommitStatus.mockResolvedValueOnce(
         partial<CombinedCommitStatus>({
           worstStatus: state as CommitStatusType,
-        })
+        }),
       );
 
       return gitea.getBranchStatus('some-branch', true);
@@ -651,17 +654,17 @@ describe('modules/platform/gitea/index', () => {
       helper.getCombinedCommitStatus.mockRejectedValueOnce({ statusCode: 404 });
 
       await expect(gitea.getBranchStatus('some-branch', true)).rejects.toThrow(
-        REPOSITORY_CHANGED
+        REPOSITORY_CHANGED,
       );
     });
 
     it('should propagate any other errors', async () => {
       helper.getCombinedCommitStatus.mockRejectedValueOnce(
-        new Error('getCombinedCommitStatus()')
+        new Error('getCombinedCommitStatus()'),
       );
 
       await expect(gitea.getBranchStatus('some-branch', true)).rejects.toThrow(
-        'getCombinedCommitStatus()'
+        'getCombinedCommitStatus()',
       );
     });
 
@@ -697,7 +700,7 @@ describe('modules/platform/gitea/index', () => {
               created_at: '',
             },
           ],
-        })
+        }),
       );
       expect(await gitea.getBranchStatus('some-branch', false)).toBe('yellow');
     });
@@ -708,11 +711,11 @@ describe('modules/platform/gitea/index', () => {
       helper.getCombinedCommitStatus.mockResolvedValueOnce(
         partial<CombinedCommitStatus>({
           statuses: [],
-        })
+        }),
       );
 
       expect(
-        await gitea.getBranchStatusCheck('some-branch', 'some-context')
+        await gitea.getBranchStatusCheck('some-branch', 'some-context'),
       ).toBeNull();
     });
 
@@ -720,11 +723,11 @@ describe('modules/platform/gitea/index', () => {
       helper.getCombinedCommitStatus.mockResolvedValueOnce(
         partial<CombinedCommitStatus>({
           statuses: [partial<CommitStatus>({ context: 'other-context' })],
-        })
+        }),
       );
 
       expect(
-        await gitea.getBranchStatusCheck('some-branch', 'some-context')
+        await gitea.getBranchStatusCheck('some-branch', 'some-context'),
       ).toBeNull();
     });
 
@@ -736,11 +739,11 @@ describe('modules/platform/gitea/index', () => {
               context: 'some-context',
             }),
           ],
-        })
+        }),
       );
 
       expect(
-        await gitea.getBranchStatusCheck('some-branch', 'some-context')
+        await gitea.getBranchStatusCheck('some-branch', 'some-context'),
       ).toBe('yellow');
     });
 
@@ -753,11 +756,11 @@ describe('modules/platform/gitea/index', () => {
               context: 'some-context',
             }),
           ],
-        })
+        }),
       );
 
       expect(
-        await gitea.getBranchStatusCheck('some-branch', 'some-context')
+        await gitea.getBranchStatusCheck('some-branch', 'some-context'),
       ).toBe('green');
     });
   });
@@ -776,7 +779,7 @@ describe('modules/platform/gitea/index', () => {
       helper.getCurrentUser.mockResolvedValueOnce(mockUser);
 
       expect(
-        await gitea.initPlatform({ token: 'some-token' })
+        await gitea.initPlatform({ token: 'some-token' }),
       ).toMatchSnapshot();
 
       await initFakeRepo();
@@ -830,7 +833,7 @@ describe('modules/platform/gitea/index', () => {
               email: 'renovate@whitesourcesoftware.com',
             }),
           },
-        })
+        }),
       );
       await initFakeRepo();
 
@@ -913,7 +916,7 @@ describe('modules/platform/gitea/index', () => {
         await gitea.findPr({
           branchName: mockPR.head.label,
           state: `!${mockPR.state as PrState}` as never, // wrong argument being passed intentionally
-        })
+        }),
       ).toBeNull();
     });
 
@@ -1103,7 +1106,7 @@ describe('modules/platform/gitea/index', () => {
       expect(helper.updatePR).toHaveBeenCalledWith(
         mockRepo.full_name,
         mockNewPR.number,
-        { title: 'new-title', body: 'new-body' }
+        { title: 'new-title', body: 'new-body' },
       );
     });
 
@@ -1117,7 +1120,7 @@ describe('modules/platform/gitea/index', () => {
           targetBranch: 'master',
           prTitle: mockNewPR.title,
           prBody: mockNewPR.body,
-        })
+        }),
       ).rejects.toThrow();
     });
 
@@ -1150,7 +1153,7 @@ describe('modules/platform/gitea/index', () => {
         {
           Do: 'rebase',
           merge_when_checks_succeed: true,
-        }
+        },
       );
     });
 
@@ -1184,7 +1187,7 @@ describe('modules/platform/gitea/index', () => {
         {
           Do: 'rebase',
           merge_when_checks_succeed: true,
-        }
+        },
       );
     });
 
@@ -1300,7 +1303,7 @@ describe('modules/platform/gitea/index', () => {
         await gitea.mergePr({
           branchName: 'some-branch',
           id: 1,
-        })
+        }),
       ).toBe(true);
       expect(helper.mergePR).toHaveBeenCalledTimes(1);
       expect(helper.mergePR).toHaveBeenCalledWith(mockRepo.full_name, 1, {
@@ -1317,7 +1320,7 @@ describe('modules/platform/gitea/index', () => {
           branchName: 'some-branch',
           id: 1,
           strategy: 'squash',
-        })
+        }),
       ).toBe(false);
     });
   });
@@ -1330,7 +1333,7 @@ describe('modules/platform/gitea/index', () => {
 
       expect(await gitea.getIssue?.(mockIssue.number)).toHaveProperty(
         'number',
-        mockIssue.number
+        mockIssue.number,
       );
     });
   });
@@ -1344,7 +1347,7 @@ describe('modules/platform/gitea/index', () => {
 
       expect(await gitea.findIssue(mockIssue.title)).toHaveProperty(
         'number',
-        mockIssue.number
+        mockIssue.number,
       );
     });
 
@@ -1440,7 +1443,7 @@ describe('modules/platform/gitea/index', () => {
           body: closedIssue.body,
           state: closedIssue.state,
           title: 'closed-issue',
-        }
+        },
       );
     });
 
@@ -1509,7 +1512,7 @@ describe('modules/platform/gitea/index', () => {
         mockIssue.number,
         {
           labels: [1, 3],
-        }
+        },
       );
     });
 
@@ -1548,7 +1551,7 @@ describe('modules/platform/gitea/index', () => {
         mockIssue.number,
         {
           labels: [1, 3],
-        }
+        },
       );
     });
 
@@ -1574,7 +1577,7 @@ describe('modules/platform/gitea/index', () => {
           body: closedIssue.body,
           state: 'open',
           title: 'closed-issue',
-        }
+        },
       );
     });
 
@@ -1596,7 +1599,7 @@ describe('modules/platform/gitea/index', () => {
 
     it('should close all open duplicate issues except first one when updating', async () => {
       const duplicates = mockIssues.filter(
-        (i) => i.title === 'duplicate-issue'
+        (i) => i.title === 'duplicate-issue',
       );
       const firstDuplicate = duplicates[0];
       helper.searchIssues.mockResolvedValueOnce(duplicates);
@@ -1616,7 +1619,7 @@ describe('modules/platform/gitea/index', () => {
           // eslint-disable-next-line jest/no-conditional-expect
           expect(helper.closeIssue).toHaveBeenCalledWith(
             mockRepo.full_name,
-            issue.number
+            issue.number,
           );
         }
       }
@@ -1664,7 +1667,7 @@ describe('modules/platform/gitea/index', () => {
       expect(helper.closeIssue).toHaveBeenCalledTimes(1);
       expect(helper.closeIssue).toHaveBeenCalledWith(
         mockRepo.full_name,
-        mockIssue.number
+        mockIssue.number,
       );
     });
   });
@@ -1681,7 +1684,7 @@ describe('modules/platform/gitea/index', () => {
       expect(helper.unassignLabel).toHaveBeenCalledWith(
         mockRepo.full_name,
         42,
-        mockLabel.id
+        mockLabel.id,
       );
     });
 
@@ -1721,7 +1724,7 @@ describe('modules/platform/gitea/index', () => {
       expect(helper.createComment).toHaveBeenCalledWith(
         mockRepo.full_name,
         1,
-        body
+        body,
       );
     });
 
@@ -1742,7 +1745,7 @@ describe('modules/platform/gitea/index', () => {
       expect(helper.createComment).toHaveBeenCalledWith(
         mockRepo.full_name,
         1,
-        'other-content'
+        'other-content',
       );
     });
 
@@ -1764,7 +1767,7 @@ describe('modules/platform/gitea/index', () => {
       expect(helper.updateComment).toHaveBeenCalledWith(
         mockRepo.full_name,
         13,
-        body
+        body,
       );
     });
 
@@ -1857,7 +1860,7 @@ describe('modules/platform/gitea/index', () => {
 
       expect(await gitea.getBranchPr(mockPR.head.label)).toHaveProperty(
         'number',
-        mockPR.number
+        mockPR.number,
       );
     });
 
@@ -1887,7 +1890,7 @@ describe('modules/platform/gitea/index', () => {
       await initFakePlatform();
       const mockPR = mockPRs[0];
       await expect(
-        gitea.addReviewers(mockPR.number, ['me', 'you'])
+        gitea.addReviewers(mockPR.number, ['me', 'you']),
       ).resolves.not.toThrow();
 
       expect(helper.requestPrReviewers).toHaveBeenCalledTimes(1);
@@ -1898,7 +1901,7 @@ describe('modules/platform/gitea/index', () => {
       expect.assertions(3);
       const mockPR = mockPRs[0];
       await expect(
-        gitea.addReviewers(mockPR.number, ['me', 'you'])
+        gitea.addReviewers(mockPR.number, ['me', 'you']),
       ).resolves.not.toThrow();
 
       expect(helper.requestPrReviewers).not.toHaveBeenCalled();
@@ -1911,7 +1914,7 @@ describe('modules/platform/gitea/index', () => {
       await initFakePlatform();
       helper.requestPrReviewers.mockRejectedValueOnce(null);
       await expect(
-        gitea.addReviewers(mockPR.number, ['me', 'you'])
+        gitea.addReviewers(mockPR.number, ['me', 'you']),
       ).resolves.not.toThrow();
       expect(logger.warn).toHaveBeenCalled();
     });
@@ -1923,7 +1926,7 @@ describe('modules/platform/gitea/index', () => {
         '[#123](../pull/123) [#124](../pull/124) [#125](../pull/125)';
 
       expect(gitea.massageMarkdown(body)).toBe(
-        '[#123](pulls/123) [#124](pulls/124) [#125](pulls/125)'
+        '[#123](pulls/123) [#124](pulls/124) [#125](pulls/125)',
       );
     });
   });
diff --git a/lib/modules/platform/gitea/index.ts b/lib/modules/platform/gitea/index.ts
index 89729341863b109a58df461cae26c51644ad8e43..419146cb85b055b0d6b9a56de2b2672a511fda4a 100644
--- a/lib/modules/platform/gitea/index.ts
+++ b/lib/modules/platform/gitea/index.ts
@@ -102,7 +102,7 @@ function toRenovatePR(data: PR): Pr | null {
     !data.head?.repo?.full_name
   ) {
     logger.trace(
-      `Skipping Pull Request #${data.number} due to missing base and/or head branch`
+      `Skipping Pull Request #${data.number} due to missing base and/or head branch`,
     );
     return null;
   }
@@ -150,14 +150,14 @@ function matchesState(actual: string, expected: string): boolean {
 
 function findCommentByTopic(
   comments: Comment[],
-  topic: string
+  topic: string,
 ): Comment | null {
   return comments.find((c) => c.body.startsWith(`### ${topic}\n\n`)) ?? null;
 }
 
 function findCommentByContent(
   comments: Comment[],
-  content: string
+  content: string,
 ): Comment | null {
   return comments.find((c) => c.body.trim() === content) ?? null;
 }
@@ -188,7 +188,7 @@ function getLabelList(): Promise<Label[]> {
       });
 
     config.labelList = Promise.all([repoLabels, orgLabels]).then((labels) =>
-      ([] as Label[]).concat(...labels)
+      ([] as Label[]).concat(...labels),
     );
   }
 
@@ -229,7 +229,7 @@ const platform: Platform = {
     } catch (err) {
       logger.debug(
         { err },
-        'Error authenticating with Gitea. Check your token'
+        'Error authenticating with Gitea. Check your token',
       );
       throw new Error('Init: Authentication failure');
     }
@@ -243,7 +243,7 @@ const platform: Platform = {
   async getRawFile(
     fileName: string,
     repoName?: string,
-    branchOrTag?: string
+    branchOrTag?: string,
   ): Promise<string | null> {
     const repo = repoName ?? config.repository;
     const contents = await helper.getRepoContents(repo, fileName, branchOrTag);
@@ -253,7 +253,7 @@ const platform: Platform = {
   async getJsonFile(
     fileName: string,
     repoName?: string,
-    branchOrTag?: string
+    branchOrTag?: string,
   ): Promise<any> {
     // TODO #22198
     const raw = await platform.getRawFile(fileName, repoName, branchOrTag);
@@ -282,19 +282,19 @@ const platform: Platform = {
     // Ensure appropriate repository state and permissions
     if (repo.archived) {
       logger.debug(
-        'Repository is archived - throwing error to abort renovation'
+        'Repository is archived - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_ARCHIVED);
     }
     if (repo.mirror) {
       logger.debug(
-        'Repository is a mirror - throwing error to abort renovation'
+        'Repository is a mirror - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_MIRRORED);
     }
     if (!repo.permissions.pull || !repo.permissions.push) {
       logger.debug(
-        'Repository does not permit pull and push - throwing error to abort renovation'
+        'Repository does not permit pull and push - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_ACCESS_FORBIDDEN);
     }
@@ -313,7 +313,7 @@ const platform: Platform = {
       config.mergeMethod = 'merge';
     } else {
       logger.debug(
-        'Repository has no allowed merge methods - throwing error to abort renovation'
+        'Repository has no allowed merge methods - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_BLOCKED);
     }
@@ -392,7 +392,7 @@ const platform: Platform = {
 
   async getBranchStatus(
     branchName: string,
-    internalChecksAsSuccess: boolean
+    internalChecksAsSuccess: boolean,
   ): Promise<BranchStatus> {
     let ccs: CombinedCommitStatus;
     try {
@@ -400,7 +400,7 @@ const platform: Platform = {
     } catch (err) {
       if (err.statusCode === 404) {
         logger.debug(
-          'Received 404 when checking branch status, assuming branch deletion'
+          'Received 404 when checking branch status, assuming branch deletion',
         );
         throw new Error(REPOSITORY_CHANGED);
       }
@@ -416,7 +416,7 @@ const platform: Platform = {
       ccs.statuses.every((status) => status.context?.startsWith('renovate/'))
     ) {
       logger.debug(
-        'Successful checks are all internal renovate/ checks, so returning "pending" branch status'
+        'Successful checks are all internal renovate/ checks, so returning "pending" branch status',
       );
       return 'yellow';
     }
@@ -426,11 +426,11 @@ const platform: Platform = {
 
   async getBranchStatusCheck(
     branchName: string,
-    context: string
+    context: string,
   ): Promise<BranchStatus | null> {
     const ccs = await helper.getCombinedCommitStatus(
       config.repository,
-      branchName
+      branchName,
     );
     const cs = ccs.statuses.find((s) => s.context === context);
     if (!cs) {
@@ -442,7 +442,7 @@ const platform: Platform = {
     }
     logger.warn(
       { check: cs },
-      'Could not map Gitea status value to Renovate status'
+      'Could not map Gitea status value to Renovate status',
     );
     return 'yellow';
   },
@@ -499,7 +499,7 @@ const platform: Platform = {
         p.sourceRepo === config.repository &&
         p.sourceBranch === branchName &&
         matchesState(p.state, state) &&
-        (!title || p.title === title)
+        (!title || p.title === title),
     );
 
     if (pr) {
@@ -549,18 +549,18 @@ const platform: Platform = {
 
             logger.debug(
               { prNumber: gpr.number },
-              'Gitea-native automerge: success'
+              'Gitea-native automerge: success',
             );
           } catch (err) {
             logger.warn(
               { err, prNumber: gpr.number },
-              'Gitea-native automerge: fail'
+              'Gitea-native automerge: fail',
             );
           }
         } else {
           logger.debug(
             { prNumber: gpr.number },
-            'Gitea-native automerge: not supported on this version of Gitea. Use 1.17.0 or newer.'
+            'Gitea-native automerge: not supported on this version of Gitea. Use 1.17.0 or newer.',
           );
         }
       }
@@ -581,7 +581,7 @@ const platform: Platform = {
       // would cause a HTTP 409 conflict error, which we hereby gracefully handle.
       if (err.statusCode === 409) {
         logger.warn(
-          `Attempting to gracefully recover from 409 Conflict response in createPr(${title}, ${sourceBranch})`
+          `Attempting to gracefully recover from 409 Conflict response in createPr(${title}, ${sourceBranch})`,
         );
 
         // Refresh cached PR list and search for pull request with matching information
@@ -595,7 +595,7 @@ const platform: Platform = {
         if (pr?.bodyStruct) {
           if (pr.title !== title || pr.bodyStruct.hash !== hashBody(body)) {
             logger.debug(
-              `Recovered from 409 Conflict, but PR for ${sourceBranch} is outdated. Updating...`
+              `Recovered from 409 Conflict, but PR for ${sourceBranch} is outdated. Updating...`,
             );
             await platform.updatePr({
               number: pr.number,
@@ -606,7 +606,7 @@ const platform: Platform = {
             pr.bodyStruct = getPrBodyStruct(body);
           } else {
             logger.debug(
-              `Recovered from 409 Conflict and PR for ${sourceBranch} is up-to-date`
+              `Recovered from 409 Conflict and PR for ${sourceBranch} is up-to-date`,
             );
           }
 
@@ -686,7 +686,7 @@ const platform: Platform = {
   async findIssue(title: string): Promise<Issue | null> {
     const issueList = await platform.getIssueList();
     const issue = issueList.find(
-      (i) => i.state === 'open' && i.title === title
+      (i) => i.state === 'open' && i.title === title,
     );
 
     if (!issue) {
@@ -718,7 +718,7 @@ const platform: Platform = {
 
       const labels = Array.isArray(labelNames)
         ? (await Promise.all(labelNames.map(lookupLabelByName))).filter(
-            is.number
+            is.number,
           )
         : undefined;
 
@@ -758,7 +758,7 @@ const platform: Platform = {
         ) {
           logger.debug(
             // TODO: types (#22198)
-            `Issue #${activeIssue.number!} is open and up to date - nothing to do`
+            `Issue #${activeIssue.number!} is open and up to date - nothing to do`,
           );
           return null;
         }
@@ -774,12 +774,12 @@ const platform: Platform = {
             body,
             title,
             state: shouldReOpen ? 'open' : (activeIssue.state as IssueState),
-          }
+          },
         );
 
         // Test whether the issues need to be updated
         const existingLabelIds = (existingIssue.labels ?? []).map(
-          (label) => label.id
+          (label) => label.id,
         );
         if (
           labels &&
@@ -793,7 +793,7 @@ const platform: Platform = {
             activeIssue.number!,
             {
               labels,
-            }
+            },
           );
         }
 
@@ -866,7 +866,7 @@ const platform: Platform = {
         comment = await helper.createComment(config.repository, issue, body);
         logger.info(
           { repository: config.repository, issue, comment: comment.id },
-          'Comment added'
+          'Comment added',
         );
       } else if (comment.body === body) {
         logger.debug(`Comment #${comment.id} is already up-to-date`);
@@ -874,7 +874,7 @@ const platform: Platform = {
         await helper.updateComment(config.repository, comment.id, body);
         logger.debug(
           { repository: config.repository, issue, comment: comment.id },
-          'Comment updated'
+          'Comment updated',
         );
       }
 
@@ -886,7 +886,7 @@ const platform: Platform = {
   },
 
   async ensureCommentRemoval(
-    deleteConfig: EnsureCommentRemovalConfig
+    deleteConfig: EnsureCommentRemovalConfig,
   ): Promise<void> {
     const { number: issue } = deleteConfig;
     const key =
@@ -915,7 +915,7 @@ const platform: Platform = {
     } catch (err) {
       logger.warn(
         { err, issue, config: deleteConfig },
-        'Error deleting comment'
+        'Error deleting comment',
       );
     }
   },
@@ -928,7 +928,7 @@ const platform: Platform = {
 
   async addAssignees(number: number, assignees: string[]): Promise<void> {
     logger.debug(
-      `Updating assignees '${assignees?.join(', ')}' on Issue #${number}`
+      `Updating assignees '${assignees?.join(', ')}' on Issue #${number}`,
     );
     await helper.updateIssue(config.repository, number, {
       assignees,
@@ -940,7 +940,7 @@ const platform: Platform = {
     if (semver.lt(defaults.version, '1.14.0')) {
       logger.debug(
         { version: defaults.version },
-        'Adding reviewer not yet supported.'
+        'Adding reviewer not yet supported.',
       );
       return;
     }
diff --git a/lib/modules/platform/gitea/utils.spec.ts b/lib/modules/platform/gitea/utils.spec.ts
index 3be58ccf579cef6b17e1f926171885174174dd59..60806698f26296ddddf2671de0ac7afdd096dc59 100644
--- a/lib/modules/platform/gitea/utils.spec.ts
+++ b/lib/modules/platform/gitea/utils.spec.ts
@@ -19,19 +19,19 @@ describe('modules/platform/gitea/utils', () => {
 
   it('trimTrailingApiPath', () => {
     expect(trimTrailingApiPath('https://gitea.renovatebot.com/api/v1')).toBe(
-      'https://gitea.renovatebot.com/'
+      'https://gitea.renovatebot.com/',
     );
     expect(trimTrailingApiPath('https://gitea.renovatebot.com/api/v1/')).toBe(
-      'https://gitea.renovatebot.com/'
+      'https://gitea.renovatebot.com/',
     );
     expect(trimTrailingApiPath('https://gitea.renovatebot.com/')).toBe(
-      'https://gitea.renovatebot.com/'
+      'https://gitea.renovatebot.com/',
     );
     expect(trimTrailingApiPath('https://gitea.renovatebot.com')).toBe(
-      'https://gitea.renovatebot.com'
+      'https://gitea.renovatebot.com',
     );
     expect(
-      trimTrailingApiPath('https://gitea.renovatebot.com/api/gitea/api/v1')
+      trimTrailingApiPath('https://gitea.renovatebot.com/api/gitea/api/v1'),
     ).toBe('https://gitea.renovatebot.com/api/gitea/');
   });
 
@@ -39,7 +39,7 @@ describe('modules/platform/gitea/utils', () => {
     it('should abort when endpoint is not valid', () => {
       expect.assertions(1);
       expect(() => getRepoUrl(mockRepo, 'endpoint', 'abc')).toThrow(
-        CONFIG_GIT_URL_UNAVAILABLE
+        CONFIG_GIT_URL_UNAVAILABLE,
       );
     });
   });
diff --git a/lib/modules/platform/gitea/utils.ts b/lib/modules/platform/gitea/utils.ts
index c2bc7af0065c8a046cd4f64fe8cb0226c5622eeb..1dedc3d206a1c59ac616db80b814cd1174eb0866 100644
--- a/lib/modules/platform/gitea/utils.ts
+++ b/lib/modules/platform/gitea/utils.ts
@@ -18,7 +18,7 @@ export function trimTrailingApiPath(url: string): string {
 export function getRepoUrl(
   repo: Repo,
   gitUrl: GitUrlOption | undefined,
-  endpoint: string
+  endpoint: string,
 ): string {
   if (gitUrl === 'ssh') {
     if (!repo.ssh_url) {
@@ -43,7 +43,7 @@ export function getRepoUrl(
     url.pathname = `${url.pathname}${repo.full_name}.git`;
     logger.debug(
       { url: url.toString() },
-      'using URL based on configured endpoint'
+      'using URL based on configured endpoint',
     );
     return url.toString();
   }
@@ -62,7 +62,7 @@ export function getRepoUrl(
 }
 
 export function getMergeMethod(
-  strategy: MergeStrategy | undefined
+  strategy: MergeStrategy | undefined,
 ): PRMergeMethod | null {
   switch (strategy) {
     case 'fast-forward':
diff --git a/lib/modules/platform/github/branch.spec.ts b/lib/modules/platform/github/branch.spec.ts
index 3d45c7bccfa3c52b1483147f293495d9c53edd48..ce547c752d9cd1f513cecf7e17034865fc9864ed 100644
--- a/lib/modules/platform/github/branch.spec.ts
+++ b/lib/modules/platform/github/branch.spec.ts
@@ -35,9 +35,9 @@ describe('modules/platform/github/branch', () => {
       .reply(200);
 
     await expect(
-      remoteBranchExists('my/repo', 'renovate/foobar')
+      remoteBranchExists('my/repo', 'renovate/foobar'),
     ).rejects.toThrow(
-      `Trying to create a branch 'renovate/foobar' while it's the part of nested branch`
+      `Trying to create a branch 'renovate/foobar' while it's the part of nested branch`,
     );
   });
 
@@ -48,7 +48,7 @@ describe('modules/platform/github/branch', () => {
       .reply(500, { message: 'Something went wrong' });
 
     await expect(
-      remoteBranchExists('my/repo', 'renovate/foobar')
+      remoteBranchExists('my/repo', 'renovate/foobar'),
     ).rejects.toThrow('external-host-error');
   });
 });
diff --git a/lib/modules/platform/github/branch.ts b/lib/modules/platform/github/branch.ts
index 8f34b58a3c9e17754a141bf781bba7089e928cc0..b8fff7d695729770d24ab914721909d04fbfa710 100644
--- a/lib/modules/platform/github/branch.ts
+++ b/lib/modules/platform/github/branch.ts
@@ -6,7 +6,7 @@ function headRef(repo: string, branchName: string): Promise<boolean> {
   return Result.wrap(
     githubApi.headJson(`/repos/${repo}/git/refs/heads/${branchName}`, {
       memCache: false,
-    })
+    }),
   )
     .transform(() => true)
     .catch((err) => {
@@ -21,7 +21,7 @@ function headRef(repo: string, branchName: string): Promise<boolean> {
 
 export async function remoteBranchExists(
   repo: string,
-  branchName: string
+  branchName: string,
 ): Promise<boolean> {
   const refNested = `${branchName}/`;
   const isNested = await headRef(repo, refNested);
diff --git a/lib/modules/platform/github/index.spec.ts b/lib/modules/platform/github/index.spec.ts
index 196168cbc234bce09d5ee2bf24bd9d03f55d701e..ab715d2865f706a5c265d758c1c841e1b4ecb93a 100644
--- a/lib/modules/platform/github/index.spec.ts
+++ b/lib/modules/platform/github/index.spec.ts
@@ -40,7 +40,7 @@ describe('modules/platform/github/index', () => {
 
     git.isBranchBehindBase.mockResolvedValue(true);
     git.getBranchCommit.mockReturnValue(
-      '0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+      '0d9c7726c3d628b7e28af234595cfd20febdbf8e',
     );
     hostRules.find.mockReturnValue({
       token: '123test',
@@ -54,7 +54,7 @@ describe('modules/platform/github/index', () => {
   describe('initPlatform()', () => {
     it('should throw if no token', async () => {
       await expect(github.initPlatform({})).rejects.toThrow(
-        'Init: You must configure a GitHub token'
+        'Init: You must configure a GitHub token',
       );
     });
 
@@ -67,9 +67,9 @@ describe('modules/platform/github/index', () => {
         github.initPlatform({
           endpoint: 'https://ghe.renovatebot.com',
           token: 'github_pat_XXXXXX',
-        })
+        }),
       ).rejects.toThrow(
-        'Init: Fine-grained Personal Access Tokens do not support GitHub Enterprise Server API version <3.10 and cannot be used with Renovate.'
+        'Init: Fine-grained Personal Access Tokens do not support GitHub Enterprise Server API version <3.10 and cannot be used with Renovate.',
       );
     });
 
@@ -79,9 +79,9 @@ describe('modules/platform/github/index', () => {
         github.initPlatform({
           endpoint: 'https://ghe.renovatebot.com',
           token: 'github_pat_XXXXXX',
-        })
+        }),
       ).rejects.toThrow(
-        'Init: Fine-grained Personal Access Tokens do not support GitHub Enterprise Server API version <3.10 and cannot be used with Renovate.'
+        'Init: Fine-grained Personal Access Tokens do not support GitHub Enterprise Server API version <3.10 and cannot be used with Renovate.',
       );
     });
 
@@ -98,7 +98,7 @@ describe('modules/platform/github/index', () => {
         await github.initPlatform({
           endpoint: 'https://ghe.renovatebot.com',
           token: 'github_pat_XXXXXX',
-        })
+        }),
       ).toEqual({
         endpoint: 'https://ghe.renovatebot.com/',
         gitAuthor: 'undefined <user@domain.com>',
@@ -142,7 +142,7 @@ describe('modules/platform/github/index', () => {
           token: '123test',
           username: 'renovate-bot',
           gitAuthor: 'renovate@whitesourcesoftware.com',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -175,7 +175,7 @@ describe('modules/platform/github/index', () => {
           data: { viewer: { login: 'my-app[bot]', databaseId: 12345 } },
         });
       expect(
-        await github.initPlatform({ token: 'x-access-token:ghs_123test' })
+        await github.initPlatform({ token: 'x-access-token:ghs_123test' }),
       ).toEqual({
         endpoint: 'https://api.github.com/',
         gitAuthor: 'my-app[bot] <12345+my-app[bot]@users.noreply.github.com>',
@@ -255,7 +255,7 @@ describe('modules/platform/github/index', () => {
     it('should throw error when cant request App information on default endpoint with GitHub App', async () => {
       httpMock.scope(githubApiHost).post('/graphql').reply(200, {});
       await expect(
-        github.initPlatform({ token: 'x-access-token:ghs_123test' })
+        github.initPlatform({ token: 'x-access-token:ghs_123test' }),
       ).rejects.toThrowWithMessage(Error, 'Init: Authentication failure');
     });
 
@@ -276,7 +276,7 @@ describe('modules/platform/github/index', () => {
         await github.initPlatform({
           endpoint: 'https://ghe.renovatebot.com',
           token: 'x-access-token:ghs_123test',
-        })
+        }),
       ).toEqual({
         endpoint: 'https://ghe.renovatebot.com/',
         gitAuthor:
@@ -306,7 +306,7 @@ describe('modules/platform/github/index', () => {
         await github.initPlatform({
           endpoint: 'https://ghe.renovatebot.com',
           token: '123test',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -330,7 +330,7 @@ describe('modules/platform/github/index', () => {
         await github.initPlatform({
           endpoint: 'https://ghe.renovatebot.com',
           token: '123test',
-        })
+        }),
       ).toMatchSnapshot();
     });
   });
@@ -451,7 +451,7 @@ describe('modules/platform/github/index', () => {
   function initRepoMock(
     scope: httpMock.Scope,
     repository: string,
-    other: any = {}
+    other: any = {},
   ): void {
     scope.post(`/graphql`).reply(200, {
       data: {
@@ -481,7 +481,7 @@ describe('modules/platform/github/index', () => {
     repository: string,
     forkExisted: boolean,
     forkResult = 200,
-    forkDefaultBranch = 'master'
+    forkDefaultBranch = 'master',
   ): void {
     scope
       // repo info
@@ -517,7 +517,7 @@ describe('modules/platform/github/index', () => {
                 default_branch: forkDefaultBranch,
               },
             ]
-          : []
+          : [],
       );
   }
 
@@ -556,7 +556,7 @@ describe('modules/platform/github/index', () => {
         github.initRepo({
           repository: 'some/repo',
           forkToken: 'true',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_CANNOT_FORK);
     });
 
@@ -568,7 +568,7 @@ describe('modules/platform/github/index', () => {
         github.initRepo({
           repository: 'some/repo',
           forkToken: 'true',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_CANNOT_FORK);
     });
 
@@ -580,7 +580,7 @@ describe('modules/platform/github/index', () => {
         github.initRepo({
           repository: 'some/repo',
           forkToken: 'true',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_CANNOT_FORK);
     });
 
@@ -597,7 +597,7 @@ describe('modules/platform/github/index', () => {
           repository: 'some/repo',
           forkToken: 'true',
           forkOrg: 'forked',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_CANNOT_FORK);
     });
 
@@ -725,14 +725,14 @@ describe('modules/platform/github/index', () => {
           },
         });
       await expect(
-        github.initRepo({ repository: 'some/repo' })
+        github.initRepo({ repository: 'some/repo' }),
       ).rejects.toThrow();
     });
 
     it('throws not-found', async () => {
       httpMock.scope(githubApiHost).post(`/graphql`).reply(404);
       await expect(
-        github.initRepo({ repository: 'some/repo' })
+        github.initRepo({ repository: 'some/repo' }),
       ).rejects.toThrow(REPOSITORY_NOT_FOUND);
     });
 
@@ -749,7 +749,7 @@ describe('modules/platform/github/index', () => {
           ],
         });
       await expect(
-        github.initRepo({ repository: 'some/repo' })
+        github.initRepo({ repository: 'some/repo' }),
       ).rejects.toThrow(PLATFORM_UNKNOWN_ERROR);
     });
 
@@ -766,7 +766,7 @@ describe('modules/platform/github/index', () => {
           ],
         });
       await expect(
-        github.initRepo({ repository: 'some/repo' })
+        github.initRepo({ repository: 'some/repo' }),
       ).rejects.toThrow(PLATFORM_RATE_LIMIT_EXCEEDED);
     });
 
@@ -789,7 +789,7 @@ describe('modules/platform/github/index', () => {
           },
         });
       await expect(
-        github.initRepo({ repository: 'some/repo' })
+        github.initRepo({ repository: 'some/repo' }),
       ).rejects.toThrow(REPOSITORY_RENAMED);
     });
 
@@ -874,7 +874,7 @@ describe('modules/platform/github/index', () => {
         .get('/repos/undefined/branches/undefined/protection')
         .reply(401);
       await expect(
-        github.getRepoForceRebase()
+        github.getRepoForceRebase(),
       ).rejects.toThrowErrorMatchingSnapshot();
     });
   });
@@ -1035,7 +1035,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, []);
 
@@ -1049,7 +1049,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -1082,7 +1082,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -1133,7 +1133,7 @@ describe('modules/platform/github/index', () => {
       GlobalConfig.set({ dryRun: 'full' });
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -1149,7 +1149,7 @@ describe('modules/platform/github/index', () => {
 
       await expect(github.getBranchPr('somebranch')).resolves.toBeNull();
       expect(logger.logger.info).toHaveBeenCalledWith(
-        'DRY-RUN: Would try to reopen autoclosed PR'
+        'DRY-RUN: Would try to reopen autoclosed PR',
       );
     });
 
@@ -1158,7 +1158,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -1185,7 +1185,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -1218,7 +1218,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -1421,7 +1421,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           {
@@ -1440,7 +1440,7 @@ describe('modules/platform/github/index', () => {
       await github.initRepo({ repository: 'some/repo' });
       const res = await github.getBranchStatusCheck(
         'renovate/future_branch',
-        'context-2'
+        'context-2',
       );
       expect(res).toBe('yellow');
     });
@@ -1450,7 +1450,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           {
@@ -1476,7 +1476,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           {
@@ -1495,7 +1495,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           {
@@ -1511,7 +1511,7 @@ describe('modules/platform/github/index', () => {
           description: 'some-description',
           state: 'yellow',
           url: 'some-url',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1520,7 +1520,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           {
@@ -1537,13 +1537,13 @@ describe('modules/platform/github/index', () => {
           },
         ])
         .post(
-          '/repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/repos/some/repo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200)
         .get('/repos/some/repo/commits/some-branch/status')
         .reply(200, {})
         .get(
-          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/repos/some/repo/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, {});
 
@@ -1555,7 +1555,7 @@ describe('modules/platform/github/index', () => {
           description: 'some-description',
           state: 'green',
           url: 'some-url',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -2125,7 +2125,7 @@ describe('modules/platform/github/index', () => {
       scope.post('/repos/some/repo/issues/42/assignees').reply(200);
       await github.initRepo({ repository: 'some/repo' });
       await expect(
-        github.addAssignees(42, ['someuser', 'someotheruser'])
+        github.addAssignees(42, ['someuser', 'someotheruser']),
       ).toResolve();
     });
   });
@@ -2137,7 +2137,7 @@ describe('modules/platform/github/index', () => {
       scope.post('/repos/some/repo/pulls/42/requested_reviewers').reply(200);
       await github.initRepo({ repository: 'some/repo' });
       await expect(
-        github.addReviewers(42, ['someuser', 'someotheruser', 'team:someteam'])
+        github.addReviewers(42, ['someuser', 'someotheruser', 'team:someteam']),
       ).toResolve();
     });
   });
@@ -2158,7 +2158,7 @@ describe('modules/platform/github/index', () => {
           number: 42,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -2186,7 +2186,7 @@ describe('modules/platform/github/index', () => {
           number: 2499,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -2205,7 +2205,7 @@ describe('modules/platform/github/index', () => {
           number: 42,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -2222,7 +2222,7 @@ describe('modules/platform/github/index', () => {
           number: 42,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -2239,7 +2239,7 @@ describe('modules/platform/github/index', () => {
           number: 42,
           topic: null,
           content: '!merge',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -2260,7 +2260,7 @@ describe('modules/platform/github/index', () => {
           type: 'by-topic',
           number: 42,
           topic: 'some-subject',
-        })
+        }),
       ).toResolve();
     });
 
@@ -2279,7 +2279,7 @@ describe('modules/platform/github/index', () => {
           type: 'by-content',
           number: 42,
           content: 'some-content',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -2289,7 +2289,7 @@ describe('modules/platform/github/index', () => {
       const scope = httpMock
         .scope(githubApiHost)
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2332,7 +2332,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2361,7 +2361,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2386,7 +2386,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2502,7 +2502,7 @@ describe('modules/platform/github/index', () => {
           .post(
             '/repos/some/repo/pulls',
             // Ensure the `maintainer_can_modify` option is set in the REST API request.
-            (body) => body.maintainer_can_modify === true
+            (body) => body.maintainer_can_modify === true,
           )
           .reply(200, {
             number: 123,
@@ -2526,7 +2526,7 @@ describe('modules/platform/github/index', () => {
           .post(
             '/repos/some/repo/pulls',
             // Ensure the `maintainer_can_modify` option is `false` in the REST API request.
-            (body) => body.maintainer_can_modify === true
+            (body) => body.maintainer_can_modify === true,
           )
           .reply(200, {
             number: 123,
@@ -2547,7 +2547,7 @@ describe('modules/platform/github/index', () => {
           .post(
             '/repos/some/repo/pulls',
             // Ensure the `maintainer_can_modify` option is `false` in the REST API request.
-            (body) => body.maintainer_can_modify === false
+            (body) => body.maintainer_can_modify === false,
           )
           .reply(200, {
             number: 123,
@@ -2702,7 +2702,7 @@ describe('modules/platform/github/index', () => {
         expect(logger.logger.debug).toHaveBeenNthCalledWith(
           10,
           { prNumber: 123 },
-          'GitHub-native automerge: not supported on this version of GHE. Use 3.3.0 or newer.'
+          'GitHub-native automerge: not supported on this version of GHE. Use 3.3.0 or newer.',
         );
       });
 
@@ -2749,7 +2749,7 @@ describe('modules/platform/github/index', () => {
 
         expect(logger.logger.debug).toHaveBeenNthCalledWith(
           11,
-          'GitHub-native automerge: success...PrNo: 123'
+          'GitHub-native automerge: success...PrNo: 123',
         );
       });
 
@@ -2807,7 +2807,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2850,7 +2850,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2875,7 +2875,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -2901,7 +2901,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [])
         .get('/repos/some/repo/pulls/1234')
@@ -2916,7 +2916,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [])
         .get('/repos/some/repo/pulls/1234')
@@ -2942,7 +2942,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [])
         .get('/repos/some/repo/pulls/1234')
@@ -2978,7 +2978,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [])
         .get('/repos/some/repo/pulls/1234')
@@ -3059,7 +3059,7 @@ describe('modules/platform/github/index', () => {
       initRepoMock(scope, 'some/repo');
       scope
         .get(
-          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1'
+          '/repos/some/repo/pulls?per_page=100&state=all&sort=updated&direction=desc&page=1',
         )
         .reply(200, [
           {
@@ -3103,7 +3103,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeFalse();
     });
 
@@ -3124,7 +3124,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeFalse();
     });
 
@@ -3145,7 +3145,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeFalse();
     });
   });
@@ -3199,7 +3199,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeTrue();
     });
 
@@ -3220,7 +3220,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeFalse();
     });
 
@@ -3245,7 +3245,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeTrue();
     });
 
@@ -3272,7 +3272,7 @@ describe('modules/platform/github/index', () => {
         await github.mergePr({
           branchName: '',
           id: pr.number,
-        })
+        }),
       ).toBeFalse();
     });
   });
@@ -3423,7 +3423,7 @@ describe('modules/platform/github/index', () => {
       await github.getVulnerabilityAlerts();
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { alerts: { 'npm/left-pad': { '0.0.2': '0.0.3' } } },
-        'GitHub vulnerability details'
+        'GitHub vulnerability details',
       );
       expect(logger.logger.error).not.toHaveBeenCalled();
     });
@@ -3523,7 +3523,7 @@ describe('modules/platform/github/index', () => {
           parentCommitSha: '1234567',
           commitSha: '7654321',
           files,
-        })
+        }),
       );
       git.fetchBranch.mockImplementation(() => Promise.resolve('0abcdef'));
     });
diff --git a/lib/modules/platform/github/index.ts b/lib/modules/platform/github/index.ts
index 0df999996ff009f027a8931f6b9e8f4567440562..68034f9319dfd5489c8adfc06c6120bfba446940 100644
--- a/lib/modules/platform/github/index.ts
+++ b/lib/modules/platform/github/index.ts
@@ -117,11 +117,11 @@ export async function detectGhe(token: string): Promise<void> {
     const gheHeaders = coerceObject(gheQueryRes?.headers);
     const [, gheVersion] =
       Object.entries(gheHeaders).find(
-        ([k]) => k.toLowerCase() === gheHeaderKey
+        ([k]) => k.toLowerCase() === gheHeaderKey,
       ) ?? [];
     platformConfig.gheVersion = semver.valid(gheVersion as string) ?? null;
     logger.debug(
-      `Detected GitHub Enterprise Server, version: ${platformConfig.gheVersion}`
+      `Detected GitHub Enterprise Server, version: ${platformConfig.gheVersion}`,
     );
   }
 }
@@ -158,7 +158,7 @@ export async function initPlatform({
       semver.lt(platformConfig.gheVersion, '3.10.0'))
   ) {
     throw new Error(
-      'Init: Fine-grained Personal Access Tokens do not support GitHub Enterprise Server API version <3.10 and cannot be used with Renovate.'
+      'Init: Fine-grained Personal Access Tokens do not support GitHub Enterprise Server API version <3.10 and cannot be used with Renovate.',
     );
   }
 
@@ -171,7 +171,7 @@ export async function initPlatform({
   } else {
     platformConfig.userDetails ??= await getUserDetails(
       platformConfig.endpoint,
-      token
+      token,
     );
     renovateUsername = platformConfig.userDetails.username;
   }
@@ -186,11 +186,11 @@ export async function initPlatform({
     } else {
       platformConfig.userDetails ??= await getUserDetails(
         platformConfig.endpoint,
-        token
+        token,
       );
       platformConfig.userEmail ??= await getUserEmail(
         platformConfig.endpoint,
-        token
+        token,
       );
       if (platformConfig.userEmail) {
         discoveredGitAuthor = `${platformConfig.userDetails.name} <${platformConfig.userEmail}>`;
@@ -226,7 +226,7 @@ export async function initPlatform({
     const usernamePasswordHostTypes = ['rubygems', 'maven', 'nuget'];
     for (const hostType of usernamePasswordHostTypes) {
       logger.debug(
-        `Adding GitHub token as ${hostType}.pkg.github.com password`
+        `Adding GitHub token as ${hostType}.pkg.github.com password`,
       );
       platformResult.hostRules.push({
         hostType,
@@ -252,7 +252,7 @@ async function fetchRepositories(): Promise<GhRestRepo[]> {
     } else {
       const res = await githubApi.getJson<GhRestRepo[]>(
         `user/repos?per_page=100`,
-        { paginate: 'all' }
+        { paginate: 'all' },
       );
       return res.body;
     }
@@ -266,16 +266,16 @@ async function fetchRepositories(): Promise<GhRestRepo[]> {
 export async function getRepos(config?: AutodiscoverConfig): Promise<string[]> {
   logger.debug('Autodiscovering GitHub repositories');
   const nonEmptyRepositories = (await fetchRepositories()).filter(
-    is.nonEmptyObject
+    is.nonEmptyObject,
   );
   const nonArchivedRepositories = nonEmptyRepositories.filter(
-    (repo) => !repo.archived
+    (repo) => !repo.archived,
   );
   if (nonArchivedRepositories.length < nonEmptyRepositories.length) {
     logger.debug(
       `Filtered out ${
         nonEmptyRepositories.length - nonArchivedRepositories.length
-      } archived repositories`
+      } archived repositories`,
     );
   }
   if (!config?.topics) {
@@ -284,28 +284,28 @@ export async function getRepos(config?: AutodiscoverConfig): Promise<string[]> {
 
   logger.debug({ topics: config.topics }, 'Filtering by topics');
   const topicRepositories = nonArchivedRepositories.filter((repo) =>
-    repo.topics?.some((topic) => config?.topics?.includes(topic))
+    repo.topics?.some((topic) => config?.topics?.includes(topic)),
   );
 
   if (topicRepositories.length < nonArchivedRepositories.length) {
     logger.debug(
       `Filtered out ${
         nonArchivedRepositories.length - topicRepositories.length
-      } repositories not matching topic filters`
+      } repositories not matching topic filters`,
     );
   }
   return topicRepositories.map((repo) => repo.full_name);
 }
 
 async function getBranchProtection(
-  branchName: string
+  branchName: string,
 ): Promise<BranchProtection> {
   // istanbul ignore if
   if (config.parentRepo) {
     return {};
   }
   const res = await githubApi.getJson<BranchProtection>(
-    `repos/${config.repository}/branches/${escapeHash(branchName)}/protection`
+    `repos/${config.repository}/branches/${escapeHash(branchName)}/protection`,
   );
   return res.body;
 }
@@ -313,7 +313,7 @@ async function getBranchProtection(
 export async function getRawFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<string | null> {
   const repo = repoName ?? config.repository;
   let url = `repos/${repo}/contents/${fileName}`;
@@ -329,7 +329,7 @@ export async function getRawFile(
 export async function getJsonFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<any> {
   const raw = await getRawFile(fileName, repoName, branchOrTag);
   return parseJson(raw, fileName);
@@ -337,7 +337,7 @@ export async function getJsonFile(
 
 export async function listForks(
   token: string,
-  repository: string
+  repository: string,
 ): Promise<GhRestRepo[]> {
   try {
     // Get list of existing repos
@@ -364,7 +364,7 @@ export async function listForks(
 export async function findFork(
   token: string,
   repository: string,
-  forkOrg?: string
+  forkOrg?: string,
 ): Promise<GhRestRepo | null> {
   const forks = await listForks(token, repository);
   if (forkOrg) {
@@ -394,7 +394,7 @@ export async function findFork(
 export async function createFork(
   token: string,
   repository: string,
-  forkOrg?: string
+  forkOrg?: string,
 ): Promise<GhRestRepo> {
   let forkedRepo: GhRestRepo | undefined;
   try {
@@ -499,13 +499,13 @@ export async function initRepo({
     ) {
       logger.debug(
         { desiredRepo: repository, foundRepo: repo.nameWithOwner },
-        'Repository has been renamed'
+        'Repository has been renamed',
       );
       throw new Error(REPOSITORY_RENAMED);
     }
     if (repo.isArchived) {
       logger.debug(
-        'Repository is archived - throwing error to abort renovation'
+        'Repository is archived - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_ARCHIVED);
     }
@@ -582,7 +582,7 @@ export async function initRepo({
             forkDefaultBranch,
             body,
           },
-          'Fork has different default branch to parent, attempting to create branch'
+          'Fork has different default branch to parent, attempting to create branch',
         );
         try {
           await githubApi.postJson(`repos/${config.repository}/git/refs`, {
@@ -593,17 +593,17 @@ export async function initRepo({
         } catch (err) /* istanbul ignore next */ {
           if (err.response?.body?.message === 'Reference already exists') {
             logger.debug(
-              `Branch ${config.defaultBranch} already exists in the fork`
+              `Branch ${config.defaultBranch} already exists in the fork`,
             );
           } else {
             logger.warn(
               { err, body: err.response?.body },
-              'Could not create parent defaultBranch in fork'
+              'Could not create parent defaultBranch in fork',
             );
           }
         }
         logger.debug(
-          `Setting ${config.defaultBranch} as default branch for ${config.repository}`
+          `Setting ${config.defaultBranch} as default branch for ${config.repository}`,
         );
         try {
           await githubApi.patchJson(`repos/${config.repository}`, {
@@ -624,7 +624,7 @@ export async function initRepo({
       const sha = repo.defaultBranchRef.target.oid;
       try {
         logger.debug(
-          `Updating forked repository default sha ${sha} to match upstream`
+          `Updating forked repository default sha ${sha} to match upstream`,
         );
         await githubApi.patchJson(url, {
           body: {
@@ -636,7 +636,7 @@ export async function initRepo({
       } catch (err) /* istanbul ignore next */ {
         logger.warn(
           { url, sha, err: err.err || err },
-          'Error updating fork from upstream - cannot continue'
+          'Error updating fork from upstream - cannot continue',
         );
         if (err instanceof ExternalHostError) {
           throw err;
@@ -665,7 +665,7 @@ export async function initRepo({
   // TODO: null checks (#22198)
   parsedEndpoint.host = parsedEndpoint.host!.replace(
     'api.github.com',
-    'github.com'
+    'github.com',
   );
   parsedEndpoint.pathname = `${config.repository}.git`;
   const url = URL.format(parsedEndpoint);
@@ -690,7 +690,7 @@ export async function getRepoForceRebase(): Promise<boolean> {
       if (branchProtection.required_status_checks) {
         if (branchProtection.required_status_checks.strict) {
           logger.debug(
-            'Branch protection: PRs must be up-to-date before merging'
+            'Branch protection: PRs must be up-to-date before merging',
           );
           config.repoForceRebase = true;
         }
@@ -701,7 +701,7 @@ export async function getRepoForceRebase(): Promise<boolean> {
             users: branchProtection.restrictions.users,
             teams: branchProtection.restrictions.teams,
           },
-          'Branch protection: Pushing to branch is restricted'
+          'Branch protection: Pushing to branch is restricted',
         );
         config.pushProtection = true;
       }
@@ -713,7 +713,7 @@ export async function getRepoForceRebase(): Promise<boolean> {
         err.statusCode === 403
       ) {
         logger.debug(
-          'Branch protection: Do not have permissions to detect branch protection'
+          'Branch protection: Do not have permissions to detect branch protection',
         );
       } else {
         throw err;
@@ -742,7 +742,7 @@ function cachePr(pr?: GhPr | null): void {
 async function fetchPr(prNo: number): Promise<GhPr | null> {
   try {
     const { body: ghRestPr } = await githubApi.getJson<GhRestPr>(
-      `repos/${config.parentRepo ?? config.repository}/pulls/${prNo}`
+      `repos/${config.parentRepo ?? config.repository}/pulls/${prNo}`,
     );
     const result = coerceRestPr(ghRestPr);
     cachePr(result);
@@ -787,7 +787,7 @@ export async function getPrList(): Promise<GhPr[]> {
     // TODO: check null `repo` (#22198)
     const prCache = await getPrCache(githubApi, repo!, username);
     config.prList = Object.values(prCache).sort(
-      ({ number: a }, { number: b }) => b - a
+      ({ number: a }, { number: b }) => b - a,
     );
   }
 
@@ -849,7 +849,7 @@ async function ensureBranchSha(branchName: string, sha: string): Promise<void> {
       if (err.err?.response?.statusCode === 422) {
         logger.debug(
           { err },
-          'Branch update failed due to reference not existing - will try to create'
+          'Branch update failed due to reference not existing - will try to create',
         );
       } else {
         logger.warn({ refUrl, err }, 'Error updating branch');
@@ -902,7 +902,7 @@ export async function getBranchPr(branchName: string): Promise<GhPr | null> {
     } catch (err) {
       logger.debug(
         { err, branchName, sha, autoclosedPr },
-        'Could not recreate autoclosed branch - skipping reopen'
+        'Could not recreate autoclosed branch - skipping reopen',
       );
       return null;
     }
@@ -915,11 +915,11 @@ export async function getBranchPr(branchName: string): Promise<GhPr | null> {
             state: 'open',
             title,
           },
-        }
+        },
       );
       logger.info(
         { branchName, title, number },
-        'Successfully reopened autoclosed PR'
+        'Successfully reopened autoclosed PR',
       );
       const result = coerceRestPr(ghPr);
       cachePr(result);
@@ -934,10 +934,10 @@ export async function getBranchPr(branchName: string): Promise<GhPr | null> {
 
 async function getStatus(
   branchName: string,
-  useCache = true
+  useCache = true,
 ): Promise<CombinedBranchStatus> {
   const commitStatusUrl = `repos/${config.repository}/commits/${escapeHash(
-    branchName
+    branchName,
   )}/status`;
 
   return (
@@ -950,7 +950,7 @@ async function getStatus(
 // Returns the combined status for a branch.
 export async function getBranchStatus(
   branchName: string,
-  internalChecksAsSuccess: boolean
+  internalChecksAsSuccess: boolean,
 ): Promise<BranchStatus> {
   logger.debug(`getBranchStatus(${branchName})`);
   let commitStatus: CombinedBranchStatus;
@@ -959,7 +959,7 @@ export async function getBranchStatus(
   } catch (err) /* istanbul ignore next */ {
     if (err.statusCode === 404) {
       logger.debug(
-        'Received 404 when checking branch status, assuming that branch has been deleted'
+        'Received 404 when checking branch status, assuming that branch has been deleted',
       );
       throw new Error(REPOSITORY_CHANGED);
     }
@@ -968,16 +968,16 @@ export async function getBranchStatus(
   }
   logger.debug(
     { state: commitStatus.state, statuses: commitStatus.statuses },
-    'branch status check result'
+    'branch status check result',
   );
   if (commitStatus.statuses && !internalChecksAsSuccess) {
     commitStatus.statuses = commitStatus.statuses.filter(
       (status) =>
-        status.state !== 'success' || !status.context?.startsWith('renovate/')
+        status.state !== 'success' || !status.context?.startsWith('renovate/'),
     );
     if (!commitStatus.statuses.length) {
       logger.debug(
-        'Successful checks are all internal renovate/ checks, so returning "pending" branch status'
+        'Successful checks are all internal renovate/ checks, so returning "pending" branch status',
       );
       commitStatus.state = 'pending';
     }
@@ -986,7 +986,7 @@ export async function getBranchStatus(
   // API is supported in oldest available GHE version 2.19
   try {
     const checkRunsUrl = `repos/${config.repository}/commits/${escapeHash(
-      branchName
+      branchName,
     )}/check-runs?per_page=100`;
     const opts = {
       headers: {
@@ -1042,7 +1042,7 @@ export async function getBranchStatus(
   if (
     (commitStatus.state === 'success' || commitStatus.statuses.length === 0) &&
     checkRuns.every((run) =>
-      ['skipped', 'neutral', 'success'].includes(run.conclusion)
+      ['skipped', 'neutral', 'success'].includes(run.conclusion),
     )
   ) {
     return 'green';
@@ -1052,7 +1052,7 @@ export async function getBranchStatus(
 
 async function getStatusCheck(
   branchName: string,
-  useCache = true
+  useCache = true,
 ): Promise<GhBranchStatus[]> {
   const branchCommit = git.getBranchCommit(branchName);
 
@@ -1075,7 +1075,7 @@ const githubToRenovateStatusMapping: GithubToRenovateStatusMapping = {
 
 export async function getBranchStatusCheck(
   branchName: string,
-  context: string
+  context: string,
 ): Promise<BranchStatus | null> {
   try {
     const res = await getStatusCheck(branchName);
@@ -1152,7 +1152,7 @@ async function getIssues(): Promise<Issue[]> {
         name: config.repositoryName,
         user: config.renovateUsername,
       },
-    }
+    },
   );
 
   logger.debug(`Retrieved ${result.length} issues`);
@@ -1176,7 +1176,7 @@ export async function getIssueList(): Promise<Issue[]> {
 
 export async function getIssue(
   number: number,
-  useCache = true
+  useCache = true,
 ): Promise<Issue | null> {
   // istanbul ignore if
   if (config.hasIssuesEnabled === false) {
@@ -1186,7 +1186,7 @@ export async function getIssue(
     const issueBody = (
       await githubApi.getJson<{ body: string }>(
         `repos/${config.parentRepo ?? config.repository}/issues/${number}`,
-        { memCache: useCache }
+        { memCache: useCache },
       )
     ).body.body;
     return {
@@ -1202,7 +1202,7 @@ export async function getIssue(
 export async function findIssue(title: string): Promise<Issue | null> {
   logger.debug(`findIssue(${title})`);
   const [issue] = (await getIssueList()).filter(
-    (i) => i.state === 'open' && i.title === title
+    (i) => i.state === 'open' && i.title === title,
   );
   if (!issue) {
     return null;
@@ -1218,7 +1218,7 @@ async function closeIssue(issueNumber: number): Promise<void> {
     `repos/${config.parentRepo ?? config.repository}/issues/${issueNumber}`,
     {
       body: { state: 'closed' },
-    }
+    },
   );
 }
 
@@ -1234,7 +1234,7 @@ export async function ensureIssue({
   // istanbul ignore if
   if (config.hasIssuesEnabled === false) {
     logger.info(
-      'Cannot ensure issue because issues are disabled in this repository'
+      'Cannot ensure issue because issues are disabled in this repository',
     );
     return null;
   }
@@ -1271,7 +1271,7 @@ export async function ensureIssue({
         await githubApi.getJson<{ body: string }>(
           `repos/${config.parentRepo ?? config.repository}/issues/${
             issue.number
-          }`
+          }`,
         )
       ).body.body;
       if (
@@ -1294,7 +1294,7 @@ export async function ensureIssue({
           }`,
           {
             body: data,
-          }
+          },
         );
         logger.debug('Issue updated');
         return 'updated';
@@ -1308,7 +1308,7 @@ export async function ensureIssue({
           body,
           labels: labels ?? [],
         },
-      }
+      },
     );
     logger.info('Issue created');
     // reset issueList so that it will be fetched again as-needed
@@ -1329,7 +1329,7 @@ export async function ensureIssueClosing(title: string): Promise<void> {
   // istanbul ignore if
   if (config.hasIssuesEnabled === false) {
     logger.info(
-      'Cannot ensure issue because issues are disabled in this repository'
+      'Cannot ensure issue because issues are disabled in this repository',
     );
     return;
   }
@@ -1345,7 +1345,7 @@ export async function ensureIssueClosing(title: string): Promise<void> {
 
 export async function addAssignees(
   issueNo: number,
-  assignees: string[]
+  assignees: string[],
 ): Promise<void> {
   logger.debug(`Adding assignees '${assignees.join(', ')}' to #${issueNo}`);
   const repository = config.parentRepo ?? config.repository;
@@ -1358,7 +1358,7 @@ export async function addAssignees(
 
 export async function addReviewers(
   prNo: number,
-  reviewers: string[]
+  reviewers: string[],
 ): Promise<void> {
   logger.debug(`Adding reviewers '${reviewers.join(', ')}' to #${prNo}`);
 
@@ -1376,7 +1376,7 @@ export async function addReviewers(
           reviewers: userReviewers,
           team_reviewers: teamReviewers,
         },
-      }
+      },
     );
   } catch (err) /* istanbul ignore next */ {
     logger.warn({ err }, 'Failed to assign reviewer');
@@ -1385,7 +1385,7 @@ export async function addReviewers(
 
 async function addLabels(
   issueNo: number,
-  labels: string[] | null | undefined
+  labels: string[] | null | undefined,
 ): Promise<void> {
   logger.debug(`Adding labels '${labels?.join(', ')}' to #${issueNo}`);
   const repository = config.parentRepo ?? config.repository;
@@ -1398,13 +1398,13 @@ async function addLabels(
 
 export async function deleteLabel(
   issueNo: number,
-  label: string
+  label: string,
 ): Promise<void> {
   logger.debug(`Deleting label ${label} from #${issueNo}`);
   const repository = config.parentRepo ?? config.repository;
   try {
     await githubApi.deleteJson(
-      `repos/${repository}/issues/${issueNo}/labels/${label}`
+      `repos/${repository}/issues/${issueNo}/labels/${label}`,
     );
   } catch (err) /* istanbul ignore next */ {
     logger.warn({ err, issueNo, label }, 'Failed to delete label');
@@ -1419,7 +1419,7 @@ async function addComment(issueNo: number, body: string): Promise<void> {
     }/issues/${issueNo}/comments`,
     {
       body: { body },
-    }
+    },
   );
 }
 
@@ -1431,7 +1431,7 @@ async function editComment(commentId: number, body: string): Promise<void> {
     }/issues/comments/${commentId}`,
     {
       body: { body },
-    }
+    },
   );
 }
 
@@ -1440,7 +1440,7 @@ async function deleteComment(commentId: number): Promise<void> {
   await githubApi.deleteJson(
     `repos/${
       config.parentRepo ?? config.repository
-    }/issues/comments/${commentId}`
+    }/issues/comments/${commentId}`,
   );
 }
 
@@ -1501,13 +1501,13 @@ export async function ensureComment({
       await addComment(number, body);
       logger.info(
         { repository: config.repository, issueNo: number, topic },
-        'Comment added'
+        'Comment added',
       );
     } else if (commentNeedsUpdating) {
       await editComment(commentId, body);
       logger.debug(
         { repository: config.repository, issueNo: number },
-        'Comment updated'
+        'Comment updated',
       );
     } else {
       logger.debug('Comment is already update-to-date');
@@ -1527,7 +1527,7 @@ export async function ensureComment({
 }
 
 export async function ensureCommentRemoval(
-  deleteConfig: EnsureCommentRemovalConfig
+  deleteConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   const { number: issueNo } = deleteConfig;
   const key =
@@ -1563,7 +1563,7 @@ export async function ensureCommentRemoval(
 async function tryPrAutomerge(
   prNumber: number,
   prNodeId: string,
-  platformOptions: PlatformPrOptions | undefined
+  platformOptions: PlatformPrOptions | undefined,
 ): Promise<void> {
   if (!platformOptions?.usePlatformAutomerge) {
     return;
@@ -1576,7 +1576,7 @@ async function tryPrAutomerge(
     if (semver.satisfies(platformConfig.gheVersion!, '<3.3.0')) {
       logger.debug(
         { prNumber },
-        'GitHub-native automerge: not supported on this version of GHE. Use 3.3.0 or newer.'
+        'GitHub-native automerge: not supported on this version of GHE. Use 3.3.0 or newer.',
       );
       return;
     }
@@ -1585,7 +1585,7 @@ async function tryPrAutomerge(
   if (!config.autoMergeAllowed) {
     logger.debug(
       { prNumber },
-      'GitHub-native automerge: not enabled in repo settings'
+      'GitHub-native automerge: not enabled in repo settings',
     );
     return;
   }
@@ -1597,13 +1597,13 @@ async function tryPrAutomerge(
 
     const res = await githubApi.requestGraphql<GhAutomergeResponse>(
       enableAutoMergeMutation,
-      queryOptions
+      queryOptions,
     );
 
     if (res?.errors) {
       logger.debug(
         { prNumber, errors: res.errors },
-        'GitHub-native automerge: fail'
+        'GitHub-native automerge: fail',
       );
       return;
     }
@@ -1650,12 +1650,12 @@ export async function createPr({
   const ghPr = (
     await githubApi.postJson<GhRestPr>(
       `repos/${config.parentRepo ?? config.repository}/pulls`,
-      options
+      options,
     )
   ).body;
   logger.debug(
     { branch: sourceBranch, pr: ghPr.number, draft: draftPR },
-    'PR created'
+    'PR created',
   );
 
   const result = coerceRestPr(ghPr);
@@ -1697,7 +1697,7 @@ export async function updatePr({
   try {
     const { body: ghPr } = await githubApi.patchJson<GhRestPr>(
       `repos/${config.parentRepo ?? config.repository}/pulls/${prNo}`,
-      options
+      options,
     );
     const result = coerceRestPr(ghPr);
     cachePr(result);
@@ -1743,7 +1743,7 @@ export async function mergePr({
         ) {
           logger.debug(
             { response: body },
-            `GitHub blocking PR merge -- Missing required status check(s)`
+            `GitHub blocking PR merge -- Missing required status check(s)`,
           );
           return false;
         }
@@ -1754,18 +1754,18 @@ export async function mergePr({
         ) {
           logger.debug(
             { response: body },
-            `GitHub blocking PR merge -- Needs approving review(s)`
+            `GitHub blocking PR merge -- Needs approving review(s)`,
           );
           return false;
         }
         logger.debug(
           { response: body },
-          'GitHub blocking PR merge -- will keep trying'
+          'GitHub blocking PR merge -- will keep trying',
         );
       } else {
         logger.warn(
           { mergeMethod: config.mergeMethod, err },
-          'Failed to merge PR'
+          'Failed to merge PR',
         );
         return false;
       }
@@ -1799,7 +1799,7 @@ export async function mergePr({
   }
   logger.debug(
     { automergeResult: automergeResult!.body, pr: prNo },
-    'PR merged'
+    'PR merged',
   );
   const cachedPr = config.prList?.find(({ number }) => number === prNo);
   if (cachedPr) {
@@ -1816,7 +1816,7 @@ export function massageMarkdown(input: string): string {
     // to be safe, replace all github.com links with renovatebot redirector
     .replace(
       regEx(/href="https?:\/\/github.com\//g),
-      'href="https://togithub.com/'
+      'href="https://togithub.com/',
     )
     .replace(regEx(/]\(https:\/\/github\.com\//g), '](https://togithub.com/')
     .replace(regEx(/]: https:\/\/github\.com\//g), ']: https://togithub.com/')
@@ -1833,7 +1833,7 @@ export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
     // semver not null safe, accepts null and undefined
 
     platformConfig.gheVersion!,
-    '>=3.5'
+    '>=3.5',
   );
   const filterByState = !platformConfig.isGhe || gheSupportsStateFilter;
   const query = vulnerabilityAlertsQuery(filterByState);
@@ -1852,7 +1852,7 @@ export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
       {
         url: 'https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts',
       },
-      'Cannot access vulnerability alerts. Please ensure permissions have been granted.'
+      'Cannot access vulnerability alerts. Please ensure permissions have been granted.',
     );
   }
   let alerts: VulnerabilityAlert[] = [];
@@ -1895,7 +1895,7 @@ export async function getVulnerabilityAlerts(): Promise<VulnerabilityAlert[]> {
 
 async function pushFiles(
   { branchName, message }: CommitFilesConfig,
-  { parentCommitSha, commitSha }: CommitResult
+  { parentCommitSha, commitSha }: CommitResult,
 ): Promise<CommitSha | null> {
   try {
     // Push the commit to GitHub using a custom ref
@@ -1909,14 +1909,14 @@ async function pushFiles(
     // Attempting to reuse the tree or commit SHA we pushed does not work
     const treeRes = await githubApi.postJson<{ sha: string }>(
       `/repos/${config.repository}/git/trees`,
-      { body: { tree: treeItems } }
+      { body: { tree: treeItems } },
     );
     const treeSha = treeRes.body.sha;
 
     // Now we recreate the commit using the tree we recreated the step before
     const commitRes = await githubApi.postJson<{ sha: string }>(
       `/repos/${config.repository}/git/commits`,
-      { body: { message, tree: treeSha, parents: [parentCommitSha] } }
+      { body: { message, tree: treeSha, parents: [parentCommitSha] } },
     );
     const remoteCommitSha = commitRes.body.sha;
     await ensureBranchSha(branchName, remoteCommitSha);
@@ -1928,14 +1928,14 @@ async function pushFiles(
 }
 
 export async function commitFiles(
-  config: CommitFilesConfig
+  config: CommitFilesConfig,
 ): Promise<CommitSha | null> {
   const commitResult = await git.prepareCommit(config); // Commit locally and don't push
   const { branchName, files } = config;
   if (!commitResult) {
     logger.debug(
       { branchName, files: files.map(({ path }) => path) },
-      `Platform-native commit: unable to prepare for commit`
+      `Platform-native commit: unable to prepare for commit`,
     );
     return null;
   }
diff --git a/lib/modules/platform/github/massage-markdown-links.spec.ts b/lib/modules/platform/github/massage-markdown-links.spec.ts
index 880144c6cf60153e21db22203ba5147874589eaa..c8faf90feae9902f03f0940f6c5570e0d8b955e2 100644
--- a/lib/modules/platform/github/massage-markdown-links.spec.ts
+++ b/lib/modules/platform/github/massage-markdown-links.spec.ts
@@ -11,7 +11,7 @@ describe('modules/platform/github/massage-markdown-links', () => {
       [
         'Link [foo/bar#1](https://togithub.com/foo/bar/pull/1) points to [https://github.com/foo/bar/pull/1](https://togithub.com/foo/bar/pull/1).',
         'URL [https://github.com/foo/bar/pull/1](https://togithub.com/foo/bar/pull/1) becomes [foo/bar#1](https://togithub.com/foo/bar/pull/1).',
-      ].join('\n')
+      ].join('\n'),
     );
   });
 
@@ -87,6 +87,6 @@ describe('modules/platform/github/massage-markdown-links', () => {
     '$input -> $output',
     ({ input, output }: { input: string; output: string }) => {
       expect(massageMarkdownLinks(input)).toEqual(output);
-    }
+    },
   );
 });
diff --git a/lib/modules/platform/github/pr.ts b/lib/modules/platform/github/pr.ts
index b876953447419d98e32201f7182818c82538e204..432abe996515c2df911236da6bbb34b087c62359 100644
--- a/lib/modules/platform/github/pr.ts
+++ b/lib/modules/platform/github/pr.ts
@@ -15,7 +15,7 @@ function getPrApiCache(): ApiCache<GhPr> {
   delete repoCache.platform.github.prCache;
   repoCache.platform.github.pullRequestsCache ??= { items: {} };
   const prApiCache = new ApiCache<GhPr>(
-    repoCache.platform.github.pullRequestsCache as ApiPageCache<GhPr>
+    repoCache.platform.github.pullRequestsCache as ApiPageCache<GhPr>,
   );
   return prApiCache;
 }
@@ -50,7 +50,7 @@ function getPrApiCache(): ApiCache<GhPr> {
 export async function getPrCache(
   http: GithubHttp,
   repo: string,
-  username: string | null
+  username: string | null,
 ): Promise<Record<number, GhPr>> {
   const prApiCache = getPrApiCache();
   const isInitial = is.emptyArray(prApiCache.getItems());
@@ -84,7 +84,7 @@ export async function getPrCache(
 
       if (username) {
         page = page.filter(
-          (ghPr) => ghPr?.user?.login && ghPr.user.login === username
+          (ghPr) => ghPr?.user?.login && ghPr.user.login === username,
         );
       }
 
@@ -106,7 +106,7 @@ export async function getPrCache(
         requestsTotal,
         apiQuotaAffected,
       },
-      `getPrList success`
+      `getPrList success`,
     );
   } catch (err) /* istanbul ignore next */ {
     logger.debug({ err }, 'getPrList err');
diff --git a/lib/modules/platform/github/scm.ts b/lib/modules/platform/github/scm.ts
index e222fa9df1992564ab900701fa9e61c9970212f3..6cd5cee34b061014559ec24b44f8060ab8161506 100644
--- a/lib/modules/platform/github/scm.ts
+++ b/lib/modules/platform/github/scm.ts
@@ -5,7 +5,7 @@ import { commitFiles } from './';
 
 export class GithubScm extends DefaultGitScm {
   override commitAndPush(
-    commitConfig: CommitFilesConfig
+    commitConfig: CommitFilesConfig,
   ): Promise<CommitSha | null> {
     return commitConfig.platformCommit
       ? commitFiles(commitConfig)
diff --git a/lib/modules/platform/github/user.ts b/lib/modules/platform/github/user.ts
index fd91f61521728e90f6066a668a6f404058a3cbe6..63171a798e53383b9336b7d9c1fd4556d50bf20f 100644
--- a/lib/modules/platform/github/user.ts
+++ b/lib/modules/platform/github/user.ts
@@ -28,7 +28,7 @@ export async function getAppDetails(token: string): Promise<UserDetails> {
 
 export async function getUserDetails(
   endpoint: string,
-  token: string
+  token: string,
 ): Promise<UserDetails> {
   try {
     const userData = (
@@ -36,7 +36,7 @@ export async function getUserDetails(
         endpoint + 'user',
         {
           token,
-        }
+        },
       )
     ).body;
     return {
@@ -52,7 +52,7 @@ export async function getUserDetails(
 
 export async function getUserEmail(
   endpoint: string,
-  token: string
+  token: string,
 ): Promise<string | null> {
   try {
     const emails = (
@@ -63,7 +63,7 @@ export async function getUserEmail(
     return emails?.[0].email ?? null;
   } catch (err) {
     logger.debug(
-      'Cannot read user/emails endpoint on GitHub to retrieve gitAuthor'
+      'Cannot read user/emails endpoint on GitHub to retrieve gitAuthor',
     );
     return null;
   }
diff --git a/lib/modules/platform/gitlab/index.spec.ts b/lib/modules/platform/gitlab/index.spec.ts
index 31d0cb40282a7d855cde53c1785390164feff673..e8663a2154119b0b553b882d26ea998d47f4c67e 100644
--- a/lib/modules/platform/gitlab/index.spec.ts
+++ b/lib/modules/platform/gitlab/index.spec.ts
@@ -43,7 +43,7 @@ describe('modules/platform/gitlab/index', () => {
     git.branchExists.mockReturnValue(true);
     git.isBranchBehindBase.mockResolvedValue(true);
     git.getBranchCommit.mockReturnValue(
-      '0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+      '0d9c7726c3d628b7e28af234595cfd20febdbf8e',
     );
     hostRules.find.mockReturnValue({
       token: '123test',
@@ -98,7 +98,7 @@ describe('modules/platform/gitlab/index', () => {
         await gitlab.initPlatform({
           token: 'some-token',
           endpoint: undefined,
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -119,7 +119,7 @@ describe('modules/platform/gitlab/index', () => {
         await gitlab.initPlatform({
           endpoint,
           token: 'some-token',
-        })
+        }),
       ).toMatchSnapshot();
     });
 
@@ -132,7 +132,7 @@ describe('modules/platform/gitlab/index', () => {
           token: 'some-token',
           endpoint: undefined,
           gitAuthor: 'somebody',
-        })
+        }),
       ).toEqual({ endpoint: 'https://gitlab.com/api/v4/' });
     });
   });
@@ -142,7 +142,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false'
+          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false',
         )
         .replyWithError('getRepos error');
       await expect(gitlab.getRepos()).rejects.toThrow('getRepos error');
@@ -152,7 +152,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false'
+          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false',
         )
         .reply(200, [
           {
@@ -174,7 +174,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false'
+          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false',
         )
         .reply(200, [
           {
@@ -196,7 +196,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&topic=one%2Ctwo'
+          '/api/v4/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&topic=one%2Ctwo',
         )
         .reply(200, [
           {
@@ -214,7 +214,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/groups/a/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&include_subgroups=true&with_shared=false'
+          '/api/v4/groups/a/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&include_subgroups=true&with_shared=false',
         )
         .reply(200, [
           {
@@ -222,7 +222,7 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .get(
-          '/api/v4/groups/c%2Fd/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&include_subgroups=true&with_shared=false'
+          '/api/v4/groups/c%2Fd/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&include_subgroups=true&with_shared=false',
         )
         .reply(200, [
           {
@@ -240,7 +240,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/groups/a/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&include_subgroups=true&with_shared=false&topic=one%2Ctwo'
+          '/api/v4/groups/a/projects?membership=true&per_page=100&with_merge_requests_enabled=true&min_access_level=30&archived=false&include_subgroups=true&with_shared=false&topic=one%2Ctwo',
         )
         .reply(200, [
           {
@@ -263,7 +263,7 @@ describe('modules/platform/gitlab/index', () => {
       repository: 'some/repo',
     },
     repoResp: httpMock.Body | null = null,
-    scope = httpMock.scope(gitlabApiHost)
+    scope = httpMock.scope(gitlabApiHost),
   ): Promise<httpMock.Scope> {
     const repo = repoParams.repository;
     const justRepo = repo.split('/').slice(0, 2).join('/');
@@ -272,7 +272,7 @@ describe('modules/platform/gitlab/index', () => {
       repoResp ?? {
         default_branch: 'master',
         http_url_to_repo: `https://gitlab.com/${justRepo}.git`,
-      }
+      },
     );
     await gitlab.initRepo(repoParams);
     return scope;
@@ -289,7 +289,7 @@ describe('modules/platform/gitlab/index', () => {
       expect(
         await gitlab.initRepo({
           repository: 'some/repo/project',
-        })
+        }),
       ).toEqual({
         defaultBranch: 'master',
         isFork: false,
@@ -305,7 +305,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow('always error');
     });
 
@@ -317,7 +317,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_ARCHIVED);
     });
 
@@ -329,7 +329,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_MIRRORED);
     });
 
@@ -345,7 +345,7 @@ describe('modules/platform/gitlab/index', () => {
         await gitlab.initRepo({
           repository: 'some/repo',
           includeMirrors: true,
-        })
+        }),
       ).toEqual({
         defaultBranch: 'master',
         isFork: false,
@@ -361,7 +361,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_DISABLED);
     });
 
@@ -373,7 +373,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_DISABLED);
     });
 
@@ -385,7 +385,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_EMPTY);
     });
 
@@ -397,7 +397,7 @@ describe('modules/platform/gitlab/index', () => {
       await expect(
         gitlab.initRepo({
           repository: 'some/repo',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_EMPTY);
     });
 
@@ -412,7 +412,7 @@ describe('modules/platform/gitlab/index', () => {
       expect(
         await gitlab.initRepo({
           repository: 'some/repo/project',
-        })
+        }),
       ).toEqual({
         defaultBranch: 'master',
         isFork: false,
@@ -449,7 +449,7 @@ describe('modules/platform/gitlab/index', () => {
         gitlab.initRepo({
           repository: 'some/repo/project',
           gitUrl: 'ssh',
-        })
+        }),
       ).rejects.toThrow(CONFIG_GIT_URL_UNAVAILABLE);
     });
 
@@ -495,7 +495,7 @@ describe('modules/platform/gitlab/index', () => {
           default_branch: 'master',
           http_url_to_repo: null,
           merge_method: 'merge',
-        }
+        },
       );
       expect(await gitlab.getRepoForceRebase()).toBeFalse();
     });
@@ -509,7 +509,7 @@ describe('modules/platform/gitlab/index', () => {
           default_branch: 'master',
           http_url_to_repo: null,
           merge_method: 'ff',
-        }
+        },
       );
       expect(await gitlab.getRepoForceRebase()).toBeTrue();
     });
@@ -520,7 +520,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const pr = await gitlab.getBranchPr('some-branch');
@@ -531,7 +531,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -543,7 +543,7 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1'
+          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1',
         )
         .reply(200, {
           iid: 91,
@@ -566,7 +566,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -578,7 +578,7 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1'
+          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1',
         )
         .reply(200, {
           iid: 91,
@@ -601,7 +601,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -613,7 +613,7 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1'
+          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1',
         )
         .reply(200, {
           iid: 91,
@@ -638,11 +638,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -653,11 +653,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -669,7 +669,7 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1'
+          '/api/v4/projects/some%2Frepo/merge_requests/91?include_diverged_commits_count=1',
         )
         .reply(200, {
           iid: 91,
@@ -695,14 +695,14 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           { context: 'renovate/stability-days', status: 'success' },
           { context: 'renovate/other', status: 'success' },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -713,14 +713,14 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           { name: 'renovate/stability-days', status: 'success' },
           { name: 'renovate/other', status: 'success' },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', false);
@@ -731,14 +731,14 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           { status: 'success' },
           { status: 'failed', allow_failure: true },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -749,11 +749,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [{ status: 'failed', allow_failure: true }])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -764,11 +764,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [{ status: 'success' }, { status: 'skipped' }])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -779,11 +779,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [{ status: 'skipped' }])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -794,11 +794,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [{ status: 'skipped' }, { status: 'failed' }])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -809,7 +809,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           { status: 'success' },
@@ -817,7 +817,7 @@ describe('modules/platform/gitlab/index', () => {
           { status: 'failed' },
         ])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -828,11 +828,11 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [{ status: 'success' }, { status: 'foo' }])
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/some%2Frepo/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatus('somebranch', true);
@@ -844,7 +844,7 @@ describe('modules/platform/gitlab/index', () => {
       git.branchExists.mockReturnValue(false);
       await initRepo();
       await expect(gitlab.getBranchStatus('somebranch', true)).rejects.toThrow(
-        REPOSITORY_CHANGED
+        REPOSITORY_CHANGED,
       );
     });
   });
@@ -854,12 +854,12 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, []);
       const res = await gitlab.getBranchStatusCheck(
         'somebranch',
-        'some-context'
+        'some-context',
       );
       expect(res).toBeNull();
     });
@@ -868,12 +868,12 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [{ name: 'context-1', status: 'pending' }]);
       const res = await gitlab.getBranchStatusCheck(
         'somebranch',
-        'some-context'
+        'some-context',
       );
       expect(res).toBeNull();
     });
@@ -882,7 +882,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           { name: 'context-1', status: 'pending' },
@@ -891,7 +891,7 @@ describe('modules/platform/gitlab/index', () => {
         ]);
       const res = await gitlab.getBranchStatusCheck(
         'somebranch',
-        'some-context'
+        'some-context',
       );
       expect(res).toBe('green');
     });
@@ -900,7 +900,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [
           { name: 'context-1', status: 'pending' },
@@ -909,7 +909,7 @@ describe('modules/platform/gitlab/index', () => {
         ]);
       const res = await gitlab.getBranchStatusCheck(
         'somebranch',
-        'some-context'
+        'some-context',
       );
       expect(res).toBe('yellow');
     });
@@ -922,15 +922,15 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .post(
-          '/api/v4/projects/some%2Frepo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, {})
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [])
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, []);
 
@@ -941,7 +941,7 @@ describe('modules/platform/gitlab/index', () => {
           description: 'some-description',
           state,
           url: 'some-url',
-        })
+        }),
       ).toResolve();
     });
 
@@ -949,15 +949,15 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .post(
-          '/api/v4/projects/some%2Frepo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, {})
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [])
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, []);
 
@@ -981,15 +981,15 @@ describe('modules/platform/gitlab/index', () => {
           (body: any): boolean => {
             expect(body.pipeline_id).toBe(123);
             return true;
-          }
+          },
         )
         .reply(200, {})
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [])
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, { last_pipeline: { id: 123 } });
 
@@ -1000,7 +1000,7 @@ describe('modules/platform/gitlab/index', () => {
           description: 'some-description',
           state: 'green',
           url: 'some-url',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1011,15 +1011,15 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .post(
-          '/api/v4/projects/some%2Frepo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/statuses/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, {})
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e/statuses',
         )
         .reply(200, [])
         .get(
-          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e'
+          '/api/v4/projects/some%2Frepo/repository/commits/0d9c7726c3d628b7e28af234595cfd20febdbf8e',
         )
         .reply(200, []);
 
@@ -1041,7 +1041,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1061,7 +1061,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1085,7 +1085,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1110,7 +1110,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [])
         .post('/api/v4/projects/undefined/issues')
@@ -1127,7 +1127,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1154,7 +1154,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1182,7 +1182,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1207,7 +1207,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1233,7 +1233,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1264,7 +1264,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened'
+          '/api/v4/projects/undefined/issues?per_page=100&scope=created_by_me&state=opened',
         )
         .reply(200, [
           {
@@ -1301,11 +1301,11 @@ describe('modules/platform/gitlab/index', () => {
         .get('/api/v4/users?username=someotheruser')
         .reply(200, [{ id: 124 }])
         .put(
-          '/api/v4/projects/undefined/merge_requests/42?assignee_ids[]=123&assignee_ids[]=124'
+          '/api/v4/projects/undefined/merge_requests/42?assignee_ids[]=123&assignee_ids[]=124',
         )
         .reply(200);
       await expect(
-        gitlab.addAssignees(42, ['someuser', 'someotheruser'])
+        gitlab.addAssignees(42, ['someuser', 'someotheruser']),
       ).toResolve();
     });
 
@@ -1315,7 +1315,7 @@ describe('modules/platform/gitlab/index', () => {
         .get('/api/v4/users?username=someuser')
         .replyWithError('some error');
       await expect(
-        gitlab.addAssignees(42, ['someuser', 'someotheruser'])
+        gitlab.addAssignees(42, ['someuser', 'someotheruser']),
       ).toResolve();
     });
   });
@@ -1327,7 +1327,7 @@ describe('modules/platform/gitlab/index', () => {
         await gitlab.addReviewers(42, ['someuser', 'foo', 'someotheruser']);
         expect(logger.warn).toHaveBeenCalledWith(
           { version: '13.8.0' },
-          'Adding reviewers is only available in GitLab 13.9 and onwards'
+          'Adding reviewers is only available in GitLab 13.9 and onwards',
         );
       });
     });
@@ -1346,7 +1346,7 @@ describe('modules/platform/gitlab/index', () => {
         const scope = httpMock
           .scope(gitlabApiHost)
           .get(
-            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
           )
           .reply(404);
 
@@ -1358,7 +1358,7 @@ describe('modules/platform/gitlab/index', () => {
         const scope = httpMock
           .scope(gitlabApiHost)
           .get(
-            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
           )
           .reply(200, { reviewers: existingReviewers })
           .get('/api/v4/users?username=someuser')
@@ -1376,7 +1376,7 @@ describe('modules/platform/gitlab/index', () => {
         const scope = httpMock
           .scope(gitlabApiHost)
           .get(
-            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
           )
           .reply(200, { reviewers: existingReviewers })
           .get('/api/v4/users?username=someuser')
@@ -1398,7 +1398,7 @@ describe('modules/platform/gitlab/index', () => {
         const scope = httpMock
           .scope(gitlabApiHost)
           .get(
-            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
           )
           .reply(200, { reviewers: existingReviewers })
           .get('/api/v4/users?username=someuser')
@@ -1418,7 +1418,7 @@ describe('modules/platform/gitlab/index', () => {
         const scope = httpMock
           .scope(gitlabApiHost)
           .get(
-            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
           )
           .reply(200, { reviewers: existingReviewers })
           .get('/api/v4/users?username=someuser')
@@ -1438,7 +1438,7 @@ describe('modules/platform/gitlab/index', () => {
         const scope = httpMock
           .scope(gitlabApiHost)
           .get(
-            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+            '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
           )
           .reply(200, { reviewers: existingReviewers })
           .get('/api/v4/users?username=someuser')
@@ -1467,7 +1467,7 @@ describe('modules/platform/gitlab/index', () => {
           number: 42,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1483,7 +1483,7 @@ describe('modules/platform/gitlab/index', () => {
           number: 42,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1497,7 +1497,7 @@ describe('modules/platform/gitlab/index', () => {
           number: 42,
           topic: 'some-subject',
           content: 'some\ncontent',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1511,7 +1511,7 @@ describe('modules/platform/gitlab/index', () => {
           number: 42,
           topic: null,
           content: '!merge',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -1529,7 +1529,7 @@ describe('modules/platform/gitlab/index', () => {
           type: 'by-topic',
           number: 42,
           topic: 'some-subject',
-        })
+        }),
       ).toResolve();
     });
 
@@ -1545,7 +1545,7 @@ describe('modules/platform/gitlab/index', () => {
           type: 'by-content',
           number: 42,
           content: 'some-body',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -1555,7 +1555,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -1575,7 +1575,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -1596,7 +1596,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -1618,7 +1618,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -1639,7 +1639,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -1660,7 +1660,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -1811,7 +1811,7 @@ describe('modules/platform/gitlab/index', () => {
           platformOptions: {
             usePlatformAutomerge: true,
           },
-        })
+        }),
       ).toMatchInlineSnapshot(`
         {
           "id": 1,
@@ -1926,7 +1926,7 @@ describe('modules/platform/gitlab/index', () => {
             usePlatformAutomerge: true,
             gitLabIgnoreApprovals: true,
           },
-        })
+        }),
       ).toMatchInlineSnapshot(`
         {
           "id": 1,
@@ -1971,7 +1971,7 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .put(
-          '/api/v4/projects/undefined/merge_requests/12345/approval_rules/50005'
+          '/api/v4/projects/undefined/merge_requests/12345/approval_rules/50005',
         )
         .reply(200);
       expect(
@@ -1985,7 +1985,7 @@ describe('modules/platform/gitlab/index', () => {
             usePlatformAutomerge: true,
             gitLabIgnoreApprovals: true,
           },
-        })
+        }),
       ).toStrictEqual({
         id: 1,
         iid: 12345,
@@ -2033,11 +2033,11 @@ describe('modules/platform/gitlab/index', () => {
           },
         ])
         .delete(
-          '/api/v4/projects/undefined/merge_requests/12345/approval_rules/50006'
+          '/api/v4/projects/undefined/merge_requests/12345/approval_rules/50006',
         )
         .reply(200)
         .delete(
-          '/api/v4/projects/undefined/merge_requests/12345/approval_rules/50007'
+          '/api/v4/projects/undefined/merge_requests/12345/approval_rules/50007',
         )
         .reply(200)
         .post('/api/v4/projects/undefined/merge_requests/12345/approval_rules')
@@ -2053,7 +2053,7 @@ describe('modules/platform/gitlab/index', () => {
             usePlatformAutomerge: true,
             gitLabIgnoreApprovals: true,
           },
-        })
+        }),
       ).toStrictEqual({
         id: 1,
         iid: 12345,
@@ -2102,7 +2102,7 @@ describe('modules/platform/gitlab/index', () => {
             usePlatformAutomerge: true,
             gitLabIgnoreApprovals: true,
           },
-        })
+        }),
       ).toMatchInlineSnapshot(`
         {
           "id": 1,
@@ -2153,7 +2153,7 @@ describe('modules/platform/gitlab/index', () => {
             usePlatformAutomerge: true,
             gitLabIgnoreApprovals: true,
           },
-        })
+        }),
       ).toMatchInlineSnapshot(`
         {
           "id": 1,
@@ -2187,7 +2187,7 @@ describe('modules/platform/gitlab/index', () => {
           platformOptions: {
             autoApprove: true,
           },
-        })
+        }),
       ).toStrictEqual({
         id: 1,
         iid: 12345,
@@ -2219,7 +2219,7 @@ describe('modules/platform/gitlab/index', () => {
           platformOptions: {
             autoApprove: true,
           },
-        })
+        }),
       ).toResolve();
     });
   });
@@ -2229,7 +2229,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1'
+          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2252,7 +2252,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1'
+          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2275,7 +2275,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1'
+          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2298,7 +2298,7 @@ describe('modules/platform/gitlab/index', () => {
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/merge_requests/12345?include_diverged_commits_count=1'
+          '/api/v4/projects/some%2Frepo/merge_requests/12345?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2322,7 +2322,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1'
+          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2349,7 +2349,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1'
+          '/api/v4/projects/undefined/merge_requests/12345?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2392,7 +2392,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -2405,7 +2405,7 @@ describe('modules/platform/gitlab/index', () => {
         .put('/api/v4/projects/undefined/merge_requests/1')
         .reply(200);
       await expect(
-        gitlab.updatePr({ number: 1, prTitle: 'title', prBody: 'body' })
+        gitlab.updatePr({ number: 1, prTitle: 'title', prBody: 'body' }),
       ).toResolve();
     });
 
@@ -2414,7 +2414,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -2427,7 +2427,7 @@ describe('modules/platform/gitlab/index', () => {
         .put('/api/v4/projects/undefined/merge_requests/1')
         .reply(200);
       await expect(
-        gitlab.updatePr({ number: 1, prTitle: 'title', prBody: 'body' })
+        gitlab.updatePr({ number: 1, prTitle: 'title', prBody: 'body' }),
       ).toResolve();
     });
 
@@ -2436,7 +2436,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -2449,7 +2449,7 @@ describe('modules/platform/gitlab/index', () => {
         .put('/api/v4/projects/undefined/merge_requests/1')
         .reply(200);
       await expect(
-        gitlab.updatePr({ number: 1, prTitle: 'title', prBody: 'body' })
+        gitlab.updatePr({ number: 1, prTitle: 'title', prBody: 'body' }),
       ).toResolve();
     });
 
@@ -2458,7 +2458,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -2478,7 +2478,7 @@ describe('modules/platform/gitlab/index', () => {
           prBody: 'body',
           state: 'closed',
           targetBranch: 'branch-b',
-        })
+        }),
       ).toResolve();
     });
 
@@ -2487,7 +2487,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -2509,7 +2509,7 @@ describe('modules/platform/gitlab/index', () => {
           platformOptions: {
             autoApprove: true,
           },
-        })
+        }),
       ).toResolve();
     });
 
@@ -2518,7 +2518,7 @@ describe('modules/platform/gitlab/index', () => {
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me'
+          '/api/v4/projects/undefined/merge_requests?per_page=100&scope=created_by_me',
         )
         .reply(200, [
           {
@@ -2536,7 +2536,7 @@ describe('modules/platform/gitlab/index', () => {
           prTitle: 'title',
           prBody: 'body',
           state: 'closed',
-        })
+        }),
       ).toResolve();
     });
   });
@@ -2550,7 +2550,7 @@ describe('modules/platform/gitlab/index', () => {
       expect(
         await gitlab.mergePr({
           id: 1,
-        })
+        }),
       ).toBeTrue();
     });
   });
@@ -2569,15 +2569,17 @@ These updates have all been created already. Click a checkbox below to force a r
   describe('massageMarkdown(input)', () => {
     it('strips invalid unicode null characters', () => {
       expect(
-        gitlab.massageMarkdown("The source contains 'Ruby\u0000' at: 2.7.6.219")
+        gitlab.massageMarkdown(
+          "The source contains 'Ruby\u0000' at: 2.7.6.219",
+        ),
       ).toBe("The source contains 'Ruby' at: 2.7.6.219");
     });
 
     it('replaces PR with MR including pluralization', () => {
       expect(
         gitlab.massageMarkdown(
-          'A Pull Request is a PR, multiple Pull Requests are PRs.'
-        )
+          'A Pull Request is a PR, multiple Pull Requests are PRs.',
+        ),
       ).toBe('A Merge Request is a MR, multiple Merge Requests are MRs.');
     });
 
@@ -2621,7 +2623,7 @@ These updates have all been created already. Click a checkbox below to force a r
       httpMock
         .scope(gitlabApiHost)
         .get(
-          '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1'
+          '/api/v4/projects/undefined/merge_requests/42?include_diverged_commits_count=1',
         )
         .reply(200, {
           id: 1,
@@ -2645,7 +2647,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD'
+          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD',
         )
         .reply(200, {
           content: '',
@@ -2659,7 +2661,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD'
+          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD',
         )
         .reply(200, {
           content: toBase64(JSON.stringify(data)),
@@ -2678,7 +2680,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json5?ref=HEAD'
+          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json5?ref=HEAD',
         )
         .reply(200, {
           content: toBase64(json5Data),
@@ -2692,7 +2694,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/different%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD'
+          '/api/v4/projects/different%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD',
         )
         .reply(200, {
           content: toBase64(JSON.stringify(data)),
@@ -2706,7 +2708,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=dev'
+          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=dev',
         )
         .reply(200, {
           content: toBase64(JSON.stringify(data)),
@@ -2714,7 +2716,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const res = await gitlab.getJsonFile(
         'dir/file.json',
         'some%2Frepo',
-        'dev'
+        'dev',
       );
       expect(res).toEqual(data);
     });
@@ -2723,7 +2725,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD'
+          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD',
         )
         .reply(200, {
           content: toBase64('!@#'),
@@ -2735,7 +2737,7 @@ These updates have all been created already. Click a checkbox below to force a r
       const scope = await initRepo();
       scope
         .get(
-          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD'
+          '/api/v4/projects/some%2Frepo/repository/files/dir%2Ffile.json?ref=HEAD',
         )
         .replyWithError('some error');
       await expect(gitlab.getJsonFile('dir/file.json')).rejects.toThrow();
@@ -2821,7 +2823,7 @@ These updates have all been created already. Click a checkbox below to force a r
       expect(expandedGroupMembers).toEqual(['group']);
       expect(logger.debug).toHaveBeenCalledWith(
         expect.any(Object),
-        'Unable to fetch group'
+        'Unable to fetch group',
       );
     });
 
diff --git a/lib/modules/platform/gitlab/index.ts b/lib/modules/platform/gitlab/index.ts
index ce4bd3ec534cf888431ef6816dfa32304fbcd7af..70db40a1c139832f1bf741ac8f28642721246f18 100644
--- a/lib/modules/platform/gitlab/index.ts
+++ b/lib/modules/platform/gitlab/index.ts
@@ -145,7 +145,7 @@ export async function initPlatform({
   } catch (err) {
     logger.debug(
       { err },
-      'Error authenticating with GitLab. Check that your token includes "api" permissions'
+      'Error authenticating with GitLab. Check that your token includes "api" permissions',
     );
     throw new Error('Init: Authentication failure');
   }
@@ -179,9 +179,9 @@ export async function getRepos(config?: AutodiscoverConfig): Promise<string[]> {
       ...config.namespaces.map(
         (namespace) =>
           `groups/${urlEscape(namespace)}/projects?${getQueryString(
-            queryParams
-          )}`
-      )
+            queryParams,
+          )}`,
+      ),
     );
   } else {
     urls.push('projects?' + getQueryString(queryParams));
@@ -197,7 +197,7 @@ export async function getRepos(config?: AutodiscoverConfig): Promise<string[]> {
           }),
         {
           concurrency: 2,
-        }
+        },
       )
     ).flatMap((response) => response.body);
 
@@ -220,7 +220,7 @@ function urlEscape(str: string | undefined): string | undefined {
 export async function getRawFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<string | null> {
   const escapedFileName = urlEscape(fileName);
   const repo = urlEscape(repoName) ?? config.repository;
@@ -236,7 +236,7 @@ export async function getRawFile(
 export async function getJsonFile(
   fileName: string,
   repoName?: string,
-  branchOrTag?: string
+  branchOrTag?: string,
 ): Promise<any> {
   const raw = await getRawFile(fileName, repoName, branchOrTag);
   return parseJson(raw, fileName);
@@ -245,7 +245,7 @@ export async function getJsonFile(
 function getRepoUrl(
   repository: string,
   gitUrl: GitUrlOption | undefined,
-  res: HttpResponse<RepoResponse>
+  res: HttpResponse<RepoResponse>,
 ): string {
   if (gitUrl === 'ssh') {
     if (!res.body.ssh_url_to_repo) {
@@ -270,7 +270,7 @@ function getRepoUrl(
     }
     if (process.env.GITLAB_IGNORE_REPO_URL) {
       logger.warn(
-        'GITLAB_IGNORE_REPO_URL environment variable is deprecated. Please use "gitUrl" option.'
+        'GITLAB_IGNORE_REPO_URL environment variable is deprecated. Please use "gitUrl" option.',
       );
     }
 
@@ -314,30 +314,30 @@ export async function initRepo({
   let res: HttpResponse<RepoResponse>;
   try {
     res = await gitlabApi.getJson<RepoResponse>(
-      `projects/${config.repository}`
+      `projects/${config.repository}`,
     );
     if (res.body.archived) {
       logger.debug(
-        'Repository is archived - throwing error to abort renovation'
+        'Repository is archived - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_ARCHIVED);
     }
 
     if (res.body.mirror && includeMirrors !== true) {
       logger.debug(
-        'Repository is a mirror - throwing error to abort renovation'
+        'Repository is a mirror - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_MIRRORED);
     }
     if (res.body.repository_access_level === 'disabled') {
       logger.debug(
-        'Repository portion of project is disabled - throwing error to abort renovation'
+        'Repository portion of project is disabled - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_DISABLED);
     }
     if (res.body.merge_requests_access_level === 'disabled') {
       logger.debug(
-        'MRs are disabled for the project - throwing error to abort renovation'
+        'MRs are disabled for the project - throwing error to abort renovation',
       );
       throw new Error(REPOSITORY_DISABLED);
     }
@@ -416,7 +416,7 @@ interface GitlabBranchStatus {
 
 async function getStatus(
   branchName: string,
-  useCache = true
+  useCache = true,
 ): Promise<GitlabBranchStatus[]> {
   const branchSha = git.getBranchCommit(branchName);
   try {
@@ -456,7 +456,7 @@ const gitlabToRenovateStatusMapping: Record<BranchState, BranchStatus> = {
 // Returns the combined status for a branch.
 export async function getBranchStatus(
   branchName: string,
-  internalChecksAsSuccess: boolean
+  internalChecksAsSuccess: boolean,
 ): Promise<BranchStatus> {
   logger.debug(`getBranchStatus(${branchName})`);
 
@@ -469,7 +469,7 @@ export async function getBranchStatus(
   if (!is.array(branchStatuses)) {
     logger.warn(
       { branchName, branchStatuses },
-      'Empty or unexpected branch statuses'
+      'Empty or unexpected branch statuses',
     );
     return 'yellow';
   }
@@ -493,11 +493,11 @@ export async function getBranchStatus(
     branchStatuses.every(
       (check) =>
         check.name?.startsWith('renovate/') &&
-        gitlabToRenovateStatusMapping[check.status] === 'green'
+        gitlabToRenovateStatusMapping[check.status] === 'green',
     )
   ) {
     logger.debug(
-      'Successful checks are all internal renovate/ checks, so returning "pending" branch status'
+      'Successful checks are all internal renovate/ checks, so returning "pending" branch status',
     );
     return 'yellow';
   }
@@ -512,7 +512,7 @@ export async function getBranchStatus(
         if (!mappedStatus) {
           logger.warn(
             { check },
-            'Could not map GitLab check.status to Renovate status'
+            'Could not map GitLab check.status to Renovate status',
           );
           mappedStatus = 'yellow';
         }
@@ -566,7 +566,7 @@ async function fetchPrList(): Promise<Pr[]> {
         title: pr.title,
         state: pr.state === 'opened' ? 'open' : pr.state,
         createdAt: pr.created_at,
-      })
+      }),
     );
   } catch (err) /* istanbul ignore next */ {
     logger.debug({ err }, 'Error fetching PR list');
@@ -598,17 +598,18 @@ async function ignoreApprovals(pr: number): Promise<void> {
     const ruleName = 'renovateIgnoreApprovals';
 
     const existingAnyApproverRule = rules?.find(
-      ({ rule_type }) => rule_type === 'any_approver'
+      ({ rule_type }) => rule_type === 'any_approver',
     );
     const existingRegularApproverRules = rules?.filter(
-      ({ rule_type, name }) => rule_type !== 'any_approver' && name !== ruleName
+      ({ rule_type, name }) =>
+        rule_type !== 'any_approver' && name !== ruleName,
     );
 
     if (existingRegularApproverRules?.length) {
       await p.all(
         existingRegularApproverRules.map((rule) => async (): Promise<void> => {
           await gitlabApi.deleteJson(`${url}/${rule.id}`);
-        })
+        }),
       );
     }
 
@@ -635,7 +636,7 @@ async function ignoreApprovals(pr: number): Promise<void> {
 
 async function tryPrAutomerge(
   pr: number,
-  platformOptions: PlatformPrOptions | undefined
+  platformOptions: PlatformPrOptions | undefined,
 ): Promise<void> {
   if (platformOptions?.usePlatformAutomerge) {
     try {
@@ -666,7 +667,7 @@ async function tryPrAutomerge(
             should_remove_source_branch: true,
             merge_when_pipeline_succeeds: true,
           },
-        }
+        },
       );
     } catch (err) /* istanbul ignore next */ {
       logger.debug({ err }, 'Automerge on PR creation failed');
@@ -677,7 +678,7 @@ async function tryPrAutomerge(
 async function approvePr(pr: number): Promise<void> {
   try {
     await gitlabApi.postJson(
-      `projects/${config.repository}/merge_requests/${pr}/approve`
+      `projects/${config.repository}/merge_requests/${pr}/approve`,
     );
   } catch (err) {
     logger.warn({ err }, 'GitLab: Error approving merge request');
@@ -711,7 +712,7 @@ export async function createPr({
         labels: (labels ?? []).join(','),
         squash: config.squash,
       },
-    }
+    },
   );
   const pr = res.body;
   pr.number = pr.iid;
@@ -781,7 +782,7 @@ export async function updatePr({
 
   await gitlabApi.putJson(
     `projects/${config.repository}/merge_requests/${iid}`,
-    { body }
+    { body },
   );
 
   if (platformOptions?.autoApprove) {
@@ -799,7 +800,7 @@ export async function mergePr({ id }: MergePRConfig): Promise<boolean> {
         body: {
           should_remove_source_branch: true,
         },
-      }
+      },
     );
     return true;
   } catch (err) /* istanbul ignore next */ {
@@ -829,7 +830,7 @@ export function massageMarkdown(input: string): string {
   if (semver.lt(defaults.version, '13.4.0')) {
     logger.debug(
       { version: defaults.version },
-      'GitLab versions earlier than 13.4 have issues with long descriptions, truncating to 25K characters'
+      'GitLab versions earlier than 13.4 have issues with long descriptions, truncating to 25K characters',
     );
 
     desc = smartTruncate(desc, 25000);
@@ -864,14 +865,14 @@ export async function findPr({
       (p: { sourceBranch: string; title: string; state: string }) =>
         p.sourceBranch === branchName &&
         (!prTitle || p.title.toUpperCase() === prTitle.toUpperCase()) &&
-        matchesState(p.state, state)
+        matchesState(p.state, state),
     ) ?? null
   );
 }
 
 // Returns the Pull Request for a branch. Null if not exists.
 export async function getBranchPr(
-  branchName: string
+  branchName: string,
 ): Promise<GitlabPr | null> {
   logger.debug(`getBranchPr(${branchName})`);
   const existingPr = await findPr({
@@ -883,7 +884,7 @@ export async function getBranchPr(
 
 export async function getBranchStatusCheck(
   branchName: string,
-  context: string
+  context: string,
 ): Promise<BranchStatus | null> {
   // cache-bust in case we have rebased
   const res = await getStatus(branchName, false);
@@ -938,7 +939,7 @@ export async function setBranchStatus({
     await setTimeout(
       process.env.RENOVATE_X_GITLAB_BRANCH_STATUS_DELAY
         ? parseInt(process.env.RENOVATE_X_GITLAB_BRANCH_STATUS_DELAY, 10)
-        : 1000
+        : 1000,
     );
 
     await gitlabApi.postJson(url, { body: options });
@@ -948,7 +949,7 @@ export async function setBranchStatus({
   } catch (err) /* istanbul ignore next */ {
     if (
       err.body?.message?.startsWith(
-        'Cannot transition status via :enqueue from :pending'
+        'Cannot transition status via :enqueue from :pending',
       )
     ) {
       // https://gitlab.com/gitlab-org/gitlab-foss/issues/25807
@@ -991,13 +992,13 @@ export async function getIssueList(): Promise<GitlabIssue[]> {
 
 export async function getIssue(
   number: number,
-  useCache = true
+  useCache = true,
 ): Promise<Issue | null> {
   try {
     const issueBody = (
       await gitlabApi.getJson<{ description: string }>(
         `projects/${config.repository}/issues/${number}`,
-        { memCache: useCache }
+        { memCache: useCache },
       )
     ).body.description;
     return {
@@ -1043,7 +1044,7 @@ export async function ensureIssue({
     if (issue) {
       const existingDescription = (
         await gitlabApi.getJson<{ description: string }>(
-          `projects/${config.repository}/issues/${issue.iid}`
+          `projects/${config.repository}/issues/${issue.iid}`,
         )
       ).body.description;
       if (issue.title !== title || existingDescription !== description) {
@@ -1057,7 +1058,7 @@ export async function ensureIssue({
               labels: (labels ?? issue.labels ?? []).join(','),
               confidential: confidential ?? false,
             },
-          }
+          },
         );
         return 'updated';
       }
@@ -1095,7 +1096,7 @@ export async function ensureIssueClosing(title: string): Promise<void> {
         `projects/${config.repository}/issues/${issue.iid}`,
         {
           body: { state_event: 'close' },
-        }
+        },
       );
     }
   }
@@ -1103,7 +1104,7 @@ export async function ensureIssueClosing(title: string): Promise<void> {
 
 export async function addAssignees(
   iid: number,
-  assignees: string[]
+  assignees: string[],
 ): Promise<void> {
   try {
     logger.debug(`Adding assignees '${assignees.join(', ')}' to #${iid}`);
@@ -1125,14 +1126,14 @@ export async function addAssignees(
 
 export async function addReviewers(
   iid: number,
-  reviewers: string[]
+  reviewers: string[],
 ): Promise<void> {
   logger.debug(`Adding reviewers '${reviewers.join(', ')}' to #${iid}`);
 
   if (semver.lt(defaults.version, '13.9.0')) {
     logger.warn(
       { version: defaults.version },
-      'Adding reviewers is only available in GitLab 13.9 and onwards'
+      'Adding reviewers is only available in GitLab 13.9 and onwards',
     );
     return;
   }
@@ -1164,7 +1165,7 @@ export async function addReviewers(
             // Unable to fetch userId, try resolve as a group
             return getMemberUserIDs(r);
           }
-        })
+        }),
       )
     ).flat();
   } catch (err) {
@@ -1187,7 +1188,7 @@ export async function addReviewers(
 
 export async function deleteLabel(
   issueNo: number,
-  label: string
+  label: string,
 ): Promise<void> {
   logger.debug(`Deleting label ${label} from #${issueNo}`);
   try {
@@ -1199,7 +1200,7 @@ export async function deleteLabel(
       `projects/${config.repository}/merge_requests/${issueNo}`,
       {
         body: { labels },
-      }
+      },
     );
   } catch (err) /* istanbul ignore next */ {
     logger.warn({ err, issueNo, label }, 'Failed to delete label');
@@ -1223,31 +1224,31 @@ async function addComment(issueNo: number, body: string): Promise<void> {
     `projects/${config.repository}/merge_requests/${issueNo}/notes`,
     {
       body: { body },
-    }
+    },
   );
 }
 
 async function editComment(
   issueNo: number,
   commentId: number,
-  body: string
+  body: string,
 ): Promise<void> {
   // PUT projects/:owner/:repo/merge_requests/:number/notes/:id
   await gitlabApi.putJson(
     `projects/${config.repository}/merge_requests/${issueNo}/notes/${commentId}`,
     {
       body: { body },
-    }
+    },
   );
 }
 
 async function deleteComment(
   issueNo: number,
-  commentId: number
+  commentId: number,
 ): Promise<void> {
   // DELETE projects/:owner/:repo/merge_requests/:number/notes/:id
   await gitlabApi.deleteJson(
-    `projects/${config.repository}/merge_requests/${issueNo}/notes/${commentId}`
+    `projects/${config.repository}/merge_requests/${issueNo}/notes/${commentId}`,
   );
 }
 
@@ -1293,13 +1294,13 @@ export async function ensureComment({
     await addComment(number, body);
     logger.debug(
       { repository: config.repository, issueNo: number },
-      'Added comment'
+      'Added comment',
     );
   } else if (commentNeedsUpdating) {
     await editComment(number, commentId, body);
     logger.debug(
       { repository: config.repository, issueNo: number },
-      'Updated comment'
+      'Updated comment',
     );
   } else {
     logger.debug('Comment is already update-to-date');
@@ -1308,7 +1309,7 @@ export async function ensureComment({
 }
 
 export async function ensureCommentRemoval(
-  deleteConfig: EnsureCommentRemovalConfig
+  deleteConfig: EnsureCommentRemovalConfig,
 ): Promise<void> {
   const { number: issueNo } = deleteConfig;
   const key =
@@ -1336,7 +1337,7 @@ export async function ensureCommentRemoval(
 }
 
 export async function filterUnavailableUsers(
-  users: string[]
+  users: string[],
 ): Promise<string[]> {
   const filteredUsers: string[] = [];
   for (const user of users) {
@@ -1348,7 +1349,7 @@ export async function filterUnavailableUsers(
 }
 
 export async function expandGroupMembers(
-  reviewersOrAssignees: string[]
+  reviewersOrAssignees: string[],
 ): Promise<string[]> {
   const expandedReviewersOrAssignees: string[] = [];
   const normalizedReviewersOrAssigneesWithoutEmails: string[] = [];
@@ -1362,7 +1363,7 @@ export async function expandGroupMembers(
 
     // Normalize the potential group names before passing to Gitlab API
     normalizedReviewersOrAssigneesWithoutEmails.push(
-      noLeadingAtSymbol(reviewerOrAssignee)
+      noLeadingAtSymbol(reviewerOrAssignee),
     );
   }
 
diff --git a/lib/modules/platform/gitlab/merge-request.ts b/lib/modules/platform/gitlab/merge-request.ts
index 23e3b0133b0d9ec008abb7a6490d35970e41f2e6..91eb6bf88d1a5d7696d49f9d6064f09e0d9941e7 100644
--- a/lib/modules/platform/gitlab/merge-request.ts
+++ b/lib/modules/platform/gitlab/merge-request.ts
@@ -4,7 +4,7 @@ import type { GitLabMergeRequest, UpdateMergeRequest } from './types';
 
 export async function getMR(
   repository: string,
-  iid: number
+  iid: number,
 ): Promise<GitLabMergeRequest> {
   logger.debug(`getMR(${iid})`);
 
@@ -15,7 +15,7 @@ export async function getMR(
 export async function updateMR(
   repository: string,
   iid: number,
-  data: UpdateMergeRequest
+  data: UpdateMergeRequest,
 ): Promise<void> {
   logger.debug(`updateMR(${iid})`);
 
diff --git a/lib/modules/platform/index.spec.ts b/lib/modules/platform/index.spec.ts
index fa5587877db6ce88ae1c8592612bf4957102cbdf..3b92123eabcad41b5bc4791ab273b6eca3c04f2d 100644
--- a/lib/modules/platform/index.spec.ts
+++ b/lib/modules/platform/index.spec.ts
@@ -27,7 +27,7 @@ describe('modules/platform/index', () => {
     const loadedMgr = loadModules(
       __dirname,
       undefined,
-      (m) => !['utils', 'git'].includes(m)
+      (m) => !['utils', 'git'].includes(m),
     );
     expect(Array.from(platforms.keys())).toEqual(Object.keys(loadedMgr));
 
@@ -39,7 +39,7 @@ describe('modules/platform/index', () => {
 
   it('throws if no platform', () => {
     expect(() => platform.platform.initPlatform({})).toThrow(
-      PLATFORM_NOT_FOUND
+      PLATFORM_NOT_FOUND,
     );
   });
 
diff --git a/lib/modules/platform/index.ts b/lib/modules/platform/index.ts
index d06cc3fdf4470830ccea79f4bd075f689f133d77..3260a5081d991b88fece822676dda51dfef3336b 100644
--- a/lib/modules/platform/index.ts
+++ b/lib/modules/platform/index.ts
@@ -32,8 +32,8 @@ export function setPlatformApi(name: PlatformId): void {
   if (!platforms.has(name)) {
     throw new Error(
       `Init: Platform "${name}" not found. Must be one of: ${getPlatformList().join(
-        ', '
-      )}`
+        ', ',
+      )}`,
     );
   }
   _platform = platforms.get(name);
diff --git a/lib/modules/platform/pr-body.spec.ts b/lib/modules/platform/pr-body.spec.ts
index f25cc491c908ab0496528f644c754f60c4f11a37..10eec4c043e1f302413f7a4c3fb9ac2cb76e2eb0 100644
--- a/lib/modules/platform/pr-body.spec.ts
+++ b/lib/modules/platform/pr-body.spec.ts
@@ -15,8 +15,8 @@ describe('modules/platform/pr-body', () => {
       });
       expect(
         getPrBodyStruct(
-          'something \n<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiAiMS4yLjEiLCJ1cGRhdGVkSW5WZXIiOiAiMS4yLjMifQ==-->'
-        )
+          'something \n<!--renovate-debug:eyJjcmVhdGVkSW5WZXIiOiAiMS4yLjEiLCJ1cGRhdGVkSW5WZXIiOiAiMS4yLjMifQ==-->',
+        ),
       ).toEqual({
         hash: '3fc9b689459d738f8c88a3a48aa9e33542016b7a4052e001aaa536fca74813cb',
         debugData: {
@@ -29,8 +29,8 @@ describe('modules/platform/pr-body', () => {
     it('checks if we reach warning', () => {
       expect(
         getPrBodyStruct(
-          'something \n<!--renovate-debug:some-wrong-data-ABCDEFGHIJKLMNOP-->'
-        )
+          'something \n<!--renovate-debug:some-wrong-data-ABCDEFGHIJKLMNOP-->',
+        ),
       ).toEqual({
         hash: '3fc9b689459d738f8c88a3a48aa9e33542016b7a4052e001aaa536fca74813cb',
       });
@@ -38,13 +38,13 @@ describe('modules/platform/pr-body', () => {
 
     it('hashes ignoring debug info', () => {
       expect(hashBody('foo\n<!--renovate-debug:123-->\n')).toEqual(
-        hashBody('foo')
+        hashBody('foo'),
       );
     });
 
     it('hashes ignoring reviewable section', () => {
       expect(hashBody('foo<!-- Reviewable:start -->bar')).toEqual(
-        hashBody('foo')
+        hashBody('foo'),
       );
     });
 
diff --git a/lib/modules/platform/pr-body.ts b/lib/modules/platform/pr-body.ts
index 97dc74b0282c440a5d9fd0e971b200d0e94064be..f50d1baa00909966d52696b925c86809778df604 100644
--- a/lib/modules/platform/pr-body.ts
+++ b/lib/modules/platform/pr-body.ts
@@ -7,11 +7,11 @@ import { fromBase64 } from '../../util/string';
 import type { PrBodyStruct } from './types';
 
 export const prDebugDataRe = regEx(
-  /\n?<!--renovate-debug:(?<payload>.*?)-->\n?/
+  /\n?<!--renovate-debug:(?<payload>.*?)-->\n?/,
 );
 
 const renovateConfigHashRe = regEx(
-  /\n?<!--renovate-config-hash:(?<payload>.*?)-->\n?/
+  /\n?<!--renovate-config-hash:(?<payload>.*?)-->\n?/,
 );
 
 const prCheckboxRe = regEx(/- (?<checkbox>\[[\sx]]) <!-- rebase-check -->/);
@@ -54,7 +54,7 @@ export function getRenovateConfigHashPayload(body: string): string | undefined {
 }
 
 export function getPrBodyStruct(
-  input: string | undefined | null
+  input: string | undefined | null,
 ): PrBodyStruct {
   const body = input ?? '';
   const hash = hashBody(body);
diff --git a/lib/modules/platform/scm.spec.ts b/lib/modules/platform/scm.spec.ts
index f93c49385e8c776a753037c3f94fedddf6d18d34..0d9ad3af4ba01918183f935208d687e8f96cb87a 100644
--- a/lib/modules/platform/scm.spec.ts
+++ b/lib/modules/platform/scm.spec.ts
@@ -13,7 +13,7 @@ describe('modules/platform/scm', () => {
 
   it('unknown platform', () => {
     expect(() => setPlatformScmApi('unknown' as PlatformId)).toThrow(
-      PLATFORM_NOT_FOUND
+      PLATFORM_NOT_FOUND,
     );
   });
 
@@ -33,6 +33,6 @@ describe('modules/platform/scm', () => {
       setPlatformScmApi(platform);
       await scm.isBranchBehindBase('abc', 'main');
       expect(git.isBranchBehindBase).toHaveBeenCalledTimes(1);
-    }
+    },
   );
 });
diff --git a/lib/modules/platform/types.ts b/lib/modules/platform/types.ts
index 28a1a699ef3bbad5ef26b14eac6e46654bb56040..9f8cc69cac145356b4540de87c6011a7ae182f4b 100644
--- a/lib/modules/platform/types.ts
+++ b/lib/modules/platform/types.ts
@@ -184,18 +184,18 @@ export interface Platform {
   getRawFile(
     fileName: string,
     repoName?: string,
-    branchOrTag?: string
+    branchOrTag?: string,
   ): Promise<string | null>;
   getJsonFile(
     fileName: string,
     repoName?: string,
-    branchOrTag?: string
+    branchOrTag?: string,
   ): Promise<any>;
   initRepo(config: RepoParams): Promise<RepoResult>;
   getPrList(): Promise<Pr[]>;
   ensureIssueClosing(title: string): Promise<void>;
   ensureIssue(
-    issueConfig: EnsureIssueConfig
+    issueConfig: EnsureIssueConfig,
   ): Promise<EnsureIssueResult | null>;
   massageMarkdown(prBody: string): string;
   updatePr(prConfig: UpdatePrConfig): Promise<void>;
@@ -210,12 +210,12 @@ export interface Platform {
   getBranchStatusCheck(
     branchName: string,
     // TODO: can be undefined or null ? #22198
-    context: string | null | undefined
+    context: string | null | undefined,
   ): Promise<BranchStatus | null>;
   ensureCommentRemoval(
     ensureCommentRemoval:
       | EnsureCommentRemovalConfigByTopic
-      | EnsureCommentRemovalConfigByContent
+      | EnsureCommentRemovalConfigByContent,
   ): Promise<void>;
   ensureComment(ensureComment: EnsureCommentConfig): Promise<boolean>;
   getPr(number: number): Promise<Pr | null>;
@@ -223,7 +223,7 @@ export interface Platform {
   refreshPr?(number: number): Promise<void>;
   getBranchStatus(
     branchName: string,
-    internalChecksAsSuccess: boolean
+    internalChecksAsSuccess: boolean,
   ): Promise<BranchStatus>;
   getBranchPr(branchName: string, targetBranch?: string): Promise<Pr | null>;
   initPlatform(config: PlatformParams): Promise<PlatformResult>;
diff --git a/lib/modules/platform/util.spec.ts b/lib/modules/platform/util.spec.ts
index 2299cf047af06e7775349c973e517045cdaa99e7..2e9e55bbaace09aa870f44c41c71f92e155bbd02 100644
--- a/lib/modules/platform/util.spec.ts
+++ b/lib/modules/platform/util.spec.ts
@@ -13,7 +13,7 @@ describe('modules/platform/util', () => {
       '("$repoId", "$endpoint") === $fingerprint',
       ({ repoId, endpoint, fingerprint }) => {
         expect(repoFingerprint(repoId, endpoint)).toBe(fingerprint);
-      }
+      },
     );
   });
 
diff --git a/lib/modules/platform/util.ts b/lib/modules/platform/util.ts
index 7cb29a951e5c36a6a19774f9347cafecec4ac0cc..c5f6be3ff5229ae6d71c9471407fb4479178e5ef 100644
--- a/lib/modules/platform/util.ts
+++ b/lib/modules/platform/util.ts
@@ -2,7 +2,7 @@ import { hash } from '../../util/hash';
 
 export function repoFingerprint(
   repoId: number | string,
-  endpoint: string | undefined
+  endpoint: string | undefined,
 ): string {
   const input = endpoint ? `${endpoint}::${repoId}` : `${repoId}`;
   const fingerprint = hash(input);
diff --git a/lib/modules/platform/utils/pr-body.ts b/lib/modules/platform/utils/pr-body.ts
index 4dcf7cc4c0b6498d1a128d30a36417d4e2bcfd9d..d6736bf9cddc72e6206606beee3b3ec2ccf992c7 100644
--- a/lib/modules/platform/utils/pr-body.ts
+++ b/lib/modules/platform/utils/pr-body.ts
@@ -2,7 +2,7 @@ import { regEx } from '../../../util/regex';
 
 const re = regEx(
   `(?<preNotes>.*### Release Notes)(?<releaseNotes>.*)### Configuration(?<postNotes>.*)`,
-  's'
+  's',
 );
 
 export function smartTruncate(input: string, len: number): string {
diff --git a/lib/modules/platform/utils/read-only-issue-body.spec.ts b/lib/modules/platform/utils/read-only-issue-body.spec.ts
index cf3db8457c9cc737db4511742696593cba1a1192..36043abc34b47cb5cb1afc300a7e104ae3563646 100644
--- a/lib/modules/platform/utils/read-only-issue-body.spec.ts
+++ b/lib/modules/platform/utils/read-only-issue-body.spec.ts
@@ -7,22 +7,22 @@ describe('modules/platform/utils/read-only-issue-body', () => {
   describe('.readOnlyIssueBody', () => {
     it('removes all checkbox formatting', () => {
       expect(readOnlyIssueBody(issueBody)).toEqual(
-        expect.not.stringContaining('[ ] <!--')
+        expect.not.stringContaining('[ ] <!--'),
       );
     });
 
     it('removes all checkbox-related instructions', () => {
       expect(readOnlyIssueBody(issueBody)).toEqual(
         expect.not.stringMatching(
-          /click (?:(?:on |)a|their|this) checkbox|check the box below/gi
-        )
+          /click (?:(?:on |)a|their|this) checkbox|check the box below/gi,
+        ),
       );
     });
 
     it('removes the create-all-rate-limited-prs', () => {
       const s = readOnlyIssueBody(issueBody);
       expect(s).toEqual(
-        expect.not.stringMatching('Create all rate-limited PRs at once')
+        expect.not.stringMatching('Create all rate-limited PRs at once'),
       );
     });
   });
diff --git a/lib/modules/versioning/aws-machine-image/index.spec.ts b/lib/modules/versioning/aws-machine-image/index.spec.ts
index 1f25f2b73f5d8eaeb8d1bd6240df89f8746a86a5..e6667a913fbd2059e6aaa894b1bc3338a40e50c3 100644
--- a/lib/modules/versioning/aws-machine-image/index.spec.ts
+++ b/lib/modules/versioning/aws-machine-image/index.spec.ts
@@ -42,7 +42,7 @@ describe('modules/versioning/aws-machine-image/index', () => {
   describe('isCompatible(version,range)', () => {
     it('should return true', () => {
       expect(
-        aws.isCompatible('ami-00e1b2c30011d4e5f', 'anything')
+        aws.isCompatible('ami-00e1b2c30011d4e5f', 'anything'),
       ).toBeTruthy();
     });
 
diff --git a/lib/modules/versioning/azure-rest-api/index.spec.ts b/lib/modules/versioning/azure-rest-api/index.spec.ts
index f556cae4e4f411493ca7974c17551c0b06ff18ee..df57edca2586a6cf81a45940957cdb3db2bd3b8a 100644
--- a/lib/modules/versioning/azure-rest-api/index.spec.ts
+++ b/lib/modules/versioning/azure-rest-api/index.spec.ts
@@ -98,7 +98,7 @@ describe('modules/versioning/azure-rest-api/index', () => {
     'equals("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(azureRestApi.equals(version, other)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -115,7 +115,7 @@ describe('modules/versioning/azure-rest-api/index', () => {
     'isGreaterThan("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(azureRestApi.isGreaterThan(version, other)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -132,6 +132,6 @@ describe('modules/versioning/azure-rest-api/index', () => {
     'sortVersions("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(azureRestApi.sortVersions(version, other)).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/azure-rest-api/index.ts b/lib/modules/versioning/azure-rest-api/index.ts
index 1d6436d0eebedf4bea592405062bcd65ba8ba9f8..9f36c3ecc020a5e9b36030930b7e5b0c7b274fde 100644
--- a/lib/modules/versioning/azure-rest-api/index.ts
+++ b/lib/modules/versioning/azure-rest-api/index.ts
@@ -12,7 +12,7 @@ export const urls = [
 export const supportsRanges = false;
 
 const AZURE_REST_API_VERSION_REGEX = regEx(
-  /^(?<year>\d{4})-(?<month>\d{2})-(?<day>\d{2})(?<prerelease>-[a-z]+)?$/
+  /^(?<year>\d{4})-(?<month>\d{2})-(?<day>\d{2})(?<prerelease>-[a-z]+)?$/,
 );
 
 class AzureRestApiVersioningApi extends GenericVersioningApi {
diff --git a/lib/modules/versioning/bazel-module/index.ts b/lib/modules/versioning/bazel-module/index.ts
index fbb7febbaa43ec280a502af0c8dc9068664e1531..51c389c46a0ecb087c51d63673622af49c0ab04c 100644
--- a/lib/modules/versioning/bazel-module/index.ts
+++ b/lib/modules/versioning/bazel-module/index.ts
@@ -59,7 +59,7 @@ function isLessThanRange(version: string, range: string): boolean {
  */
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const target = new BzlmodVersion(range);
   const result = versions.find((ver) => {
@@ -75,7 +75,7 @@ function getSatisfyingVersion(
  */
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return getSatisfyingVersion(versions, range);
 }
diff --git a/lib/modules/versioning/cargo/index.spec.ts b/lib/modules/versioning/cargo/index.spec.ts
index 0af20dad3485f806225284a5316d646a2f970ccb..7b474f42cd41773563c640df4e388a582836f5af 100644
--- a/lib/modules/versioning/cargo/index.spec.ts
+++ b/lib/modules/versioning/cargo/index.spec.ts
@@ -12,7 +12,7 @@ describe('modules/versioning/cargo/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(semver.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -23,7 +23,7 @@ describe('modules/versioning/cargo/index', () => {
     'getSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(semver.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -57,7 +57,7 @@ describe('modules/versioning/cargo/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(semver.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -71,7 +71,7 @@ describe('modules/versioning/cargo/index', () => {
     'minSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(semver.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -147,8 +147,8 @@ describe('modules/versioning/cargo/index', () => {
           rangeStrategy,
           currentVersion,
           newVersion,
-        })
+        }),
       ).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/cargo/index.ts b/lib/modules/versioning/cargo/index.ts
index 7508c5781be024ba0be0eb1b2d1669b8707e863f..8a2dfeb447d8198b1dc4d241d2a1242b8d54427d 100644
--- a/lib/modules/versioning/cargo/index.ts
+++ b/lib/modules/versioning/cargo/index.ts
@@ -70,14 +70,14 @@ const matches = (version: string, range: string): boolean =>
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.getSatisfyingVersion(versions, cargo2npm(range));
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.minSatisfyingVersion(versions, cargo2npm(range));
 }
@@ -116,7 +116,7 @@ function getNewValue({
   if (!newCargo) {
     logger.info(
       { currentValue, newSemver },
-      'Could not get cargo version from semver'
+      'Could not get cargo version from semver',
     );
     return currentValue;
   }
diff --git a/lib/modules/versioning/common.ts b/lib/modules/versioning/common.ts
index 241625833993292fe50895d608b1f9a48ece3773..4b037c809bcdc88eb716aeaa1264cd1c0bf93c9d 100644
--- a/lib/modules/versioning/common.ts
+++ b/lib/modules/versioning/common.ts
@@ -2,7 +2,7 @@ import { regEx } from '../../util/regex';
 import type { VersioningApi, VersioningApiConstructor } from './types';
 
 export function isVersioningApiConstructor(
-  obj: VersioningApi | VersioningApiConstructor
+  obj: VersioningApi | VersioningApiConstructor,
 ): obj is VersioningApiConstructor {
   return typeof obj === 'function';
 }
diff --git a/lib/modules/versioning/composer/index.spec.ts b/lib/modules/versioning/composer/index.spec.ts
index 2fbebc88ef45ab450e4027b362a7c7e2189eca15..56feb523462a647b172e5350cae81447f664b738 100644
--- a/lib/modules/versioning/composer/index.spec.ts
+++ b/lib/modules/versioning/composer/index.spec.ts
@@ -125,7 +125,7 @@ describe('modules/versioning/composer/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(semver.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -141,7 +141,7 @@ describe('modules/versioning/composer/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(semver.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -230,7 +230,7 @@ describe('modules/versioning/composer/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/composer/index.ts b/lib/modules/versioning/composer/index.ts
index a176856e35bcd46e7c9b39cd6789a2aefb14f072..d875bfa13ea5d561a3c340f7a05742d927c94e26 100644
--- a/lib/modules/versioning/composer/index.ts
+++ b/lib/modules/versioning/composer/index.ts
@@ -53,7 +53,7 @@ function convertStabilityModifier(input: string): string {
   // 1.0@beta2 to 1.0-beta.2
   const stability = versionParts[1].replace(
     regEx(/(?:^|\s)(beta|alpha|rc)([1-9][0-9]*)(?: |$)/gi),
-    '$1.$2'
+    '$1.$2',
   );
 
   // If there is a stability part, npm semver expects the version
@@ -76,7 +76,7 @@ function normalizeVersion(input: string): string {
 function calculateSatisfyingVersionIntenal(
   versions: string[],
   range: string,
-  minMode: boolean
+  minMode: boolean,
 ): string | null {
   // Because composer -p versions are considered stable, we have to remove the suffix for the npm.XXX functions.
   const versionsMapped = versions.map((x) => {
@@ -135,12 +135,12 @@ function composer2npm(input: string): string {
       // ~4 to ^4 and ~4.1 to ^4.1
       output = output.replace(
         regEx(/(?:^|\s)~([1-9][0-9]*(?:\.[0-9]*)?)(?: |$)/g),
-        '^$1'
+        '^$1',
       );
       // ~0.4 to >=0.4 <1
       output = output.replace(
         regEx(/(?:^|\s)~(0\.[1-9][0-9]*)(?: |$)/g),
-        '>=$1 <1'
+        '>=$1 <1',
       );
 
       // add extra digits to <8-DEV and <8.0-DEV
@@ -213,14 +213,14 @@ function matches(version: string, range: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return calculateSatisfyingVersionIntenal(versions, range, false);
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return calculateSatisfyingVersionIntenal(versions, range, true);
 }
@@ -337,7 +337,7 @@ function getNewValue({
   if (!newValue) {
     logger.warn(
       { currentValue, rangeStrategy, currentVersion, newVersion },
-      'Unsupported composer value'
+      'Unsupported composer value',
     );
     newValue = newVersion;
   }
diff --git a/lib/modules/versioning/conan/common.ts b/lib/modules/versioning/conan/common.ts
index 864a90f07b5999123d3d3d8a946d03bebd2b9b38..3b5bdb4df095caeacf24655d9b0925a5e9c05d0d 100644
--- a/lib/modules/versioning/conan/common.ts
+++ b/lib/modules/versioning/conan/common.ts
@@ -4,7 +4,7 @@ import { coerceString } from '../../../util/string';
 
 export function makeVersion(
   version: string,
-  options: semver.RangeOptions
+  options: semver.RangeOptions,
 ): string | boolean | null {
   const splitVersion = version.split('.');
   const prerelease = semver.prerelease(version, options);
@@ -58,7 +58,7 @@ export function containsOperators(input: string): boolean {
 export function matchesWithOptions(
   version: string,
   cleanRange: string,
-  options: semver.RangeOptions
+  options: semver.RangeOptions,
 ): boolean {
   let cleanedVersion = version;
   if (
@@ -75,7 +75,7 @@ export function matchesWithOptions(
 export function findSatisfyingVersion(
   versions: string[],
   range: string,
-  compareRt: number
+  compareRt: number,
 ): string | null {
   const options = getOptions(range);
   let cur: any = null;
diff --git a/lib/modules/versioning/conan/index.spec.ts b/lib/modules/versioning/conan/index.spec.ts
index 8d88c2a465060741e76bbe1ead71b52da5c737a1..a6ec27921bec9c8984be58d6a36a271207a18086 100644
--- a/lib/modules/versioning/conan/index.spec.ts
+++ b/lib/modules/versioning/conan/index.spec.ts
@@ -351,7 +351,7 @@ describe('modules/versioning/conan/index', () => {
     ({ version, range, result }) => {
       const res = !!conan.isCompatible(version, range);
       expect(res).toBe(result);
-    }
+    },
   );
 
   // matches(version: string, range: string | Range): string | boolean | null;
@@ -546,7 +546,7 @@ describe('modules/versioning/conan/index', () => {
     ({ version, range, result }) => {
       const res = !!conan.matches(version, range);
       expect(res).toBe(result);
-    }
+    },
   );
 
   // isStable(version: string): boolean;
@@ -643,7 +643,7 @@ describe('modules/versioning/conan/index', () => {
         newVersion,
       });
       expect(res).toEqual(result);
-    }
+    },
   );
 
   // getSatisfyingVersion(versions: string[], range: string): string | null;
@@ -701,7 +701,7 @@ describe('modules/versioning/conan/index', () => {
     ({ versions, range, result }) => {
       const res = conan.getSatisfyingVersion(versions, range);
       expect(res).toEqual(result);
-    }
+    },
   );
 
   // minSatisfyingVersion(versions: string[], range: string): string | null;
@@ -722,7 +722,7 @@ describe('modules/versioning/conan/index', () => {
     ({ versions, range, result }) => {
       const res = conan.minSatisfyingVersion(versions, range);
       expect(res).toEqual(result);
-    }
+    },
   );
 
   // test 4-digit
@@ -745,7 +745,7 @@ describe('modules/versioning/conan/index', () => {
       expect(conan.getMajor(version)).toBe(major);
       expect(conan.getMinor(version)).toBe(minor);
       expect(conan.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   // getMajor(version: string): null | number;
@@ -827,7 +827,7 @@ describe('modules/versioning/conan/index', () => {
     ({ version, other, result }) => {
       const res = conan.equals(version, other);
       expect(res).toEqual(result);
-    }
+    },
   );
 
   // isGreaterThan(version: string, other: string): boolean;
@@ -873,7 +873,7 @@ describe('modules/versioning/conan/index', () => {
     ({ version, other, result }) => {
       const res = conan.isGreaterThan(version, other);
       expect(res).toEqual(result);
-    }
+    },
   );
 
   // sortVersions(version: string, other: string): boolean;
@@ -888,7 +888,7 @@ describe('modules/versioning/conan/index', () => {
     ({ version, other, result }) => {
       const res = conan.sortVersions(version, other);
       expect(res).toEqual(result);
-    }
+    },
   );
 
   // isLessThanRange(version: string, range: string): boolean;
@@ -902,6 +902,6 @@ describe('modules/versioning/conan/index', () => {
     ({ version, range, result }) => {
       const res = conan.isLessThanRange?.(version, range);
       expect(res).toEqual(result);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/conan/index.ts b/lib/modules/versioning/conan/index.ts
index 99b7033537cec7f0d9f6588ca4082774ed937dbb..fe3b865aaf4b6aad317da4b55d63f9f4d65181f5 100644
--- a/lib/modules/versioning/conan/index.ts
+++ b/lib/modules/versioning/conan/index.ts
@@ -91,7 +91,7 @@ function isLessThanRange(version: string, range: string): boolean {
   const options = getOptions(range);
   const looseResult: any = looseAPI.isLessThanRange?.(
     cleanedVersion,
-    cleanRange
+    cleanRange,
   );
   try {
     return semver.ltr(cleanedVersion, cleanRange, options) || looseResult;
@@ -148,14 +148,14 @@ function isStable(version: string): boolean {
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return findSatisfyingVersion(versions, range, MIN);
 }
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return findSatisfyingVersion(versions, range, MAX);
 }
@@ -176,12 +176,12 @@ function getNewValue({
   if (rangeStrategy === 'widen') {
     newValue = widenRange(
       { currentValue: cleanRange, rangeStrategy, currentVersion, newVersion },
-      options
+      options,
     );
   } else if (rangeStrategy === 'bump') {
     newValue = bumpRange(
       { currentValue: cleanRange, rangeStrategy, currentVersion, newVersion },
-      options
+      options,
     );
   } else {
     newValue = replaceRange({
diff --git a/lib/modules/versioning/conan/range.ts b/lib/modules/versioning/conan/range.ts
index efda646033903382f6de898d00ea308952c4ae83..8984753168cccbf1c8a685769cf22b385c719bfd 100644
--- a/lib/modules/versioning/conan/range.ts
+++ b/lib/modules/versioning/conan/range.ts
@@ -45,7 +45,7 @@ export function getPatch(version: string): null | number {
   if (typeof cleanerVersion === 'string') {
     const newVersion = semver.valid(
       semver.coerce(cleanedVersion, options),
-      options
+      options,
     );
     return Number(newVersion?.split('.')[2]);
   }
@@ -212,7 +212,7 @@ export function replaceRange({
 
 export function widenRange(
   { currentValue, currentVersion, newVersion }: NewValueConfig,
-  options: semver.Options
+  options: semver.Options,
 ): string | null {
   const parsedRange = parseRange(currentValue);
   const element = parsedRange[parsedRange.length - 1];
@@ -248,7 +248,7 @@ export function widenRange(
 
 export function bumpRange(
   { currentValue, currentVersion, newVersion }: NewValueConfig,
-  options: semver.Options
+  options: semver.Options,
 ): string | null {
   if (!containsOperators(currentValue) && currentValue.includes('||')) {
     return widenRange(
@@ -258,7 +258,7 @@ export function bumpRange(
         currentVersion,
         newVersion,
       },
-      options
+      options,
     );
   }
   const parsedRange = parseRange(currentValue);
@@ -320,7 +320,7 @@ export function bumpRange(
             currentVersion,
             newVersion,
           },
-          options
+          options,
         );
         if (
           bumpedSubRange &&
@@ -340,7 +340,7 @@ export function bumpRange(
     return versions.filter((x: any) => x !== null && x !== '').join(' ');
   }
   logger.debug(
-    'Unsupported range type for rangeStrategy=bump: ' + currentValue
+    'Unsupported range type for rangeStrategy=bump: ' + currentValue,
   );
   return null;
 }
diff --git a/lib/modules/versioning/deb/index.ts b/lib/modules/versioning/deb/index.ts
index 30e9471633bf6d03e25fb449cc853def99e1c810..d96a70c4aa2f8e8fd2f4a501dde042a334e0871e 100644
--- a/lib/modules/versioning/deb/index.ts
+++ b/lib/modules/versioning/deb/index.ts
@@ -63,7 +63,7 @@ class DebVersioningApi extends GenericVersioningApi {
       return null;
     }
     const release = [...remainingVersion.matchAll(numericPattern)].map((m) =>
-      parseInt(m[0], 10)
+      parseInt(m[0], 10),
     );
     return {
       epoch: parseInt(epochStr, 10),
@@ -111,10 +111,10 @@ class DebVersioningApi extends GenericVersioningApi {
         // Lexicographical comparison
         // numeric character is treated like end of string (they are part of a new block)
         const aPriority = characterOrder.indexOf(
-          numericChars.includes(aChar) || aChar === '' ? ' ' : aChar
+          numericChars.includes(aChar) || aChar === '' ? ' ' : aChar,
         );
         const bPriority = characterOrder.indexOf(
-          numericChars.includes(bChar) || bChar === '' ? ' ' : bChar
+          numericChars.includes(bChar) || bChar === '' ? ' ' : bChar,
         );
         return Math.sign(aPriority - bPriority);
       }
@@ -134,7 +134,7 @@ class DebVersioningApi extends GenericVersioningApi {
     }
     const upstreamVersionDifference = this._compare_string(
       parsed1.upstreamVersion,
-      parsed2.upstreamVersion
+      parsed2.upstreamVersion,
     );
     if (upstreamVersionDifference !== 0) {
       return upstreamVersionDifference;
diff --git a/lib/modules/versioning/debian/index.spec.ts b/lib/modules/versioning/debian/index.spec.ts
index ce19fa9a1227e322de255f5cc28b27dc48f228ab..4c86f3d4d410ddd17736f76cb47ff6acacdf6615 100644
--- a/lib/modules/versioning/debian/index.spec.ts
+++ b/lib/modules/versioning/debian/index.spec.ts
@@ -83,7 +83,7 @@ describe('modules/versioning/debian/index', () => {
     'isCompatible("$version") === $expected',
     ({ version, range, expected }) => {
       expect(debian.isCompatible(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -164,7 +164,7 @@ describe('modules/versioning/debian/index', () => {
     ({ version, expected }) => {
       debian.isStable(version);
       expect(logger.debug).toHaveBeenCalledTimes(0);
-    }
+    },
   );
 
   it('checks runtime date handling & refresh rolling release data', () => {
@@ -176,7 +176,7 @@ describe('modules/versioning/debian/index', () => {
     expect(debian.isStable('buster')).toBeFalse();
     expect(logger.debug).toHaveBeenCalledTimes(1);
     expect(logger.debug).toHaveBeenCalledWith(
-      'RollingReleasesData - data written'
+      'RollingReleasesData - data written',
     );
   });
 
@@ -262,7 +262,7 @@ describe('modules/versioning/debian/index', () => {
       expect(debian.getMajor(version)).toBe(major);
       expect(debian.getMinor(version)).toBe(minor);
       expect(debian.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -329,7 +329,7 @@ describe('modules/versioning/debian/index', () => {
     'getSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(debian.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -351,7 +351,7 @@ describe('modules/versioning/debian/index', () => {
     'minSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(debian.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -385,9 +385,9 @@ describe('modules/versioning/debian/index', () => {
           rangeStrategy,
           currentVersion,
           newVersion,
-        })
+        }),
       ).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -419,6 +419,6 @@ describe('modules/versioning/debian/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(debian.matches(version, range)).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/debian/index.ts b/lib/modules/versioning/debian/index.ts
index 310df655bd5897820c7e52777ee7ea4544b4d188..f97ee8002420b8c8704a93748a326ce2e8eae52d 100644
--- a/lib/modules/versioning/debian/index.ts
+++ b/lib/modules/versioning/debian/index.ts
@@ -28,7 +28,7 @@ export class DebianVersioningApi extends GenericVersioningApi {
   override isValid(version: string): boolean {
     const isValid = super.isValid(version);
     const schedule = this._distroInfo.getSchedule(
-      this._rollingReleases.getVersionByLts(version)
+      this._rollingReleases.getVersionByLts(version),
     );
     return isValid && schedule !== null && RELEASE_PROP in schedule;
   }
diff --git a/lib/modules/versioning/distro.spec.ts b/lib/modules/versioning/distro.spec.ts
index c4e4c82d4694ffb325e4e3e5655cdfcd4bee8b79..a5bd69adf67b98fd9359b62c82529aa1cfc891b2 100644
--- a/lib/modules/versioning/distro.spec.ts
+++ b/lib/modules/versioning/distro.spec.ts
@@ -38,7 +38,7 @@ describe('modules/versioning/distro', () => {
     'getVersionByCodename("$version") === $expected',
     ({ version, expected }) => {
       expect(di.getVersionByCodename(version)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -55,7 +55,7 @@ describe('modules/versioning/distro', () => {
     'getCodenameByVersion("$version") === $expected',
     ({ version, expected }) => {
       expect(di.getCodenameByVersion(version)).toBe(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/distro.ts b/lib/modules/versioning/distro.ts
index bdc90f23eab3c41b513582b7f98f0cd35a040937..e434d4a8ae6436a91efbe0241f39bfc099ee3283 100644
--- a/lib/modules/versioning/distro.ts
+++ b/lib/modules/versioning/distro.ts
@@ -36,7 +36,7 @@ export class DistroInfo {
 
   constructor(distroJsonKey: DistroDataFile) {
     this._distroInfo = JSON.parse(
-      dataFiles.get(distroJsonKey as DataFile)!.replace(/v([\d.]+)\b/gm, '$1')
+      dataFiles.get(distroJsonKey as DataFile)!.replace(/v([\d.]+)\b/gm, '$1'),
     );
 
     for (const version of Object.keys(this._distroInfo)) {
@@ -45,7 +45,7 @@ export class DistroInfo {
     }
 
     const arr = Object.keys(this._distroInfo).sort(
-      (a, b) => parseFloat(a) - parseFloat(b)
+      (a, b) => parseFloat(a) - parseFloat(b),
     );
 
     for (const v of arr) {
diff --git a/lib/modules/versioning/docker/index.spec.ts b/lib/modules/versioning/docker/index.spec.ts
index 1eaf606278a3cd2c385048907bc1f83d9a04de2c..809a5418c13649b11c3032ad80d1973d6d22a96c 100644
--- a/lib/modules/versioning/docker/index.spec.ts
+++ b/lib/modules/versioning/docker/index.spec.ts
@@ -37,7 +37,7 @@ describe('modules/versioning/docker/index', () => {
       expect(docker.getMajor(version)).toBe(major);
       expect(docker.getMinor(version)).toBe(minor);
       expect(docker.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -62,7 +62,7 @@ describe('modules/versioning/docker/index', () => {
     'isLessThanRange($version, $range) === $expected',
     ({ version, range, expected }) => {
       expect(docker.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -117,7 +117,7 @@ describe('modules/versioning/docker/index', () => {
         const dockerSorted = docker.sortVersions(a, b);
         const semverSorted = semver.sortVersions(a, b);
         expect(dockerSorted).toBe(semverSorted);
-      }
+      },
     );
 
     it('sorts unstable', () => {
@@ -158,7 +158,7 @@ describe('modules/versioning/docker/index', () => {
         newVersion,
       });
       expect(res).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -193,7 +193,7 @@ describe('modules/versioning/docker/index', () => {
     ({ version, range, expected }) => {
       const res = docker.isCompatible(version, range);
       expect(!!res).toBe(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/generic.spec.ts b/lib/modules/versioning/generic.spec.ts
index d7946528f85fd5cd050c8256f75b3fc2a489713c..e47a44e40b8f3deb77b48d33d4b9e4ed43a8550a 100644
--- a/lib/modules/versioning/generic.spec.ts
+++ b/lib/modules/versioning/generic.spec.ts
@@ -38,7 +38,7 @@ describe('modules/versioning/generic', () => {
 
       protected _parse(_version: string): GenericVersion | null {
         const matchGroups = _version.match(
-          /^(?<major>\d)\.(?<minor>\d)\.(?<patch>\d)$/
+          /^(?<major>\d)\.(?<minor>\d)\.(?<patch>\d)$/,
         )?.groups;
         if (!matchGroups) {
           return null;
@@ -53,11 +53,11 @@ describe('modules/versioning/generic', () => {
     it('Scheme keys', () => {
       const schemeKeys = getAllPropertyNames(api)
         .filter(
-          (val) => !optionalFunctions.includes(val) && !val.startsWith('_')
+          (val) => !optionalFunctions.includes(val) && !val.startsWith('_'),
         )
         .filter(
           (val) =>
-            !['minSatisfyingVersion', 'getSatisfyingVersion'].includes(val)
+            !['minSatisfyingVersion', 'getSatisfyingVersion'].includes(val),
         )
         .sort();
       expect(schemeKeys).toEqual([
@@ -104,7 +104,7 @@ describe('modules/versioning/generic', () => {
           rangeStrategy: 'auto',
           currentVersion: '1.2.3',
           newVersion: '3.2.1',
-        })
+        }),
       ).toBe('3.2.1');
 
       expect(api.getNewValue(partial<NewValueConfig>({}))).toBeNull();
@@ -156,20 +156,20 @@ describe('modules/versioning/generic', () => {
     it('minSatisfyingVersion', () => {
       expect(api.minSatisfyingVersion(['1.2.3'], '1.2.3')).toBe('1.2.3');
       expect(
-        api.minSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '2.2.2')
+        api.minSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '2.2.2'),
       ).toBe('2.2.2');
       expect(
-        api.minSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '1.2.3')
+        api.minSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '1.2.3'),
       ).toBeNull();
     });
 
     it('getSatisfyingVersion', () => {
       expect(api.getSatisfyingVersion(['1.2.3'], '1.2.3')).toBe('1.2.3');
       expect(
-        api.getSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '2.2.2')
+        api.getSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '2.2.2'),
       ).toBe('2.2.2');
       expect(
-        api.getSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '1.2.3')
+        api.getSatisfyingVersion(['1.1.1', '2.2.2', '3.3.3'], '1.2.3'),
       ).toBeNull();
     });
   });
diff --git a/lib/modules/versioning/generic.ts b/lib/modules/versioning/generic.ts
index 1697ad6ab74bee98f418a8c1389455277d92e998..f87bba6ee4d5fbbf7c124d551f1faec731ff7864 100644
--- a/lib/modules/versioning/generic.ts
+++ b/lib/modules/versioning/generic.ts
@@ -16,7 +16,7 @@ export interface VersionComparator {
 }
 
 export abstract class GenericVersioningApi<
-  T extends GenericVersion = GenericVersion
+  T extends GenericVersion = GenericVersion,
 > implements VersioningApi
 {
   private _getSection(version: string, index: number): number | null {
diff --git a/lib/modules/versioning/git/index.spec.ts b/lib/modules/versioning/git/index.spec.ts
index ede7a0fe2a366f54000e43d6fc858e5c942476dc..21fc749a09de5cb150bb06414f13fd8ac0551281 100644
--- a/lib/modules/versioning/git/index.spec.ts
+++ b/lib/modules/versioning/git/index.spec.ts
@@ -26,7 +26,7 @@ describe('modules/versioning/git/index', () => {
     ({ version, range, expected }) => {
       const res = git.isCompatible(version, range);
       expect(!!res).toBe(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/go-mod-directive/index.spec.ts b/lib/modules/versioning/go-mod-directive/index.spec.ts
index 21a71df7007ea5aacf1ef4e12ab6b9597f8269b0..e133594d30b22a8da3453c1fa05029fb3bc20d72 100644
--- a/lib/modules/versioning/go-mod-directive/index.spec.ts
+++ b/lib/modules/versioning/go-mod-directive/index.spec.ts
@@ -12,7 +12,7 @@ describe('modules/versioning/go-mod-directive/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(semver.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -22,7 +22,7 @@ describe('modules/versioning/go-mod-directive/index', () => {
     'getSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(semver.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -51,7 +51,7 @@ describe('modules/versioning/go-mod-directive/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(semver.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -62,7 +62,7 @@ describe('modules/versioning/go-mod-directive/index', () => {
     'minSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(semver.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -83,8 +83,8 @@ describe('modules/versioning/go-mod-directive/index', () => {
           rangeStrategy,
           currentVersion,
           newVersion,
-        })
+        }),
       ).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/go-mod-directive/index.ts b/lib/modules/versioning/go-mod-directive/index.ts
index eaa7dbadb63e50a47bc3b304541171bf857ef383..820c4b62ddb8cd4ac390e1a8d9d1652c1896f747 100644
--- a/lib/modules/versioning/go-mod-directive/index.ts
+++ b/lib/modules/versioning/go-mod-directive/index.ts
@@ -41,7 +41,7 @@ function getNewValue({
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.getSatisfyingVersion(versions, toNpmRange(range));
 }
@@ -56,7 +56,7 @@ const matches = (version: string, range: string): boolean =>
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.minSatisfyingVersion(versions, toNpmRange(range));
 }
diff --git a/lib/modules/versioning/gradle/compare.ts b/lib/modules/versioning/gradle/compare.ts
index f9f2a7546ec0d37cc3f7902400a3577726d294c8..2497527108363a590977c027531769f0e6670201 100644
--- a/lib/modules/versioning/gradle/compare.ts
+++ b/lib/modules/versioning/gradle/compare.ts
@@ -13,7 +13,7 @@ type Token = {
 
 function iterateChars(
   str: string,
-  cb: (p: string | null, n: string | null) => void
+  cb: (p: string | null, n: string | null) => void,
 ): void {
   let prev = null;
   let next = null;
@@ -254,7 +254,7 @@ export function parsePrefixRange(input: string): PrefixRange | null {
 }
 
 const mavenBasedRangeRegex = regEx(
-  /^(?<leftBoundStr>[[\](]\s*)(?<leftVal>[-._+a-zA-Z0-9]*?)(?<separator>\s*,\s*)(?<rightVal>[-._+a-zA-Z0-9]*?)(?<rightBoundStr>\s*[[\])])$/
+  /^(?<leftBoundStr>[[\](]\s*)(?<leftVal>[-._+a-zA-Z0-9]*?)(?<separator>\s*,\s*)(?<rightVal>[-._+a-zA-Z0-9]*?)(?<rightBoundStr>\s*[[\])])$/,
 );
 
 export function parseMavenBasedRange(input: string): MavenBasedRange | null {
diff --git a/lib/modules/versioning/gradle/index.spec.ts b/lib/modules/versioning/gradle/index.spec.ts
index cef0b7c0d5425664e0542a78c7d4c3d73a06f6b7..59ec118e294f290125d01c02b648ee6d01e96ff2 100644
--- a/lib/modules/versioning/gradle/index.spec.ts
+++ b/lib/modules/versioning/gradle/index.spec.ts
@@ -205,7 +205,7 @@ describe('modules/versioning/gradle/index', () => {
       expect(api.getMajor(input)).toBe(major);
       expect(api.getMinor(input)).toBe(minor);
       expect(api.getPatch(input)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -233,7 +233,7 @@ describe('modules/versioning/gradle/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(api.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -250,7 +250,7 @@ describe('modules/versioning/gradle/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(api.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -260,7 +260,7 @@ describe('modules/versioning/gradle/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(api.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -295,6 +295,6 @@ describe('modules/versioning/gradle/index', () => {
         newVersion,
       });
       expect(res).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/gradle/index.ts b/lib/modules/versioning/gradle/index.ts
index 9b61742a39177a19a3b95258bfa1976d4b8323aa..09136f8f2e04b10d24d416bc553dab1ac752ed0c 100644
--- a/lib/modules/versioning/gradle/index.ts
+++ b/lib/modules/versioning/gradle/index.ts
@@ -155,7 +155,7 @@ const matches = (a: string, b: string): boolean => {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return versions.reduce((result: string | null, version) => {
     if (matches(version, range)) {
@@ -172,7 +172,7 @@ function getSatisfyingVersion(
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return versions.reduce((result: string | null, version) => {
     if (matches(version, range)) {
diff --git a/lib/modules/versioning/hashicorp/convertor.spec.ts b/lib/modules/versioning/hashicorp/convertor.spec.ts
index 54a6b24727a69ee8972b828feeb7b046f2ed0fff..e5495b70cebbbd0d743c58af80ca012c81bc6d93 100644
--- a/lib/modules/versioning/hashicorp/convertor.spec.ts
+++ b/lib/modules/versioning/hashicorp/convertor.spec.ts
@@ -25,7 +25,7 @@ describe('modules/versioning/hashicorp/convertor', () => {
     ({ hashicorp, npm }) => {
       expect(hashicorp2npm(hashicorp)).toBe(npm);
       expect(npm2hashicorp(npm)).toBe(hashicorp);
-    }
+    },
   );
 
   // These are non-reflective cases for hashicorp2npm
diff --git a/lib/modules/versioning/hashicorp/convertor.ts b/lib/modules/versioning/hashicorp/convertor.ts
index e558e0f7e577003cfadb1b97fdde14f1af961ee8..38834a9abd006d9579e3fd1ecb11de4146b34a7b 100644
--- a/lib/modules/versioning/hashicorp/convertor.ts
+++ b/lib/modules/versioning/hashicorp/convertor.ts
@@ -18,20 +18,20 @@ export function hashicorp2npm(input: string): string {
     .map((single) => {
       const r = single.match(
         regEx(
-          /^\s*(|=|!=|>|<|>=|<=|~>)\s*v?((\d+)(\.\d+){0,2}[\w-+]*(\.\d+)*)\s*$/
-        )
+          /^\s*(|=|!=|>|<|>=|<=|~>)\s*v?((\d+)(\.\d+){0,2}[\w-+]*(\.\d+)*)\s*$/,
+        ),
       );
       if (!r) {
         logger.warn(
           { constraint: input, element: single },
-          'Invalid hashicorp constraint'
+          'Invalid hashicorp constraint',
         );
         throw new Error('Invalid hashicorp constraint');
       }
       if (r[1] === '!=') {
         logger.warn(
           { constraint: input, element: single },
-          'Unsupported hashicorp constraint'
+          'Unsupported hashicorp constraint',
         );
         throw new Error('Unsupported hashicorp constraint');
       }
@@ -72,7 +72,7 @@ export function npm2hashicorp(input: string): string {
     .split(' ')
     .map((single) => {
       const r = single.match(
-        regEx(/^(|>|<|>=|<=|~|\^)v?((\d+)(\.\d+){0,2}[\w-]*(\.\d+)*)$/)
+        regEx(/^(|>|<|>=|<=|~|\^)v?((\d+)(\.\d+){0,2}[\w-]*(\.\d+)*)$/),
       );
       if (!r) {
         throw new Error('invalid npm constraint');
diff --git a/lib/modules/versioning/hashicorp/index.spec.ts b/lib/modules/versioning/hashicorp/index.spec.ts
index 2fc235a241f46e79fec71879a0c4856a0edeba1f..d43977fa29ec93413f4076b93c8e47afc7760eef 100644
--- a/lib/modules/versioning/hashicorp/index.spec.ts
+++ b/lib/modules/versioning/hashicorp/index.spec.ts
@@ -11,7 +11,7 @@ describe('modules/versioning/hashicorp/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(semver.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -23,7 +23,7 @@ describe('modules/versioning/hashicorp/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(semver.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -53,7 +53,7 @@ describe('modules/versioning/hashicorp/index', () => {
     'isLessThanRange($version, $range) === $expected',
     ({ version, range, expected }) => {
       expect(semver.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -66,7 +66,7 @@ describe('modules/versioning/hashicorp/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(semver.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -104,6 +104,6 @@ describe('modules/versioning/hashicorp/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/hashicorp/index.ts b/lib/modules/versioning/hashicorp/index.ts
index 0e8a2ad9388789bbabab7d053a601e274cc522b7..3407a1b524cbad1599bf2f835dbb5a5489ccedda 100644
--- a/lib/modules/versioning/hashicorp/index.ts
+++ b/lib/modules/versioning/hashicorp/index.ts
@@ -48,32 +48,32 @@ function matches(version: string, range: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const excludedVersions = getExcludedVersions(range);
   const filteredRange = getFilteredRange(range);
   const filteredVersions = versions.filter(
-    (version) => !excludedVersions.includes(version)
+    (version) => !excludedVersions.includes(version),
   );
 
   return npm.getSatisfyingVersion(
     filteredVersions,
-    hashicorp2npm(filteredRange)
+    hashicorp2npm(filteredRange),
   );
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const excludedVersions = getExcludedVersions(range);
   const filteredRange = getFilteredRange(range);
   const filteredVersions = versions.filter(
-    (version) => !excludedVersions.includes(version)
+    (version) => !excludedVersions.includes(version),
   );
   return npm.minSatisfyingVersion(
     filteredVersions,
-    hashicorp2npm(filteredRange)
+    hashicorp2npm(filteredRange),
   );
 }
 
diff --git a/lib/modules/versioning/helm/index.spec.ts b/lib/modules/versioning/helm/index.spec.ts
index 56bdd0b60ed352594d614c6071479550d075aa2c..748bac896033f034291e0eb91c2855335bbd1ebe 100644
--- a/lib/modules/versioning/helm/index.spec.ts
+++ b/lib/modules/versioning/helm/index.spec.ts
@@ -93,6 +93,6 @@ describe('modules/versioning/helm/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/hermit/index.spec.ts b/lib/modules/versioning/hermit/index.spec.ts
index d2bc903b41057f7ffa628cd9e05436d138fe58a6..af5d2b967b774716f3aee30e463d1924688fff83 100644
--- a/lib/modules/versioning/hermit/index.spec.ts
+++ b/lib/modules/versioning/hermit/index.spec.ts
@@ -59,7 +59,7 @@ describe('modules/versioning/hermit/index', () => {
       expect(versioning.getMajor(version)).toBe(major);
       expect(versioning.getMinor(version)).toBe(minor);
       expect(versioning.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -77,7 +77,7 @@ describe('modules/versioning/hermit/index', () => {
     'equals("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.equals(version, other)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -104,7 +104,7 @@ describe('modules/versioning/hermit/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(versioning.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -133,7 +133,7 @@ describe('modules/versioning/hermit/index', () => {
     'isGreaterThan("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.isGreaterThan(version, other)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -160,42 +160,42 @@ describe('modules/versioning/hermit/index', () => {
     'isLessThanRange("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.isLessThanRange(version, other)).toBe(expected);
-    }
+    },
   );
 
   it('getSatisfyingVersion', () => {
     expect(versioning.getSatisfyingVersion(['@1.1.1', '1.2.3'], '1.2.3')).toBe(
-      '1.2.3'
+      '1.2.3',
     );
     expect(
       versioning.getSatisfyingVersion(
         ['1.1.1', '@2.2.1', '2.2.2', '3.3.3'],
-        '2.2.2'
-      )
+        '2.2.2',
+      ),
     ).toBe('2.2.2');
     expect(
       versioning.getSatisfyingVersion(
         ['1.1.1', '@1.3.3', '2.2.2', '3.3.3'],
-        '1.2.3'
-      )
+        '1.2.3',
+      ),
     ).toBeNull();
   });
 
   it('minSatisfyingVersion', () => {
     expect(versioning.minSatisfyingVersion(['@1.1.1', '1.2.3'], '1.2.3')).toBe(
-      '1.2.3'
+      '1.2.3',
     );
     expect(
       versioning.minSatisfyingVersion(
         ['1.1.1', '@1.2.3', '2.2.2', '3.3.3'],
-        '2.2.2'
-      )
+        '2.2.2',
+      ),
     ).toBe('2.2.2');
     expect(
       versioning.minSatisfyingVersion(
         ['1.1.1', '@1.2.2', '2.2.2', '3.3.3'],
-        '1.2.3'
-      )
+        '1.2.3',
+      ),
     ).toBeNull();
   });
 
@@ -214,7 +214,7 @@ describe('modules/versioning/hermit/index', () => {
           '2.1',
           '@stable',
           '@latest',
-        ].sort((a, b) => versioning.sortVersions(a, b))
+        ].sort((a, b) => versioning.sortVersions(a, b)),
       ).toEqual([
         '@latest',
         '@stable',
diff --git a/lib/modules/versioning/hex/index.spec.ts b/lib/modules/versioning/hex/index.spec.ts
index 8a0b33b68dc7606a21f5fbb50af3cf372e7a4aa7..6ec94685bf8db531ce515b973e57f8087adb12e3 100644
--- a/lib/modules/versioning/hex/index.spec.ts
+++ b/lib/modules/versioning/hex/index.spec.ts
@@ -13,7 +13,7 @@ describe('modules/versioning/hex/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(hexScheme.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -24,7 +24,7 @@ describe('modules/versioning/hex/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(hexScheme.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -48,7 +48,7 @@ describe('modules/versioning/hex/index', () => {
     'isLessThanRange($version, $range) === $expected',
     ({ version, range, expected }) => {
       expect(hexScheme.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -59,7 +59,7 @@ describe('modules/versioning/hex/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(hexScheme.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -91,6 +91,6 @@ describe('modules/versioning/hex/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/hex/index.ts b/lib/modules/versioning/hex/index.ts
index 510be6104a683da116d6c4eb14c85290c7b59ff3..b84ff6189fb344afc7d4ad96c85debc36ca5ce75 100644
--- a/lib/modules/versioning/hex/index.ts
+++ b/lib/modules/versioning/hex/index.ts
@@ -59,14 +59,14 @@ const matches = (version: string, range: string): boolean =>
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.getSatisfyingVersion(versions.map(hex2npm), hex2npm(range));
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.minSatisfyingVersion(versions.map(hex2npm), hex2npm(range));
 }
@@ -89,12 +89,12 @@ function getNewValue({
     if (regEx(/~>\s*(\d+\.\d+\.\d+)$/).test(currentValue)) {
       newSemver = newSemver.replace(
         regEx(/[\^~]\s*(\d+\.\d+\.\d+)/),
-        (_str, p1: string) => `~> ${p1}`
+        (_str, p1: string) => `~> ${p1}`,
       );
     } else if (regEx(/~>\s*(\d+\.\d+)$/).test(currentValue)) {
       newSemver = newSemver.replace(
         regEx(/\^\s*(\d+\.\d+)(\.\d+)?/),
-        (_str, p1: string) => `~> ${p1}`
+        (_str, p1: string) => `~> ${p1}`,
       );
     } else {
       newSemver = newSemver.replace(regEx(/~\s*(\d+\.\d+\.\d)/), '~> $1');
diff --git a/lib/modules/versioning/index.spec.ts b/lib/modules/versioning/index.spec.ts
index a31f24ce94e6cab6bdb0cd142172166f4dbf9a2f..b826fdfd62302e49584a36a0da6b78f697a2855d 100644
--- a/lib/modules/versioning/index.spec.ts
+++ b/lib/modules/versioning/index.spec.ts
@@ -8,7 +8,7 @@ import type { VersioningApi, VersioningApiConstructor } from './types';
 import * as allVersioning from '.';
 
 const supportedSchemes = getOptions().find(
-  (option) => option.name === 'versioning'
+  (option) => option.name === 'versioning',
 )?.allowedValues;
 
 describe('modules/versioning/index', () => {
@@ -36,7 +36,7 @@ describe('modules/versioning/index', () => {
   it('validates', () => {
     function validate(
       module: VersioningApi | VersioningApiConstructor,
-      name: string
+      name: string,
     ): boolean {
       const mod = isVersioningApiConstructor(module) ? new module() : module;
 
@@ -60,10 +60,10 @@ describe('modules/versioning/index', () => {
 
   it('should fallback to semver-coerced', () => {
     expect(allVersioning.get(undefined)).toBe(
-      allVersioning.get(semverCoercedVersioning.id)
+      allVersioning.get(semverCoercedVersioning.id),
     );
     expect(allVersioning.get('unknown')).toBe(
-      allVersioning.get(semverCoercedVersioning.id)
+      allVersioning.get(semverCoercedVersioning.id),
     );
   });
 
@@ -108,10 +108,10 @@ describe('modules/versioning/index', () => {
     for (const supportedScheme of supportedSchemes ?? []) {
       it(supportedScheme, async () => {
         const schemeKeys = getAllPropertyNames(
-          allVersioning.get(supportedScheme)
+          allVersioning.get(supportedScheme),
         )
           .filter(
-            (val) => !optionalFunctions.includes(val) && !val.startsWith('_')
+            (val) => !optionalFunctions.includes(val) && !val.startsWith('_'),
           )
           .sort();
 
@@ -123,7 +123,7 @@ describe('modules/versioning/index', () => {
         }
 
         expect(Object.keys(apiOrCtor).sort()).toEqual(
-          Object.keys(allVersioning.get(supportedScheme)).sort()
+          Object.keys(allVersioning.get(supportedScheme)).sort(),
         );
       });
     }
@@ -142,7 +142,7 @@ describe('modules/versioning/index', () => {
       const api = new DummyScheme();
       const schemeKeys = getAllPropertyNames(api)
         .filter(
-          (val) => !optionalFunctions.includes(val) && !val.startsWith('_')
+          (val) => !optionalFunctions.includes(val) && !val.startsWith('_'),
         )
         .sort();
 
diff --git a/lib/modules/versioning/index.ts b/lib/modules/versioning/index.ts
index 06b02a0bd49021a5bab68d77ad6c5e2584c4acf3..b93c5e8740e812576cf38fbefb460cfeda9ec1a0 100644
--- a/lib/modules/versioning/index.ts
+++ b/lib/modules/versioning/index.ts
@@ -18,7 +18,7 @@ export const getVersionings = (): Map<
 
 export function get(versioning: string | null | undefined): VersioningApi {
   const res = Versioning.safeParse(
-    versioning ? versioning : defaultVersioning.id
+    versioning ? versioning : defaultVersioning.id,
   );
 
   if (!res.success) {
diff --git a/lib/modules/versioning/ivy/index.spec.ts b/lib/modules/versioning/ivy/index.spec.ts
index 09dab16ffbe5e2eb0e753728834b4291405f1607..29045f2a30658f9f35b27a4a63875c09060e3d8b 100644
--- a/lib/modules/versioning/ivy/index.spec.ts
+++ b/lib/modules/versioning/ivy/index.spec.ts
@@ -27,7 +27,7 @@ describe('modules/versioning/ivy/index', () => {
     'parseDynamicRevision("$input") === { type: "$type", value: "$value" }',
     ({ input, type, value }) => {
       expect(parseDynamicRevision(input)).toEqual({ type, value });
-    }
+    },
   );
 
   it.each`
@@ -137,7 +137,7 @@ describe('modules/versioning/ivy/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(ivy.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -167,7 +167,7 @@ describe('modules/versioning/ivy/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 
   it.each`
@@ -177,7 +177,7 @@ describe('modules/versioning/ivy/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(ivy.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/ivy/index.ts b/lib/modules/versioning/ivy/index.ts
index 79ff4641c11f70dfa51e361ea010ad9fcf70324b..b7bcd26fbe4dc2a8a459d5d6a439459793421c24 100644
--- a/lib/modules/versioning/ivy/index.ts
+++ b/lib/modules/versioning/ivy/index.ts
@@ -99,7 +99,7 @@ function matches(a: string, b: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return versions.reduce((result: string | null, version) => {
     if (matches(version, range)) {
diff --git a/lib/modules/versioning/kubernetes-api/index.spec.ts b/lib/modules/versioning/kubernetes-api/index.spec.ts
index 5f45041a1d440e76733ba06b3ee0183b8067ea80..1031fcc1bcfa83e82f0dfe8b316d296cce7c9037 100644
--- a/lib/modules/versioning/kubernetes-api/index.spec.ts
+++ b/lib/modules/versioning/kubernetes-api/index.spec.ts
@@ -48,7 +48,7 @@ describe('modules/versioning/kubernetes-api/index', () => {
       expect(versioning.getMajor(version)).toBe(major);
       expect(versioning.getMinor(version)).toBe(minor);
       expect(versioning.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -75,7 +75,7 @@ describe('modules/versioning/kubernetes-api/index', () => {
     'equals("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.equals(version, other)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -94,7 +94,7 @@ describe('modules/versioning/kubernetes-api/index', () => {
     'matches("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.matches(version, other)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -110,7 +110,7 @@ describe('modules/versioning/kubernetes-api/index', () => {
     'isGreaterThan("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.isGreaterThan(version, other)).toBe(expected);
-    }
+    },
   );
 
   it('sorts versions in an ascending order', () => {
@@ -127,7 +127,7 @@ describe('modules/versioning/kubernetes-api/index', () => {
         'v1beta1',
         'v1alpha2',
         'v1alpha1',
-      ].sort((a, b) => versioning.sortVersions(a, b))
+      ].sort((a, b) => versioning.sortVersions(a, b)),
     ).toEqual([
       'v1alpha1',
       'v1alpha2',
diff --git a/lib/modules/versioning/maven/compare.spec.ts b/lib/modules/versioning/maven/compare.spec.ts
index 1fe6a033292c3320468dee4ec9402dbfc0808ebf..ac24b1cb34ae49a17bd156a63b756c34b0d4cd0f 100644
--- a/lib/modules/versioning/maven/compare.spec.ts
+++ b/lib/modules/versioning/maven/compare.spec.ts
@@ -240,7 +240,7 @@ describe('modules/versioning/maven/compare', () => {
       'isSubversion("$majorVersion", "$minorVersion") === $expected',
       ({ majorVersion, minorVersion, expected }) => {
         expect(isSubversion(majorVersion, minorVersion)).toBe(expected);
-      }
+      },
     );
   });
 
@@ -506,9 +506,9 @@ describe('modules/versioning/maven/compare', () => {
         ];
         expect(parseRange(input)).toEqual(parseResult);
         expect(rangeToStr(parseResult as never)).toEqual(
-          input.replace(/\s*/g, '')
+          input.replace(/\s*/g, ''),
         );
-      }
+      },
     );
 
     it.each`
@@ -557,7 +557,7 @@ describe('modules/versioning/maven/compare', () => {
       'autoExtendMavenRange("$range", "$version") === $expected',
       ({ range, version, expected }) => {
         expect(autoExtendMavenRange(range, version)).toEqual(expected);
-      }
+      },
     );
   });
 });
diff --git a/lib/modules/versioning/maven/compare.ts b/lib/modules/versioning/maven/compare.ts
index f70d7965e70afe528fe9e6a9a8f9e165ff358e93..594cd7777c6890208713fd5c8e131a243233c8c4 100644
--- a/lib/modules/versioning/maven/compare.ts
+++ b/lib/modules/versioning/maven/compare.ts
@@ -27,7 +27,7 @@ export type Token = NumberToken | QualifierToken;
 
 function iterateChars(
   str: string,
-  cb: (p: string | null, n: string | null) => void
+  cb: (p: string | null, n: string | null) => void,
 ): void {
   let prev = null;
   let next = null;
@@ -443,7 +443,7 @@ function rangeToStr(fullRange: Range[] | null): string | null {
       ',',
       valToStr(val.rightValue),
       val.rightBracket,
-    ].join('')
+    ].join(''),
   );
   return intervals.join(',');
 }
@@ -480,7 +480,7 @@ function incrementRangeValue(value: string): string {
 
 function autoExtendMavenRange(
   currentRepresentation: string,
-  newValue: string
+  newValue: string,
 ): string | null {
   const range = parseRange(currentRepresentation);
   if (!range) {
@@ -535,7 +535,7 @@ function autoExtendMavenRange(
       }
     } else {
       interval.rightValue = incrementRangeValue(
-        coerceRangeValue(rightValue, newValue)
+        coerceRangeValue(rightValue, newValue),
       );
     }
   } else if (leftValue !== null) {
diff --git a/lib/modules/versioning/maven/index.spec.ts b/lib/modules/versioning/maven/index.spec.ts
index c0b23ac2eceb2d19b8acbf3a9321f1071ae50d2c..f23b1ce9abf682da0512e32e38d506117e691c36 100644
--- a/lib/modules/versioning/maven/index.spec.ts
+++ b/lib/modules/versioning/maven/index.spec.ts
@@ -94,7 +94,7 @@ describe('modules/versioning/maven/index', () => {
       expect(getMajor(input)).toBe(major);
       expect(getMinor(input)).toBe(minor);
       expect(getPatch(input)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -123,7 +123,7 @@ describe('modules/versioning/maven/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -140,7 +140,7 @@ describe('modules/versioning/maven/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(maven.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -150,7 +150,7 @@ describe('modules/versioning/maven/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(maven.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -178,6 +178,6 @@ describe('modules/versioning/maven/index', () => {
         newVersion,
       });
       expect(res).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/maven/index.ts b/lib/modules/versioning/maven/index.ts
index 67521565f70e405cc25cae5181dda1f0295adcb1..33c0fea85aba62a5e999c5482dcdb384c69f519d 100644
--- a/lib/modules/versioning/maven/index.ts
+++ b/lib/modules/versioning/maven/index.ts
@@ -131,7 +131,7 @@ const isStable = (version: string): boolean => {
 // istanbul ignore next
 const getSatisfyingVersion = (
   versions: string[],
-  range: string
+  range: string,
 ): string | null =>
   versions.reduce((result: string | null, version) => {
     if (matches(version, range)) {
@@ -155,7 +155,7 @@ function getNewValue({
   }
   return coerceString(
     autoExtendMavenRange(currentValue, newVersion),
-    currentValue
+    currentValue,
   );
 }
 
diff --git a/lib/modules/versioning/nixpkgs/index.spec.ts b/lib/modules/versioning/nixpkgs/index.spec.ts
index 965cf8c8d49ebc313fde27872577ab0776965a45..5686533558a56293861057bd096f25d628f9926c 100644
--- a/lib/modules/versioning/nixpkgs/index.spec.ts
+++ b/lib/modules/versioning/nixpkgs/index.spec.ts
@@ -64,9 +64,9 @@ describe('modules/versioning/nixpkgs/index', () => {
     '$versions -> sortVersions -> $expected ',
     ({ versions, expected }: { versions: string[]; expected: string[] }) => {
       expect(versions.sort((a, b) => versioning.sortVersions(a, b))).toEqual(
-        expected
+        expected,
       );
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/node/index.spec.ts b/lib/modules/versioning/node/index.spec.ts
index c0f7040c6b1e31bdff2ab54c1dda65ff42f630de..6154a4209eb1fc07a60950b4c3403fa4844c47e2 100644
--- a/lib/modules/versioning/node/index.spec.ts
+++ b/lib/modules/versioning/node/index.spec.ts
@@ -31,7 +31,7 @@ describe('modules/versioning/node/index', () => {
         newVersion,
       });
       expect(res).toBe(expected);
-    }
+    },
   );
 
   const t1 = DateTime.fromISO('2020-09-01');
@@ -78,9 +78,9 @@ describe('modules/versioning/node/index', () => {
     'matches("$version", "$range") === $expected',
     ({ version, range, expected }) => {
       expect(nodever.matches(version as string, range as string)).toBe(
-        expected
+        expected,
       );
-    }
+    },
   );
 
   it.each`
@@ -92,9 +92,9 @@ describe('modules/versioning/node/index', () => {
     'getSatisfyingVersion("$versions", "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(
-        nodever.getSatisfyingVersion(versions as string[], range as string)
+        nodever.getSatisfyingVersion(versions as string[], range as string),
       ).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -106,8 +106,8 @@ describe('modules/versioning/node/index', () => {
     'minSatisfyingVersion("$versions", "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(
-        nodever.minSatisfyingVersion(versions as string[], range as string)
+        nodever.minSatisfyingVersion(versions as string[], range as string),
       ).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/node/index.ts b/lib/modules/versioning/node/index.ts
index 35eb921153862552d0a94395406433264c518cda..370368b867ef365ae6e0c01cb1bc8e5de5f10df3 100644
--- a/lib/modules/versioning/node/index.ts
+++ b/lib/modules/versioning/node/index.ts
@@ -65,14 +65,14 @@ export function matches(version: string, range: string): boolean {
 
 export function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.getSatisfyingVersion(versions, normalizeValue(range));
 }
 
 export function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.minSatisfyingVersion(versions, normalizeValue(range));
 }
diff --git a/lib/modules/versioning/node/schedule.ts b/lib/modules/versioning/node/schedule.ts
index db9328a7ef9ec5dd4e91cc56208e5817d19f7692..eedaf86c7a211d08eedf477b1a2a79f81f1fbdcc 100644
--- a/lib/modules/versioning/node/schedule.ts
+++ b/lib/modules/versioning/node/schedule.ts
@@ -12,7 +12,7 @@ interface NodeJsSchedule {
 export type NodeJsData = Record<string, NodeJsSchedule>;
 
 const nodeSchedule: NodeJsData = JSON.parse(
-  dataFiles.get('data/node-js-schedule.json')!
+  dataFiles.get('data/node-js-schedule.json')!,
 );
 
 export type NodeJsScheduleWithVersion = { version: string } & NodeJsSchedule;
@@ -29,7 +29,7 @@ for (const version of Object.keys(nodeSchedule)) {
 }
 
 export function findScheduleForCodename(
-  codename: string
+  codename: string,
 ): NodeJsScheduleWithVersion | null {
   return nodeCodenames.get(codename?.toUpperCase()) ?? null;
 }
diff --git a/lib/modules/versioning/npm/index.spec.ts b/lib/modules/versioning/npm/index.spec.ts
index bc8c723797a81a1585dac5ee9655add4b3fbc009..c8f22b65a7e5e425a997c227772da2e95dacd4a3 100644
--- a/lib/modules/versioning/npm/index.spec.ts
+++ b/lib/modules/versioning/npm/index.spec.ts
@@ -40,7 +40,7 @@ describe('modules/versioning/npm/index', () => {
     'getSatisfyingVersion("$versions","$range") === $maxSatisfying',
     ({ versions, range, maxSatisfying }) => {
       expect(semver.getSatisfyingVersion(versions, range)).toBe(maxSatisfying);
-    }
+    },
   );
 
   it.each`
@@ -163,6 +163,6 @@ describe('modules/versioning/npm/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/npm/range.ts b/lib/modules/versioning/npm/range.ts
index 779b3308caf959530a8a90763159ff75450bd1f0..9481eaad2d7ba21b76d888db865748553cf8626c 100644
--- a/lib/modules/versioning/npm/range.ts
+++ b/lib/modules/versioning/npm/range.ts
@@ -183,7 +183,7 @@ export function getNewValue({
         .join(' ');
     }
     logger.debug(
-      'Unsupported range type for rangeStrategy=bump: ' + currentValue
+      'Unsupported range type for rangeStrategy=bump: ' + currentValue,
     );
     return null;
   }
diff --git a/lib/modules/versioning/pep440/index.spec.ts b/lib/modules/versioning/pep440/index.spec.ts
index 4a404d7378a82dde9d3a946f6713ea13cd1d7bdd..bb2ad28d78b03723b833f889f9361183f3710043 100644
--- a/lib/modules/versioning/pep440/index.spec.ts
+++ b/lib/modules/versioning/pep440/index.spec.ts
@@ -69,7 +69,7 @@ describe('modules/versioning/pep440/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ range, expected }) => {
       expect(pep440.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -80,7 +80,7 @@ describe('modules/versioning/pep440/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ range, expected }) => {
       expect(pep440.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -189,7 +189,7 @@ describe('modules/versioning/pep440/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 
   it.each`
@@ -306,7 +306,7 @@ describe('modules/versioning/pep440/index', () => {
         isReplacement,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 
   it.each`
@@ -337,6 +337,6 @@ describe('modules/versioning/pep440/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(pep440.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/pep440/index.ts b/lib/modules/versioning/pep440/index.ts
index 26c6f80ce70178842ff99408014421ca124a83d7..6de5a74ad4931bcef2829238715f44a16a318614 100644
--- a/lib/modules/versioning/pep440/index.ts
+++ b/lib/modules/versioning/pep440/index.ts
@@ -48,7 +48,7 @@ export function isValid(input: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const found = pep440.filter(versions, range).sort(sortVersions);
   return found.length === 0 ? null : found[found.length - 1];
@@ -56,7 +56,7 @@ function getSatisfyingVersion(
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const found = pep440.filter(versions, range).sort(sortVersions);
   return found.length === 0 ? null : found[0];
diff --git a/lib/modules/versioning/pep440/range.spec.ts b/lib/modules/versioning/pep440/range.spec.ts
index 75c69a681d4c005dd629ffcfbdb27d6f44d99296..193d558fd6749b58332743054c217323e6feae74 100644
--- a/lib/modules/versioning/pep440/range.spec.ts
+++ b/lib/modules/versioning/pep440/range.spec.ts
@@ -10,8 +10,8 @@ it.each`
   ({ rangeInput, newVersion, expected }) => {
     const res = checkRangeAndRemoveUnnecessaryRangeLimit(
       rangeInput,
-      newVersion
+      newVersion,
     );
     expect(res).toEqual(expected);
-  }
+  },
 );
diff --git a/lib/modules/versioning/pep440/range.ts b/lib/modules/versioning/pep440/range.ts
index 54eca71de4b53654c39d36a0cf715d9da36978d9..0c94a3c4c894a881579ea669413c254845212190 100644
--- a/lib/modules/versioning/pep440/range.ts
+++ b/lib/modules/versioning/pep440/range.ts
@@ -30,7 +30,7 @@ type UserPolicy =
  */
 function getRangePrecision(ranges: Range[]): UserPolicy {
   const bound = coerceArray(
-    parseVersion((ranges[1] || ranges[0]).version)?.release
+    parseVersion((ranges[1] || ranges[0]).version)?.release,
   );
   let rangePrecision = -1;
   // range is defined by a single bound.
@@ -74,11 +74,11 @@ function getRangePrecision(ranges: Range[]): UserPolicy {
 function getFutureVersion(
   policy: UserPolicy,
   newVersion: string,
-  baseVersion?: string
+  baseVersion?: string,
 ): number[] {
   const toRelease = coerceArray(parseVersion(newVersion)?.release);
   const baseRelease = coerceArray(
-    parseVersion(baseVersion ?? newVersion)?.release
+    parseVersion(baseVersion ?? newVersion)?.release,
   );
   return baseRelease.map((_, index) => {
     const toPart = toRelease[index] ?? 0;
@@ -139,7 +139,7 @@ export function getNewValue({
           currentVersion,
           newVersion,
         },
-        ranges
+        ranges,
       );
       break;
     case 'widen':
@@ -150,7 +150,7 @@ export function getNewValue({
           currentVersion,
           newVersion,
         },
-        ranges
+        ranges,
       );
       break;
     case 'bump':
@@ -161,7 +161,7 @@ export function getNewValue({
           currentVersion,
           newVersion,
         },
-        ranges
+        ranges,
       );
       break;
     default:
@@ -171,7 +171,7 @@ export function getNewValue({
       logger.debug(
         'Unsupported rangeStrategy: ' +
           rangeStrategy +
-          '. Using "replace" instead.'
+          '. Using "replace" instead.',
       );
       return getNewValue({
         currentValue,
@@ -188,14 +188,14 @@ export function getNewValue({
   }
   const checkedResult = checkRangeAndRemoveUnnecessaryRangeLimit(
     result,
-    newVersion
+    newVersion,
   );
 
   if (!satisfies(newVersion, checkedResult)) {
     // we failed at creating the range
     logger.warn(
       { result, newVersion, currentValue },
-      'pep440: failed to calculate newValue'
+      'pep440: failed to calculate newValue',
     );
     return null;
   }
@@ -212,7 +212,7 @@ export function isLessThanRange(input: string, range: string): boolean {
         x
           .replace(regEx(/\s*/g), '')
           .split(regEx(/(~=|==|!=|<=|>=|<|>|===)/))
-          .slice(1)
+          .slice(1),
       )
       .map(([op, version]) => {
         if (['!=', '<=', '<'].includes(op)) {
@@ -275,7 +275,7 @@ function handleUpperBound(range: Range, newVersion: string): string | null {
       const futureVersion = getFutureVersion(
         precision,
         newVersion,
-        range.version
+        range.version,
       );
       return range.operator + futureVersion.join('.');
     }
@@ -288,7 +288,7 @@ function handleUpperBound(range: Range, newVersion: string): string | null {
 
 function updateRangeValue(
   { currentValue, rangeStrategy, currentVersion, newVersion }: NewValueConfig,
-  range: Range
+  range: Range,
 ): string | null {
   // used to exclude versions,
   // we assume that's for a good reason
@@ -301,7 +301,7 @@ function updateRangeValue(
     const futureVersion = getFutureVersion(
       UserPolicyPrecisionMap.None,
       newVersion,
-      range.version
+      range.version,
     ).join('.');
     return range.operator + futureVersion + '.*';
   }
@@ -347,7 +347,7 @@ function updateRangeValue(
   // istanbul ignore next
   logger.error(
     { newVersion, currentValue, range },
-    'pep440: failed to process range'
+    'pep440: failed to process range',
   );
   // istanbul ignore next
   return null;
@@ -394,7 +394,7 @@ function divideCompatibleReleaseRange(currentRange: Range): Range[] {
 
 function handleWidenStrategy(
   { currentValue, rangeStrategy, currentVersion, newVersion }: NewValueConfig,
-  ranges: Range[]
+  ranges: Range[],
 ): (string | null)[] {
   // newVersion is within range
   if (satisfies(newVersion, currentValue)) {
@@ -427,7 +427,7 @@ function handleWidenStrategy(
       let futureVersion = getFutureVersion(
         rangePrecision,
         newVersion,
-        range.version
+        range.version,
       );
       if (trimZeros) {
         futureVersion = trimTrailingZeros(futureVersion);
@@ -442,14 +442,14 @@ function handleWidenStrategy(
         currentVersion,
         newVersion,
       },
-      range
+      range,
     );
   });
 }
 
 function handleReplaceStrategy(
   { currentValue, rangeStrategy, currentVersion, newVersion }: NewValueConfig,
-  ranges: Range[]
+  ranges: Range[],
 ): (string | null)[] {
   // newVersion is within range
   if (satisfies(newVersion, currentValue)) {
@@ -463,7 +463,7 @@ function handleReplaceStrategy(
       let futureVersion = getFutureVersion(
         rangePrecision,
         newVersion,
-        range.version
+        range.version,
       );
       if (trimZeros) {
         futureVersion = trimTrailingZeros(futureVersion);
@@ -501,14 +501,14 @@ function handleReplaceStrategy(
         currentVersion,
         newVersion,
       },
-      range
+      range,
     );
   });
 }
 
 function handleBumpStrategy(
   { currentValue, rangeStrategy, currentVersion, newVersion }: NewValueConfig,
-  ranges: Range[]
+  ranges: Range[],
 ): (string | null)[] {
   return ranges.map((range) => {
     // bump lower bound to current new version
@@ -522,14 +522,14 @@ function handleBumpStrategy(
         currentVersion,
         newVersion,
       },
-      range
+      range,
     );
   });
 }
 
 export function checkRangeAndRemoveUnnecessaryRangeLimit(
   rangeInput: string,
-  newVersion: string
+  newVersion: string,
 ): string {
   let newRange: string = rangeInput;
   if (rangeInput.includes(',')) {
diff --git a/lib/modules/versioning/poetry/index.spec.ts b/lib/modules/versioning/poetry/index.spec.ts
index 3d3618e3153d3dfe3271e79c9bd403f4a2440f64..5a7285cac93bbfd45a579ff963d86bdd14556fd9 100644
--- a/lib/modules/versioning/poetry/index.spec.ts
+++ b/lib/modules/versioning/poetry/index.spec.ts
@@ -37,7 +37,7 @@ describe('modules/versioning/poetry/index', () => {
       expect(versioning.getMajor(version)).toBe(major);
       expect(versioning.getMinor(version)).toBe(minor);
       expect(versioning.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -135,7 +135,7 @@ describe('modules/versioning/poetry/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(versioning.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -146,7 +146,7 @@ describe('modules/versioning/poetry/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(versioning.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -162,7 +162,7 @@ describe('modules/versioning/poetry/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(versioning.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -175,7 +175,7 @@ describe('modules/versioning/poetry/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(versioning.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -238,7 +238,7 @@ describe('modules/versioning/poetry/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/poetry/index.ts b/lib/modules/versioning/poetry/index.ts
index b7e6d84175c06c73d0c33b1066ed197d7714141a..39066b703cacb37820783609f4ea74e585ac67fd 100644
--- a/lib/modules/versioning/poetry/index.ts
+++ b/lib/modules/versioning/poetry/index.ts
@@ -83,7 +83,7 @@ function matches(version: string, range: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const semverVersions: string[] = [];
   versions.forEach((version) => {
@@ -99,7 +99,7 @@ function getSatisfyingVersion(
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const semverVersions: string[] = [];
   versions.forEach((version) => {
@@ -124,7 +124,7 @@ function isSingleVersion(constraint: string): boolean {
 function handleShort(
   operator: string,
   currentValue: string,
-  newVersion: string
+  newVersion: string,
 ): string | null {
   const toVersionMajor = getMajor(newVersion);
   const toVersionMinor = getMinor(newVersion);
@@ -167,7 +167,7 @@ function getNewValue({
     } catch (err) /* istanbul ignore next */ {
       logger.info(
         { err },
-        'Poetry versioning: Error caught checking if newVersion satisfies currentValue'
+        'Poetry versioning: Error caught checking if newVersion satisfies currentValue',
       );
     }
     const parsedRange = parseRange(npmCurrentValue);
@@ -194,7 +194,7 @@ function getNewValue({
       .length !== 3
   ) {
     logger.debug(
-      'Cannot massage python version to npm - returning currentValue'
+      'Cannot massage python version to npm - returning currentValue',
     );
     return currentValue;
   }
@@ -219,7 +219,7 @@ function getNewValue({
   } catch (err) /* istanbul ignore next */ {
     logger.debug(
       { currentValue, rangeStrategy, currentVersion, newVersion, err },
-      'Could not generate new value using npm.getNewValue()'
+      'Could not generate new value using npm.getNewValue()',
     );
   }
 
diff --git a/lib/modules/versioning/poetry/patterns.ts b/lib/modules/versioning/poetry/patterns.ts
index 04633ee4e57ae74fcd1b8c4fe3a76683cee8dcd1..20b79b2cefb4b216a71db08a9e77d7f3d41dae3e 100644
--- a/lib/modules/versioning/poetry/patterns.ts
+++ b/lib/modules/versioning/poetry/patterns.ts
@@ -43,5 +43,5 @@ export const VERSION_PATTERN = regEx(
   );
 
 export const RANGE_COMPARATOR_PATTERN = regEx(
-  /(\s*(?:\^|~|[><!]?=|[><]|\|\|)\s*)/
+  /(\s*(?:\^|~|[><!]?=|[><]|\|\|)\s*)/,
 );
diff --git a/lib/modules/versioning/poetry/transform.ts b/lib/modules/versioning/poetry/transform.ts
index 0eb5098bda949c3d37e43599bc7b557376d3acd2..44f5c4da678285ac4b97449a01135ed80780577d 100644
--- a/lib/modules/versioning/poetry/transform.ts
+++ b/lib/modules/versioning/poetry/transform.ts
@@ -41,7 +41,7 @@ function notEmpty(s: string): boolean {
  */
 export function poetry2semver(
   poetry_version: string,
-  padRelease = true
+  padRelease = true,
 ): string | null {
   const matchGroups = VERSION_PATTERN.exec(poetry_version)?.groups;
   if (!matchGroups) {
diff --git a/lib/modules/versioning/python/index.spec.ts b/lib/modules/versioning/python/index.spec.ts
index f362c9056ef9f81bb1f13ae28db7cc4d50954c7f..15043550a57a287222ca4253b64962e7c33c9fbb 100644
--- a/lib/modules/versioning/python/index.spec.ts
+++ b/lib/modules/versioning/python/index.spec.ts
@@ -50,7 +50,7 @@ describe('modules/versioning/python/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(versioning.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -62,7 +62,7 @@ describe('modules/versioning/python/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(versioning.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -79,7 +79,7 @@ describe('modules/versioning/python/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(versioning.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -93,7 +93,7 @@ describe('modules/versioning/python/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(versioning.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   test('getNewValue()', () => {
diff --git a/lib/modules/versioning/python/index.ts b/lib/modules/versioning/python/index.ts
index 7e287abc052d86938ee29e4fe4cfb06e5b43c2bb..18e31c78daca21d9aba0a8c17cdfe209ad73983b 100644
--- a/lib/modules/versioning/python/index.ts
+++ b/lib/modules/versioning/python/index.ts
@@ -25,7 +25,7 @@ function matches(version: string, range: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return poetry.isValid(range)
     ? poetry.getSatisfyingVersion(versions, range)
@@ -34,7 +34,7 @@ function getSatisfyingVersion(
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return poetry.isValid(range)
     ? poetry.minSatisfyingVersion(versions, range)
diff --git a/lib/modules/versioning/redhat/index.ts b/lib/modules/versioning/redhat/index.ts
index e9503582388877992e138acbdcde94f86c2bc6a8..5057410492f1b0b493b57b272aea2c03aad5d9c5 100644
--- a/lib/modules/versioning/redhat/index.ts
+++ b/lib/modules/versioning/redhat/index.ts
@@ -8,7 +8,7 @@ export const urls = [];
 export const supportsRanges = false;
 
 const pattern = regEx(
-  /^(?<major>\d+)(?:\.(?<minor>\d+))?(?:\.(?<patch>\d+))?(?:-(?<releaseMajor>\d+)(?:\.(?<releaseMinor>\d+))?)?$/
+  /^(?<major>\d+)(?:\.(?<minor>\d+))?(?:\.(?<patch>\d+))?(?:-(?<releaseMajor>\d+)(?:\.(?<releaseMinor>\d+))?)?$/,
 );
 
 class RedhatVersioningApi extends GenericVersioningApi {
diff --git a/lib/modules/versioning/regex/index.spec.ts b/lib/modules/versioning/regex/index.spec.ts
index 3c96498aa811952dd6d0ea29589ad6a931229999..7893513e1b0a2dd417d1cf2fada4340fab54ee3c 100644
--- a/lib/modules/versioning/regex/index.spec.ts
+++ b/lib/modules/versioning/regex/index.spec.ts
@@ -4,7 +4,7 @@ import { CONFIG_VALIDATION } from '../../../constants/error-messages';
 describe('modules/versioning/regex/index', () => {
   describe('regex versioning', () => {
     const regex = get(
-      'regex:^(?<major>\\d+)\\.(?<minor>\\d+)\\.(?<patch>\\d+)(?<prerelease>[^.-]+)?(?:-(?<compatibility>.*))?$'
+      'regex:^(?<major>\\d+)\\.(?<minor>\\d+)\\.(?<patch>\\d+)(?<prerelease>[^.-]+)?(?:-(?<compatibility>.*))?$',
     );
 
     it('requires a valid configuration to be initialized', () => {
@@ -77,7 +77,7 @@ describe('modules/versioning/regex/index', () => {
       ({ version, range, expected }) => {
         const res = regex.isCompatible(version, range);
         expect(!!res).toBe(expected);
-      }
+      },
     );
 
     it.each`
@@ -143,7 +143,7 @@ describe('modules/versioning/regex/index', () => {
         expect(regex.getMajor(version)).toBe(major);
         expect(regex.getMinor(version)).toBe(minor);
         expect(regex.getPatch(version)).toBe(patch);
-      }
+      },
     );
 
     it.each`
@@ -247,7 +247,7 @@ describe('modules/versioning/regex/index', () => {
       'isLessThanRange($version, $range) === $expected',
       ({ version, range, expected }) => {
         expect(regex.isLessThanRange?.(version, range)).toBe(expected);
-      }
+      },
     );
 
     it.each`
@@ -261,7 +261,7 @@ describe('modules/versioning/regex/index', () => {
       'getSatisfyingVersion($versions, "$range") === $expected',
       ({ versions, range, expected }) => {
         expect(regex.getSatisfyingVersion(versions, range)).toBe(expected);
-      }
+      },
     );
 
     it.each`
@@ -275,7 +275,7 @@ describe('modules/versioning/regex/index', () => {
       'minSatisfyingVersion($versions, "$range") === "$expected"',
       ({ versions, range, expected }) => {
         expect(regex.minSatisfyingVersion(versions, range)).toBe(expected);
-      }
+      },
     );
 
     describe('.getNewValue', () => {
@@ -286,7 +286,7 @@ describe('modules/versioning/regex/index', () => {
             rangeStrategy: null as never,
             currentVersion: null as never,
             newVersion: '1.2.3',
-          })
+          }),
         ).toBe('1.2.3');
       });
     });
@@ -295,8 +295,8 @@ describe('modules/versioning/regex/index', () => {
       it('sorts versions in an ascending order', () => {
         expect(
           ['1.2.3a1', '2.0.1', '1.3.4', '1.2.3'].sort(
-            regex.sortVersions.bind(regex)
-          )
+            regex.sortVersions.bind(regex),
+          ),
         ).toEqual(['1.2.3a1', '1.2.3', '1.3.4', '2.0.1']);
       });
     });
@@ -347,13 +347,13 @@ describe('modules/versioning/regex/index', () => {
       'matches("$version", "$range") === $expected',
       ({ version, range, expected }) => {
         expect(regex.matches(version, range)).toBe(expected);
-      }
+      },
     );
   });
 
   describe('Supported 4th number as build and 5th as revision', () => {
     const re = get(
-      'regex:^(?<major>\\d+)\\.(?<minor>\\d+)\\.(?<patch>\\d+)(:?-(?<compatibility>.+)(?<build>\\d+)-r(?<revision>\\d+))?$'
+      'regex:^(?<major>\\d+)\\.(?<minor>\\d+)\\.(?<patch>\\d+)(:?-(?<compatibility>.+)(?<build>\\d+)-r(?<revision>\\d+))?$',
     );
 
     it.each`
@@ -372,7 +372,7 @@ describe('modules/versioning/regex/index', () => {
       ({ version, range, expected }) => {
         const res = re.isCompatible(version, range);
         expect(!!res).toBe(expected);
-      }
+      },
     );
 
     it.each`
@@ -391,7 +391,7 @@ describe('modules/versioning/regex/index', () => {
       'matches("$version", "$range") === $expected',
       ({ version, range, expected }) => {
         expect(re.matches(version, range)).toBe(expected);
-      }
+      },
     );
   });
 });
diff --git a/lib/modules/versioning/regex/index.ts b/lib/modules/versioning/regex/index.ts
index a11369635d481710f22a1db5ff05304ad332049c..31e0565be7102354a8d3575141bbb357edecdc7d 100644
--- a/lib/modules/versioning/regex/index.ts
+++ b/lib/modules/versioning/regex/index.ts
@@ -115,7 +115,7 @@ export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> {
 
   override getSatisfyingVersion(
     versions: string[],
-    range: string
+    range: string,
   ): string | null {
     const parsedRange = this._parse(range);
     return parsedRange
@@ -124,14 +124,14 @@ export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> {
             .map((v) => this._parse(v))
             .filter(is.truthy)
             .map(asSemver),
-          asSemver(parsedRange)
+          asSemver(parsedRange),
         )
       : null;
   }
 
   override minSatisfyingVersion(
     versions: string[],
-    range: string
+    range: string,
   ): string | null {
     const parsedRange = this._parse(range);
     return parsedRange
@@ -140,7 +140,7 @@ export class RegExpVersioningApi extends GenericVersioningApi<RegExpVersion> {
             .map((v) => this._parse(v))
             .filter(is.truthy)
             .map(asSemver),
-          asSemver(parsedRange)
+          asSemver(parsedRange),
         )
       : null;
   }
diff --git a/lib/modules/versioning/rez/index.spec.ts b/lib/modules/versioning/rez/index.spec.ts
index c076cf3fcce9fdaedaba74d7e14dceeb79726c01..05c2f7f3aba818e229a0c4b2dd75276aa9d76871 100644
--- a/lib/modules/versioning/rez/index.spec.ts
+++ b/lib/modules/versioning/rez/index.spec.ts
@@ -15,7 +15,7 @@ describe('modules/versioning/rez/index', () => {
     'equals("$version", "$equal") === $expected',
     ({ version, equal, expected }) => {
       expect(versioning.equals(version, equal)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -61,7 +61,7 @@ describe('modules/versioning/rez/index', () => {
     'isGreaterThan("$version", "$other") === $expected',
     ({ version, other, expected }) => {
       expect(versioning.isGreaterThan(version, other)).toEqual(expected);
-    }
+    },
   );
 
   it.each`
@@ -129,7 +129,7 @@ describe('modules/versioning/rez/index', () => {
     'minSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(versioning.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -139,7 +139,7 @@ describe('modules/versioning/rez/index', () => {
     'getSatisfyingVersion($versions, "$range") === $expected',
     ({ versions, range, expected }) => {
       expect(versioning.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -152,7 +152,7 @@ describe('modules/versioning/rez/index', () => {
     'isLessThanRange($version, "$range") === $expected',
     ({ version, range, expected }) => {
       expect(versioning.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -172,7 +172,7 @@ describe('modules/versioning/rez/index', () => {
     'matches($version, "$range") === $expected',
     ({ version, range, expected }) => {
       expect(versioning.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -187,7 +187,7 @@ describe('modules/versioning/rez/index', () => {
       const dockerSorted = versioning.sortVersions(a, b);
       const semverSorted = semver.sortVersions(a, b);
       expect(dockerSorted).toBe(semverSorted);
-    }
+    },
   );
 
   it.each`
@@ -437,7 +437,7 @@ describe('modules/versioning/rez/index', () => {
         newVersion,
       });
       expect(res).toBe(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/rez/index.ts b/lib/modules/versioning/rez/index.ts
index 06ab51161d2a63eb190f97302bcd96cfe9ec8e56..9ef244da419646db000a6158694246896593265c 100644
--- a/lib/modules/versioning/rez/index.ts
+++ b/lib/modules/versioning/rez/index.ts
@@ -101,14 +101,14 @@ function matches(version: string, range: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.getSatisfyingVersion(versions, rez2npm(range));
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return npm.minSatisfyingVersion(versions, rez2npm(range));
 }
@@ -162,18 +162,18 @@ function getNewValue({
     const upperAscVersionCurrent = matchAscRange.groups.range_upper_asc_version;
     const [lowerBoundAscPep440, upperBoundAscPep440] = pep440Value.split(', ');
     const lowerAscVersionNew = coerceString(
-      regEx(versionGroup).exec(lowerBoundAscPep440)?.[0]
+      regEx(versionGroup).exec(lowerBoundAscPep440)?.[0],
     );
     const upperAscVersionNew = coerceString(
-      regEx(versionGroup).exec(upperBoundAscPep440)?.[0]
+      regEx(versionGroup).exec(upperBoundAscPep440)?.[0],
     );
     const lowerBoundAscNew = lowerBoundAscCurrent.replace(
       lowerAscVersionCurrent,
-      lowerAscVersionNew
+      lowerAscVersionNew,
     );
     const upperBoundAscNew = upperBoundAscCurrent.replace(
       upperAscVersionCurrent,
-      upperAscVersionNew
+      upperAscVersionNew,
     );
     const separator = currentValue.includes(',') ? ',' : '';
 
@@ -193,18 +193,18 @@ function getNewValue({
       pep440Value.split(', ');
 
     const upperDescVersionNew = coerceString(
-      regEx(versionGroup).exec(upperBoundDescPep440)?.[0]
+      regEx(versionGroup).exec(upperBoundDescPep440)?.[0],
     );
     const lowerDescVersionNew = coerceString(
-      regEx(versionGroup).exec(lowerBoundDescPep440)?.[0]
+      regEx(versionGroup).exec(lowerBoundDescPep440)?.[0],
     );
     const upperBoundDescNew = upperBoundDescCurrent.replace(
       upperDescVersionCurrent,
-      upperDescVersionNew
+      upperDescVersionNew,
     );
     const lowerBoundDescNew = lowerBoundDescCurrent.replace(
       lowerDescVersionCurrent,
-      lowerDescVersionNew
+      lowerDescVersionNew,
     );
     // Descending ranges are only supported with a comma.
     const separator = ',';
diff --git a/lib/modules/versioning/rez/pattern.ts b/lib/modules/versioning/rez/pattern.ts
index dc344c115410c8e99e6f85c74eec6decc69be5dc..8eac443ec6a5c02a07b9df14a16c610eedae802d 100644
--- a/lib/modules/versioning/rez/pattern.ts
+++ b/lib/modules/versioning/rez/pattern.ts
@@ -65,28 +65,28 @@ import { regEx } from '../../../util/regex';
 // - Replace single \ -> double \
 export const versionGroup = '([0-9a-zA-Z_]+(?:[.-][0-9a-zA-Z_]+)*)';
 export const matchVersion = regEx(
-  `^(?<version>${versionGroup})$`
+  `^(?<version>${versionGroup})$`,
 ); /* Match a version number (e.g. 1.0.0) */
 export const exactVersion = regEx(
-  `^(?<exact_version>==(?<exact_version_group>${versionGroup})?)$`
+  `^(?<exact_version>==(?<exact_version_group>${versionGroup})?)$`,
 ); /* Match an exact version number (e.g. ==1.0.0) */
 // inclusiveBound is called inclusive but behaviour in rez is this:
 // package-1..3 will match versions 1.2.3, 2.3.4, but not 3.0.0 or above
 export const inclusiveBound = regEx(
-  `^(?<inclusive_bound>(?<inclusive_lower_version>${versionGroup})?\\.\\.(?<inclusive_upper_version>${versionGroup})?)$`
+  `^(?<inclusive_bound>(?<inclusive_lower_version>${versionGroup})?\\.\\.(?<inclusive_upper_version>${versionGroup})?)$`,
 ); /* Match an inclusive bound (e.g. 1.0.0..2.0.0) */
 // Add ? after |\\+) in order to match >=1.15
 export const lowerBound = new RegExp( // TODO #12872 named backreference
-  `^(?<lower_bound>(?<lower_bound_prefix>>|>=)?(?<lower_version>${versionGroup})?(\\k<lower_bound_prefix>|\\+)?)$`
+  `^(?<lower_bound>(?<lower_bound_prefix>>|>=)?(?<lower_version>${versionGroup})?(\\k<lower_bound_prefix>|\\+)?)$`,
 ); /* Match a lower bound (e.g. 1.0.0+) */
 export const upperBound = new RegExp( // TODO #12872  lookahead
-  `^(?<upper_bound>(?<upper_bound_prefix><(?=${versionGroup})|<=)?(?<upper_version>${versionGroup})?)$`
+  `^(?<upper_bound>(?<upper_bound_prefix><(?=${versionGroup})|<=)?(?<upper_version>${versionGroup})?)$`,
 ); /* Match an upper bound (e.g. <=1.0.0) */
 // Add ,? to match >=7,<9 (otherwise it just matches >=7<9)
 export const ascendingRange = new RegExp( // TODO #12872  named backreference
-  `^(?<range_asc>(?<range_lower_asc>(?<range_lower_asc_prefix>>|>=)?(?<range_lower_asc_version>${versionGroup})?(\\k<range_lower_asc_prefix>|\\+)?),?(?<range_upper_asc>(\\k<range_lower_asc_version>,?|)(?<range_upper_asc_prefix><(?=${versionGroup})|<=)(?<range_upper_asc_version>${versionGroup})?))$`
+  `^(?<range_asc>(?<range_lower_asc>(?<range_lower_asc_prefix>>|>=)?(?<range_lower_asc_version>${versionGroup})?(\\k<range_lower_asc_prefix>|\\+)?),?(?<range_upper_asc>(\\k<range_lower_asc_version>,?|)(?<range_upper_asc_prefix><(?=${versionGroup})|<=)(?<range_upper_asc_version>${versionGroup})?))$`,
 ); /* Match a range in ascending order (e.g. 1.0.0+<2.0.0) */
 // Add , to match <9,>=7 (otherwise it just matches <9>=7)
 export const descendingRange = new RegExp( // TODO #12872  named backreference
-  `^(?<range_desc>(?<range_upper_desc>(?<range_upper_desc_prefix><|<=)?(?<range_upper_desc_version>${versionGroup})?(\\k<range_upper_desc_prefix>|\\+)?),(?<range_lower_desc>(\\k<range_upper_desc_version>,|)(?<range_lower_desc_prefix><(?=${versionGroup})|>=?)(?<range_lower_desc_version>${versionGroup})?))$`
+  `^(?<range_desc>(?<range_upper_desc>(?<range_upper_desc_prefix><|<=)?(?<range_upper_desc_version>${versionGroup})?(\\k<range_upper_desc_prefix>|\\+)?),(?<range_lower_desc>(\\k<range_upper_desc_version>,|)(?<range_lower_desc_prefix><(?=${versionGroup})|>=?)(?<range_lower_desc_version>${versionGroup})?))$`,
 ); /* Match a range in descending order (e.g. <=2.0.0,1.0.0+) */
diff --git a/lib/modules/versioning/ruby/index.spec.ts b/lib/modules/versioning/ruby/index.spec.ts
index 6f9f9024521b1b7ac9c30441204f5b04b32bdfd0..ea1d69782489dec87730ce4966e8e594648073c7 100644
--- a/lib/modules/versioning/ruby/index.spec.ts
+++ b/lib/modules/versioning/ruby/index.spec.ts
@@ -27,7 +27,7 @@ describe('modules/versioning/ruby/index', () => {
       expect(semverRuby.getMajor(version)).toBe(major);
       expect(semverRuby.getMinor(version)).toBe(minor);
       expect(semverRuby.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -129,7 +129,7 @@ describe('modules/versioning/ruby/index', () => {
     'minSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(semverRuby.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -144,7 +144,7 @@ describe('modules/versioning/ruby/index', () => {
     'getSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(semverRuby.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -161,7 +161,7 @@ describe('modules/versioning/ruby/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(semverRuby.matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -180,7 +180,7 @@ describe('modules/versioning/ruby/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(semverRuby.isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -397,8 +397,8 @@ describe('modules/versioning/ruby/index', () => {
           rangeStrategy,
           currentVersion,
           newVersion,
-        })
+        }),
       ).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/ruby/index.ts b/lib/modules/versioning/ruby/index.ts
index b2cee80b2e2e5f5f32f5274a17902af54307a37e..c6e4e38558fbd2efd1c25e14fcff5b72e0558b72 100644
--- a/lib/modules/versioning/ruby/index.ts
+++ b/lib/modules/versioning/ruby/index.ts
@@ -83,14 +83,14 @@ export const matches = (version: string, range: string): boolean =>
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return maxSatisfying(versions.map(vtrim), vtrim(range));
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return minSatisfying(versions.map(vtrim), vtrim(range));
 }
@@ -151,12 +151,12 @@ const getNewValue = ({
       .map((element) =>
         element.replace(
           regEx(`^(?<whitespace>\\s*)`),
-          `$<whitespace>${delimiter}`
-        )
+          `$<whitespace>${delimiter}`,
+        ),
       )
       .map(
         (element) =>
-          element.replace(/(?<whitespace>\s*)$/, `${delimiter}$<whitespace>`) // TODO #12875 adds ' at front when re2 is used
+          element.replace(/(?<whitespace>\s*)$/, `${delimiter}$<whitespace>`), // TODO #12875 adds ' at front when re2 is used
       )
       .join(',');
   }
diff --git a/lib/modules/versioning/ruby/range.ts b/lib/modules/versioning/ruby/range.ts
index cb8deaf75717c6c101474b61ab274bdf726541d1..1fef56d3814cc3b685e338739117fa7f7fefdaa8 100644
--- a/lib/modules/versioning/ruby/range.ts
+++ b/lib/modules/versioning/ruby/range.ts
@@ -21,7 +21,7 @@ export interface Range {
 
 const parse = (range: string): Range => {
   const regExp = regEx(
-    /^(?<operator>[^\d\s]+)?(?<delimiter>\s*)(?<version>[0-9a-zA-Z-.]+)$/
+    /^(?<operator>[^\d\s]+)?(?<delimiter>\s*)(?<version>[0-9a-zA-Z-.]+)$/,
   );
 
   const value = (range || '').trim();
diff --git a/lib/modules/versioning/ruby/version.ts b/lib/modules/versioning/ruby/version.ts
index cd421a6fea0a67132bc89742fa1dbe8e2f765ba9..f82fdb152d829529cce2e1c02cbf7c243f11a708 100644
--- a/lib/modules/versioning/ruby/version.ts
+++ b/lib/modules/versioning/ruby/version.ts
@@ -70,7 +70,7 @@ const incrementMajor = (
   maj: number,
   min: number,
   ptch: number,
-  pre: string[]
+  pre: string[],
 ): number => (min === 0 || ptch === 0 || pre.length === 0 ? maj + 1 : maj);
 
 // istanbul ignore next
@@ -120,7 +120,7 @@ const decrement = (version: string): string => {
       (
         accumulator: number[],
         segment: SegmentElement,
-        index: number
+        index: number,
       ): number[] => {
         if (index === 0) {
           return [(segment as number) - 1];
@@ -136,7 +136,7 @@ const decrement = (version: string): string => {
 
         return [...accumulator, segment as number];
       },
-      []
+      [],
     );
 
   return nextSegments.reverse().join('.');
diff --git a/lib/modules/versioning/schema.ts b/lib/modules/versioning/schema.ts
index 3471ecbed5172d8513153325bb01727ee5944751..3bf4db93da042a609c89a2f5a162fea01652ecc8 100644
--- a/lib/modules/versioning/schema.ts
+++ b/lib/modules/versioning/schema.ts
@@ -14,7 +14,7 @@ export const Versioning = z
     if (!versioning) {
       logger.info(
         { versioning: versioningSpec },
-        `Versioning: '${versioningSpec}' not found, falling back to ${defaultVersioning.id}`
+        `Versioning: '${versioningSpec}' not found, falling back to ${defaultVersioning.id}`,
       );
       return defaultVersioning.api;
     }
diff --git a/lib/modules/versioning/semver-coerced/index.spec.ts b/lib/modules/versioning/semver-coerced/index.spec.ts
index ff01ad95fdacffa806e15ea1ea711c28f4d207ca..90bca0d540228e9d101196214d11b79b9164f429 100644
--- a/lib/modules/versioning/semver-coerced/index.spec.ts
+++ b/lib/modules/versioning/semver-coerced/index.spec.ts
@@ -184,7 +184,7 @@ describe('modules/versioning/semver-coerced/index', () => {
       expect(semverCoerced.isValid('renovatebot/renovate')).toBeFalse();
       expect(semverCoerced.isValid('renovatebot/renovate#master')).toBeFalse();
       expect(
-        semverCoerced.isValid('https://github.com/renovatebot/renovate.git')
+        semverCoerced.isValid('https://github.com/renovatebot/renovate.git'),
       ).toBeFalse();
     });
   });
@@ -224,13 +224,13 @@ describe('modules/versioning/semver-coerced/index', () => {
   describe('.getSatisfyingVersion(versions, range)', () => {
     it('should return max satisfying version in range', () => {
       expect(
-        semverCoerced.getSatisfyingVersion(['1.0.0', '1.0.4'], '^1.0')
+        semverCoerced.getSatisfyingVersion(['1.0.0', '1.0.4'], '^1.0'),
       ).toBe('1.0.4');
     });
 
     it('should support coercion', () => {
       expect(
-        semverCoerced.getSatisfyingVersion(['v1.0', '1.0.4-foo'], '^1.0')
+        semverCoerced.getSatisfyingVersion(['v1.0', '1.0.4-foo'], '^1.0'),
       ).toBe('1.0.4');
     });
   });
@@ -238,13 +238,13 @@ describe('modules/versioning/semver-coerced/index', () => {
   describe('.minSatisfyingVersion(versions, range)', () => {
     it('should return min satisfying version in range', () => {
       expect(
-        semverCoerced.minSatisfyingVersion(['1.0.0', '1.0.4'], '^1.0')
+        semverCoerced.minSatisfyingVersion(['1.0.0', '1.0.4'], '^1.0'),
       ).toBe('1.0.0');
     });
 
     it('should support coercion', () => {
       expect(
-        semverCoerced.minSatisfyingVersion(['v1.0', '1.0.4-foo'], '^1.0')
+        semverCoerced.minSatisfyingVersion(['v1.0', '1.0.4-foo'], '^1.0'),
       ).toBe('1.0.0');
     });
   });
@@ -257,7 +257,7 @@ describe('modules/versioning/semver-coerced/index', () => {
           rangeStrategy: 'bump',
           currentVersion: '1.0.0',
           newVersion: '1.1.0',
-        })
+        }),
       ).toBe('1.1.0');
     });
   });
diff --git a/lib/modules/versioning/semver-coerced/index.ts b/lib/modules/versioning/semver-coerced/index.ts
index c17e9d248dce1391b84482cca85080251406aa48..da56efca6cf6718f9173be4dc58a695a98dd0d07 100644
--- a/lib/modules/versioning/semver-coerced/index.ts
+++ b/lib/modules/versioning/semver-coerced/index.ts
@@ -13,7 +13,7 @@ function isStable(version: string): boolean {
   // matching a version with the semver prefix
   // v1.2.3, 1.2.3, v1.2, 1.2, v1, 1
   const regx = regEx(
-    /^v?(?<major>\d+)(?<minor>\.\d+)?(?<patch>\.\d+)?(?<others>.+)?/
+    /^v?(?<major>\d+)(?<minor>\.\d+)?(?<patch>\.\d+)?(?<others>.+)?/,
   );
   const m = regx.exec(version);
 
@@ -68,7 +68,7 @@ function isValid(version: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const coercedVersions = versions
     .map((version) => semver.coerce(version)?.version)
@@ -79,7 +79,7 @@ function getSatisfyingVersion(
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const coercedVersions = versions
     .map((version) => semver.coerce(version)?.version)
diff --git a/lib/modules/versioning/semver/index.spec.ts b/lib/modules/versioning/semver/index.spec.ts
index f99a043ac734eaff36c2e2b53f0635d216053658..05f7573efc0e675676f9bdcc0304c59e30b2b727 100644
--- a/lib/modules/versioning/semver/index.spec.ts
+++ b/lib/modules/versioning/semver/index.spec.ts
@@ -41,7 +41,7 @@ describe('modules/versioning/semver/index', () => {
         newVersion,
       });
       expect(res).toEqual(expected);
-    }
+    },
   );
 
   it.each`
diff --git a/lib/modules/versioning/swift/index.spec.ts b/lib/modules/versioning/swift/index.spec.ts
index b79c6addd3b61a7f6b7cec6ce37b651809144f93..dd733bd6b489a9c7fa34a9148698780366121905 100644
--- a/lib/modules/versioning/swift/index.spec.ts
+++ b/lib/modules/versioning/swift/index.spec.ts
@@ -66,7 +66,7 @@ describe('modules/versioning/swift/index', () => {
     'minSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -79,7 +79,7 @@ describe('modules/versioning/swift/index', () => {
     'getSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -93,7 +93,7 @@ describe('modules/versioning/swift/index', () => {
     'isLessThanRange("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(isLessThanRange?.(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -107,7 +107,7 @@ describe('modules/versioning/swift/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(matches(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -137,8 +137,8 @@ describe('modules/versioning/swift/index', () => {
           rangeStrategy,
           currentVersion,
           newVersion,
-        })
+        }),
       ).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/swift/index.ts b/lib/modules/versioning/swift/index.ts
index a273a15d47885c5925a43f0a1d6033b4f9af8920..a6135eaa73221dbdc2527d3e7c661c0bdfde4ed4 100644
--- a/lib/modules/versioning/swift/index.ts
+++ b/lib/modules/versioning/swift/index.ts
@@ -40,7 +40,7 @@ export const isVersion = (input: string): boolean => !!valid(input);
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const normalizedVersions = versions.map((v) => v.replace(regEx(/^v/), ''));
   const semverRange = toSemverRange(range);
@@ -49,7 +49,7 @@ function getSatisfyingVersion(
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   const normalizedVersions = versions.map((v) => v.replace(regEx(/^v/), ''));
   const semverRange = toSemverRange(range);
diff --git a/lib/modules/versioning/ubuntu/index.spec.ts b/lib/modules/versioning/ubuntu/index.spec.ts
index 96e4d1f89083bf8c7a0ec86dc9fda2242a8faf47..eaa5de4e2842bf1ca99ca0552805c7ef429c80be 100644
--- a/lib/modules/versioning/ubuntu/index.spec.ts
+++ b/lib/modules/versioning/ubuntu/index.spec.ts
@@ -100,7 +100,7 @@ describe('modules/versioning/ubuntu/index', () => {
     'isCompatible("$version") === $expected',
     ({ version, range, expected }) => {
       expect(ubuntu.isCompatible(version, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -268,7 +268,7 @@ describe('modules/versioning/ubuntu/index', () => {
       expect(ubuntu.getMajor(version)).toBe(major);
       expect(ubuntu.getMinor(version)).toBe(minor);
       expect(ubuntu.getPatch(version)).toBe(patch);
-    }
+    },
   );
 
   it.each`
@@ -337,7 +337,7 @@ describe('modules/versioning/ubuntu/index', () => {
     'getSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(ubuntu.getSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -356,7 +356,7 @@ describe('modules/versioning/ubuntu/index', () => {
     'minSatisfyingVersion($versions, "$range") === "$expected"',
     ({ versions, range, expected }) => {
       expect(ubuntu.minSatisfyingVersion(versions, range)).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -375,9 +375,9 @@ describe('modules/versioning/ubuntu/index', () => {
           rangeStrategy,
           currentVersion,
           newVersion,
-        })
+        }),
       ).toBe(expected);
-    }
+    },
   );
 
   it.each`
@@ -397,6 +397,6 @@ describe('modules/versioning/ubuntu/index', () => {
     'matches("$version", "$range") === "$expected"',
     ({ version, range, expected }) => {
       expect(ubuntu.matches(version, range)).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/modules/versioning/ubuntu/index.ts b/lib/modules/versioning/ubuntu/index.ts
index 243b7e2cfd17ca448fbe9e60b853bd8ce5732c3a..338b419ecb18ca3b4848d7423e1f755944c348e4 100644
--- a/lib/modules/versioning/ubuntu/index.ts
+++ b/lib/modules/versioning/ubuntu/index.ts
@@ -145,14 +145,14 @@ function isGreaterThan(version: string, other: string): boolean {
 
 function getSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return versions.find((version) => equals(version, range)) ? range : null;
 }
 
 function minSatisfyingVersion(
   versions: string[],
-  range: string
+  range: string,
 ): string | null {
   return getSatisfyingVersion(versions, range);
 }
diff --git a/lib/modules/versioning/versioning-metadata.spec.ts b/lib/modules/versioning/versioning-metadata.spec.ts
index 28cfda606ea522a775e1e9beed6161e8b3da12bd..5dc04736794141c2bbbc48b24e3a9c01fe253ea6 100644
--- a/lib/modules/versioning/versioning-metadata.spec.ts
+++ b/lib/modules/versioning/versioning-metadata.spec.ts
@@ -3,14 +3,14 @@ import { readFile, readdir } from 'fs-extra';
 describe('modules/versioning/versioning-metadata', () => {
   it('readme no markdown headers', async () => {
     const allVersioning = (await readdir('lib/modules/versioning')).filter(
-      (item) => !item.includes('.')
+      (item) => !item.includes('.'),
     );
     for (const versioning of allVersioning) {
       let readme: string | undefined;
       try {
         readme = await readFile(
           'lib/modules/versioning/' + versioning + '/readme.md',
-          'utf8'
+          'utf8',
         );
       } catch (err) {
         // ignore missing file
@@ -24,7 +24,7 @@ describe('modules/versioning/versioning-metadata', () => {
 
   it('contains mandatory fields', async () => {
     const allVersioning = (await readdir('lib/modules/versioning')).filter(
-      (item) => !item.includes('.') && !item.startsWith('_')
+      (item) => !item.includes('.') && !item.startsWith('_'),
     );
 
     for (const versioning of allVersioning) {
diff --git a/lib/util/array.ts b/lib/util/array.ts
index f4ad9412fee1dbe5716389aef367262621365eb7..87fc4c59ac93b518d9e7c44fff7f0f0364c1540f 100644
--- a/lib/util/array.ts
+++ b/lib/util/array.ts
@@ -15,7 +15,7 @@ export function sortNumeric(a: number, b: number): number {
 // undefined. This predicate acts as a type guard so that the resulting type for
 // `values.filter(isNotNullOrUndefined)` is `T[]`.
 export function isNotNullOrUndefined<T>(
-  value: T | undefined | null
+  value: T | undefined | null,
 ): value is T {
   return !is.nullOrUndefined(value);
 }
diff --git a/lib/util/assign-keys.ts b/lib/util/assign-keys.ts
index 088d619617b43c4ddbe66a5a54bf0652a975a166..7083515e56dca2da915bb2f593ad9d7381fd72a6 100644
--- a/lib/util/assign-keys.ts
+++ b/lib/util/assign-keys.ts
@@ -6,7 +6,7 @@ import is from '@sindresorhus/is';
 export function assignKeys<
   Left extends { [key in K]?: unknown },
   Right extends { [key in K]?: Left[key] },
-  K extends keyof Right
+  K extends keyof Right,
 >(left: Left, right: Right, keys: K[]): Left {
   for (const key of keys) {
     const val = right[key];
diff --git a/lib/util/cache/package/decorator.spec.ts b/lib/util/cache/package/decorator.spec.ts
index eb5d33312160acfa82de4a370cd0a23f67287e88..7dea4e7e6c52713d6a4dc1303f0d8620bc1669f7 100644
--- a/lib/util/cache/package/decorator.spec.ts
+++ b/lib/util/cache/package/decorator.spec.ts
@@ -42,7 +42,7 @@ describe('util/cache/package/decorator', () => {
       'some-namespace',
       'cache-decorator:some-key',
       { cachedAt: expect.any(String), value: '111' },
-      30
+      30,
     );
   });
 
@@ -82,7 +82,7 @@ describe('util/cache/package/decorator', () => {
       'namespace',
       'cache-decorator:key',
       { cachedAt: expect.any(String), value: null },
-      30
+      30,
     );
   });
 
@@ -130,7 +130,7 @@ describe('util/cache/package/decorator', () => {
       'some-namespace',
       'cache-decorator:some-key',
       { cachedAt: expect.any(String), value: '111' },
-      30
+      30,
     );
   });
 
@@ -152,7 +152,7 @@ describe('util/cache/package/decorator', () => {
       'namespace',
       'cache-decorator:key',
       { cachedAt: expect.any(String), value: '111' },
-      30
+      30,
     );
   });
 
@@ -188,7 +188,7 @@ describe('util/cache/package/decorator', () => {
         'namespace',
         'cache-decorator:key',
         { cachedAt: expect.any(String), value: '111' },
-        2
+        2,
       );
 
       jest.advanceTimersByTime(1);
@@ -198,7 +198,7 @@ describe('util/cache/package/decorator', () => {
         'namespace',
         'cache-decorator:key',
         { cachedAt: expect.any(String), value: '222' },
-        2
+        2,
       );
     });
 
@@ -215,7 +215,7 @@ describe('util/cache/package/decorator', () => {
         'namespace',
         'cache-decorator:key',
         { cachedAt: expect.any(String), value: '111' },
-        3
+        3,
       );
 
       jest.advanceTimersByTime(120 * 1000 - 1); // namespace default ttl is 1min
@@ -230,7 +230,7 @@ describe('util/cache/package/decorator', () => {
         'namespace',
         'cache-decorator:key',
         { cachedAt: expect.any(String), value: '222' },
-        3
+        3,
       );
     });
 
@@ -243,7 +243,7 @@ describe('util/cache/package/decorator', () => {
         'namespace',
         'cache-decorator:key',
         { cachedAt: expect.any(String), value: '111' },
-        2
+        2,
       );
 
       jest.advanceTimersByTime(60 * 1000);
@@ -262,7 +262,7 @@ describe('util/cache/package/decorator', () => {
         'namespace',
         'cache-decorator:key',
         { cachedAt: expect.any(String), value: '111' },
-        2
+        2,
       );
 
       jest.advanceTimersByTime(2 * 60 * 1000 - 1);
diff --git a/lib/util/cache/package/decorator.ts b/lib/util/cache/package/decorator.ts
index 3508780a890b579fff759509ac0451977d5cc3df..36b23ffb1aa0f6581a71971fe21a399f1efda0e5 100644
--- a/lib/util/cache/package/decorator.ts
+++ b/lib/util/cache/package/decorator.ts
@@ -73,7 +73,7 @@ export function cache<T>({
     finalKey = `cache-decorator:${finalKey}`;
     const oldRecord = await packageCache.get<DecoratorCachedRecord>(
       finalNamespace,
-      finalKey
+      finalKey,
     );
 
     const ttlOverride = getTtlOverride(finalNamespace);
@@ -81,7 +81,7 @@ export function cache<T>({
 
     const cacheHardTtlMinutes = GlobalConfig.get(
       'cacheHardTtlMinutes',
-      7 * 24 * 60
+      7 * 24 * 60,
     );
     let hardTtl = softTtl;
     if (methodName === 'getReleases' || methodName === 'getDigest') {
@@ -111,7 +111,7 @@ export function cache<T>({
       } catch (err) {
         logger.debug(
           { err },
-          'Package cache decorator: callback error, returning old data'
+          'Package cache decorator: callback error, returning old data',
         );
         return oldData;
       }
diff --git a/lib/util/cache/package/file.ts b/lib/util/cache/package/file.ts
index 1415f8f2d83bed24c8531f54a9e23853b967494c..4a6a298437d11707461e6c0ba88a29c6a05506d3 100644
--- a/lib/util/cache/package/file.ts
+++ b/lib/util/cache/package/file.ts
@@ -17,7 +17,7 @@ async function rm(namespace: string, key: string): Promise<void> {
 
 export async function get<T = never>(
   namespace: string,
-  key: string
+  key: string,
 ): Promise<T | undefined> {
   if (!cacheFileName) {
     return undefined;
@@ -47,7 +47,7 @@ export async function set(
   namespace: string,
   key: string,
   value: unknown,
-  ttlMinutes = 5
+  ttlMinutes = 5,
 ): Promise<void> {
   if (!cacheFileName) {
     return;
@@ -60,7 +60,7 @@ export async function set(
       compress: true,
       value: await compress(JSON.stringify(value)),
       expiry: DateTime.local().plus({ minutes: ttlMinutes }),
-    })
+    }),
   );
 }
 
@@ -97,7 +97,7 @@ export async function cleanup(): Promise<void> {
     }
     const durationMs = Math.round(Date.now() - startTime);
     logger.debug(
-      `Deleted ${deletedCount} of ${totalCount} file cached entries in ${durationMs}ms`
+      `Deleted ${deletedCount} of ${totalCount} file cached entries in ${durationMs}ms`,
     );
   } catch (err) /* istanbul ignore next */ {
     logger.warn({ err }, 'Error cleaning up expired file cache');
diff --git a/lib/util/cache/package/index.spec.ts b/lib/util/cache/package/index.spec.ts
index 65bbab5c49b58f1887671692afb1f8f50937d8b6..2ae1929c790706b7bfde4c021a7f6368d5e11822 100644
--- a/lib/util/cache/package/index.spec.ts
+++ b/lib/util/cache/package/index.spec.ts
@@ -15,7 +15,7 @@ describe('util/cache/package/index', () => {
   it('sets and gets file', async () => {
     await init({ cacheDir: 'some-dir' });
     expect(
-      await set('some-namespace', 'some-key', 'some-value', 1)
+      await set('some-namespace', 'some-key', 'some-value', 1),
     ).toBeUndefined();
     expect(await get('some-namespace', 'unknown-key')).toBeUndefined();
   });
@@ -23,7 +23,7 @@ describe('util/cache/package/index', () => {
   it('sets and gets redis', async () => {
     await init({ redisUrl: 'some-url' });
     expect(
-      await set('some-namespace', 'some-key', 'some-value', 1)
+      await set('some-namespace', 'some-key', 'some-value', 1),
     ).toBeUndefined();
     expect(await get('some-namespace', 'unknown-key')).toBeUndefined();
     expect(await cleanup({ redisUrl: 'some-url' })).toBeUndefined();
diff --git a/lib/util/cache/package/index.ts b/lib/util/cache/package/index.ts
index 438095147c78c6766a906cc0e532d9e389102b9e..80834c30636574f49d080b9888d1379aff4fdaa9 100644
--- a/lib/util/cache/package/index.ts
+++ b/lib/util/cache/package/index.ts
@@ -12,7 +12,7 @@ function getGlobalKey(namespace: string, key: string): string {
 
 export async function get<T = any>(
   namespace: string,
-  key: string
+  key: string,
 ): Promise<T | undefined> {
   if (!cacheProxy) {
     return undefined;
@@ -38,7 +38,7 @@ export async function set(
   namespace: string,
   key: string,
   value: unknown,
-  minutes: number
+  minutes: number,
 ): Promise<void> {
   if (!cacheProxy) {
     return;
diff --git a/lib/util/cache/package/redis.ts b/lib/util/cache/package/redis.ts
index ef7dcb556ddfe6934d2aa7735c72f74c7cb62fcd..4b0fd7c9db58db0d837166e7b5d70b295e787875 100644
--- a/lib/util/cache/package/redis.ts
+++ b/lib/util/cache/package/redis.ts
@@ -26,7 +26,7 @@ async function rm(namespace: string, key: string): Promise<void> {
 
 export async function get<T = never>(
   namespace: string,
-  key: string
+  key: string,
 ): Promise<T | undefined> {
   if (!client) {
     return undefined;
@@ -58,7 +58,7 @@ export async function set(
   namespace: string,
   key: string,
   value: unknown,
-  ttlMinutes = 5
+  ttlMinutes = 5,
 ): Promise<void> {
   logger.trace({ namespace, key, ttlMinutes }, 'Saving cached value');
 
@@ -73,7 +73,7 @@ export async function set(
         value: await compress(JSON.stringify(value)),
         expiry: DateTime.local().plus({ minutes: ttlMinutes }),
       }),
-      { EX: redisTTL }
+      { EX: redisTTL },
     );
   } catch (err) {
     logger.once.debug({ err }, 'Error while setting cache value');
diff --git a/lib/util/cache/package/types.ts b/lib/util/cache/package/types.ts
index 65383ab9d6d58b3caeeef8b9f9830d56f4166f5a..700ef95c88a1b1541dc79934109428ba108591b3 100644
--- a/lib/util/cache/package/types.ts
+++ b/lib/util/cache/package/types.ts
@@ -5,7 +5,7 @@ export interface PackageCache {
     namespace: string,
     key: string,
     value: T,
-    ttlMinutes?: number
+    ttlMinutes?: number,
   ): Promise<void>;
 
   cleanup?(): Promise<void>;
diff --git a/lib/util/cache/repository/impl/base.ts b/lib/util/cache/repository/impl/base.ts
index 3025e3ee9bce09afe4f2620dc704284a2903c8a3..e034f46bc756619ee5317f07ed5d10cb0958afb0 100644
--- a/lib/util/cache/repository/impl/base.ts
+++ b/lib/util/cache/repository/impl/base.ts
@@ -15,7 +15,7 @@ export abstract class RepoCacheBase implements RepoCache {
 
   protected constructor(
     protected readonly repository: string,
-    protected readonly fingerprint: string
+    protected readonly fingerprint: string,
   ) {}
 
   protected abstract read(): Promise<string | null>;
@@ -51,7 +51,7 @@ export abstract class RepoCacheBase implements RepoCache {
       const oldCache = await this.read();
       if (!is.string(oldCache)) {
         logger.debug(
-          `RepoCacheBase.load() - expecting data of type 'string' received '${typeof oldCache}' instead - skipping`
+          `RepoCacheBase.load() - expecting data of type 'string' received '${typeof oldCache}' instead - skipping`,
         );
         return;
       }
diff --git a/lib/util/cache/repository/impl/cache-factory.ts b/lib/util/cache/repository/impl/cache-factory.ts
index db8c86ab9a84838bcb0ae8013e7171413705738e..d24c674cf21720a5061706a7b024dc6b86e14f63 100644
--- a/lib/util/cache/repository/impl/cache-factory.ts
+++ b/lib/util/cache/repository/impl/cache-factory.ts
@@ -8,7 +8,7 @@ export class CacheFactory {
   static get(
     repository: string,
     repoFingerprint: string,
-    cacheType: RepositoryCacheType
+    cacheType: RepositoryCacheType,
   ): RepoCache {
     const type = cacheType.split('://')[0].trim().toLowerCase();
     switch (type) {
@@ -19,7 +19,7 @@ export class CacheFactory {
       default:
         logger.warn(
           { cacheType },
-          `Repository cache type not supported using type "local" instead`
+          `Repository cache type not supported using type "local" instead`,
         );
         return new RepoCacheLocal(repository, repoFingerprint);
     }
diff --git a/lib/util/cache/repository/impl/local.spec.ts b/lib/util/cache/repository/impl/local.spec.ts
index e02178f2857ea65194372084688df277bec57ae5..a30907ec273281f35660aafa7941cf72af2ee657 100644
--- a/lib/util/cache/repository/impl/local.spec.ts
+++ b/lib/util/cache/repository/impl/local.spec.ts
@@ -13,7 +13,7 @@ jest.mock('../../../fs');
 
 async function createCacheRecord(
   data: RepoCacheData,
-  repository = 'some/repo'
+  repository = 'some/repo',
 ): Promise<RepoCacheRecord> {
   const revision = CACHE_REVISION;
 
@@ -42,7 +42,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
     expect(localRepoCache.getData()).toBeEmpty();
     expect(localRepoCache.isModified()).toBeUndefined();
@@ -52,11 +52,11 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
     await localRepoCache.load(); // readCacheFile is mocked but has no return value set - therefore returns undefined
     expect(logger.debug).toHaveBeenCalledWith(
-      "RepoCacheBase.load() - expecting data of type 'string' received 'undefined' instead - skipping"
+      "RepoCacheBase.load() - expecting data of type 'string' received 'undefined' instead - skipping",
     );
     expect(localRepoCache.isModified()).toBeUndefined();
   });
@@ -65,7 +65,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
     await localRepoCache.load(); // readCacheFile is mocked but has no return value set - therefore returns undefined
     expect(logger.debug).not.toHaveBeenCalledWith();
@@ -79,7 +79,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
 
     await localRepoCache.load();
@@ -110,7 +110,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
 
     await localRepoCache.load();
@@ -123,7 +123,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
 
     await localRepoCache.load();
@@ -138,7 +138,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
 
     await localRepoCache.load();
@@ -154,7 +154,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      'local'
+      'local',
     );
     await localRepoCache.load();
 
@@ -171,7 +171,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      cacheType
+      cacheType,
     );
     await localRepoCache.load();
     const data = localRepoCache.getData();
@@ -185,11 +185,11 @@ describe('util/cache/repository/impl/local', () => {
     expect(localRepoCache.isModified()).toBeTrue();
     expect(logger.warn).toHaveBeenCalledWith(
       { cacheType },
-      `Repository cache type not supported using type "local" instead`
+      `Repository cache type not supported using type "local" instead`,
     );
     expect(fs.outputCacheFile).toHaveBeenCalledWith(
       '/tmp/cache/renovate/repository/github/some/repo.json',
-      JSON.stringify(newCacheRecord)
+      JSON.stringify(newCacheRecord),
     );
   });
 
@@ -202,7 +202,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      cacheType
+      cacheType,
     );
 
     await localRepoCache.load();
@@ -224,7 +224,7 @@ describe('util/cache/repository/impl/local', () => {
     const localRepoCache = CacheFactory.get(
       'some/repo',
       '0123456789abcdef',
-      cacheType
+      cacheType,
     );
 
     await localRepoCache.load();
diff --git a/lib/util/cache/repository/impl/s3.spec.ts b/lib/util/cache/repository/impl/s3.spec.ts
index a981dbef710c87dfc6de91bc5ff018840f0cacc4..69838e093017655d707487df860e6e214f18db16 100644
--- a/lib/util/cache/repository/impl/s3.spec.ts
+++ b/lib/util/cache/repository/impl/s3.spec.ts
@@ -19,7 +19,7 @@ import { RepoCacheS3 } from './s3';
 function createGetObjectCommandInput(
   repository: string,
   url: string,
-  folder = ''
+  folder = '',
 ): GetObjectCommandInput {
   const platform = GlobalConfig.get('platform')!;
   return {
@@ -32,7 +32,7 @@ function createPutObjectCommandInput(
   repository: string,
   url: string,
   data: RepoCacheRecord,
-  folder = ''
+  folder = '',
 ): PutObjectCommandInput {
   return {
     ...createGetObjectCommandInput(repository, url, folder),
@@ -64,7 +64,7 @@ describe('util/cache/repository/impl/s3', () => {
     putObjectCommandInput = createPutObjectCommandInput(
       repository,
       url,
-      repoCache
+      repoCache,
     );
   });
 
@@ -84,12 +84,12 @@ describe('util/cache/repository/impl/s3', () => {
     s3Cache = new RepoCacheS3(
       repository,
       '0123456789abcdef',
-      `${url}/${folder}`
+      `${url}/${folder}`,
     );
     s3Mock
       .on(
         GetObjectCommand,
-        createGetObjectCommandInput(repository, url, folder)
+        createGetObjectCommandInput(repository, url, folder),
       )
       .resolvesOnce({ Body: Readable.from([json]) as never });
     await expect(s3Cache.read()).resolves.toBe(json);
@@ -104,12 +104,12 @@ describe('util/cache/repository/impl/s3', () => {
     s3Cache = new RepoCacheS3(
       repository,
       '0123456789abcdef',
-      `${url}/${pathname}`
+      `${url}/${pathname}`,
     );
     s3Mock
       .on(
         GetObjectCommand,
-        createGetObjectCommandInput(repository, url, pathname + '/')
+        createGetObjectCommandInput(repository, url, pathname + '/'),
       )
       .resolvesOnce({ Body: Readable.from([json]) as never });
     await expect(s3Cache.read()).resolves.toBe(json);
@@ -117,7 +117,7 @@ describe('util/cache/repository/impl/s3', () => {
     expect(logger.warn).toHaveBeenCalledTimes(1);
     expect(logger.warn).toHaveBeenCalledWith(
       { pathname },
-      'RepoCacheS3.getCacheFolder() - appending missing trailing slash to pathname'
+      'RepoCacheS3.getCacheFolder() - appending missing trailing slash to pathname',
     );
   });
 
@@ -125,7 +125,7 @@ describe('util/cache/repository/impl/s3', () => {
     s3Mock.on(GetObjectCommand, getObjectCommandInput).resolvesOnce({});
     await expect(s3Cache.read()).resolves.toBeNull();
     expect(logger.warn).toHaveBeenCalledWith(
-      "RepoCacheS3.read() - failure - expecting Readable return type got 'undefined' type instead"
+      "RepoCacheS3.read() - failure - expecting Readable return type got 'undefined' type instead",
     );
   });
 
@@ -138,7 +138,7 @@ describe('util/cache/repository/impl/s3', () => {
     await expect(s3Cache.read()).resolves.toBeNull();
     expect(logger.warn).toHaveBeenCalledTimes(0);
     expect(logger.debug).toHaveBeenCalledWith(
-      `RepoCacheS3.read() - No cached file found`
+      `RepoCacheS3.read() - No cached file found`,
     );
   });
 
@@ -147,7 +147,7 @@ describe('util/cache/repository/impl/s3', () => {
     await expect(s3Cache.read()).resolves.toBeNull();
     expect(logger.warn).toHaveBeenCalledWith(
       { err },
-      'RepoCacheS3.read() - failure'
+      'RepoCacheS3.read() - failure',
     );
   });
 
@@ -170,12 +170,12 @@ describe('util/cache/repository/impl/s3', () => {
     s3Cache = new RepoCacheS3(
       repository,
       '0123456789abcdef',
-      `${url}/${folder}`
+      `${url}/${folder}`,
     );
     s3Mock
       .on(
         PutObjectCommand,
-        createPutObjectCommandInput(repository, url, repoCache, folder)
+        createPutObjectCommandInput(repository, url, repoCache, folder),
       )
       .resolvesOnce(putObjectCommandOutput);
     await expect(s3Cache.write(repoCache)).toResolve();
@@ -188,7 +188,7 @@ describe('util/cache/repository/impl/s3', () => {
     await expect(s3Cache.write(repoCache)).toResolve();
     expect(logger.warn).toHaveBeenCalledWith(
       { err },
-      'RepoCacheS3.write() - failure'
+      'RepoCacheS3.write() - failure',
     );
   });
 
diff --git a/lib/util/cache/repository/impl/s3.ts b/lib/util/cache/repository/impl/s3.ts
index f402409acab70740b7fa64e9d38e49a18889b260..407ea3664f59b4aa3066f1ae54b91efaf23b365e 100644
--- a/lib/util/cache/repository/impl/s3.ts
+++ b/lib/util/cache/repository/impl/s3.ts
@@ -32,14 +32,14 @@ export class RepoCacheS3 extends RepoCacheBase {
     };
     try {
       const { Body: res } = await this.s3Client.send(
-        new GetObjectCommand(s3Params)
+        new GetObjectCommand(s3Params),
       );
       if (res instanceof Readable) {
         logger.debug('RepoCacheS3.read() - success');
         return await streamToString(res);
       }
       logger.warn(
-        `RepoCacheS3.read() - failure - expecting Readable return type got '${typeof res}' type instead`
+        `RepoCacheS3.read() - failure - expecting Readable return type got '${typeof res}' type instead`,
       );
     } catch (err) {
       // https://docs.aws.amazon.com/AmazonS3/latest/API/ErrorResponses.html
@@ -78,7 +78,7 @@ export class RepoCacheS3 extends RepoCacheBase {
 
     logger.warn(
       { pathname },
-      'RepoCacheS3.getCacheFolder() - appending missing trailing slash to pathname'
+      'RepoCacheS3.getCacheFolder() - appending missing trailing slash to pathname',
     );
     return pathname + '/';
   }
diff --git a/lib/util/cache/repository/schema.ts b/lib/util/cache/repository/schema.ts
index 5a337b0c6b9a5935ef7eb76f27df11c8a0832fc1..54025508658b0b6d33e0140a895cb404809752ca 100644
--- a/lib/util/cache/repository/schema.ts
+++ b/lib/util/cache/repository/schema.ts
@@ -10,7 +10,7 @@ export const RepoCacheV13 = Json.pipe(
       hash: z.string().min(1),
       fingerprint: z.string().min(1),
     })
-    .strict()
+    .strict(),
 );
 
 export type RepoCacheRecord = z.infer<typeof RepoCacheV13>;
diff --git a/lib/util/check-token.spec.ts b/lib/util/check-token.spec.ts
index 67b95950c2e9cb27d7970e80ed60251f504071dd..34b596aa99c25a77c4545fd50e24e00128cb2fd4 100644
--- a/lib/util/check-token.spec.ts
+++ b/lib/util/check-token.spec.ts
@@ -50,7 +50,7 @@ describe('util/check-token', () => {
         url: 'https://api.github.com',
       });
       expect(logger.logger.trace).toHaveBeenCalledWith(
-        'GitHub token warning is disabled'
+        'GitHub token warning is disabled',
       );
       expect(logger.logger.warn).not.toHaveBeenCalled();
     });
@@ -159,7 +159,7 @@ describe('util/check-token', () => {
   describe('isGithubFineGrainedPersonalAccessToken', () => {
     it('returns true when string is a github fine grained personal access token', () => {
       expect(
-        isGithubFineGrainedPersonalAccessToken('github_pat_XXXXXX')
+        isGithubFineGrainedPersonalAccessToken('github_pat_XXXXXX'),
       ).toBeTrue();
     });
 
@@ -192,7 +192,7 @@ describe('util/check-token', () => {
       const TOKEN_STRING = 'ghp_TOKEN';
 
       expect(findGithubToken({ token: TOKEN_STRING_WITH_PREFIX })).toBe(
-        TOKEN_STRING
+        TOKEN_STRING,
       );
     });
   });
@@ -202,7 +202,7 @@ describe('util/check-token', () => {
       const githubToken = undefined;
       const gitTagsGithubToken = undefined;
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBeUndefined();
     });
 
@@ -210,7 +210,7 @@ describe('util/check-token', () => {
       const githubToken = 'ghp_github';
       const gitTagsGithubToken = 'ghp_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(gitTagsGithubToken);
     });
 
@@ -218,7 +218,7 @@ describe('util/check-token', () => {
       const githubToken = 'ghp_github';
       const gitTagsGithubToken = 'ghs_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(githubToken);
     });
 
@@ -226,7 +226,7 @@ describe('util/check-token', () => {
       const githubToken = 'ghs_github';
       const gitTagsGithubToken = 'ghs_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(gitTagsGithubToken);
     });
 
@@ -234,7 +234,7 @@ describe('util/check-token', () => {
       const githubToken = undefined;
       const gitTagsGithubToken = 'ghs_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(gitTagsGithubToken);
     });
 
@@ -242,7 +242,7 @@ describe('util/check-token', () => {
       const githubToken = 'ghs_gitTags';
       const gitTagsGithubToken = undefined;
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(githubToken);
     });
 
@@ -250,7 +250,7 @@ describe('util/check-token', () => {
       const githubToken = 'ghp_github';
       const gitTagsGithubToken = 'github_pat_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(githubToken);
     });
 
@@ -258,7 +258,7 @@ describe('util/check-token', () => {
       const githubToken = 'github_pat_github';
       const gitTagsGithubToken = 'ghs_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(githubToken);
     });
 
@@ -266,7 +266,7 @@ describe('util/check-token', () => {
       const githubToken = undefined;
       const gitTagsGithubToken = 'github_pat_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(gitTagsGithubToken);
     });
 
@@ -274,7 +274,7 @@ describe('util/check-token', () => {
       const githubToken = undefined;
       const gitTagsGithubToken = 'unknownTokenType_gitTags';
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(gitTagsGithubToken);
     });
 
@@ -282,7 +282,7 @@ describe('util/check-token', () => {
       const githubToken = 'unknownTokenType';
       const gitTagsGithubToken = undefined;
       expect(
-        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken)
+        takePersonalAccessTokenIfPossible(githubToken, gitTagsGithubToken),
       ).toBe(githubToken);
     });
   });
diff --git a/lib/util/check-token.ts b/lib/util/check-token.ts
index 0d6bd44bacc43e456eacafeecca6d19577719b2e..48cf641bd9602c4b176a82f7fd6ccf57685beca3 100644
--- a/lib/util/check-token.ts
+++ b/lib/util/check-token.ts
@@ -9,7 +9,7 @@ import * as memCache from '../util/cache/memory';
 import * as hostRules from './host-rules';
 
 export function checkGithubToken(
-  packageFiles: Record<string, PackageFileContent[]> = {}
+  packageFiles: Record<string, PackageFileContent[]> = {},
 ): void {
   const { token } = hostRules.find({
     hostType: 'github',
@@ -47,13 +47,13 @@ export function checkGithubToken(
 
   if (githubDeps.length > 0) {
     const warningLogged = memCache.get<boolean | undefined>(
-      'github-token-required-warning-logged'
+      'github-token-required-warning-logged',
     );
     if (!warningLogged) {
       const withoutDuplicates = [...new Set(githubDeps)];
       logger.warn(
         { githubDeps: withoutDuplicates },
-        `GitHub token is required for some dependencies`
+        `GitHub token is required for some dependencies`,
       );
       memCache.set('github-token-required-warning-logged', true);
     }
@@ -73,14 +73,14 @@ export function isGithubFineGrainedPersonalAccessToken(token: string): boolean {
 }
 
 export function findGithubToken(
-  searchResult: HostRuleSearchResult
+  searchResult: HostRuleSearchResult,
 ): string | undefined {
   return searchResult?.token?.replace('x-access-token:', '');
 }
 
 export function takePersonalAccessTokenIfPossible(
   githubToken: string | undefined,
-  gitTagsGithubToken: string | undefined
+  gitTagsGithubToken: string | undefined,
 ): string | undefined {
   if (gitTagsGithubToken && isGithubPersonalAccessToken(gitTagsGithubToken)) {
     logger.debug('Using GitHub Personal Access Token (git-tags)');
diff --git a/lib/util/coerce.ts b/lib/util/coerce.ts
index bad74fefe0caf8166fc7c1c4e5b61f16ac3b0f06..4a72aa796b229f947906795d8cf22977497a84ce 100644
--- a/lib/util/coerce.ts
+++ b/lib/util/coerce.ts
@@ -3,7 +3,7 @@ export function coerceToNull<T>(input: T | null | undefined): T | null {
 }
 
 export function coerceToUndefined<T>(
-  input: T | null | undefined
+  input: T | null | undefined,
 ): T | undefined {
   return input ?? undefined;
 }
diff --git a/lib/util/common.spec.ts b/lib/util/common.spec.ts
index 0b053aff055444ccbeb3738043531bd5d162e632..675948a9aaa341cdb9d806149296be050baccfb7 100644
--- a/lib/util/common.spec.ts
+++ b/lib/util/common.spec.ts
@@ -73,16 +73,16 @@ describe('util/common', () => {
       });
 
       expect(detectPlatform('https://bb.example.com/chalk/chalk')).toBe(
-        'bitbucket'
+        'bitbucket',
       );
       expect(detectPlatform('https://gt.example.com/chalk/chalk')).toBe(
-        'gitea'
+        'gitea',
       );
       expect(detectPlatform('https://gh.example.com/chalk/chalk')).toBe(
-        'github'
+        'github',
       );
       expect(detectPlatform('https://gl.example.com/chalk/chalk')).toBe(
-        'gitlab'
+        'gitlab',
       );
       expect(detectPlatform('https://f.example.com/chalk/chalk')).toBeNull();
     });
@@ -114,7 +114,7 @@ describe('util/common', () => {
       });
       expect(logger.logger.warn).toHaveBeenCalledWith(
         { context: 'renovate.json' },
-        'File contents are invalid JSON but parse using JSON5. Support for this will be removed in a future release so please change to a support .json5 file name or ensure correct JSON syntax.'
+        'File contents are invalid JSON but parse using JSON5. Support for this will be removed in a future release so please change to a support .json5 file name or ensure correct JSON syntax.',
       );
     });
   });
diff --git a/lib/util/common.ts b/lib/util/common.ts
index cb1af76f3df3fa1c9b6ff10aabdb4bc7ca252382..0834c63f91e0c8ac4eb9a6c050dc16cfd2e14c18 100644
--- a/lib/util/common.ts
+++ b/lib/util/common.ts
@@ -16,7 +16,7 @@ import { parseUrl } from './url';
  * @returns matched `platform` if found, otherwise `null`
  */
 export function detectPlatform(
-  url: string
+  url: string,
 ): 'azure' | 'bitbucket' | 'gitea' | 'github' | 'gitlab' | null {
   const { hostname } = parseUrl(url) ?? {};
   if (hostname === 'dev.azure.com' || hostname?.endsWith('.visualstudio.com')) {
@@ -78,7 +78,7 @@ export function parseJson(content: string | null, filename: string): unknown {
 
 export function parseJsonWithFallback(
   content: string,
-  context: string
+  context: string,
 ): unknown {
   let parsedJson: unknown;
 
@@ -88,7 +88,7 @@ export function parseJsonWithFallback(
     parsedJson = JSON5.parse(content);
     logger.warn(
       { context },
-      'File contents are invalid JSON but parse using JSON5. Support for this will be removed in a future release so please change to a support .json5 file name or ensure correct JSON syntax.'
+      'File contents are invalid JSON but parse using JSON5. Support for this will be removed in a future release so please change to a support .json5 file name or ensure correct JSON syntax.',
     );
   }
 
diff --git a/lib/util/date.ts b/lib/util/date.ts
index 61fbe64b5c8f4069cd95560485eb36a1bfb99ebd..d3078e2024fbaa0c954e45e8feb43dc50962a118 100644
--- a/lib/util/date.ts
+++ b/lib/util/date.ts
@@ -5,7 +5,7 @@ const ONE_DAY_MS = 24 * 60 * ONE_MINUTE_MS;
 
 export function getElapsedDays(timestamp: string): number {
   return Math.floor(
-    (new Date().getTime() - new Date(timestamp).getTime()) / ONE_DAY_MS
+    (new Date().getTime() - new Date(timestamp).getTime()) / ONE_DAY_MS,
   );
 }
 
diff --git a/lib/util/decorator/index.ts b/lib/util/decorator/index.ts
index 2a9ef484da9a064382fc39b93c654c3b0cbe9c9b..0bdb79b82349382978737a2c627e30435e13bb7a 100644
--- a/lib/util/decorator/index.ts
+++ b/lib/util/decorator/index.ts
@@ -1,11 +1,11 @@
 export type Handler<T> = (
-  parameters: DecoratorParameters<T>
+  parameters: DecoratorParameters<T>,
 ) => Promise<unknown>;
 export type Method<T> = (this: T, ...args: any[]) => Promise<any>;
 export type Decorator<T> = <U extends T>(
   target: U,
   key: keyof U,
-  descriptor: TypedPropertyDescriptor<Method<T>>
+  descriptor: TypedPropertyDescriptor<Method<T>>,
 ) => TypedPropertyDescriptor<Method<T>>;
 
 export interface DecoratorParameters<T, U extends any[] = any[]> {
@@ -43,7 +43,7 @@ export function decorate<T>(fn: Handler<T>): Decorator<T> {
       configurable: true,
       writable: true,
       ...Object.getOwnPropertyDescriptor(target, key),
-    }
+    },
   ) => {
     const { value } = descriptor;
 
diff --git a/lib/util/emoji.ts b/lib/util/emoji.ts
index 0e8dff8788c2643114644e6da4b37e6369846d06..219392ab98b77d4668506d9e2cb30456e08908a9 100644
--- a/lib/util/emoji.ts
+++ b/lib/util/emoji.ts
@@ -21,13 +21,13 @@ const shortCodesByHex = new Map<string, string>();
 const hexCodesByShort = new Map<string, string>();
 
 const EmojiShortcodesSchema = Json.pipe(
-  z.record(z.string(), z.union([z.string(), z.array(z.string())]))
+  z.record(z.string(), z.union([z.string(), z.array(z.string())])),
 );
 
 function lazyInitMappings(): void {
   if (!mappingsInitialized) {
     const result = EmojiShortcodesSchema.safeParse(
-      dataFiles.get('node_modules/emojibase-data/en/shortcodes/github.json')!
+      dataFiles.get('node_modules/emojibase-data/en/shortcodes/github.json')!,
     );
     // istanbul ignore if: not easily testable
     if (!result.success) {
@@ -65,7 +65,7 @@ export function emojify(text: string): string {
 }
 
 const emojiRegexSrc = [emojibaseEmojiRegex, mathiasBynensEmojiRegex()].map(
-  ({ source }) => source
+  ({ source }) => source,
 );
 const emojiRegex = new RegExp(`(?:${emojiRegexSrc.join('|')})`, 'g'); // TODO #12875 cannot figure it out
 const excludedModifiers = new Set([
diff --git a/lib/util/exec/common.spec.ts b/lib/util/exec/common.spec.ts
index 4100750c52c2ae31137cfcf221416bfb7a19c4ab..0e3f2a75a50aa16c7b02b571e6e253de5dcfd73a 100644
--- a/lib/util/exec/common.spec.ts
+++ b/lib/util/exec/common.spec.ts
@@ -38,7 +38,7 @@ interface StubArgs {
 
 function getReadable(
   data: string | undefined,
-  encoding: BufferEncoding
+  encoding: BufferEncoding,
 ): Readable {
   const readable = new Readable();
   readable._read = (size: number): void => {
@@ -166,8 +166,8 @@ describe('util/exec/common', () => {
       await expect(
         exec(
           cmd,
-          partial<RawExecOptions>({ encoding: 'utf8', shell: 'bin/bash' })
-        )
+          partial<RawExecOptions>({ encoding: 'utf8', shell: 'bin/bash' }),
+        ),
       ).resolves.toEqual({
         stderr,
         stdout,
@@ -181,7 +181,7 @@ describe('util/exec/common', () => {
       const stub = getSpawnStub({ cmd, exitCode, exitSignal: null, stderr });
       spawn.mockImplementationOnce((cmd, opts) => stub);
       await expect(
-        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' }))
+        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' })),
       ).rejects.toMatchObject({
         cmd,
         message: `Command failed: ${cmd}\n${stderr}`,
@@ -196,7 +196,7 @@ describe('util/exec/common', () => {
       const stub = getSpawnStub({ cmd, exitCode: null, exitSignal });
       spawn.mockImplementationOnce((cmd, opts) => stub);
       await expect(
-        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' }))
+        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' })),
       ).rejects.toMatchObject({
         cmd,
         signal: exitSignal,
@@ -214,7 +214,7 @@ describe('util/exec/common', () => {
       });
       spawn.mockImplementationOnce((cmd, opts) => stub);
       await expect(
-        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' }))
+        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' })),
       ).toReject();
     });
 
@@ -229,7 +229,7 @@ describe('util/exec/common', () => {
       });
       spawn.mockImplementationOnce((cmd, opts) => stub);
       await expect(
-        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' }))
+        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' })),
       ).rejects.toMatchObject({ cmd: 'ls -l', message: 'error message' });
     });
 
@@ -248,8 +248,8 @@ describe('util/exec/common', () => {
           partial<RawExecOptions>({
             encoding: 'utf8',
             maxBuffer: 5,
-          })
-        )
+          }),
+        ),
       ).rejects.toMatchObject({
         cmd: 'ls -l',
         message: 'stdout maxBuffer exceeded',
@@ -272,8 +272,8 @@ describe('util/exec/common', () => {
           partial<RawExecOptions>({
             encoding: 'utf8',
             maxBuffer: 5,
-          })
-        )
+          }),
+        ),
       ).rejects.toMatchObject({
         cmd: 'ls -l',
         message: 'stderr maxBuffer exceeded',
@@ -299,7 +299,7 @@ describe('util/exec/common', () => {
       spawn.mockImplementationOnce((cmd, opts) => stub);
       killSpy.mockImplementationOnce((pid, signal) => true);
       await expect(
-        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' }))
+        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' })),
       ).rejects.toMatchObject({
         cmd,
         signal: exitSignal,
@@ -318,7 +318,7 @@ describe('util/exec/common', () => {
         throw new Error();
       });
       await expect(
-        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' }))
+        exec(cmd, partial<RawExecOptions>({ encoding: 'utf8' })),
       ).rejects.toMatchObject({
         cmd,
         signal: exitSignal,
diff --git a/lib/util/exec/common.ts b/lib/util/exec/common.ts
index bcb164af6374ae0673cbe67264c2c57ebcb1840c..96dfa3c9294f2630461b10f46e92547dd2280870 100644
--- a/lib/util/exec/common.ts
+++ b/lib/util/exec/common.ts
@@ -25,7 +25,7 @@ function stringify(list: Buffer[]): string {
 
 function initStreamListeners(
   cp: ChildProcess,
-  opts: RawExecOptions & { maxBuffer: number }
+  opts: RawExecOptions & { maxBuffer: number },
 ): [Buffer[], Buffer[]] {
   const stdout: Buffer[] = [];
   const stderr: Buffer[] = [];
@@ -90,7 +90,7 @@ export function exec(cmd: string, opts: RawExecOptions): Promise<ExecResult> {
           new ExecError(`Command failed: ${cmd}\nInterrupted by ${signal}`, {
             ...rejectInfo(),
             signal,
-          })
+          }),
         );
         return;
       }
@@ -99,7 +99,7 @@ export function exec(cmd: string, opts: RawExecOptions): Promise<ExecResult> {
           new ExecError(`Command failed: ${cmd}\n${stringify(stderr)}`, {
             ...rejectInfo(),
             exitCode: code,
-          })
+          }),
         );
         return;
       }
@@ -147,5 +147,5 @@ function kill(cp: ChildProcess, signal: NodeJS.Signals): boolean {
 
 export const rawExec: (
   cmd: string,
-  opts: RawExecOptions
+  opts: RawExecOptions,
 ) => Promise<ExecResult> = exec;
diff --git a/lib/util/exec/containerbase.spec.ts b/lib/util/exec/containerbase.spec.ts
index 5d5c5c2f8d8a3d27ec8dd16d3de115aa4803850e..40e1bea053e39dd52b16eede40176ccdf38ff33a 100644
--- a/lib/util/exec/containerbase.spec.ts
+++ b/lib/util/exec/containerbase.spec.ts
@@ -49,7 +49,7 @@ describe('util/exec/containerbase', () => {
   describe('resolveConstraint()', () => {
     it('returns from config', async () => {
       expect(
-        await resolveConstraint({ toolName: 'composer', constraint: '1.1.0' })
+        await resolveConstraint({ toolName: 'composer', constraint: '1.1.0' }),
       ).toBe('1.1.0');
     });
 
@@ -72,7 +72,7 @@ describe('util/exec/containerbase', () => {
         releases: [{ version: '2.0.14-b.1' }, { version: '2.1.0-a.1' }],
       });
       expect(await resolveConstraint({ toolName: 'composer' })).toBe(
-        '2.1.0-a.1'
+        '2.1.0-a.1',
       );
     });
 
@@ -109,7 +109,7 @@ describe('util/exec/containerbase', () => {
 
     it('throws for unknown tools', async () => {
       await expect(resolveConstraint({ toolName: 'whoops' })).rejects.toThrow(
-        'Invalid tool to install: whoops'
+        'Invalid tool to install: whoops',
       );
     });
 
@@ -118,7 +118,7 @@ describe('util/exec/containerbase', () => {
         releases: [],
       });
       await expect(resolveConstraint({ toolName: 'composer' })).rejects.toThrow(
-        'No tool releases found.'
+        'No tool releases found.',
       );
     });
 
@@ -127,7 +127,7 @@ describe('util/exec/containerbase', () => {
         releases: [{ version: '1.2.3' }],
       });
       expect(
-        await resolveConstraint({ toolName: 'composer', constraint: '^3.1.0' })
+        await resolveConstraint({ toolName: 'composer', constraint: '^3.1.0' }),
       ).toBe('1.2.3');
     });
 
@@ -136,7 +136,7 @@ describe('util/exec/containerbase', () => {
         releases: [{ version: '1.2.3' }],
       });
       expect(
-        await resolveConstraint({ toolName: 'composer', constraint: 'whoops' })
+        await resolveConstraint({ toolName: 'composer', constraint: 'whoops' }),
       ).toBe('1.2.3');
     });
 
@@ -159,9 +159,9 @@ describe('util/exec/containerbase', () => {
           ],
         });
         expect(
-          await resolveConstraint({ toolName: 'python', constraint })
+          await resolveConstraint({ toolName: 'python', constraint }),
         ).toBe(expected);
-      }
+      },
     );
   });
 
diff --git a/lib/util/exec/containerbase.ts b/lib/util/exec/containerbase.ts
index e8528aaadca01fd6d9b8783878eefaa8e7d4ad63..5a27d1dd0d2082d8a8ffd6f9cc0fcbee0db7a62f 100644
--- a/lib/util/exec/containerbase.ts
+++ b/lib/util/exec/containerbase.ts
@@ -207,7 +207,7 @@ export function isContainerbase(): boolean {
 }
 
 export function isDynamicInstall(
-  toolConstraints?: Opt<ToolConstraint[]>
+  toolConstraints?: Opt<ToolConstraint[]>,
 ): boolean {
   if (GlobalConfig.get('binarySource') !== 'install') {
     return false;
@@ -219,7 +219,7 @@ export function isDynamicInstall(
   return (
     !toolConstraints ||
     toolConstraints.every((toolConstraint) =>
-      supportsDynamicInstall(toolConstraint.toolName)
+      supportsDynamicInstall(toolConstraint.toolName),
     )
   );
 }
@@ -227,7 +227,7 @@ export function isDynamicInstall(
 function isStable(
   version: string,
   versioning: allVersioning.VersioningApi,
-  latest?: string
+  latest?: string,
 ): boolean {
   if (!versioning.isStable(version)) {
     return false;
@@ -241,7 +241,7 @@ function isStable(
 }
 
 export async function resolveConstraint(
-  toolConstraint: ToolConstraint
+  toolConstraint: ToolConstraint,
 ): Promise<string> {
   const { toolName } = toolConstraint;
   const toolConfig = allToolConfig[toolName];
@@ -259,7 +259,7 @@ export async function resolveConstraint(
     } else {
       logger.warn(
         { toolName, constraint, versioning: toolConfig.versioning },
-        'Invalid tool constraint'
+        'Invalid tool constraint',
       );
       constraint = undefined;
     }
@@ -274,7 +274,7 @@ export async function resolveConstraint(
   }
 
   const matchingReleases = releases.filter(
-    (r) => !constraint || versioning.matches(r.version, constraint)
+    (r) => !constraint || versioning.matches(r.version, constraint),
   );
 
   const stableMatchingVersion = matchingReleases
@@ -283,7 +283,7 @@ export async function resolveConstraint(
   if (stableMatchingVersion) {
     logger.debug(
       { toolName, constraint, resolvedVersion: stableMatchingVersion },
-      'Resolved stable matching version'
+      'Resolved stable matching version',
     );
     return stableMatchingVersion;
   }
@@ -292,7 +292,7 @@ export async function resolveConstraint(
   if (unstableMatchingVersion) {
     logger.debug(
       { toolName, constraint, resolvedVersion: unstableMatchingVersion },
-      'Resolved unstable matching version'
+      'Resolved unstable matching version',
     );
     return unstableMatchingVersion;
   }
@@ -303,20 +303,20 @@ export async function resolveConstraint(
   if (stableVersion) {
     logger.warn(
       { toolName, constraint, stableVersion },
-      'No matching tool versions found for constraint - using latest stable version'
+      'No matching tool versions found for constraint - using latest stable version',
     );
   }
 
   const highestVersion = releases.pop()!.version;
   logger.warn(
     { toolName, constraint, highestVersion },
-    'No matching or stable tool versions found - using an unstable version'
+    'No matching or stable tool versions found - using an unstable version',
   );
   return highestVersion;
 }
 
 export async function generateInstallCommands(
-  toolConstraints: Opt<ToolConstraint[]>
+  toolConstraints: Opt<ToolConstraint[]>,
 ): Promise<string[]> {
   const installCommands: string[] = [];
   if (toolConstraints?.length) {
diff --git a/lib/util/exec/docker/index.spec.ts b/lib/util/exec/docker/index.spec.ts
index ab3eb066a1efa85d21ea914cd9c8134c67370040..92f67a1a5d62578610fcc9b4414c620ad0142f62 100644
--- a/lib/util/exec/docker/index.spec.ts
+++ b/lib/util/exec/docker/index.spec.ts
@@ -57,7 +57,7 @@ describe('util/exec/docker/index', () => {
         .mockResolvedValueOnce(undefined as never)
         .mockResolvedValueOnce(partial<modulesDatasource.ReleaseResult>())
         .mockResolvedValueOnce(
-          partial<modulesDatasource.ReleaseResult>({ releases: [] })
+          partial<modulesDatasource.ReleaseResult>({ releases: [] }),
         );
       expect(await getDockerTag('foo', '1.2.3', 'semver')).toBe('latest');
       expect(await getDockerTag('foo', '1.2.3', 'semver')).toBe('latest');
@@ -88,7 +88,7 @@ describe('util/exec/docker/index', () => {
       jest
         .spyOn(modulesDatasource, 'getPkgReleases')
         .mockResolvedValueOnce(
-          partial<modulesDatasource.ReleaseResult>({ releases })
+          partial<modulesDatasource.ReleaseResult>({ releases }),
         );
       expect(await getDockerTag('foo', '^1.2.3', 'npm')).toBe('1.9.9');
     });
@@ -103,7 +103,7 @@ describe('util/exec/docker/index', () => {
       jest
         .spyOn(modulesDatasource, 'getPkgReleases')
         .mockResolvedValueOnce(
-          partial<modulesDatasource.ReleaseResult>({ releases })
+          partial<modulesDatasource.ReleaseResult>({ releases }),
         );
       expect(await getDockerTag('foo', '>=12', 'node')).toBe('14.0.2');
     });
@@ -155,7 +155,7 @@ describe('util/exec/docker/index', () => {
       err.errno = 'ENOMEM';
       mockExecAll(err);
       await expect(removeDanglingContainers).rejects.toThrow(
-        SYSTEM_INSUFFICIENT_MEMORY
+        SYSTEM_INSUFFICIENT_MEMORY,
       );
     });
 
@@ -238,7 +238,7 @@ describe('util/exec/docker/index', () => {
       const res = await generateDockerCommand(
         commands,
         preCommands,
-        dockerOptions
+        dockerOptions,
       );
       expect(res).toBe(command(image));
     });
@@ -257,8 +257,8 @@ describe('util/exec/docker/index', () => {
       expect(res).toBe(
         command(
           image,
-          `-v "/tmp/foo":"/tmp/foo" -v "/tmp/bar":"/tmp/bar" -v "/tmp/baz":"/home/baz"`
-        )
+          `-v "/tmp/foo":"/tmp/foo" -v "/tmp/bar":"/tmp/bar" -v "/tmp/baz":"/home/baz"`,
+        ),
       );
     });
 
@@ -278,8 +278,8 @@ describe('util/exec/docker/index', () => {
       expect(res).toBe(
         command(
           image,
-          `-v "/tmp/cache":"/tmp/cache" -v "/tmp/containerbase":"/tmp/containerbase" -v "/tmp/foo":"/tmp/foo"`
-        )
+          `-v "/tmp/cache":"/tmp/cache" -v "/tmp/containerbase":"/tmp/containerbase" -v "/tmp/foo":"/tmp/foo"`,
+        ),
       );
     });
 
@@ -297,7 +297,7 @@ describe('util/exec/docker/index', () => {
         volumes: [...volumes, ...volumes],
       });
       expect(res).toBe(
-        command(image, `-v "/tmp/cache":"/tmp/cache" -v "/tmp/foo":"/tmp/foo"`)
+        command(image, `-v "/tmp/cache":"/tmp/cache" -v "/tmp/foo":"/tmp/foo"`),
       );
     });
 
diff --git a/lib/util/exec/docker/index.ts b/lib/util/exec/docker/index.ts
index d853a1a1bc2f91eeb8ee496ff098db6994f0d31e..899cfe671440151d9ab5b861db38d2c665b9ad8f 100644
--- a/lib/util/exec/docker/index.ts
+++ b/lib/util/exec/docker/index.ts
@@ -19,8 +19,8 @@ export async function prefetchDockerImage(taggedImage: string): Promise<void> {
   if (prefetchedImages.has(taggedImage)) {
     logger.debug(
       `Docker image is already prefetched: ${taggedImage}@${prefetchedImages.get(
-        taggedImage
-      )!}`
+        taggedImage,
+      )!}`,
     );
   } else {
     logger.debug(`Fetching Docker image: ${taggedImage}`);
@@ -29,7 +29,7 @@ export async function prefetchDockerImage(taggedImage: string): Promise<void> {
     });
     const imageDigest = digestRegex.exec(res?.stdout)?.[1] ?? 'unknown';
     logger.debug(
-      `Finished fetching Docker image ${taggedImage}@${imageDigest}`
+      `Finished fetching Docker image ${taggedImage}@${imageDigest}`,
     );
     prefetchedImages.set(taggedImage, imageDigest);
   }
@@ -61,7 +61,7 @@ function volumesEql(x: VolumesPair, y: VolumesPair): boolean {
 function prepareVolumes(volumes: VolumeOption[]): string[] {
   const expanded: (VolumesPair | null)[] = volumes.map(expandVolumeOption);
   const filtered: VolumesPair[] = expanded.filter(
-    (vol): vol is VolumesPair => vol !== null
+    (vol): vol is VolumesPair => vol !== null,
   );
   const unique: VolumesPair[] = uniq<VolumesPair>(filtered, volumesEql);
   return unique.map(([from, to]) => `-v "${from}":"${to}"`);
@@ -69,28 +69,28 @@ function prepareVolumes(volumes: VolumeOption[]): string[] {
 
 function prepareCommands(commands: Opt<string>[]): string[] {
   return commands.filter<string>((command): command is string =>
-    is.string(command)
+    is.string(command),
   );
 }
 
 export async function getDockerTag(
   packageName: string,
   constraint: string,
-  scheme: string
+  scheme: string,
 ): Promise<string> {
   const ver = versioning.get(scheme);
 
   if (!ver.isValid(constraint)) {
     logger.warn(
       { scheme, constraint },
-      `Invalid Docker image version constraint`
+      `Invalid Docker image version constraint`,
     );
     return 'latest';
   }
 
   logger.debug(
     { packageName, scheme, constraint },
-    `Found version constraint - checking for a compatible image to use`
+    `Found version constraint - checking for a compatible image to use`,
   );
   const imageReleases = await getPkgReleases({
     datasource: 'docker',
@@ -100,7 +100,7 @@ export async function getDockerTag(
   if (imageReleases?.releases) {
     let versions = imageReleases.releases.map((release) => release.version);
     versions = versions.filter(
-      (version) => ver.isVersion(version) && ver.matches(version, constraint)
+      (version) => ver.isVersion(version) && ver.matches(version, constraint),
     );
     // Prefer stable versions over unstable, even if the range satisfies both types
     if (!versions.every((version) => ver.isStable(version))) {
@@ -111,7 +111,7 @@ export async function getDockerTag(
     if (version) {
       logger.debug(
         { packageName, scheme, constraint, version },
-        `Found compatible image version`
+        `Found compatible image version`,
       );
       return version;
     }
@@ -121,7 +121,7 @@ export async function getDockerTag(
   }
   logger.warn(
     { packageName, constraint, scheme },
-    'Failed to find a tag satisfying constraint, using "latest" tag instead'
+    'Failed to find a tag satisfying constraint, using "latest" tag instead',
   );
   return 'latest';
 }
@@ -136,7 +136,7 @@ function getContainerLabel(prefix: string | undefined): string {
 
 export async function removeDockerContainer(
   image: string,
-  prefix: string
+  prefix: string,
 ): Promise<void> {
   const containerName = getContainerName(image, prefix);
   let cmd = `docker ps --filter name=${containerName} -aq`;
@@ -157,7 +157,7 @@ export async function removeDockerContainer(
   } catch (err) {
     logger.warn(
       { image, containerName, cmd, err },
-      'Could not remove Docker container'
+      'Could not remove Docker container',
     );
   }
 }
@@ -169,13 +169,13 @@ export async function removeDanglingContainers(): Promise<void> {
 
   try {
     const containerLabel = getContainerLabel(
-      GlobalConfig.get('dockerChildPrefix')
+      GlobalConfig.get('dockerChildPrefix'),
     );
     const res = await rawExec(
       `docker ps --filter label=${containerLabel} -aq`,
       {
         encoding: 'utf-8',
-      }
+      },
     );
     if (res?.stdout?.trim().length) {
       const containerIds = res.stdout
@@ -205,7 +205,7 @@ export async function removeDanglingContainers(): Promise<void> {
 export async function generateDockerCommand(
   commands: string[],
   preCommands: string[],
-  options: DockerOptions
+  options: DockerOptions,
 ): Promise<string> {
   const { envVars, cwd } = options;
   let image = sideCarImage;
@@ -249,7 +249,7 @@ export async function generateDockerCommand(
     result.push(
       ...uniq(envVars)
         .filter(is.string)
-        .map((e) => `-e ${e}`)
+        .map((e) => `-e ${e}`),
     );
   }
 
@@ -263,7 +263,7 @@ export async function generateDockerCommand(
   // TODO: add constraint: const tag = getDockerTag(image, sideCarImageVersion, 'semver');
   logger.debug(
     { image /*, tagConstraint: sideCarImageVersion, tag */ },
-    'Resolved tag constraint'
+    'Resolved tag constraint',
   );
 
   const taggedImage = image; // TODO: tag ? `${image}:${tag}` : `${image}`;
@@ -271,7 +271,7 @@ export async function generateDockerCommand(
   result.push(taggedImage);
 
   const bashCommand = [...prepareCommands(preCommands), ...commands].join(
-    ' && '
+    ' && ',
   );
   result.push(`bash -l -c "${bashCommand.replace(regEx(/"/g), '\\"')}"`); // lgtm [js/incomplete-sanitization]
 
diff --git a/lib/util/exec/env.ts b/lib/util/exec/env.ts
index 1226fa60ba7e8601c3a29faa9192e6e9e8f668ef..83a9539b8d235190903526e2aa72eebac3d2e5fe 100644
--- a/lib/util/exec/env.ts
+++ b/lib/util/exec/env.ts
@@ -22,7 +22,7 @@ const basicEnvVars = [
 ];
 
 export function getChildProcessEnv(
-  customEnvVars: string[] = []
+  customEnvVars: string[] = [],
 ): NodeJS.ProcessEnv {
   const env: NodeJS.ProcessEnv = {};
   if (GlobalConfig.get('exposeAllEnv')) {
diff --git a/lib/util/exec/hermit.spec.ts b/lib/util/exec/hermit.spec.ts
index 8152713811903e990d80cb1c949ad3e59e369f8a..0d524108be6d780a8c86455f10b2f805a04b7b96 100644
--- a/lib/util/exec/hermit.spec.ts
+++ b/lib/util/exec/hermit.spec.ts
@@ -43,7 +43,7 @@ describe('util/exec/hermit', () => {
         expect(await findHermitCwd(cwd)).toBe(upath.join(localDir, expected));
 
         expect(findUp.mock.calls[0][1]?.cwd).toBe(cwd);
-      }
+      },
     );
 
     it('should throw error when hermit cwd is not found', async () => {
@@ -75,7 +75,7 @@ describe('util/exec/hermit', () => {
       const resp = await getHermitEnvs(
         partial<RawExecOptions>({
           cwd: fullCwd,
-        })
+        }),
       );
 
       expect(findUp.mock.calls[0][1]?.cwd).toEqual(fullCwd);
diff --git a/lib/util/exec/hermit.ts b/lib/util/exec/hermit.ts
index d47f31bffce2e1ef0198e1737523ba00598c1eaf..76fa6e888e19e0b7247d9d51bd8c60853e3060c1 100644
--- a/lib/util/exec/hermit.ts
+++ b/lib/util/exec/hermit.ts
@@ -22,7 +22,7 @@ export async function findHermitCwd(cwd: string): Promise<string> {
 }
 
 export async function getHermitEnvs(
-  rawOptions: RawExecOptions
+  rawOptions: RawExecOptions,
 ): Promise<Record<string, string>> {
   const cwd = rawOptions.cwd ?? /* istanbul ignore next */ '';
   const hermitCwd = await findHermitCwd(cwd);
diff --git a/lib/util/exec/index.spec.ts b/lib/util/exec/index.spec.ts
index 8aa42878eaca3d2eef632562fa52e3d85e5f7b95..68616cbf372216d6077141fd781dbc02d6dd1d5a 100644
--- a/lib/util/exec/index.spec.ts
+++ b/lib/util/exec/index.spec.ts
@@ -881,7 +881,7 @@ describe('util/exec/index', () => {
 
     const removeDockerContainerSpy = jest.spyOn(
       dockerModule,
-      'removeDockerContainer'
+      'removeDockerContainer',
     );
 
     const promise = exec('foobar', {});
@@ -903,7 +903,7 @@ describe('util/exec/index', () => {
     let calledOnce = false;
     const removeDockerContainerSpy = jest.spyOn(
       dockerModule,
-      'removeDockerContainer'
+      'removeDockerContainer',
     );
     removeDockerContainerSpy.mockImplementation((): Promise<void> => {
       if (!calledOnce) {
@@ -917,8 +917,8 @@ describe('util/exec/index', () => {
     const promise = exec('foobar', { docker });
     await expect(promise).rejects.toThrow(
       new Error(
-        'Error: "removeDockerContainer failed" - Original Error: "some error occurred"'
-      )
+        'Error: "removeDockerContainer failed" - Original Error: "some error occurred"',
+      ),
     );
     expect(removeDockerContainerSpy).toHaveBeenCalledTimes(2);
   });
@@ -934,7 +934,7 @@ describe('util/exec/index', () => {
     });
     const removeDockerContainerSpy = jest.spyOn(
       dockerModule,
-      'removeDockerContainer'
+      'removeDockerContainer',
     );
     const promise = exec('foobar', {});
     await expect(promise).rejects.toThrow(TEMPORARY_ERROR);
diff --git a/lib/util/exec/index.ts b/lib/util/exec/index.ts
index 7a65802e1a73ae77fa4d2f74e21bd275fe294610..d7403c3e894bb240100c47943dcd922593ab505f 100644
--- a/lib/util/exec/index.ts
+++ b/lib/util/exec/index.ts
@@ -75,7 +75,7 @@ interface RawExecArguments {
 
 async function prepareRawExec(
   cmd: string | string[],
-  opts: ExecOptions
+  opts: ExecOptions,
 ): Promise<RawExecArguments> {
   const { docker } = opts;
   const preCommands = opts.preCommands ?? [];
@@ -111,7 +111,7 @@ async function prepareRawExec(
         ...(await generateInstallCommands(opts.toolConstraints)),
         ...preCommands,
       ],
-      dockerOptions
+      dockerOptions,
     );
     rawCommands = [dockerCommand];
   } else if (isDynamicInstall(opts.toolConstraints)) {
@@ -125,7 +125,7 @@ async function prepareRawExec(
     const hermitEnvVars = await getHermitEnvs(rawOptions);
     logger.debug(
       { hermitEnvVars },
-      'merging hermit environment variables into the execution options'
+      'merging hermit environment variables into the execution options',
     );
     rawOptions.env = {
       ...rawOptions.env,
@@ -138,7 +138,7 @@ async function prepareRawExec(
 
 export async function exec(
   cmd: string | string[],
-  opts: ExecOptions = {}
+  opts: ExecOptions = {},
 ): Promise<ExecResult> {
   const { docker } = opts;
   const dockerChildPrefix = GlobalConfig.get('dockerChildPrefix', 'renovate_');
@@ -164,15 +164,15 @@ export async function exec(
           (removeErr: Error) => {
             const message: string = err.message;
             throw new Error(
-              `Error: "${removeErr.message}" - Original Error: "${message}"`
+              `Error: "${removeErr.message}" - Original Error: "${message}"`,
             );
-          }
+          },
         );
       }
       if (err.signal === `SIGTERM`) {
         logger.debug(
           { err },
-          'exec interrupted by SIGTERM - run needs to be aborted'
+          'exec interrupted by SIGTERM - run needs to be aborted',
         );
         throw new Error(TEMPORARY_ERROR);
       }
@@ -185,7 +185,7 @@ export async function exec(
         stdout: res.stdout,
         stderr: res.stderr,
       },
-      'exec completed'
+      'exec completed',
     );
   }
 
diff --git a/lib/util/fs/index.spec.ts b/lib/util/fs/index.spec.ts
index 361f4eff62dda9d96fa1d5af5acec890ad49ad59..13c175716895f7207b8e5da5eea3b26260f21fe6 100644
--- a/lib/util/fs/index.spec.ts
+++ b/lib/util/fs/index.spec.ts
@@ -230,7 +230,7 @@ describe('util/fs/index', () => {
       await writeLocalFile('test/test.txt', '');
       await fs.symlink(
         join(localDir, 'test/test.txt'),
-        join(localDir, 'test/test')
+        join(localDir, 'test/test'),
       );
 
       const result = await readLocalSymlink('test/test');
@@ -242,7 +242,7 @@ describe('util/fs/index', () => {
       await writeLocalFile('test/test.txt', '');
       await fs.symlink(
         join(localDir, 'test/test.txt'),
-        join(localDir, 'test/test')
+        join(localDir, 'test/test'),
       );
 
       const notExistsResult = await readLocalSymlink('test/not-exists');
@@ -257,22 +257,22 @@ describe('util/fs/index', () => {
       await writeLocalFile('Cargo.lock', 'bar');
 
       expect(
-        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.lock')
+        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.lock'),
       ).toBe('Cargo.lock');
       expect(
-        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.mock')
+        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.mock'),
       ).toBeNull();
 
       await writeLocalFile('crates/one/Cargo.lock', '');
 
       expect(
-        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.lock')
+        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.lock'),
       ).toBe('crates/one/Cargo.lock');
       expect(await findLocalSiblingOrParent('crates/one', 'Cargo.lock')).toBe(
-        'Cargo.lock'
+        'Cargo.lock',
       );
       expect(
-        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.mock')
+        await findLocalSiblingOrParent('crates/one/Cargo.toml', 'Cargo.mock'),
       ).toBeNull();
     });
 
@@ -458,7 +458,7 @@ describe('util/fs/index', () => {
       await fs.outputFile(`${cacheDir}/foo/bar/file.txt`, 'foobar');
       expect(await readCacheFile(`foo/bar/file.txt`, 'utf8')).toBe('foobar');
       expect(await readCacheFile(`foo/bar/file.txt`)).toEqual(
-        Buffer.from('foobar')
+        Buffer.from('foobar'),
       );
     });
   });
diff --git a/lib/util/fs/index.ts b/lib/util/fs/index.ts
index aa67dee18eff4e4a6e76c92beb330f704f2b22f0..0d34ea3a0931a9e44aaac58767e7d9ff51cc6876 100644
--- a/lib/util/fs/index.ts
+++ b/lib/util/fs/index.ts
@@ -16,7 +16,7 @@ export function getParentDir(fileName: string): string {
 
 export function getSiblingFileName(
   fileName: string,
-  siblingName: string
+  siblingName: string,
 ): string {
   const subDirectory = getParentDir(fileName);
   return upath.join(subDirectory, siblingName);
@@ -25,11 +25,11 @@ export function getSiblingFileName(
 export async function readLocalFile(fileName: string): Promise<Buffer | null>;
 export async function readLocalFile(
   fileName: string,
-  encoding: 'utf8'
+  encoding: 'utf8',
 ): Promise<string | null>;
 export async function readLocalFile(
   fileName: string,
-  encoding?: BufferEncoding
+  encoding?: BufferEncoding,
 ): Promise<string | Buffer | null> {
   const localFileName = ensureLocalPath(fileName);
   try {
@@ -44,7 +44,7 @@ export async function readLocalFile(
 }
 
 export async function readLocalSymlink(
-  fileName: string
+  fileName: string,
 ): Promise<string | null> {
   const localFileName = ensureLocalPath(fileName);
   try {
@@ -58,7 +58,7 @@ export async function readLocalSymlink(
 
 export async function writeLocalFile(
   fileName: string,
-  fileContent: string | Buffer
+  fileContent: string | Buffer,
 ): Promise<void> {
   const localFileName = ensureLocalPath(fileName);
   await fs.outputFile(localFileName, fileContent);
@@ -78,7 +78,7 @@ export async function deleteLocalFile(fileName: string): Promise<void> {
 
 export async function renameLocalFile(
   fromFile: string,
-  toFile: string
+  toFile: string,
 ): Promise<void> {
   const fromPath = ensureLocalPath(fromFile);
   const toPath = ensureLocalPath(toFile);
@@ -141,7 +141,7 @@ export function isValidLocalPath(path: string): boolean {
  */
 export async function findLocalSiblingOrParent(
   existingFileNameWithPath: string,
-  otherFileName: string
+  otherFileName: string,
 ): Promise<string | null> {
   if (upath.isAbsolute(existingFileNameWithPath)) {
     return null;
@@ -187,7 +187,7 @@ export async function localPathIsFile(pathName: string): Promise<boolean> {
 }
 
 export async function localPathIsSymbolicLink(
-  pathName: string
+  pathName: string,
 ): Promise<boolean> {
   const path = ensureLocalPath(pathName);
   try {
@@ -204,7 +204,7 @@ export async function localPathIsSymbolicLink(
 
 export async function findUpLocal(
   fileName: string | string[],
-  cwd: string
+  cwd: string,
 ): Promise<string | null> {
   const localDir = GlobalConfig.get('localDir');
   const absoluteCwd = upath.join(localDir, cwd);
@@ -232,14 +232,14 @@ export async function findUpLocal(
 
 export function chmodLocalFile(
   fileName: string,
-  mode: string | number
+  mode: string | number,
 ): Promise<void> {
   const fullFileName = ensureLocalPath(fileName);
   return fs.chmod(fullFileName, mode);
 }
 
 export async function statLocalFile(
-  fileName: string
+  fileName: string,
 ): Promise<fs.Stats | null> {
   const fullFileName = ensureLocalPath(fileName);
   try {
@@ -272,11 +272,11 @@ export async function cachePathExists(pathName: string): Promise<boolean> {
 export async function readCacheFile(fileName: string): Promise<Buffer>;
 export async function readCacheFile(
   fileName: string,
-  encoding: 'utf8'
+  encoding: 'utf8',
 ): Promise<string>;
 export function readCacheFile(
   fileName: string,
-  encoding?: BufferEncoding
+  encoding?: BufferEncoding,
 ): Promise<string | Buffer> {
   const fullPath = ensureCachePath(fileName);
   return encoding ? fs.readFile(fullPath, encoding) : fs.readFile(fullPath);
@@ -284,7 +284,7 @@ export function readCacheFile(
 
 export function outputCacheFile(
   file: string,
-  data: string | NodeJS.ArrayBufferView
+  data: string | NodeJS.ArrayBufferView,
 ): Promise<void> {
   const filePath = ensureCachePath(file);
   return fs.outputFile(filePath, data);
@@ -293,17 +293,17 @@ export function outputCacheFile(
 export async function readSystemFile(fileName: string): Promise<Buffer>;
 export async function readSystemFile(
   fileName: string,
-  encoding: 'utf8'
+  encoding: 'utf8',
 ): Promise<string>;
 export function readSystemFile(
   fileName: string,
-  encoding?: BufferEncoding
+  encoding?: BufferEncoding,
 ): Promise<string | Buffer> {
   return encoding ? fs.readFile(fileName, encoding) : fs.readFile(fileName);
 }
 
 export async function getLocalFiles(
-  fileNames: string[]
+  fileNames: string[],
 ): Promise<Record<string, string | null>> {
   const fileContentMap: Record<string, string | null> = {};
 
diff --git a/lib/util/fs/util.ts b/lib/util/fs/util.ts
index 9b9f67daafb4e6c086d08da0bcd9b69d73b6e6cb..cbe8afae07824b62f08efde2c210067effab0217 100644
--- a/lib/util/fs/util.ts
+++ b/lib/util/fs/util.ts
@@ -7,7 +7,7 @@ function assertBaseDir(path: string, baseDir: string): void {
   if (!path.startsWith(baseDir)) {
     logger.debug(
       { path, baseDir },
-      'Preventing access to file outside the base directory'
+      'Preventing access to file outside the base directory',
     );
     throw new Error(FILE_ACCESS_VIOLATION_ERROR);
   }
@@ -16,7 +16,7 @@ function assertBaseDir(path: string, baseDir: string): void {
 function ensurePath(path: string, key: 'localDir' | 'cacheDir'): string {
   const baseDir = upath.resolve(GlobalConfig.get(key)!);
   const fullPath = upath.resolve(
-    upath.isAbsolute(path) ? path : upath.join(baseDir, path)
+    upath.isAbsolute(path) ? path : upath.join(baseDir, path),
   );
   assertBaseDir(fullPath, baseDir);
   return fullPath;
@@ -32,11 +32,11 @@ export function ensureCachePath(path: string): string {
 
 export function isValidPath(
   path: string,
-  key: 'localDir' | 'cacheDir'
+  key: 'localDir' | 'cacheDir',
 ): boolean {
   const baseDir = upath.resolve(GlobalConfig.get(key)!);
   const fullPath = upath.resolve(
-    upath.isAbsolute(path) ? path : upath.join(baseDir, path)
+    upath.isAbsolute(path) ? path : upath.join(baseDir, path),
   );
 
   return fullPath.startsWith(baseDir);
diff --git a/lib/util/git/auth.spec.ts b/lib/util/git/auth.spec.ts
index 2b74fd1fdf3d4b804d3e64907f7f4d8893513762..d7e0d8b2d4dc8e48a30fc702ba328507300bad98 100644
--- a/lib/util/git/auth.spec.ts
+++ b/lib/util/git/auth.spec.ts
@@ -16,7 +16,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -35,7 +35,7 @@ describe('util/git/auth', () => {
           password: 'password',
           hostType: 'github',
           matchHost: 'example.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0:
@@ -58,7 +58,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -76,7 +76,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -94,7 +94,7 @@ describe('util/git/auth', () => {
           token: 'x-access-token:token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0:
@@ -118,8 +118,8 @@ describe('util/git/auth', () => {
             hostType: 'github',
             matchHost: 'github.com',
           },
-          { GIT_CONFIG_COUNT: '1' }
-        )
+          { GIT_CONFIG_COUNT: '1' },
+        ),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '4',
         GIT_CONFIG_KEY_1: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -141,8 +141,8 @@ describe('util/git/auth', () => {
             hostType: 'github',
             matchHost: 'github.com',
           },
-          { GIT_CONFIG_COUNT: '1' }
-        )
+          { GIT_CONFIG_COUNT: '1' },
+        ),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '4',
         GIT_CONFIG_KEY_1: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -161,7 +161,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '4',
         GIT_CONFIG_KEY_1: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -182,8 +182,8 @@ describe('util/git/auth', () => {
             hostType: 'github',
             matchHost: 'github.com',
           },
-          { RANDOM_VARIABLE: 'random' }
-        )
+          { RANDOM_VARIABLE: 'random' },
+        ),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -203,7 +203,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.https://ssh:token1234@github.com/.insteadOf',
@@ -221,7 +221,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'gitlab',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0:
@@ -241,7 +241,7 @@ describe('util/git/auth', () => {
         getGitAuthenticatedEnvironmentVariables('https://gitlab.com/', {
           token: 'token1234',
           matchHost: 'gitlab.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0:
@@ -264,8 +264,8 @@ describe('util/git/auth', () => {
             hostType: 'gitlab',
             matchHost: 'gitlab.com',
           },
-          { env: 'value' }
-        )
+          { env: 'value' },
+        ),
       ).toStrictEqual({
         env: 'value',
       });
@@ -277,7 +277,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.http://ssh:token1234@github.com/.insteadOf',
@@ -295,7 +295,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0: 'url.https://ssh:token1234@github.com/org.insteadOf',
@@ -313,7 +313,7 @@ describe('util/git/auth', () => {
           token: 'token1234',
           hostType: 'github',
           matchHost: 'github.com',
-        })
+        }),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0:
@@ -335,8 +335,8 @@ describe('util/git/auth', () => {
             token: 'token1234',
             hostType: 'github',
             matchHost: 'github.com',
-          }
-        )
+          },
+        ),
       ).toStrictEqual({
         GIT_CONFIG_COUNT: '3',
         GIT_CONFIG_KEY_0:
diff --git a/lib/util/git/auth.ts b/lib/util/git/auth.ts
index a01613cc4d6d36b883c665bec839f885e4216e17..c28906dcaafd6da2263937f193363c9c2758e46c 100644
--- a/lib/util/git/auth.ts
+++ b/lib/util/git/auth.ts
@@ -32,12 +32,12 @@ const standardGitAllowedHostTypes = [
 export function getGitAuthenticatedEnvironmentVariables(
   originalGitUrl: string,
   { token, username, password, hostType, matchHost }: HostRule,
-  environmentVariables?: NodeJS.ProcessEnv
+  environmentVariables?: NodeJS.ProcessEnv,
 ): NodeJS.ProcessEnv {
   if (!token && !(username && password)) {
     logger.warn(
       // TODO: types (#22198)
-      `Could not create environment variable for ${matchHost!} as neither token or username and password was set`
+      `Could not create environment variable for ${matchHost!} as neither token or username and password was set`,
     );
     return { ...environmentVariables };
   }
@@ -52,8 +52,8 @@ export function getGitAuthenticatedEnvironmentVariables(
     if (Number.isNaN(gitConfigCount)) {
       logger.warn(
         `Found GIT_CONFIG_COUNT env variable, but couldn't parse the value to an integer: ${String(
-          process.env.GIT_CONFIG_COUNT
-        )}. Ignoring it.`
+          process.env.GIT_CONFIG_COUNT,
+        )}. Ignoring it.`,
       );
       gitConfigCount = 0;
     }
@@ -63,7 +63,7 @@ export function getGitAuthenticatedEnvironmentVariables(
     authenticationRules = getAuthenticationRulesWithToken(
       originalGitUrl,
       hostType,
-      token
+      token,
     );
   } else {
     const encodedUsername = encodeURIComponent(username!);
@@ -71,7 +71,7 @@ export function getGitAuthenticatedEnvironmentVariables(
 
     authenticationRules = getAuthenticationRules(
       originalGitUrl,
-      `${encodedUsername}:${encodedPassword}`
+      `${encodedUsername}:${encodedPassword}`,
     );
   }
 
@@ -97,7 +97,7 @@ export function getGitAuthenticatedEnvironmentVariables(
 function getAuthenticationRulesWithToken(
   url: string,
   hostType: string | undefined | null,
-  authToken: string
+  authToken: string,
 ): AuthenticationRule[] {
   let token = authToken;
   let type = hostType;
@@ -116,7 +116,7 @@ function getAuthenticationRulesWithToken(
  */
 export function getAuthenticationRules(
   gitUrl: string,
-  token: string
+  token: string,
 ): AuthenticationRule[] {
   const authenticationRules = [];
   const hasUser = token.split(':').length > 1;
@@ -156,7 +156,7 @@ export function getAuthenticationRules(
 }
 
 export function getGitEnvironmentVariables(
-  additionalHostTypes: string[] = []
+  additionalHostTypes: string[] = [],
 ): NodeJS.ProcessEnv {
   let environmentVariables: NodeJS.ProcessEnv = {};
 
@@ -169,7 +169,7 @@ export function getGitEnvironmentVariables(
   if (gitHubHostRule?.token) {
     environmentVariables = getGitAuthenticatedEnvironmentVariables(
       'https://github.com/',
-      gitHubHostRule
+      gitHubHostRule,
     );
   }
 
@@ -191,7 +191,7 @@ export function getGitEnvironmentVariables(
     if (!hostRule.hostType || gitAllowedHostTypes.has(hostRule.hostType)) {
       environmentVariables = addAuthFromHostRule(
         hostRule,
-        environmentVariables
+        environmentVariables,
       );
     }
   }
@@ -200,7 +200,7 @@ export function getGitEnvironmentVariables(
 
 function addAuthFromHostRule(
   hostRule: HostRule,
-  env: NodeJS.ProcessEnv
+  env: NodeJS.ProcessEnv,
 ): NodeJS.ProcessEnv {
   let environmentVariables = env;
   const httpUrl = createURLFromHostOrURL(hostRule.matchHost!)?.toString();
@@ -209,11 +209,11 @@ function addAuthFromHostRule(
     environmentVariables = getGitAuthenticatedEnvironmentVariables(
       httpUrl!,
       hostRule,
-      environmentVariables
+      environmentVariables,
     );
   } else {
     logger.debug(
-      `Could not parse registryUrl ${hostRule.matchHost!} or not using http(s). Ignoring`
+      `Could not parse registryUrl ${hostRule.matchHost!} or not using http(s). Ignoring`,
     );
   }
   return environmentVariables;
diff --git a/lib/util/git/author.spec.ts b/lib/util/git/author.spec.ts
index 3d7773c2b8c65638a398bb71b8b883593f68064e..1f9539bfe41cf34f8374d61869790c7326f23991 100644
--- a/lib/util/git/author.spec.ts
+++ b/lib/util/git/author.spec.ts
@@ -19,7 +19,9 @@ describe('util/git/author', () => {
 
     it('parses bot name and email', () => {
       expect(
-        parseGitAuthor('renovate[bot] <renovate[bot]@users.noreply.github.com>')
+        parseGitAuthor(
+          'renovate[bot] <renovate[bot]@users.noreply.github.com>',
+        ),
       ).toEqual({
         address: 'renovate[bot]@users.noreply.github.com',
         name: 'renovate[bot]',
@@ -28,7 +30,7 @@ describe('util/git/author', () => {
 
     it('escapes names', () => {
       expect(parseGitAuthor('name [what] <name@what.com>')?.name).toBe(
-        `name [what]`
+        `name [what]`,
       );
     });
 
diff --git a/lib/util/git/behind-base-branch-cache.spec.ts b/lib/util/git/behind-base-branch-cache.spec.ts
index 4aaabc6aaaaa2436f64da9f510f5fa661ca554e3..c8b8cd1b94d0abe2b9629d3833b795f1ca97c21b 100644
--- a/lib/util/git/behind-base-branch-cache.spec.ts
+++ b/lib/util/git/behind-base-branch-cache.spec.ts
@@ -24,8 +24,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -47,8 +47,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -70,8 +70,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -93,8 +93,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -115,8 +115,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -138,8 +138,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -161,8 +161,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeNull();
     });
 
@@ -184,8 +184,8 @@ describe('util/git/behind-base-branch-cache', () => {
           'branch',
           'branch_sha',
           'base_branch',
-          'base_branch_sha'
-        )
+          'base_branch_sha',
+        ),
       ).toBeTrue();
     });
   });
@@ -195,7 +195,7 @@ describe('util/git/behind-base-branch-cache', () => {
       setCachedBehindBaseResult('foo', false);
       expect(repoCache).toEqual({});
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'setCachedBehindBaseResult(): Branch cache not present'
+        'setCachedBehindBaseResult(): Branch cache not present',
       );
     });
 
@@ -203,7 +203,7 @@ describe('util/git/behind-base-branch-cache', () => {
       setCachedBehindBaseResult('foo', false);
       expect(repoCache).toEqual({});
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'setCachedBehindBaseResult(): Branch cache not present'
+        'setCachedBehindBaseResult(): Branch cache not present',
       );
     });
 
diff --git a/lib/util/git/behind-base-branch-cache.ts b/lib/util/git/behind-base-branch-cache.ts
index 929457a8a5ce3d897ce29943d98eb8740c0edcf5..f4f158bc6c46f907b6edccf82a2bed719659c8c2 100644
--- a/lib/util/git/behind-base-branch-cache.ts
+++ b/lib/util/git/behind-base-branch-cache.ts
@@ -5,11 +5,11 @@ export function getCachedBehindBaseResult(
   branchName: string,
   branchSha: string | null,
   baseBranch: string,
-  baseBranchSha: string | null
+  baseBranchSha: string | null,
 ): boolean | null {
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branch.branchName === branchName
+    (branch) => branch.branchName === branchName,
   );
 
   if (
@@ -27,11 +27,11 @@ export function getCachedBehindBaseResult(
 
 export function setCachedBehindBaseResult(
   branchName: string,
-  isBehindBase: boolean
+  isBehindBase: boolean,
 ): void {
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branch.branchName === branchName
+    (branch) => branch.branchName === branchName,
   );
 
   if (!branch) {
diff --git a/lib/util/git/conflicts-cache.spec.ts b/lib/util/git/conflicts-cache.spec.ts
index 19d79037ac181673acf76252fd3be6440ef0d7dc..038a34fe325113c4b73c1a7b83b40bd36164dc89 100644
--- a/lib/util/git/conflicts-cache.spec.ts
+++ b/lib/util/git/conflicts-cache.spec.ts
@@ -20,7 +20,7 @@ describe('util/git/conflicts-cache', () => {
   describe('getCachedConflictResult', () => {
     it('returns null if cache is not populated', () => {
       expect(
-        getCachedConflictResult('foo', 'sha', 'bar', 'base_sha')
+        getCachedConflictResult('foo', 'sha', 'bar', 'base_sha'),
       ).toBeNull();
     });
 
@@ -35,7 +35,7 @@ describe('util/git/conflicts-cache', () => {
         }),
       ];
       expect(
-        getCachedConflictResult('not_foo', 'sha', 'bar', 'base_sha')
+        getCachedConflictResult('not_foo', 'sha', 'bar', 'base_sha'),
       ).toBeNull();
     });
 
@@ -50,7 +50,7 @@ describe('util/git/conflicts-cache', () => {
         }),
       ];
       expect(
-        getCachedConflictResult('foo', 'sha', 'bar', 'not_base_sha')
+        getCachedConflictResult('foo', 'sha', 'bar', 'not_base_sha'),
       ).toBeNull();
     });
 
@@ -65,7 +65,7 @@ describe('util/git/conflicts-cache', () => {
         }),
       ];
       expect(
-        getCachedConflictResult('foo', 'not_sha', 'bar', 'base_sha')
+        getCachedConflictResult('foo', 'not_sha', 'bar', 'base_sha'),
       ).toBeNull();
     });
 
@@ -79,7 +79,7 @@ describe('util/git/conflicts-cache', () => {
         }),
       ];
       expect(
-        getCachedConflictResult('foo', 'sha', 'bar', 'base_sha')
+        getCachedConflictResult('foo', 'sha', 'bar', 'base_sha'),
       ).toBeNull();
     });
 
@@ -94,7 +94,7 @@ describe('util/git/conflicts-cache', () => {
         }),
       ];
       expect(
-        getCachedConflictResult('foo', 'sha', 'bar', 'base_sha')
+        getCachedConflictResult('foo', 'sha', 'bar', 'base_sha'),
       ).toBeTrue();
     });
   });
diff --git a/lib/util/git/conflicts-cache.ts b/lib/util/git/conflicts-cache.ts
index 2e891c52849eeac59dca1369f86cf8980dd0bb71..f736c9842c8ac4428e8747c0512b091aa44be48d 100644
--- a/lib/util/git/conflicts-cache.ts
+++ b/lib/util/git/conflicts-cache.ts
@@ -5,7 +5,7 @@ export function getCachedConflictResult(
   branchName: string,
   branchSha: string,
   baseBranch: string,
-  baseBranchSha: string
+  baseBranchSha: string,
 ): boolean | null {
   const cache = getCache();
   const branch = cache?.branches?.find((br) => br.branchName === branchName);
@@ -24,7 +24,7 @@ export function getCachedConflictResult(
 
 export function setCachedConflictResult(
   branchName: string,
-  isConflicted: boolean
+  isConflicted: boolean,
 ): void {
   const cache = getCache();
   const branch = cache?.branches?.find((br) => br.branchName === branchName);
diff --git a/lib/util/git/error.ts b/lib/util/git/error.ts
index 34708669f3012a0e0d58ab8f9dea6906699d937e..758ccec6879a2d1d69213191602fb75443cda763 100644
--- a/lib/util/git/error.ts
+++ b/lib/util/git/error.ts
@@ -75,7 +75,7 @@ export function checkForPlatformFailure(err: Error): Error | null {
 export function handleCommitError(
   err: Error,
   branchName: string,
-  files?: FileChange[]
+  files?: FileChange[],
 ): null {
   checkForPlatformFailure(err);
   if (err.message.includes(`'refs/heads/renovate' exists`)) {
@@ -87,11 +87,11 @@ export function handleCommitError(
   }
   if (
     err.message.includes(
-      'refusing to allow a GitHub App to create or update workflow'
+      'refusing to allow a GitHub App to create or update workflow',
     )
   ) {
     logger.warn(
-      'App has not been granted permissions to update Workflows - aborting branch.'
+      'App has not been granted permissions to update Workflows - aborting branch.',
     );
     return null;
   }
@@ -116,7 +116,7 @@ export function handleCommitError(
     error.validationError = 'Bitbucket committer error';
     error.validationMessage = `Renovate has experienced the following error when attempting to push its branch to the server: \`${err.message.replaceAll(
       '`',
-      "'"
+      "'",
     )}\``;
     throw error;
   }
diff --git a/lib/util/git/index.spec.ts b/lib/util/git/index.spec.ts
index 3902d862a47c263bde6dfb215af01ffc9fab8f7d..3e36ab49fff75fee2c02d47d652c7e409e39c711 100644
--- a/lib/util/git/index.spec.ts
+++ b/lib/util/git/index.spec.ts
@@ -162,7 +162,7 @@ describe('util/git/index', () => {
         throw new Error('The remote end hung up unexpectedly');
       });
       await expect(git.gitRetry(() => gitFunc())).rejects.toThrow(
-        'The remote end hung up unexpectedly'
+        'The remote end hung up unexpectedly',
       );
       expect(gitFunc).toHaveBeenCalledTimes(6);
     });
@@ -245,13 +245,13 @@ describe('util/git/index', () => {
   describe('isBranchBehindBase()', () => {
     it('should return false if same SHA as master', async () => {
       expect(
-        await git.isBranchBehindBase('renovate/future_branch', defaultBranch)
+        await git.isBranchBehindBase('renovate/future_branch', defaultBranch),
       ).toBeFalse();
     });
 
     it('should return true if SHA different from master', async () => {
       expect(
-        await git.isBranchBehindBase('renovate/past_branch', defaultBranch)
+        await git.isBranchBehindBase('renovate/past_branch', defaultBranch),
       ).toBeTrue();
     });
 
@@ -263,7 +263,7 @@ describe('util/git/index', () => {
       behindBaseCache.getCachedBehindBaseResult.mockReturnValue(true);
       expect(await git.isBranchBehindBase('develop', defaultBranch)).toBeTrue();
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'branch.isBehindBase(): using cached result "true"'
+        'branch.isBehindBase(): using cached result "true"',
       );
     });
   });
@@ -327,7 +327,7 @@ describe('util/git/index', () => {
         message: 'Create something',
       });
       const branchFiles = await git.getBranchFiles(
-        'renovate/branch_with_changes'
+        'renovate/branch_with_changes',
       );
       expect(branchFiles).toEqual(['some-new-file']);
     });
@@ -418,7 +418,7 @@ describe('util/git/index', () => {
 
     it('compare with changes', () => {
       return expect(
-        git.hasDiff('origin/master', 'origin/renovate/future_branch')
+        git.hasDiff('origin/master', 'origin/renovate/future_branch'),
       ).resolves.toBeTrue();
     });
   });
@@ -573,12 +573,12 @@ describe('util/git/index', () => {
       expect(commitSpy).toHaveBeenCalledWith(
         expect.anything(),
         expect.anything(),
-        expect.not.objectContaining({ '--no-verify': null })
+        expect.not.objectContaining({ '--no-verify': null }),
       );
       expect(pushSpy).toHaveBeenCalledWith(
         expect.anything(),
         expect.anything(),
-        expect.not.objectContaining({ '--no-verify': null })
+        expect.not.objectContaining({ '--no-verify': null }),
       );
     });
 
@@ -604,12 +604,12 @@ describe('util/git/index', () => {
       expect(commitSpy).toHaveBeenCalledWith(
         expect.anything(),
         expect.anything(),
-        expect.objectContaining({ '--no-verify': null })
+        expect.objectContaining({ '--no-verify': null }),
       );
       expect(pushSpy).toHaveBeenCalledWith(
         expect.anything(),
         expect.anything(),
-        expect.not.objectContaining({ '--no-verify': null })
+        expect.not.objectContaining({ '--no-verify': null }),
       );
     });
 
@@ -635,12 +635,12 @@ describe('util/git/index', () => {
       expect(commitSpy).toHaveBeenCalledWith(
         expect.anything(),
         expect.anything(),
-        expect.not.objectContaining({ '--no-verify': null })
+        expect.not.objectContaining({ '--no-verify': null }),
       );
       expect(pushSpy).toHaveBeenCalledWith(
         expect.anything(),
         expect.anything(),
-        expect.objectContaining({ '--no-verify': null })
+        expect.objectContaining({ '--no-verify': null }),
       );
     });
 
@@ -683,14 +683,14 @@ describe('util/git/index', () => {
           auth: 'user:pass',
           hostname: 'host',
           repository: 'some/repo',
-        })
+        }),
       ).toBe('https://user:pass@host/some/repo.git');
       expect(
         getUrl({
           auth: 'user:pass',
           hostname: 'host',
           repository: 'some/repo',
-        })
+        }),
       ).toBe('https://user:pass@host/some/repo.git');
     });
 
@@ -701,7 +701,7 @@ describe('util/git/index', () => {
           auth: 'user:pass',
           hostname: 'host',
           repository: 'some/repo',
-        })
+        }),
       ).toBe('git@host:some/repo.git');
     });
   });
@@ -767,7 +767,7 @@ describe('util/git/index', () => {
       const repo = Git(base.path);
       await fs.writeFile(
         base.path + '/.gitmodules',
-        '[submodule "test"]\npath=test\nurl=ssh://0.0.0.0'
+        '[submodule "test"]\npath=test\nurl=ssh://0.0.0.0',
       );
       await repo.add('.gitmodules');
       await repo.raw([
@@ -846,7 +846,7 @@ describe('util/git/index', () => {
     it('returns true for non-existing source branch', async () => {
       const res = await git.isBranchConflicted(
         defaultBranch,
-        'renovate/non_existing_branch'
+        'renovate/non_existing_branch',
       );
       expect(res).toBeTrue();
     });
@@ -854,7 +854,7 @@ describe('util/git/index', () => {
     it('returns true for non-existing target branch', async () => {
       const res = await git.isBranchConflicted(
         'renovate/non_existing_branch',
-        'renovate/non_conflicted_branch'
+        'renovate/non_conflicted_branch',
       );
       expect(res).toBeTrue();
     });
@@ -865,7 +865,7 @@ describe('util/git/index', () => {
 
       const res = await git.isBranchConflicted(
         defaultBranch,
-        'renovate/conflicted_branch'
+        'renovate/conflicted_branch',
       );
 
       expect(res).toBeTrue();
@@ -881,7 +881,7 @@ describe('util/git/index', () => {
 
       const res = await git.isBranchConflicted(
         defaultBranch,
-        'renovate/non_conflicted_branch'
+        'renovate/non_conflicted_branch',
       );
 
       expect(res).toBeFalse();
@@ -897,7 +897,7 @@ describe('util/git/index', () => {
 
         const res = await git.isBranchConflicted(
           defaultBranch,
-          'renovate/conflicted_branch'
+          'renovate/conflicted_branch',
         );
 
         expect(res).toBeTrue();
@@ -917,7 +917,7 @@ describe('util/git/index', () => {
 
         const res = await git.isBranchConflicted(
           defaultBranch,
-          'renovate/conflicted_branch'
+          'renovate/conflicted_branch',
         );
 
         expect(res).toBeTrue();
@@ -931,7 +931,7 @@ describe('util/git/index', () => {
 
         const res = await git.isBranchConflicted(
           defaultBranch,
-          'renovate/non_conflicted_branch'
+          'renovate/non_conflicted_branch',
         );
 
         expect(res).toBeFalse();
@@ -1013,7 +1013,7 @@ describe('util/git/index', () => {
       const pushSpy = jest.spyOn(SimpleGit.prototype, 'push');
       pushSpy.mockImplementationOnce(() => {
         throw new Error(
-          'remote: Repository policies do not allow pushes that update more than 2 branches or tags.'
+          'remote: Repository policies do not allow pushes that update more than 2 branches or tags.',
         );
       });
 
@@ -1058,7 +1058,7 @@ describe('util/git/index', () => {
       await fs.writeFile(tmpDir.path + '/root', 'new root');
 
       await expect(git.getRepoStatus('../../bin')).rejects.toThrow(
-        INVALID_PATH
+        INVALID_PATH,
       );
     });
   });
@@ -1072,7 +1072,7 @@ describe('util/git/index', () => {
   describe('fetchRevSpec()', () => {
     it('fetchRevSpec()', async () => {
       await git.fetchRevSpec(
-        `refs/heads/${defaultBranch}:refs/heads/other/${defaultBranch}`
+        `refs/heads/${defaultBranch}:refs/heads/other/${defaultBranch}`,
       );
       //checkout this duplicate
       const sha = await git.checkoutBranch(`other/${defaultBranch}`);
diff --git a/lib/util/git/index.ts b/lib/util/git/index.ts
index ce68362dc9539013509222c557d22d59dce6d964..8f45f1d233a9e0c9044335e11f64e9927290d670 100644
--- a/lib/util/git/index.ts
+++ b/lib/util/git/index.ts
@@ -94,7 +94,7 @@ export async function gitRetry<T>(gitFunc: () => Promise<T>): Promise<T> {
           { err: errChecked },
           `ExternalHostError thrown in round ${
             round + 1
-          } of ${retryCount} - retrying in the next round`
+          } of ${retryCount} - retrying in the next round`,
         );
       } else {
         throw err;
@@ -141,7 +141,7 @@ async function getDefaultBranch(git: SimpleGit): Promise<string> {
     }
     if (
       err.message.startsWith(
-        'fatal: ref refs/remotes/origin/HEAD is not a symbolic ref'
+        'fatal: ref refs/remotes/origin/HEAD is not a symbolic ref',
       )
     ) {
       throw new Error(REPOSITORY_EMPTY);
@@ -191,7 +191,7 @@ export async function validateGitVersion(): Promise<boolean> {
   ) {
     logger.error(
       { detectedVersion: version, minimumVersion: GIT_MINIMUM_VERSION },
-      'Git version needs upgrading'
+      'Git version needs upgrading',
     );
     return false;
   }
@@ -205,7 +205,7 @@ async function fetchBranchCommits(): Promise<void> {
   if (config.extraCloneOpts) {
     Object.entries(config.extraCloneOpts).forEach((e) =>
       // TODO: types (#22198)
-      opts.unshift(e[0], `${e[1]!}`)
+      opts.unshift(e[0], `${e[1]!}`),
     );
   }
   try {
@@ -280,7 +280,7 @@ async function cleanLocalBranches(): Promise<void> {
 
 export function setGitAuthor(gitAuthor: string | undefined): void {
   const gitAuthorParsed = parseGitAuthor(
-    gitAuthor ?? 'Renovate Bot <renovate@whitesourcesoftware.com>'
+    gitAuthor ?? 'Renovate Bot <renovate@whitesourcesoftware.com>',
   );
   if (!gitAuthorParsed) {
     const error = new Error(CONFIG_VALIDATION);
@@ -316,7 +316,7 @@ export async function writeGitAuthor(): Promise<void> {
     }
     logger.debug(
       { err, gitAuthorName, gitAuthorEmail },
-      'Error setting git author config'
+      'Error setting git author config',
     );
     throw new Error(TEMPORARY_ERROR);
   }
@@ -364,7 +364,7 @@ export async function cloneSubmodules(shouldClone: boolean): Promise<void> {
     } catch (err) {
       logger.warn(
         { err },
-        `Unable to initialise git submodule at ${submodule}`
+        `Unable to initialise git submodule at ${submodule}`,
       );
     }
   }
@@ -427,7 +427,7 @@ export async function syncGit(): Promise<void> {
       if (config.extraCloneOpts) {
         Object.entries(config.extraCloneOpts).forEach((e) =>
           // TODO: types (#22198)
-          opts.push(e[0], `${e[1]!}`)
+          opts.push(e[0], `${e[1]!}`),
         );
       }
       const emptyDirAndClone = async (): Promise<void> => {
@@ -483,7 +483,7 @@ export async function getRepoStatus(path?: string): Promise<StatusResult> {
     if (!localPath.startsWith(upath.resolve(localDir))) {
       logger.warn(
         { localPath, localDir },
-        'Preventing access to file outside the local directory'
+        'Preventing access to file outside the local directory',
       );
       throw new Error(INVALID_PATH);
     }
@@ -554,7 +554,7 @@ export async function getFileList(): Promise<string[]> {
     if (err.message?.includes('fatal: Not a valid object name')) {
       logger.debug(
         { err },
-        'Branch not found when checking branch list - aborting'
+        'Branch not found when checking branch list - aborting',
       );
       throw new Error(REPOSITORY_CHANGED);
     }
@@ -578,7 +578,7 @@ export function getBranchList(): string[] {
 
 export async function isBranchBehindBase(
   branchName: string,
-  baseBranch: string
+  baseBranch: string,
 ): Promise<boolean> {
   const baseBranchSha = getBranchCommit(baseBranch);
   const branchSha = getBranchCommit(branchName);
@@ -586,7 +586,7 @@ export async function isBranchBehindBase(
     branchName,
     branchSha,
     baseBranch,
-    baseBranchSha
+    baseBranchSha,
   );
   if (isBehind !== null) {
     logger.debug(`branch.isBehindBase(): using cached result "${isBehind}"`);
@@ -603,7 +603,7 @@ export async function isBranchBehindBase(
     isBehind = behindCount !== '0';
     logger.debug(
       { baseBranch, branchName },
-      `branch.isBehindBase(): ${isBehind}`
+      `branch.isBehindBase(): ${isBehind}`,
     );
     setCachedBehindBaseResult(branchName, isBehind);
     return isBehind;
@@ -628,7 +628,7 @@ export async function isBranchModified(branchName: string): Promise<boolean> {
   // Second check repository cache
   const isModified = getCachedModifiedResult(
     branchName,
-    getBranchCommit(branchName) // branch sha
+    getBranchCommit(branchName), // branch sha
   );
   if (isModified !== null) {
     logger.debug(`branch.isModified(): using cached result "${isModified}"`);
@@ -655,7 +655,7 @@ export async function isBranchModified(branchName: string): Promise<boolean> {
     if (err.message?.includes('fatal: bad revision')) {
       logger.debug(
         { err },
-        'Remote branch not found when checking last commit author - aborting run'
+        'Remote branch not found when checking last commit author - aborting run',
       );
       throw new Error(REPOSITORY_CHANGED);
     }
@@ -674,7 +674,7 @@ export async function isBranchModified(branchName: string): Promise<boolean> {
   }
   logger.debug(
     { branchName, lastAuthor, gitAuthorEmail },
-    'branch.isModified() = true'
+    'branch.isModified() = true',
   );
   config.branchIsModified[branchName] = true;
   setCachedModifiedResult(branchName, true);
@@ -683,7 +683,7 @@ export async function isBranchModified(branchName: string): Promise<boolean> {
 
 export async function isBranchConflicted(
   baseBranch: string,
-  branch: string
+  branch: string,
 ): Promise<boolean> {
   logger.debug(`isBranchConflicted(${baseBranch}, ${branch})`);
 
@@ -692,7 +692,7 @@ export async function isBranchConflicted(
   if (!baseBranchSha || !branchSha) {
     logger.warn(
       { baseBranch, branch },
-      'isBranchConflicted: branch does not exist'
+      'isBranchConflicted: branch does not exist',
     );
     return true;
   }
@@ -701,11 +701,11 @@ export async function isBranchConflicted(
     branch,
     branchSha,
     baseBranch,
-    baseBranchSha
+    baseBranchSha,
   );
   if (is.boolean(isConflicted)) {
     logger.debug(
-      `branch.isConflicted(): using cached result "${isConflicted}"`
+      `branch.isConflicted(): using cached result "${isConflicted}"`,
     );
     return isConflicted;
   }
@@ -730,7 +730,7 @@ export async function isBranchConflicted(
     if (!err?.git?.conflicts?.length) {
       logger.debug(
         { baseBranch, branch, err },
-        'isBranchConflicted: unknown error'
+        'isBranchConflicted: unknown error',
       );
     }
   } finally {
@@ -742,7 +742,7 @@ export async function isBranchConflicted(
     } catch (err) /* istanbul ignore next */ {
       logger.debug(
         { baseBranch, branch, err },
-        'isBranchConflicted: cleanup error'
+        'isBranchConflicted: cleanup error',
       );
     }
   }
@@ -790,7 +790,7 @@ export async function mergeToLocal(refSpecToMerge: string): Promise<void> {
         '-B',
         config.currentBranch,
         'origin/' + config.currentBranch,
-      ])
+      ]),
     );
     status = await git.status();
     await fetchRevSpec(refSpecToMerge);
@@ -804,7 +804,7 @@ export async function mergeToLocal(refSpecToMerge: string): Promise<void> {
         status,
         err,
       },
-      'mergeLocally error'
+      'mergeLocally error',
     );
     throw err;
   }
@@ -817,14 +817,14 @@ export async function mergeBranch(branchName: string): Promise<void> {
     await writeGitAuthor();
     await git.reset(ResetMode.HARD);
     await gitRetry(() =>
-      git.checkout(['-B', branchName, 'origin/' + branchName])
+      git.checkout(['-B', branchName, 'origin/' + branchName]),
     );
     await gitRetry(() =>
       git.checkout([
         '-B',
         config.currentBranch,
         'origin/' + config.currentBranch,
-      ])
+      ]),
     );
     status = await git.status();
     await gitRetry(() => git.merge(['--ff-only', branchName]));
@@ -840,14 +840,14 @@ export async function mergeBranch(branchName: string): Promise<void> {
         status,
         err,
       },
-      'mergeBranch error'
+      'mergeBranch error',
     );
     throw err;
   }
 }
 
 export async function getBranchLastCommitTime(
-  branchName: string
+  branchName: string,
 ): Promise<Date> {
   await syncGit();
   try {
@@ -864,12 +864,12 @@ export async function getBranchLastCommitTime(
 }
 
 export async function getBranchFiles(
-  branchName: string
+  branchName: string,
 ): Promise<string[] | null> {
   await syncGit();
   try {
     const diff = await gitRetry(() =>
-      git.diffSummary([`origin/${branchName}`, `origin/${branchName}^`])
+      git.diffSummary([`origin/${branchName}`, `origin/${branchName}^`]),
     );
     return diff.files.map((file) => file.file);
   } catch (err) /* istanbul ignore next */ {
@@ -884,7 +884,7 @@ export async function getBranchFiles(
 
 export async function getFile(
   filePath: string,
-  branchName?: string
+  branchName?: string,
 ): Promise<string | null> {
   await syncGit();
   try {
@@ -903,7 +903,7 @@ export async function getFile(
 }
 
 export async function getFiles(
-  fileNames: string[]
+  fileNames: string[],
 ): Promise<Record<string, string | null>> {
   const fileContentMap: Record<string, string | null> = {};
 
@@ -916,7 +916,7 @@ export async function getFiles(
 
 export async function hasDiff(
   sourceRef: string,
-  targetRef: string
+  targetRef: string,
 ): Promise<boolean> {
   await syncGit();
   try {
@@ -964,7 +964,7 @@ export async function prepareCommit({
     await git.raw(['clean', '-fd']);
     const parentCommitSha = config.currentBranchSha;
     await gitRetry(() =>
-      git.checkout(['-B', branchName, 'origin/' + config.currentBranch])
+      git.checkout(['-B', branchName, 'origin/' + config.currentBranch]),
     );
     const deletedFiles: string[] = [];
     const addedModifiedFiles: string[] = [];
@@ -1019,7 +1019,7 @@ export async function prepareCommit({
         } catch (err) /* istanbul ignore next */ {
           if (
             !err.message.includes(
-              'The following paths are ignored by one of your .gitignore files'
+              'The following paths are ignored by one of your .gitignore files',
             )
           ) {
             throw err;
@@ -1047,12 +1047,12 @@ export async function prepareCommit({
     }
     logger.debug(
       { deletedFiles, ignoredFiles, result: commitRes },
-      `git commit`
+      `git commit`,
     );
     if (!force && !(await hasDiff('HEAD', `origin/${branchName}`))) {
       logger.debug(
         { branchName, deletedFiles, addedModifiedFiles, ignoredFiles },
-        'No file changes detected. Skipping commit'
+        'No file changes detected. Skipping commit',
       );
       return null;
     }
@@ -1093,7 +1093,7 @@ export async function pushCommit({
     }
 
     const pushRes = await gitRetry(() =>
-      git.push('origin', `${sourceRef}:${targetRef ?? sourceRef}`, pushOptions)
+      git.push('origin', `${sourceRef}:${targetRef ?? sourceRef}`, pushOptions),
     );
     delete pushRes.repo;
     logger.debug({ result: pushRes }, 'git push');
@@ -1106,7 +1106,7 @@ export async function pushCommit({
 }
 
 export async function fetchBranch(
-  branchName: string
+  branchName: string,
 ): Promise<CommitSha | null> {
   await syncGit();
   logger.debug(`Fetching branch ${branchName}`);
@@ -1123,7 +1123,7 @@ export async function fetchBranch(
 }
 
 export async function commitFiles(
-  commitConfig: CommitFilesConfig
+  commitConfig: CommitFilesConfig,
 ): Promise<CommitSha | null> {
   try {
     const commitResult = await prepareCommit(commitConfig);
@@ -1188,7 +1188,7 @@ let remoteRefsExist = false;
 export async function pushCommitToRenovateRef(
   commitSha: string,
   refName: string,
-  section = 'branches'
+  section = 'branches',
 ): Promise<void> {
   const fullRefName = `refs/renovate/${section}/${refName}`;
   await git.raw(['update-ref', fullRefName, commitSha]);
@@ -1245,7 +1245,7 @@ export async function clearRenovateRefs(): Promise<void> {
   obsoleteRefs.push(...nonSectionedRefs);
 
   const renovateBranchRefs = renovateRefs.filter((ref) =>
-    ref.startsWith('refs/renovate/branches/')
+    ref.startsWith('refs/renovate/branches/'),
   );
   obsoleteRefs.push(...renovateBranchRefs);
 
@@ -1275,7 +1275,7 @@ export async function clearRenovateRefs(): Promise<void> {
 }
 
 const treeItemRegex = regEx(
-  /^(?<mode>\d{6})\s+(?<type>blob|tree|commit)\s+(?<sha>[0-9a-f]{40})\s+(?<path>.*)$/
+  /^(?<mode>\d{6})\s+(?<type>blob|tree|commit)\s+(?<sha>[0-9a-f]{40})\s+(?<path>.*)$/,
 );
 
 const treeShaRegex = regEx(/tree\s+(?<treeSha>[0-9a-f]{40})\s*/);
diff --git a/lib/util/git/modified-cache.spec.ts b/lib/util/git/modified-cache.spec.ts
index 75ab80d203e4d551d4a23065b5bd093767dfd7c3..1c679021fc87379de68abd5121919671033e1ecd 100644
--- a/lib/util/git/modified-cache.spec.ts
+++ b/lib/util/git/modified-cache.spec.ts
@@ -67,7 +67,7 @@ describe('util/git/modified-cache', () => {
       setCachedModifiedResult('foo', false);
       expect(repoCache).toEqual({});
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'setCachedModifiedResult(): Branch cache not present'
+        'setCachedModifiedResult(): Branch cache not present',
       );
     });
 
@@ -75,7 +75,7 @@ describe('util/git/modified-cache', () => {
       setCachedModifiedResult('foo', false);
       expect(repoCache).toEqual({});
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'setCachedModifiedResult(): Branch cache not present'
+        'setCachedModifiedResult(): Branch cache not present',
       );
     });
 
diff --git a/lib/util/git/modified-cache.ts b/lib/util/git/modified-cache.ts
index e25998cdd46e30b7d5317484fd5347ff2d9b5f8d..4bb5e1d663f0727ead4dd7c4dcfc2fcecafadcc2 100644
--- a/lib/util/git/modified-cache.ts
+++ b/lib/util/git/modified-cache.ts
@@ -3,11 +3,11 @@ import { getCache } from '../cache/repository';
 
 export function getCachedModifiedResult(
   branchName: string,
-  branchSha: string | null
+  branchSha: string | null,
 ): boolean | null {
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branch.branchName === branchName
+    (branch) => branch.branchName === branchName,
   );
 
   if (branch?.sha === branchSha && branch.isModified !== undefined) {
@@ -19,11 +19,11 @@ export function getCachedModifiedResult(
 
 export function setCachedModifiedResult(
   branchName: string,
-  isModified: boolean
+  isModified: boolean,
 ): void {
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branch.branchName === branchName
+    (branch) => branch.branchName === branchName,
   );
 
   if (!branch) {
diff --git a/lib/util/git/pristine.ts b/lib/util/git/pristine.ts
index 7729a835644184de2bb3a11fd07f45e45939dcad..490a0ada6c5054b7d014c0a857a4c16ff559f72f 100644
--- a/lib/util/git/pristine.ts
+++ b/lib/util/git/pristine.ts
@@ -3,7 +3,7 @@ import { getCache } from '../cache/repository';
 export function getCachedPristineResult(branchName: string): boolean {
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branch.branchName === branchName
+    (branch) => branch.branchName === branchName,
   );
 
   return branch?.pristine ?? false;
diff --git a/lib/util/git/private-key.spec.ts b/lib/util/git/private-key.spec.ts
index 667b8c585318b7095b147450dad8b8dcfc80ff65..fb8bea0667bf1c27efd5c4c1b53e528263c9fa25 100644
--- a/lib/util/git/private-key.spec.ts
+++ b/lib/util/git/private-key.spec.ts
@@ -6,9 +6,9 @@ import { setPrivateKey } from '.';
 jest.mock('fs-extra', () =>
   jest
     .requireActual<typeof import('../../../test/fixtures')>(
-      '../../../test/fixtures'
+      '../../../test/fixtures',
     )
-    .fsExtra()
+    .fsExtra(),
 );
 jest.mock('../exec');
 
diff --git a/lib/util/git/private-key.ts b/lib/util/git/private-key.ts
index 32f3db0a47424d50d6eea3adc9b4bb24fa4f33e7..681331d06846e63bee4dfc2f0b62f152be16b88d 100644
--- a/lib/util/git/private-key.ts
+++ b/lib/util/git/private-key.ts
@@ -17,7 +17,7 @@ export function setPrivateKey(key: string | undefined): void {
   }
   addSecretForSanitizing(key.trim(), 'global');
   logger.debug(
-    'gitPrivateKey: successfully set (but not yet written/configured)'
+    'gitPrivateKey: successfully set (but not yet written/configured)',
   );
   gitPrivateKey = key.trim();
 }
diff --git a/lib/util/git/semantic.ts b/lib/util/git/semantic.ts
index c2e27b4a004f1eddf857dcf99baa0777ba7980f8..09fbd8b8b771cd0279d1210916a209ce6b36942b 100644
--- a/lib/util/git/semantic.ts
+++ b/lib/util/git/semantic.ts
@@ -10,7 +10,7 @@ export async function detectSemanticCommits(): Promise<DetectedSemanticCommit> {
   const cache = getCache();
   if (cache.semanticCommits) {
     logger.debug(
-      `semanticCommits: returning "${cache.semanticCommits}" from cache`
+      `semanticCommits: returning "${cache.semanticCommits}" from cache`,
     );
     return cache.semanticCommits;
   }
diff --git a/lib/util/git/set-branch-commit.spec.ts b/lib/util/git/set-branch-commit.spec.ts
index 322f658dbd1f35d9c5431af18279324adb40173f..6c1be8266ab88f214e6318a7e0070eada04526ab 100644
--- a/lib/util/git/set-branch-commit.spec.ts
+++ b/lib/util/git/set-branch-commit.spec.ts
@@ -20,7 +20,7 @@ describe('util/git/set-branch-commit', () => {
       git.getBranchCommit.mockReturnValueOnce('base_SHA');
       setBranchNewCommit('branch_name', 'base_branch', 'SHA');
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'setBranchCommit(): Branch cache not present'
+        'setBranchCommit(): Branch cache not present',
       );
       expect(repoCache.branches).toEqual([
         {
diff --git a/lib/util/git/set-branch-commit.ts b/lib/util/git/set-branch-commit.ts
index ec25ce99d1fb0508e03ccc212e3ef473942d0387..a0015a4d33e5d395ba13a2947114076f1b429bbd 100644
--- a/lib/util/git/set-branch-commit.ts
+++ b/lib/util/git/set-branch-commit.ts
@@ -11,7 +11,7 @@ import { getBranchCommit } from '.';
 export function setBranchNewCommit(
   branchName: string,
   baseBranch: string,
-  commitSha: string
+  commitSha: string,
 ): void {
   logger.debug('setBranchCommit()');
   const cache = getCache();
diff --git a/lib/util/git/url.spec.ts b/lib/util/git/url.spec.ts
index eda3e6bf423c54a1374c62750deaa55b92641621..2656d536c6e94cd4d3c6f13751c3932dc2a1b936 100644
--- a/lib/util/git/url.spec.ts
+++ b/lib/util/git/url.spec.ts
@@ -51,37 +51,37 @@ describe('util/git/url', () => {
 
     it('returns gitlab url with token', () => {
       expect(getHttpUrl('http://gitlab.com/', 'token')).toBe(
-        'http://gitlab-ci-token:token@gitlab.com/'
+        'http://gitlab-ci-token:token@gitlab.com/',
       );
       expect(getHttpUrl('http://gitlab.com/', 'gitlab-ci-token:token')).toBe(
-        'http://gitlab-ci-token:token@gitlab.com/'
+        'http://gitlab-ci-token:token@gitlab.com/',
       );
       expect(
-        getHttpUrl('http://gitlab.com:8443/', 'gitlab-ci-token:token')
+        getHttpUrl('http://gitlab.com:8443/', 'gitlab-ci-token:token'),
       ).toBe('http://gitlab-ci-token:token@gitlab.com:8443/');
       expect(getHttpUrl('git@gitlab.com:some/repo', 'token')).toBe(
-        'https://gitlab-ci-token:token@gitlab.com/some/repo'
+        'https://gitlab-ci-token:token@gitlab.com/some/repo',
       );
     });
 
     it('returns github url with token', () => {
       expect(getHttpUrl('http://github.com/', 'token')).toBe(
-        'http://x-access-token:token@github.com/'
+        'http://x-access-token:token@github.com/',
       );
       expect(getHttpUrl('http://github.com/', 'x-access-token:token')).toBe(
-        'http://x-access-token:token@github.com/'
+        'http://x-access-token:token@github.com/',
       );
       expect(
-        getHttpUrl('http://github.com:8443/', 'x-access-token:token')
+        getHttpUrl('http://github.com:8443/', 'x-access-token:token'),
       ).toBe('http://x-access-token:token@github.com:8443/');
       expect(getHttpUrl('git@github.com:some/repo', 'token')).toBe(
-        'https://x-access-token:token@github.com/some/repo'
+        'https://x-access-token:token@github.com/some/repo',
       );
     });
 
     it('removes username/password from URL', () => {
       expect(getHttpUrl('https://user:password@foo.bar/someOrg/someRepo')).toBe(
-        'https://foo.bar/someOrg/someRepo'
+        'https://foo.bar/someOrg/someRepo',
       );
     });
 
@@ -89,8 +89,8 @@ describe('util/git/url', () => {
       expect(
         getHttpUrl(
           'https://user:password@foo.bar/someOrg/someRepo',
-          'another-user:a-secret-pwd'
-        )
+          'another-user:a-secret-pwd',
+        ),
       ).toBe('https://another-user:a-secret-pwd@foo.bar/someOrg/someRepo');
     });
   });
@@ -98,7 +98,7 @@ describe('util/git/url', () => {
   describe('getRemoteUrlWithToken()', () => {
     it('returns original url if no host rule is found', () => {
       expect(getRemoteUrlWithToken('https://foo.bar/')).toBe(
-        'https://foo.bar/'
+        'https://foo.bar/',
       );
     });
 
@@ -107,7 +107,7 @@ describe('util/git/url', () => {
       expect(hostRules.find).toHaveBeenLastCalledWith(
         expect.objectContaining({
           url: 'https://foo.bar/some/repo',
-        })
+        }),
       );
     });
 
@@ -116,35 +116,35 @@ describe('util/git/url', () => {
       expect(hostRules.find).toHaveBeenLastCalledWith(
         expect.objectContaining({
           url: 'abcdefg',
-        })
+        }),
       );
     });
 
     it('returns http url with token', () => {
       hostRules.find.mockReturnValueOnce({ token: 'token' });
       expect(getRemoteUrlWithToken('http://foo.bar/')).toBe(
-        'http://token@foo.bar/'
+        'http://token@foo.bar/',
       );
     });
 
     it('returns https url with token', () => {
       hostRules.find.mockReturnValueOnce({ token: 'token' });
       expect(getRemoteUrlWithToken('https://foo.bar/')).toBe(
-        'https://token@foo.bar/'
+        'https://token@foo.bar/',
       );
     });
 
     it('returns https url with token for non-http protocols', () => {
       hostRules.find.mockReturnValueOnce({ token: 'token' });
       expect(getRemoteUrlWithToken('ssh://foo.bar/')).toBe(
-        'https://token@foo.bar/'
+        'https://token@foo.bar/',
       );
     });
 
     it('returns https url with encoded token', () => {
       hostRules.find.mockReturnValueOnce({ token: 't#ken' });
       expect(getRemoteUrlWithToken('https://foo.bar/')).toBe(
-        'https://t%23ken@foo.bar/'
+        'https://t%23ken@foo.bar/',
       );
     });
 
@@ -154,7 +154,7 @@ describe('util/git/url', () => {
         password: 'pass',
       });
       expect(getRemoteUrlWithToken('http://foo.bar/')).toBe(
-        'http://user:pass@foo.bar/'
+        'http://user:pass@foo.bar/',
       );
     });
 
@@ -164,7 +164,7 @@ describe('util/git/url', () => {
         password: 'pass',
       });
       expect(getRemoteUrlWithToken('https://foo.bar/')).toBe(
-        'https://user:pass@foo.bar/'
+        'https://user:pass@foo.bar/',
       );
     });
 
@@ -174,7 +174,7 @@ describe('util/git/url', () => {
         password: 'pass',
       });
       expect(getRemoteUrlWithToken('ssh://foo.bar/')).toBe(
-        'https://user:pass@foo.bar/'
+        'https://user:pass@foo.bar/',
       );
     });
 
@@ -184,7 +184,7 @@ describe('util/git/url', () => {
         password: 'p@ss',
       });
       expect(getRemoteUrlWithToken('https://foo.bar/')).toBe(
-        'https://u%24er:p%40ss@foo.bar/'
+        'https://u%24er:p%40ss@foo.bar/',
       );
     });
 
@@ -193,7 +193,7 @@ describe('util/git/url', () => {
         token: 'token',
       });
       expect(getRemoteUrlWithToken('ssh://gitlab.com/some/repo.git')).toBe(
-        'https://gitlab-ci-token:token@gitlab.com/some/repo.git'
+        'https://gitlab-ci-token:token@gitlab.com/some/repo.git',
       );
     });
 
@@ -202,7 +202,7 @@ describe('util/git/url', () => {
         token: 'token',
       });
       expect(getRemoteUrlWithToken('ssh://github.com/some/repo.git')).toBe(
-        'https://x-access-token:token@github.com/some/repo.git'
+        'https://x-access-token:token@github.com/some/repo.git',
       );
     });
   });
diff --git a/lib/util/github/graphql/cache-strategies/abstract-cache-strategy.ts b/lib/util/github/graphql/cache-strategies/abstract-cache-strategy.ts
index 1b8a1288026b01d0c78d6b7aae27797c7bf6040c..0be638592061129f94742823354e50f9fc4f514e 100644
--- a/lib/util/github/graphql/cache-strategies/abstract-cache-strategy.ts
+++ b/lib/util/github/graphql/cache-strategies/abstract-cache-strategy.ts
@@ -12,7 +12,7 @@ import { isDateExpired } from '../util';
  * and reconciling them with newly obtained ones from paginated queries.
  */
 export abstract class AbstractGithubGraphqlCacheStrategy<
-  GithubItem extends GithubDatasourceItem
+  GithubItem extends GithubDatasourceItem,
 > implements GithubGraphqlCacheStrategy<GithubItem>
 {
   /**
@@ -47,12 +47,12 @@ export abstract class AbstractGithubGraphqlCacheStrategy<
    */
   abstract load(): Promise<GithubGraphqlCacheRecord<GithubItem> | undefined>;
   abstract persist(
-    cacheRecord: GithubGraphqlCacheRecord<GithubItem>
+    cacheRecord: GithubGraphqlCacheRecord<GithubItem>,
   ): Promise<void>;
 
   constructor(
     protected readonly cacheNs: string,
-    protected readonly cacheKey: string
+    protected readonly cacheKey: string,
   ) {}
 
   /**
diff --git a/lib/util/github/graphql/cache-strategies/memory-cache-strategy.ts b/lib/util/github/graphql/cache-strategies/memory-cache-strategy.ts
index 75ae807911cffb93f82e398872d0afeb2db5f3d7..8938b0b045762781e8f400bd241e7b171feb588f 100644
--- a/lib/util/github/graphql/cache-strategies/memory-cache-strategy.ts
+++ b/lib/util/github/graphql/cache-strategies/memory-cache-strategy.ts
@@ -7,7 +7,7 @@ import { AbstractGithubGraphqlCacheStrategy } from './abstract-cache-strategy';
  * and for testing purposes.
  */
 export class GithubGraphqlMemoryCacheStrategy<
-  GithubItem extends GithubDatasourceItem
+  GithubItem extends GithubDatasourceItem,
 > extends AbstractGithubGraphqlCacheStrategy<GithubItem> {
   private fullKey(): string {
     return `github-graphql-cache:${this.cacheNs}:${this.cacheKey}`;
diff --git a/lib/util/github/graphql/cache-strategies/package-cache-strategy.ts b/lib/util/github/graphql/cache-strategies/package-cache-strategy.ts
index f503207ca0118701fcffc833a01d1c8bba3a997b..fff6adb9d0e3c4becf0fcd015954bd6976618b76 100644
--- a/lib/util/github/graphql/cache-strategies/package-cache-strategy.ts
+++ b/lib/util/github/graphql/cache-strategies/package-cache-strategy.ts
@@ -6,14 +6,14 @@ import { AbstractGithubGraphqlCacheStrategy } from './abstract-cache-strategy';
  * Package cache strategy meant to be used for public packages.
  */
 export class GithubGraphqlPackageCacheStrategy<
-  GithubItem extends GithubDatasourceItem
+  GithubItem extends GithubDatasourceItem,
 > extends AbstractGithubGraphqlCacheStrategy<GithubItem> {
   load(): Promise<GithubGraphqlCacheRecord<GithubItem> | undefined> {
     return packageCache.get(this.cacheNs, this.cacheKey);
   }
 
   async persist(
-    cacheRecord: GithubGraphqlCacheRecord<GithubItem>
+    cacheRecord: GithubGraphqlCacheRecord<GithubItem>,
   ): Promise<void> {
     const expiry = this.createdAt
       .plus({
@@ -28,7 +28,7 @@ export class GithubGraphqlPackageCacheStrategy<
         this.cacheNs,
         this.cacheKey,
         cacheRecord,
-        ttlMinutes
+        ttlMinutes,
       );
     }
   }
diff --git a/lib/util/github/graphql/datasource-fetcher.spec.ts b/lib/util/github/graphql/datasource-fetcher.spec.ts
index f2c76fdb8f8707c97a10c27d5fc023cf8900d30f..b95722867ddd1b47807f9dcf972405fa1ac8f468 100644
--- a/lib/util/github/graphql/datasource-fetcher.spec.ts
+++ b/lib/util/github/graphql/datasource-fetcher.spec.ts
@@ -58,7 +58,7 @@ const adapter: GithubGraphqlDatasourceAdapter<
 function resp(
   isRepoPrivate: boolean | undefined,
   nodes: TestAdapterInput[],
-  cursor: string | undefined = undefined
+  cursor: string | undefined = undefined,
 ): GithubGraphqlResponse<GithubGraphqlRepoResponse<TestAdapterInput>> {
   const data: GithubGraphqlRepoResponse<TestAdapterInput> = {
     repository: {
@@ -120,7 +120,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
       const res = await Datasource.query(
         { packageName: 'foo/bar' },
         http,
-        adapter
+        adapter,
       );
 
       expect(res).toBeEmptyArray();
@@ -133,7 +133,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
         .replyWithError('unknown error');
 
       await expect(() =>
-        Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+        Datasource.query({ packageName: 'foo/bar' }, http, adapter),
       ).rejects.toThrow('unknown error');
     });
 
@@ -144,7 +144,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
         .reply(200, err('single error'));
 
       const res = await catchError(() =>
-        Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+        Datasource.query({ packageName: 'foo/bar' }, http, adapter),
       );
 
       expect(res.message).toBe('single error');
@@ -158,7 +158,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
         .reply(200, err('first error', 'second error'));
 
       const res = (await catchError(() =>
-        Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+        Datasource.query({ packageName: 'foo/bar' }, http, adapter),
       )) as AggregateError;
 
       expect(res).toBeInstanceOf(AggregateError);
@@ -172,7 +172,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
       httpMock.scope('https://api.github.com/').post('/graphql').reply(200, {});
 
       await expect(() =>
-        Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+        Datasource.query({ packageName: 'foo/bar' }, http, adapter),
       ).rejects.toThrow('GitHub GraphQL datasource: failed to obtain data');
     });
 
@@ -183,9 +183,9 @@ describe('util/github/graphql/datasource-fetcher', () => {
         .reply(200, { data: {} });
 
       await expect(() =>
-        Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+        Datasource.query({ packageName: 'foo/bar' }, http, adapter),
       ).rejects.toThrow(
-        'GitHub GraphQL datasource: failed to obtain repository data'
+        'GitHub GraphQL datasource: failed to obtain repository data',
       );
     });
 
@@ -196,9 +196,9 @@ describe('util/github/graphql/datasource-fetcher', () => {
         .reply(200, { data: { repository: {} } });
 
       await expect(() =>
-        Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+        Datasource.query({ packageName: 'foo/bar' }, http, adapter),
       ).rejects.toThrow(
-        'GitHub GraphQL datasource: failed to obtain repository payload data'
+        'GitHub GraphQL datasource: failed to obtain repository payload data',
       );
     });
 
@@ -213,13 +213,13 @@ describe('util/github/graphql/datasource-fetcher', () => {
             { version: v2, releaseTimestamp: t2, foo: '2' },
             partial<TestAdapterInput>(),
             { version: v1, releaseTimestamp: t1, foo: '1' },
-          ])
+          ]),
         );
 
       const res = await Datasource.query(
         { packageName: 'foo/bar' },
         http,
-        adapter
+        adapter,
       );
 
       expect(res).toEqual([
@@ -233,12 +233,12 @@ describe('util/github/graphql/datasource-fetcher', () => {
       const page1 = resp(
         false,
         [{ version: v3, releaseTimestamp: t3, foo: '3' }],
-        'aaa'
+        'aaa',
       );
       const page2 = resp(
         false,
         [{ version: v2, releaseTimestamp: t2, foo: '2' }],
-        'bbb'
+        'bbb',
       );
       const page3 = resp(false, [
         { version: v1, releaseTimestamp: t1, foo: '1' },
@@ -255,7 +255,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
       const res = await Datasource.query(
         { packageName: 'foo/bar' },
         http,
-        adapter
+        adapter,
       );
 
       expect(res).toEqual([
@@ -289,11 +289,11 @@ describe('util/github/graphql/datasource-fetcher', () => {
 
       function generatePages(
         items: TestAdapterInput[],
-        perPage: number
+        perPage: number,
       ): GithubGraphqlResponse<GithubGraphqlRepoResponse<TestAdapterInput>>[] {
         const partitions = partitionBy(items, perPage);
         const pages = partitions.map((nodes, idx) =>
-          resp(false, nodes, `page-${idx + 2}`)
+          resp(false, nodes, `page-${idx + 2}`),
         );
         delete pages[pages.length - 1].data?.repository.payload.pageInfo;
         return pages;
@@ -313,7 +313,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
         const res = await Datasource.query(
           { packageName: 'foo/bar' },
           http,
-          adapter
+          adapter,
         );
 
         expect(res).toHaveLength(150);
@@ -341,7 +341,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
         const res = await Datasource.query(
           { packageName: 'foo/bar' },
           http,
-          adapter
+          adapter,
         );
 
         expect(res).toHaveLength(100);
@@ -364,7 +364,7 @@ describe('util/github/graphql/datasource-fetcher', () => {
           .reply(200, err('Something went wrong while executing your query.'));
 
         await expect(
-          Datasource.query({ packageName: 'foo/bar' }, http, adapter)
+          Datasource.query({ packageName: 'foo/bar' }, http, adapter),
         ).rejects.toThrow('Something went wrong while executing your query.');
 
         expect(httpMock.getTrace()).toMatchObject([
@@ -398,12 +398,12 @@ describe('util/github/graphql/datasource-fetcher', () => {
           const instance = new GithubGraphqlDatasourceFetcher(
             { packageName: 'foo/bar' },
             http,
-            adapter
+            adapter,
           );
           await instance.getItems();
 
           expect(instance).toHaveProperty('isCacheable', isCacheable);
-        }
+        },
       );
     });
   });
diff --git a/lib/util/github/graphql/datasource-fetcher.ts b/lib/util/github/graphql/datasource-fetcher.ts
index ca53fb6eff753b21e46999e6f7ad3b60f4f34270..a8bb090fbc8b099a35b71df2ed1876ffc373bb47 100644
--- a/lib/util/github/graphql/datasource-fetcher.ts
+++ b/lib/util/github/graphql/datasource-fetcher.ts
@@ -36,23 +36,23 @@ function isUnknownGraphqlError(err: Error): boolean {
 function canBeSolvedByShrinking(err: Error): boolean {
   const errors: Error[] = err instanceof AggregateError ? [...err] : [err];
   return errors.some(
-    (e) => err instanceof ExternalHostError || isUnknownGraphqlError(e)
+    (e) => err instanceof ExternalHostError || isUnknownGraphqlError(e),
   );
 }
 
 export class GithubGraphqlDatasourceFetcher<
   GraphqlItem,
-  ResultItem extends GithubDatasourceItem
+  ResultItem extends GithubDatasourceItem,
 > {
   static async query<T, U extends GithubDatasourceItem>(
     config: GithubPackageConfig,
     http: GithubHttp,
-    adapter: GithubGraphqlDatasourceAdapter<T, U>
+    adapter: GithubGraphqlDatasourceAdapter<T, U>,
   ): Promise<U[]> {
     const instance = new GithubGraphqlDatasourceFetcher<T, U>(
       config,
       http,
-      adapter
+      adapter,
     );
     const items = await instance.getItems();
     return items;
@@ -76,7 +76,7 @@ export class GithubGraphqlDatasourceFetcher<
     private datasourceAdapter: GithubGraphqlDatasourceAdapter<
       GraphqlItem,
       ResultItem
-    >
+    >,
   ) {
     const { packageName, registryUrl } = packageConfig;
     [this.repoOwner, this.repoName] = packageName.split('/');
@@ -207,7 +207,7 @@ export class GithubGraphqlDatasourceFetcher<
         const { body, ...options } = this.getRawQueryOptions();
         logger.debug(
           { options, newSize: this.itemsPerQuery },
-          'Shrinking GitHub GraphQL page size after error'
+          'Shrinking GitHub GraphQL page size after error',
         );
       }
     }
@@ -245,7 +245,7 @@ export class GithubGraphqlDatasourceFetcher<
               packageName: `${this.repoOwner}/${this.repoName}`,
               baseUrl: this.baseUrl,
             },
-            `GitHub GraphQL datasource: skipping empty item`
+            `GitHub GraphQL datasource: skipping empty item`,
           );
           continue;
         }
diff --git a/lib/util/github/graphql/index.ts b/lib/util/github/graphql/index.ts
index 08f428c92c33b8b6c8d787d22b7651d3d00e42fc..9c6183b2030732146220e7c3d06c6dfcee22d0c3 100644
--- a/lib/util/github/graphql/index.ts
+++ b/lib/util/github/graphql/index.ts
@@ -10,24 +10,24 @@ import type {
 
 export async function queryTags(
   config: GithubPackageConfig,
-  http: GithubHttp
+  http: GithubHttp,
 ): Promise<GithubTagItem[]> {
   const res = await GithubGraphqlDatasourceFetcher.query(
     config,
     http,
-    tagsAdapter
+    tagsAdapter,
   );
   return res;
 }
 
 export async function queryReleases(
   config: GithubPackageConfig,
-  http: GithubHttp
+  http: GithubHttp,
 ): Promise<GithubReleaseItem[]> {
   const res = await GithubGraphqlDatasourceFetcher.query(
     config,
     http,
-    releasesAdapter
+    releasesAdapter,
   );
   return res;
 }
diff --git a/lib/util/github/graphql/query-adapters/tags-query-adapter.spec.ts b/lib/util/github/graphql/query-adapters/tags-query-adapter.spec.ts
index 4f014a98de2262435d95da28dae736a22954eddd..07a83d1536feb3b5603d2aab1c121f276290f584 100644
--- a/lib/util/github/graphql/query-adapters/tags-query-adapter.spec.ts
+++ b/lib/util/github/graphql/query-adapters/tags-query-adapter.spec.ts
@@ -10,7 +10,7 @@ describe('util/github/graphql/query-adapters/tags-query-adapter', () => {
           oid: 'abc123',
           releaseTimestamp: '2022-09-24',
         },
-      })
+      }),
     ).toEqual({
       version: '1.2.3',
       gitRef: '1.2.3',
@@ -28,7 +28,7 @@ describe('util/github/graphql/query-adapters/tags-query-adapter', () => {
           target: { oid: 'abc123' },
           tagger: { releaseTimestamp: '2022-09-24' },
         },
-      })
+      }),
     ).toEqual({
       version: '1.2.3',
       gitRef: '1.2.3',
@@ -41,7 +41,7 @@ describe('util/github/graphql/query-adapters/tags-query-adapter', () => {
     expect(
       adapter.transform({
         target: { type: 'Blob' },
-      } as never)
+      } as never),
     ).toBeNull();
   });
 });
diff --git a/lib/util/github/graphql/types.ts b/lib/util/github/graphql/types.ts
index 41344df196f76921925b252ceb5fcf0cb7547328..fb39074109aeaff38df7924a4c8263d75df53722 100644
--- a/lib/util/github/graphql/types.ts
+++ b/lib/util/github/graphql/types.ts
@@ -8,7 +8,7 @@ export interface GithubDatasourceItem {
  */
 export interface GithubGraphqlDatasourceAdapter<
   Input,
-  Output extends GithubDatasourceItem
+  Output extends GithubDatasourceItem,
 > {
   /**
    * Used for creating datasource-unique cache key
@@ -88,14 +88,14 @@ export interface GithubGraphqlRepoParams {
 }
 
 export interface GithubGraphqlCacheRecord<
-  GithubItem extends GithubDatasourceItem
+  GithubItem extends GithubDatasourceItem,
 > {
   items: Record<string, GithubItem>;
   createdAt: string;
 }
 
 export interface GithubGraphqlCacheStrategy<
-  GithubItem extends GithubDatasourceItem
+  GithubItem extends GithubDatasourceItem,
 > {
   reconcile(items: GithubItem[]): Promise<boolean>;
   finalize(): Promise<GithubItem[]>;
diff --git a/lib/util/github/graphql/util.spec.ts b/lib/util/github/graphql/util.spec.ts
index 00a61702cd9b7e1b70b83a3b650918f92d70cb21..e49b3724b67285d8500afae61dddd6329e58e625 100644
--- a/lib/util/github/graphql/util.spec.ts
+++ b/lib/util/github/graphql/util.spec.ts
@@ -49,9 +49,9 @@ describe('util/github/graphql/util', () => {
         isDateExpired(
           DateTime.fromISO(isoTs(currentTime)),
           isoTs(initialTimestamp),
-          duration
-        )
+          duration,
+        ),
       ).toBe(expected);
-    }
+    },
   );
 });
diff --git a/lib/util/github/graphql/util.ts b/lib/util/github/graphql/util.ts
index 1ae3214b4189169361403375e00353455519e887..6ff867cafe8216235f84f8e31f1709475a9c28ba 100644
--- a/lib/util/github/graphql/util.ts
+++ b/lib/util/github/graphql/util.ts
@@ -18,7 +18,7 @@ export function prepareQuery(payloadQuery: string): string {
 export function isDateExpired(
   currentTime: DateTime,
   initialTimestamp: string,
-  duration: DurationLikeObject
+  duration: DurationLikeObject,
 ): boolean {
   const expiryTime = DateTime.fromISO(initialTimestamp).plus(duration).toUTC();
   return currentTime >= expiryTime;
diff --git a/lib/util/github/tags.spec.ts b/lib/util/github/tags.spec.ts
index 9747b8acf128463cf4f41a72e7a450baa0e2a481..5eb4a304404ed10d73054ccf9fb1e66f07922e77 100644
--- a/lib/util/github/tags.spec.ts
+++ b/lib/util/github/tags.spec.ts
@@ -27,7 +27,7 @@ describe('util/github/tags', () => {
         undefined,
         'some-org/repo',
         'v2.0.0',
-        http
+        http,
       );
       expect(commit).toBe('abc');
     });
@@ -39,7 +39,7 @@ describe('util/github/tags', () => {
         'https://my-enterprise-github.dev',
         'some-org/repo',
         'v2.0.0',
-        http
+        http,
       );
       expect(commit).toBeNull();
       expect(githubGraphql.queryTags).toHaveBeenCalledWith(
@@ -47,7 +47,7 @@ describe('util/github/tags', () => {
           packageName: 'some-org/repo',
           registryUrl: 'https://my-enterprise-github.dev',
         },
-        http
+        http,
       );
     });
 
@@ -58,7 +58,7 @@ describe('util/github/tags', () => {
         undefined,
         'some-org/repo',
         'v2.0.0',
-        http
+        http,
       );
       expect(commit).toBeNull();
     });
@@ -70,7 +70,7 @@ describe('util/github/tags', () => {
         undefined,
         'some-org/repo',
         'v2.0.0',
-        http
+        http,
       );
       expect(commit).toBeNull();
     });
diff --git a/lib/util/github/tags.ts b/lib/util/github/tags.ts
index 51101958af8413cb6a09b3992e9f3e8fe73ef6eb..2454f76071bbf8b860a37536d6edfc5eb5c70e95 100644
--- a/lib/util/github/tags.ts
+++ b/lib/util/github/tags.ts
@@ -6,14 +6,14 @@ export async function findCommitOfTag(
   registryUrl: string | undefined,
   packageName: string,
   tag: string,
-  http: GithubHttp
+  http: GithubHttp,
 ): Promise<string | null> {
   logger.trace(`github/tags.findCommitOfTag(${packageName}, ${tag})`);
   try {
     const tags = await queryTags({ packageName, registryUrl }, http);
     if (!tags.length) {
       logger.debug(
-        `github/tags.findCommitOfTag(): No tags found for ${packageName}`
+        `github/tags.findCommitOfTag(): No tags found for ${packageName}`,
       );
     }
     const tagItem = tags.find(({ version }) => version === tag);
@@ -22,17 +22,17 @@ export async function findCommitOfTag(
         return tagItem.hash;
       }
       logger.debug(
-        `github/tags.findCommitOfTag: Tag ${tag} has no hash for ${packageName}`
+        `github/tags.findCommitOfTag: Tag ${tag} has no hash for ${packageName}`,
       );
     } else {
       logger.debug(
-        `github/tags.findCommitOfTag: Tag ${tag} not found for ${packageName}`
+        `github/tags.findCommitOfTag: Tag ${tag} not found for ${packageName}`,
       );
     }
   } catch (err) {
     logger.debug(
       { githubRepo: packageName, err },
-      'Error getting tag commit from GitHub repo'
+      'Error getting tag commit from GitHub repo',
     );
   }
   return null;
diff --git a/lib/util/github/url.spec.ts b/lib/util/github/url.spec.ts
index 01059d664bcf0943beb7beed3359ed0ff7cb7a40..21a853c8482bcde8c855a8ca4c1fcbb83ba77160 100644
--- a/lib/util/github/url.spec.ts
+++ b/lib/util/github/url.spec.ts
@@ -21,10 +21,10 @@ describe('util/github/url', () => {
 
     it('supports local github installations', () => {
       expect(getApiBaseUrl('https://gh.my-company.com/')).toBe(
-        'https://gh.my-company.com/api/v3/'
+        'https://gh.my-company.com/api/v3/',
       );
       expect(getApiBaseUrl('https://gh.my-company.com/api/v3/')).toBe(
-        'https://gh.my-company.com/api/v3/'
+        'https://gh.my-company.com/api/v3/',
       );
     });
   });
diff --git a/lib/util/github/url.ts b/lib/util/github/url.ts
index 9b0062a251c385de2cd1107c97fecba8b1b181ff..ad4e28e107168d1c40d9fa3fa49ab5a9a207541c 100644
--- a/lib/util/github/url.ts
+++ b/lib/util/github/url.ts
@@ -27,7 +27,7 @@ export function getApiBaseUrl(registryUrl: string | undefined): string {
 
 export function getSourceUrl(
   packageName: string,
-  registryUrl?: string
+  registryUrl?: string,
 ): string {
   const sourceUrlBase = getSourceUrlBase(registryUrl);
   return `${sourceUrlBase}${packageName}`;
diff --git a/lib/util/hash.spec.ts b/lib/util/hash.spec.ts
index 1a703dc4009f7ddf02611c29a95324480902af6b..e66637f12cb091de7b8f89cb520027bd45e236a1 100644
--- a/lib/util/hash.spec.ts
+++ b/lib/util/hash.spec.ts
@@ -5,16 +5,16 @@ import { hash, hashStream, toSha256 } from './hash';
 describe('util/hash', () => {
   it('hashes data with sha256', () => {
     expect(hash('https://example.com/test.txt', 'sha256')).toBe(
-      'd1dc63218c42abba594fff6450457dc8c4bfdd7c22acf835a50ca0e5d2693020'
+      'd1dc63218c42abba594fff6450457dc8c4bfdd7c22acf835a50ca0e5d2693020',
     );
     expect(toSha256('https://example.com/test.txt')).toBe(
-      'd1dc63218c42abba594fff6450457dc8c4bfdd7c22acf835a50ca0e5d2693020'
+      'd1dc63218c42abba594fff6450457dc8c4bfdd7c22acf835a50ca0e5d2693020',
     );
   });
 
   it('hashes data with sha512', () => {
     expect(hash('https://example.com/test.txt')).toBe(
-      '368b1e723aecb5d17e0a69d046f8a7b9eb4e2aa2ee78e307d563c57cde45b8c3755990411aa2626c13214a8d571e0478fa9a19d03e295bb28bc453a88206b484'
+      '368b1e723aecb5d17e0a69d046f8a7b9eb4e2aa2ee78e307d563c57cde45b8c3755990411aa2626c13214a8d571e0478fa9a19d03e295bb28bc453a88206b484',
     );
   });
 
diff --git a/lib/util/hash.ts b/lib/util/hash.ts
index 704654b744b02c086c2aeb68085f6e7f1d666851..3fef71fe1e785e0342bed9522499ce1c990c6086 100644
--- a/lib/util/hash.ts
+++ b/lib/util/hash.ts
@@ -9,7 +9,7 @@ export type AlgorithmName = LiteralUnion<
 
 export function hash(
   data: string | Buffer,
-  algorithm: AlgorithmName = 'sha512'
+  algorithm: AlgorithmName = 'sha512',
 ): string {
   const hash = crypto.createHash(algorithm);
   hash.update(data);
@@ -22,7 +22,7 @@ export function toSha256(input: string): string {
 
 export async function hashStream(
   inputStream: NodeJS.ReadableStream,
-  algorithm: AlgorithmName = 'sha512'
+  algorithm: AlgorithmName = 'sha512',
 ): Promise<string> {
   const hash = crypto.createHash(algorithm);
   await pipeline(inputStream, hash);
diff --git a/lib/util/host-rules.spec.ts b/lib/util/host-rules.spec.ts
index 5b0c47c5337099c950233c12efd741eae30b2556..a16a9332e3d29cc22fb8a782411b256d6cae4ba9 100644
--- a/lib/util/host-rules.spec.ts
+++ b/lib/util/host-rules.spec.ts
@@ -21,7 +21,7 @@ describe('util/host-rules', () => {
           hostType: 'azure',
           domainName: 'github.com',
           hostName: 'api.github.com',
-        } as never)
+        } as never),
       ).toThrow();
     });
 
@@ -31,7 +31,7 @@ describe('util/host-rules', () => {
           hostType: 'azure',
           domainName: 'github.com',
           matchHost: 'https://api.github.com',
-        } as never)
+        } as never),
       ).toThrow();
     });
 
@@ -41,7 +41,7 @@ describe('util/host-rules', () => {
           hostType: 'azure',
           hostName: 'api.github.com',
           matchHost: 'https://api.github.com',
-        } as never)
+        } as never),
       ).toThrow();
     });
 
@@ -77,13 +77,13 @@ describe('util/host-rules', () => {
       } as never);
       expect(find({ hostType: NugetDatasource.id })).toEqual({});
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://nuget.org' })
+        find({ hostType: NugetDatasource.id, url: 'https://nuget.org' }),
       ).not.toEqual({});
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://not.nuget.org' })
+        find({ hostType: NugetDatasource.id, url: 'https://not.nuget.org' }),
       ).not.toEqual({});
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://not-nuget.org' })
+        find({ hostType: NugetDatasource.id, url: 'https://not-nuget.org' }),
       ).toEqual({});
     });
 
@@ -92,7 +92,7 @@ describe('util/host-rules', () => {
         enabled: true,
       });
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://api.github.com' })
+        find({ hostType: NugetDatasource.id, url: 'https://api.github.com' }),
       ).toEqual({ enabled: true });
     });
 
@@ -102,7 +102,7 @@ describe('util/host-rules', () => {
         token: 'abc',
       });
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://nuget.local/api' })
+        find({ hostType: NugetDatasource.id, url: 'https://nuget.local/api' }),
       ).toEqual({ token: 'abc' });
     });
 
@@ -113,14 +113,14 @@ describe('util/host-rules', () => {
       } as never);
       expect(
         find({ hostType: NugetDatasource.id, url: 'https://api.github.com' })
-          .token
+          .token,
       ).toBe('def');
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://github.com' }).token
+        find({ hostType: NugetDatasource.id, url: 'https://github.com' }).token,
       ).toBe('def');
       expect(
         find({ hostType: NugetDatasource.id, url: 'https://apigithub.com' })
-          .token
+          .token,
       ).toBeUndefined();
     });
 
@@ -147,7 +147,7 @@ describe('util/host-rules', () => {
         find({
           hostType: 'github',
           url: 'https://api.github.com/repos/org-b/someRepo/tags?per_page=100',
-        }).token
+        }).token,
       ).toBe('def');
     });
 
@@ -160,13 +160,13 @@ describe('util/host-rules', () => {
         find({
           hostType: 'github',
           url: 'https://api.github.com/repos/org-b/someRepo/tags?per_page=100',
-        }).token
+        }).token,
       ).toBe('abc');
       expect(
         find({
           hostType: 'github-releases',
           url: 'https://api.github.com/repos/org-b/someRepo/tags?per_page=100',
-        }).token
+        }).token,
       ).toBe('abc');
     });
 
@@ -185,7 +185,7 @@ describe('util/host-rules', () => {
         find({
           hostType: 'github-tags',
           url: 'https://api.github.com/repos/org-b/someRepo/tags?per_page=100',
-        }).token
+        }).token,
       ).toBe('def');
     });
 
@@ -195,7 +195,7 @@ describe('util/host-rules', () => {
         token: 'abc',
       } as never);
       expect(
-        find({ hostType: NugetDatasource.id, url: 'https://nuget.local/api' })
+        find({ hostType: NugetDatasource.id, url: 'https://nuget.local/api' }),
       ).toEqual({ token: 'abc' });
     });
 
@@ -210,7 +210,7 @@ describe('util/host-rules', () => {
         find({
           hostType: NugetDatasource.id,
           url: 'https://domain.com/renovatebot',
-        }).token
+        }).token,
       ).toBe('def');
     });
 
@@ -262,7 +262,7 @@ describe('util/host-rules', () => {
       });
       expect(
         find({ hostType: NugetDatasource.id, url: 'https://nuget.local/api' })
-          .token
+          .token,
       ).toBe('abc');
     });
 
@@ -276,7 +276,7 @@ describe('util/host-rules', () => {
         find({
           hostType: NugetDatasource.id,
           url: 'https://nuget.local/api/sub-resource',
-        })
+        }),
       ).toEqual({ token: 'abc' });
     });
 
@@ -292,7 +292,7 @@ describe('util/host-rules', () => {
       expect(
         find({
           url: 'https://nuget.local/api/sub-resource',
-        })
+        }),
       ).toEqual({ token: 'longest' });
     });
   });
@@ -398,7 +398,7 @@ describe('util/host-rules', () => {
       expect(
         hostType({
           url: 'https://github.example.com/chalk/chalk',
-        })
+        }),
       ).toBe('github-changelog');
     });
 
@@ -420,12 +420,12 @@ describe('util/host-rules', () => {
       expect(
         hostType({
           url: 'https://github.example.com/chalk/chalk',
-        })
+        }),
       ).toBe('github-changelog');
       expect(
         hostType({
           url: 'https://gitlab.example.com/chalk/chalk',
-        })
+        }),
       ).toBeNull();
     });
   });
diff --git a/lib/util/host-rules.ts b/lib/util/host-rules.ts
index 9de4fcfc3e1750d62a6b48e4f5342115e4a94bae..19e3addbb0444b06a63866783802bb8187da12a0 100644
--- a/lib/util/host-rules.ts
+++ b/lib/util/host-rules.ts
@@ -32,7 +32,7 @@ export function migrateRule(rule: LegacyHostRule & HostRule): HostRule {
     result.matchHost = matchHost;
   } else if (hostValues.length > 1) {
     throw new Error(
-      `hostRules cannot contain more than one host-matching field - use "matchHost" only.`
+      `hostRules cannot contain more than one host-matching field - use "matchHost" only.`,
     );
   }
 
@@ -52,7 +52,7 @@ export function add(params: HostRule): void {
           // TODO: types (#22198)
           `Adding ${field} authentication for ${rule.matchHost!} (hostType=${
             rule.hostType
-          }) to hostRules`
+          }) to hostRules`,
         );
       }
     });
@@ -65,7 +65,7 @@ export function add(params: HostRule): void {
   });
   if (rule.username && rule.password) {
     sanitize.addSecretForSanitizing(
-      toBase64(`${rule.username}:${rule.password}`)
+      toBase64(`${rule.username}:${rule.password}`),
     );
   }
   hostRules.push(rule);
@@ -153,7 +153,7 @@ export function find(search: HostRuleSearch): HostRuleSearchResult {
       (rule) =>
         isMultiRule(rule) &&
         matchesHostType(rule, search) &&
-        matchesHost(rule, search)
+        matchesHost(rule, search),
     )
     .sort(prioritizeLongestMatchHost)
     .forEach((rule) => {
diff --git a/lib/util/http/auth.ts b/lib/util/http/auth.ts
index ccdc74d5dfb4fe4a946d39f83315513cecb1a62f..d0fb77de4ad6c7e83c77ace3c0feb537e037581f 100644
--- a/lib/util/http/auth.ts
+++ b/lib/util/http/auth.ts
@@ -19,7 +19,7 @@ export type AuthGotOptions = Pick<
 >;
 
 export function applyAuthorization<GotOptions extends AuthGotOptions>(
-  inOptions: GotOptions
+  inOptions: GotOptions,
 ): GotOptions {
   const options: GotOptions = { ...inOptions };
 
@@ -45,7 +45,7 @@ export function applyAuthorization<GotOptions extends AuthGotOptions>(
         if (is.string(options.headers.accept)) {
           options.headers.accept = options.headers.accept.replace(
             'application/vnd.github.v3+json',
-            'application/vnd.github.machine-man-preview+json'
+            'application/vnd.github.machine-man-preview+json',
           );
         }
       }
@@ -74,7 +74,7 @@ export function applyAuthorization<GotOptions extends AuthGotOptions>(
   } else if (options.password !== undefined) {
     // Otherwise got will add username and password to url and header
     const auth = Buffer.from(
-      `${options.username ?? ''}:${options.password}`
+      `${options.username ?? ''}:${options.password}`,
     ).toString('base64');
     options.headers.authorization = `Basic ${auth}`;
     delete options.username;
diff --git a/lib/util/http/bitbucket-server.ts b/lib/util/http/bitbucket-server.ts
index f9a7fa3ac74d0da6a4306722ed5f7d00ce36d905..463e37d3c007edb71243ec50391bcc8e1292a01d 100644
--- a/lib/util/http/bitbucket-server.ts
+++ b/lib/util/http/bitbucket-server.ts
@@ -19,7 +19,7 @@ export class BitbucketServerHttp extends Http {
 
   protected override request<T>(
     path: string,
-    options?: InternalHttpOptions & HttpRequestOptions<T>
+    options?: InternalHttpOptions & HttpRequestOptions<T>,
   ): Promise<HttpResponse<T>> {
     const url = resolveBaseUrl(baseUrl, path);
     const opts = {
diff --git a/lib/util/http/bitbucket.ts b/lib/util/http/bitbucket.ts
index e566223eb7d8a5906f1013ab64296f441bdfac0e..de4c406dffd675ca3b449997a89a00ff8251138a 100644
--- a/lib/util/http/bitbucket.ts
+++ b/lib/util/http/bitbucket.ts
@@ -26,7 +26,7 @@ export class BitbucketHttp extends Http<BitbucketHttpOptions> {
 
   protected override async request<T>(
     path: string,
-    options?: BitbucketHttpOptions & HttpRequestOptions<T>
+    options?: BitbucketHttpOptions & HttpRequestOptions<T>,
   ): Promise<HttpResponse<T>> {
     const opts = { baseUrl, ...options };
 
@@ -53,7 +53,7 @@ export class BitbucketHttp extends Http<BitbucketHttpOptions> {
       while (is.nonEmptyString(nextURL) && page <= MAX_PAGES) {
         const nextResult = await super.request<PagedResult<T>>(
           nextURL,
-          options as BitbucketHttpOptions
+          options as BitbucketHttpOptions,
         );
 
         resultBody.values.push(...nextResult.body.values);
diff --git a/lib/util/http/dns.spec.ts b/lib/util/http/dns.spec.ts
index b1e1477b572778c4f426ab6b3c40866f68dec62e..db574bc10e2f2a83bf79756b2096a3c9052703d3 100644
--- a/lib/util/http/dns.spec.ts
+++ b/lib/util/http/dns.spec.ts
@@ -8,7 +8,7 @@ describe('util/http/dns', () => {
       const ip = await new Promise((resolve) =>
         dnsLookup('api.github.com', 4, (_e, r, _f) => {
           resolve(r);
-        })
+        }),
       );
       expect(ip).toBeString();
       // uses cache
@@ -16,15 +16,15 @@ describe('util/http/dns', () => {
         await new Promise((resolve) =>
           dnsLookup('api.github.com', (_e, r, _f) => {
             resolve(r);
-          })
-        )
+          }),
+        ),
       ).toBe(ip);
       expect(
         await new Promise((resolve) =>
           dnsLookup('api.github.com', {}, (_e, r, _f) => {
             resolve(r);
-          })
-        )
+          }),
+        ),
       ).toBe(ip);
     });
 
@@ -37,7 +37,7 @@ describe('util/http/dns', () => {
           } else {
             resolve(r);
           }
-        })
+        }),
       );
       await expect(ip).rejects.toThrow();
     });
diff --git a/lib/util/http/dns.ts b/lib/util/http/dns.ts
index fd14183aa9be50c06b082a6597dacee43b34c180..808060f0d0de33a52798915986e63d2d6da3ce12 100644
--- a/lib/util/http/dns.ts
+++ b/lib/util/http/dns.ts
@@ -17,24 +17,24 @@ function lookup(
         callback: (
           error: NodeJS.ErrnoException,
           address: string,
-          family: IPFamily
-        ) => void
+          family: IPFamily,
+        ) => void,
       ]
     | [
         hostname: string,
         callback: (
           error: NodeJS.ErrnoException,
           address: string,
-          family: IPFamily
-        ) => void
+          family: IPFamily,
+        ) => void,
       ]
     | [
         hostname: string,
         options: LookupOptions & { all: true },
         callback: (
           error: NodeJS.ErrnoException,
-          result: ReadonlyArray<EntryObject>
-        ) => void
+          result: ReadonlyArray<EntryObject>,
+        ) => void,
       ]
     | [
         hostname: string,
@@ -42,8 +42,8 @@ function lookup(
         callback: (
           error: NodeJS.ErrnoException,
           address: string,
-          family: IPFamily
-        ) => void
+          family: IPFamily,
+        ) => void,
       ]
 ): void {
   let opts: LookupOneOptions | LookupAllOptions;
diff --git a/lib/util/http/gitea.spec.ts b/lib/util/http/gitea.spec.ts
index 3e9b7bd0e789189b2c41f67cb462b6a267318e3e..666d6f948161553f88aeabfc8f2c794c1cb81a27 100644
--- a/lib/util/http/gitea.spec.ts
+++ b/lib/util/http/gitea.spec.ts
@@ -56,7 +56,7 @@ describe('util/http/gitea', () => {
       'pagination-example-2',
       {
         paginate: true,
-      }
+      },
     );
     expect(res.body.data).toHaveLength(6);
     expect(res.body.data).toEqual(['abc', 'def', 'ghi', 'jkl', 'mno', 'pqr']);
@@ -74,7 +74,7 @@ describe('util/http/gitea', () => {
       'pagination-example-3',
       {
         paginate: true,
-      }
+      },
     );
     expect(res.body.data).toHaveLength(3);
     expect(res.body.data).toEqual(['abc', 'def', 'ghi']);
diff --git a/lib/util/http/gitea.ts b/lib/util/http/gitea.ts
index eb292e105663b7953b02a2eb41d15dc5b6fbb39c..c8296939e45f806f02356c1b5054f30c63fe88a1 100644
--- a/lib/util/http/gitea.ts
+++ b/lib/util/http/gitea.ts
@@ -41,7 +41,7 @@ export class GiteaHttp extends Http<GiteaHttpOptions> {
 
   protected override async request<T>(
     path: string,
-    options?: InternalHttpOptions & GiteaHttpOptions & HttpRequestOptions<T>
+    options?: InternalHttpOptions & GiteaHttpOptions & HttpRequestOptions<T>,
   ): Promise<HttpResponse<T>> {
     const resolvedUrl = resolveUrl(path, options?.baseUrl ?? baseUrl);
     const opts = {
diff --git a/lib/util/http/github.spec.ts b/lib/util/http/github.spec.ts
index 5d66f42fba1f520073ac3a0c28f2cbc2318b0df0..c2acafddbb7462d2fb6ba567275bdddb3892a58d 100644
--- a/lib/util/http/github.spec.ts
+++ b/lib/util/http/github.spec.ts
@@ -73,7 +73,7 @@ describe('util/http/github', () => {
       const [req] = httpMock.getTrace();
       expect(req).toBeDefined();
       expect(req.headers.accept).toBe(
-        'some-accept, application/vnd.github.machine-man-preview+json'
+        'some-accept, application/vnd.github.machine-man-preview+json',
       );
       expect(req.headers.authorization).toBe('token 123test');
     });
@@ -120,7 +120,7 @@ describe('util/http/github', () => {
           { the_field: ['a'], total: 4 },
           {
             link: `<${url}?page=2>; rel="next", <${url}?page=3>; rel="last"`,
-          }
+          },
         )
         .get(`${url}?page=2`)
         .reply(
@@ -128,7 +128,7 @@ describe('util/http/github', () => {
           { the_field: ['b', 'c'], total: 4 },
           {
             link: `<${url}?page=3>; rel="next", <${url}?page=3>; rel="last"`,
-          }
+          },
         )
         .get(`${url}?page=3`)
         .reply(200, { the_field: ['d'], total: 4 });
@@ -302,7 +302,7 @@ describe('util/http/github', () => {
       async function fail(
         code: number,
         body: any = undefined,
-        headers: httpMock.ReplyHeaders = {}
+        headers: httpMock.ReplyHeaders = {},
       ) {
         const url = '/some-url';
         httpMock
@@ -317,7 +317,7 @@ describe('util/http/github', () => {
               }
               return body;
             },
-            headers
+            headers,
           );
         await githubApi.getJson(url);
       }
@@ -330,15 +330,15 @@ describe('util/http/github', () => {
 
       it('should throw Not found', async () => {
         await expect(fail(404)).rejects.toThrow(
-          'Response code 404 (Not Found)'
+          'Response code 404 (Not Found)',
         );
       });
 
       it('should throw 410', async () => {
         await expect(
-          fail(410, { message: 'Issues are disabled for this repo' })
+          fail(410, { message: 'Issues are disabled for this repo' }),
         ).rejects.toThrow(
-          'Response code 410 (Issues are disabled for this repo)'
+          'Response code 410 (Issues are disabled for this repo)',
         );
       });
 
@@ -347,7 +347,7 @@ describe('util/http/github', () => {
           fail(403, {
             message:
               'Error updating branch: API rate limit exceeded for installation ID 48411. (403)',
-          })
+          }),
         ).rejects.toThrow(PLATFORM_RATE_LIMIT_EXCEEDED);
       });
 
@@ -356,13 +356,13 @@ describe('util/http/github', () => {
           fail(403, {
             message:
               'You have exceeded a secondary rate limit and have been temporarily blocked from content creation. Please retry your request again later.',
-          })
+          }),
         ).rejects.toThrow(PLATFORM_RATE_LIMIT_EXCEEDED);
       });
 
       it('should throw Bad credentials', async () => {
         await expect(
-          fail(401, { message: 'Bad credentials. (401)' })
+          fail(401, { message: 'Bad credentials. (401)' }),
         ).rejects.toThrow(PLATFORM_BAD_CREDENTIALS);
       });
 
@@ -373,8 +373,8 @@ describe('util/http/github', () => {
             { message: 'Bad credentials. (401)' },
             {
               'x-ratelimit-limit': '60',
-            }
-          )
+            },
+          ),
         ).rejects.toThrow(EXTERNAL_HOST_ERROR);
       });
 
@@ -383,7 +383,7 @@ describe('util/http/github', () => {
         for (let idx = 0; idx < codes.length; idx += 1) {
           const code = codes[idx];
           await expect(failWithError({ code })).rejects.toThrow(
-            EXTERNAL_HOST_ERROR
+            EXTERNAL_HOST_ERROR,
           );
         }
       });
@@ -398,13 +398,15 @@ describe('util/http/github', () => {
 
       it('should throw for unauthorized integration', async () => {
         await expect(
-          fail(403, { message: 'Resource not accessible by integration (403)' })
+          fail(403, {
+            message: 'Resource not accessible by integration (403)',
+          }),
         ).rejects.toThrow(PLATFORM_INTEGRATION_UNAUTHORIZED);
       });
 
       it('should throw for unauthorized integration2', async () => {
         await expect(
-          fail(403, { message: 'Upgrade to GitHub Pro' })
+          fail(403, { message: 'Upgrade to GitHub Pro' }),
         ).rejects.toThrow('Upgrade to GitHub Pro');
       });
 
@@ -412,7 +414,7 @@ describe('util/http/github', () => {
         await expect(
           fail(403, {
             message: 'You have triggered an abuse detection mechanism',
-          })
+          }),
         ).rejects.toThrow(PLATFORM_RATE_LIMIT_EXCEEDED);
       });
 
@@ -421,7 +423,7 @@ describe('util/http/github', () => {
           fail(422, {
             message: 'foobar',
             errors: [{ code: 'invalid' }],
-          })
+          }),
         ).rejects.toThrow(REPOSITORY_CHANGED);
       });
 
@@ -429,7 +431,7 @@ describe('util/http/github', () => {
         await expect(
           fail(422, {
             message: 'foobar',
-          })
+          }),
         ).rejects.toThrow(EXTERNAL_HOST_ERROR);
       });
 
@@ -437,9 +439,9 @@ describe('util/http/github', () => {
         await expect(
           fail(422, {
             message: 'Review cannot be requested from pull request author.',
-          })
+          }),
         ).rejects.toThrow(
-          'Review cannot be requested from pull request author.'
+          'Review cannot be requested from pull request author.',
         );
       });
 
@@ -448,7 +450,7 @@ describe('util/http/github', () => {
           fail(422, {
             message: 'Validation error',
             errors: [{ message: 'A pull request already exists' }],
-          })
+          }),
         ).rejects.toThrow('Validation error');
       });
 
@@ -456,7 +458,7 @@ describe('util/http/github', () => {
         await expect(
           fail(418, {
             message: 'Sorry, this is a teapot',
-          })
+          }),
         ).rejects.toThrow('Sorry, this is a teapot');
       });
     });
@@ -551,7 +553,7 @@ describe('util/http/github', () => {
       const [req] = httpMock.getTrace();
       expect(req).toBeDefined();
       expect(req.headers.accept).toBe(
-        'application/vnd.github.machine-man-preview+json'
+        'application/vnd.github.machine-man-preview+json',
       );
     });
 
@@ -567,7 +569,7 @@ describe('util/http/github', () => {
       expect(
         await githubApi.queryRepoField(graphqlQuery, 'testItem', {
           paginate: false,
-        })
+        }),
       ).toEqual([]);
     });
 
@@ -581,7 +583,7 @@ describe('util/http/github', () => {
       expect(
         await githubApi.queryRepoField(graphqlQuery, 'testItem', {
           paginate: false,
-        })
+        }),
       ).toEqual([]);
     });
 
@@ -590,7 +592,7 @@ describe('util/http/github', () => {
       await expect(
         githubApi.queryRepoField(graphqlQuery, 'someItem', {
           paginate: false,
-        })
+        }),
       ).rejects.toThrow("Response code 418 (I'm a Teapot)");
     });
 
@@ -605,7 +607,7 @@ describe('util/http/github', () => {
           },
         });
       expect(
-        await githubApi.queryRepoField(graphqlQuery, 'testItem')
+        await githubApi.queryRepoField(graphqlQuery, 'testItem'),
       ).toMatchInlineSnapshot(`[]`);
     });
 
@@ -760,7 +762,7 @@ describe('util/http/github', () => {
       await expect(
         githubApi.queryRepoField(graphqlQuery, 'testItem', {
           count: 9,
-        })
+        }),
       ).rejects.toThrow(EXTERNAL_HOST_ERROR);
     });
   });
diff --git a/lib/util/http/github.ts b/lib/util/http/github.ts
index 2090e9403cc6877a3d8fa9fb76ece719db8f8f0c..a7a7a6ce8ce4d6db0691cc961143674fa5f0280d 100644
--- a/lib/util/http/github.ts
+++ b/lib/util/http/github.ts
@@ -58,7 +58,7 @@ export type GithubGraphqlResponse<T = unknown> =
 function handleGotError(
   err: GotLegacyError,
   url: string | URL,
-  opts: GithubHttpOptions
+  opts: GithubHttpOptions,
 ): Error {
   const path = url.toString();
   let message = err.message || '';
@@ -112,7 +112,7 @@ function handleGotError(
   ) {
     logger.debug(
       { err },
-      'GitHub failure: Resource not accessible by integration'
+      'GitHub failure: Resource not accessible by integration',
     );
     return new Error(PLATFORM_INTEGRATION_UNAUTHORIZED);
   }
@@ -123,7 +123,7 @@ function handleGotError(
         token: maskToken(opts.token),
         err,
       },
-      'GitHub failure: Bad credentials'
+      'GitHub failure: Bad credentials',
     );
     if (rateLimit === '60') {
       return new ExternalHostError(err, 'github');
@@ -140,7 +140,7 @@ function handleGotError(
       return new Error(REPOSITORY_CHANGED);
     } else if (
       err.body?.errors?.find((e: any) =>
-        e.message?.startsWith('A pull request already exists')
+        e.message?.startsWith('A pull request already exists'),
       )
     ) {
       return err;
@@ -190,7 +190,7 @@ export type GraphqlPageCache = Record<string, GraphqlPageCacheItem>;
 
 function getGraphqlPageSize(
   fieldName: string,
-  defaultPageSize = MAX_GRAPHQL_PAGE_SIZE
+  defaultPageSize = MAX_GRAPHQL_PAGE_SIZE,
 ): number {
   const cache = getCache();
   const graphqlPageCache = cache?.platform?.github
@@ -200,7 +200,7 @@ function getGraphqlPageSize(
   if (graphqlPageCache && cachedRecord) {
     logger.debug(
       { fieldName, ...cachedRecord },
-      'GraphQL page size: found cached value'
+      'GraphQL page size: found cached value',
     );
 
     const oldPageSize = cachedRecord.pageSize;
@@ -215,7 +215,7 @@ function getGraphqlPageSize(
 
         logger.debug(
           { fieldName, oldPageSize, newPageSize, timestamp },
-          'GraphQL page size: expanding'
+          'GraphQL page size: expanding',
         );
 
         cachedRecord.pageLastResizedAt = timestamp;
@@ -223,7 +223,7 @@ function getGraphqlPageSize(
       } else {
         logger.debug(
           { fieldName, oldPageSize, newPageSize },
-          'GraphQL page size: expanded to default page size'
+          'GraphQL page size: expanded to default page size',
         );
 
         delete graphqlPageCache[fieldName];
@@ -245,7 +245,7 @@ function setGraphqlPageSize(fieldName: string, newPageSize: number): void {
     const pageLastResizedAt = now.toISO()!;
     logger.debug(
       { fieldName, oldPageSize, newPageSize, timestamp: pageLastResizedAt },
-      'GraphQL page size: shrinking'
+      'GraphQL page size: shrinking',
     );
     const cache = getCache();
     cache.platform ??= {};
@@ -273,7 +273,7 @@ export class GithubHttp extends Http<GithubHttpOptions> {
   protected override async request<T>(
     url: string | URL,
     options?: InternalHttpOptions & GithubHttpOptions & HttpRequestOptions<T>,
-    okToRetry = true
+    okToRetry = true,
   ): Promise<HttpResponse<T>> {
     const opts: GithubHttpOptions = {
       baseUrl,
@@ -291,13 +291,13 @@ export class GithubHttp extends Http<GithubHttpOptions> {
         authUrl.pathname = joinUrlParts(
           authUrl.pathname.startsWith('/api/v3') ? '/api/v3' : '',
           'repos',
-          `${opts.repository}`
+          `${opts.repository}`,
         );
       }
 
       const { token } = findMatchingRules(
         { hostType: this.hostType },
-        authUrl.toString()
+        authUrl.toString(),
       );
       opts.token = token;
     }
@@ -340,9 +340,9 @@ export class GithubHttp extends Http<GithubHttpOptions> {
               return this.request<T>(
                 nextUrl,
                 { ...opts, paginate: false },
-                okToRetry
+                okToRetry,
               );
-            }
+            },
           );
           const pages = await p.all(queue);
           if (opts.paginationField && is.plainObject(result.body)) {
@@ -374,7 +374,7 @@ export class GithubHttp extends Http<GithubHttpOptions> {
 
   public async requestGraphql<T = unknown>(
     query: string,
-    options: GraphqlOptions = {}
+    options: GraphqlOptions = {},
   ): Promise<GithubGraphqlResponse<T> | null> {
     const path = 'graphql';
 
@@ -415,7 +415,7 @@ export class GithubHttp extends Http<GithubHttpOptions> {
   async queryRepoField<T = Record<string, unknown>>(
     query: string,
     fieldName: string,
-    options: GraphqlOptions = {}
+    options: GraphqlOptions = {},
   ): Promise<T[]> {
     const result: T[] = [];
 
@@ -424,7 +424,7 @@ export class GithubHttp extends Http<GithubHttpOptions> {
     let optimalCount: null | number = null;
     let count = getGraphqlPageSize(
       fieldName,
-      options.count ?? MAX_GRAPHQL_PAGE_SIZE
+      options.count ?? MAX_GRAPHQL_PAGE_SIZE,
     );
     let limit = options.limit ?? 1000;
     let cursor: string | null = null;
diff --git a/lib/util/http/gitlab.spec.ts b/lib/util/http/gitlab.spec.ts
index 7a63dd6a114795516925a12859012322c0a2f0fe..ff3bcb5077c79011520e79ea8044bf2765d68e26 100644
--- a/lib/util/http/gitlab.spec.ts
+++ b/lib/util/http/gitlab.spec.ts
@@ -105,25 +105,25 @@ describe('util/http/gitlab', () => {
     it('403', async () => {
       httpMock.scope(gitlabApiHost).get('/api/v4/some-url').reply(403);
       await expect(
-        gitlabApi.get('some-url')
+        gitlabApi.get('some-url'),
       ).rejects.toThrowErrorMatchingInlineSnapshot(
-        `"Response code 403 (Forbidden)"`
+        `"Response code 403 (Forbidden)"`,
       );
     });
 
     it('404', async () => {
       httpMock.scope(gitlabApiHost).get('/api/v4/some-url').reply(404);
       await expect(
-        gitlabApi.get('some-url')
+        gitlabApi.get('some-url'),
       ).rejects.toThrowErrorMatchingInlineSnapshot(
-        `"Response code 404 (Not Found)"`
+        `"Response code 404 (Not Found)"`,
       );
     });
 
     it('500', async () => {
       httpMock.scope(gitlabApiHost).get('/api/v4/some-url').reply(500);
       await expect(gitlabApi.get('some-url')).rejects.toThrow(
-        EXTERNAL_HOST_ERROR
+        EXTERNAL_HOST_ERROR,
       );
     });
 
@@ -133,14 +133,14 @@ describe('util/http/gitlab', () => {
         .get('/api/v4/some-url')
         .replyWithError({ code: 'EAI_AGAIN' });
       await expect(gitlabApi.get('some-url')).rejects.toThrow(
-        EXTERNAL_HOST_ERROR
+        EXTERNAL_HOST_ERROR,
       );
     });
 
     it('ParseError', async () => {
       httpMock.scope(gitlabApiHost).get('/api/v4/some-url').reply(200, '{{');
       await expect(gitlabApi.getJson('some-url')).rejects.toThrow(
-        EXTERNAL_HOST_ERROR
+        EXTERNAL_HOST_ERROR,
       );
     });
   });
diff --git a/lib/util/http/gitlab.ts b/lib/util/http/gitlab.ts
index 9078bbeee0c08e2a38b9c2642010b77d41001934..770e9d1cf4a49d21af133273db5ce369f3154ac4 100644
--- a/lib/util/http/gitlab.ts
+++ b/lib/util/http/gitlab.ts
@@ -26,7 +26,7 @@ export class GitlabHttp extends Http<GitlabHttpOptions> {
 
   protected override async request<T>(
     url: string | URL,
-    options?: InternalHttpOptions & GitlabHttpOptions & HttpRequestOptions<T>
+    options?: InternalHttpOptions & GitlabHttpOptions & HttpRequestOptions<T>,
   ): Promise<HttpResponse<T>> {
     const opts = {
       baseUrl,
diff --git a/lib/util/http/hooks.spec.ts b/lib/util/http/hooks.spec.ts
index 002a79eba4a1dfb3a70f5c9bec0a88f08b822e0e..d9b0584cf7305f0ead534991c5ffc1298123c720 100644
--- a/lib/util/http/hooks.spec.ts
+++ b/lib/util/http/hooks.spec.ts
@@ -29,6 +29,6 @@ describe('util/http/hooks', () => {
       hook(response);
       const calledTimes = expected ? 1 : 0;
       expect(destroy).toHaveBeenCalledTimes(calledTimes);
-    }
+    },
   );
 });
diff --git a/lib/util/http/host-rules.spec.ts b/lib/util/http/host-rules.spec.ts
index 6b8667d70c78571b4756458c5bd9e02fb9fcb41c..adb46f9e4bc99f838e8e47a512d1f104488a1008 100644
--- a/lib/util/http/host-rules.spec.ts
+++ b/lib/util/http/host-rules.spec.ts
@@ -118,7 +118,7 @@ describe('util/http/host-rules', () => {
   it('uses http keepalives', () => {
     hostRules.add({ keepalive: true });
     expect(
-      applyHostRules(url, { ...options, token: 'xxx' }).agent
+      applyHostRules(url, { ...options, token: 'xxx' }).agent,
     ).toBeDefined();
   });
 
@@ -156,7 +156,7 @@ describe('util/http/host-rules', () => {
       applyHostRules('https://custom.datasource.ca/data/path', {
         ...options,
         hostType: 'maven',
-      })
+      }),
     ).toMatchInlineSnapshot(`
       {
         "hostType": "maven",
@@ -177,7 +177,7 @@ describe('util/http/host-rules', () => {
       applyHostRules('https://custom.datasource.key/data/path', {
         ...options,
         hostType: 'maven',
-      })
+      }),
     ).toMatchInlineSnapshot(`
       {
         "hostType": "maven",
@@ -199,7 +199,7 @@ describe('util/http/host-rules', () => {
       applyHostRules('https://custom.datasource.cert/data/path', {
         ...options,
         hostType: 'maven',
-      })
+      }),
     ).toMatchInlineSnapshot(`
       {
         "hostType": "maven",
@@ -230,14 +230,14 @@ describe('util/http/host-rules', () => {
       password: 'xxx',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'github-releases' })
+      applyHostRules(url, { ...options, hostType: 'github-releases' }),
     ).toEqual({
       hostType: 'github-releases',
       username: 'some',
       password: 'xxx',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'github-tags' })
+      applyHostRules(url, { ...options, hostType: 'github-tags' }),
     ).toEqual({
       hostType: 'github-tags',
       username: 'some2',
@@ -251,7 +251,7 @@ describe('util/http/host-rules', () => {
       token: 'pod-token',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'github-changelog' })
+      applyHostRules(url, { ...options, hostType: 'github-changelog' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -263,7 +263,7 @@ describe('util/http/host-rules', () => {
 
   it('fallback to github', () => {
     expect(
-      applyHostRules(url, { ...options, hostType: 'github-tags' })
+      applyHostRules(url, { ...options, hostType: 'github-tags' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -272,7 +272,7 @@ describe('util/http/host-rules', () => {
       token: 'token',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'github-changelog' })
+      applyHostRules(url, { ...options, hostType: 'github-changelog' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -303,7 +303,7 @@ describe('util/http/host-rules', () => {
       token: 'tags-token',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-tags' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-tags' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -312,7 +312,7 @@ describe('util/http/host-rules', () => {
       token: 'tags-token',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-releases' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-releases' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -321,7 +321,7 @@ describe('util/http/host-rules', () => {
       token: 'release-token',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-packages' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-packages' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -333,7 +333,7 @@ describe('util/http/host-rules', () => {
 
   it('fallback to gitlab', () => {
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-tags' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-tags' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -342,7 +342,7 @@ describe('util/http/host-rules', () => {
       token: 'abc',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-releases' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-releases' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -351,7 +351,7 @@ describe('util/http/host-rules', () => {
       token: 'abc',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-packages' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-packages' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -360,7 +360,7 @@ describe('util/http/host-rules', () => {
       token: 'abc',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'gitlab-changelog' })
+      applyHostRules(url, { ...options, hostType: 'gitlab-changelog' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -377,7 +377,7 @@ describe('util/http/host-rules', () => {
       password: 'xxx',
     });
     expect(
-      applyHostRules(url, { ...options, hostType: 'bitbucket-tags' })
+      applyHostRules(url, { ...options, hostType: 'bitbucket-tags' }),
     ).toEqual({
       hostType: 'bitbucket-tags',
       username: 'some',
@@ -387,7 +387,7 @@ describe('util/http/host-rules', () => {
 
   it('fallback to bitbucket', () => {
     expect(
-      applyHostRules(url, { ...options, hostType: 'bitbucket-tags' })
+      applyHostRules(url, { ...options, hostType: 'bitbucket-tags' }),
     ).toEqual({
       context: {
         authType: undefined,
@@ -409,7 +409,7 @@ describe('util/http/host-rules', () => {
         },
         hostType: 'gitea-tags',
         token: 'abc',
-      }
+      },
     );
   });
 
@@ -419,7 +419,7 @@ describe('util/http/host-rules', () => {
         hostType: 'gitea-tags',
         password: 'password',
         username: undefined,
-      }
+      },
     );
   });
 });
diff --git a/lib/util/http/host-rules.ts b/lib/util/http/host-rules.ts
index 577afb5976e667614247d6d86e2ea52f191f9f1f..45855dd4bf5db79294882565f0d31b8425ae1ac3 100644
--- a/lib/util/http/host-rules.ts
+++ b/lib/util/http/host-rules.ts
@@ -35,7 +35,7 @@ export type HostRulesGotOptions = Pick<
 
 export function findMatchingRules<GotOptions extends HostRulesGotOptions>(
   options: GotOptions,
-  url: string
+  url: string,
 ): HostRule {
   const { hostType } = options;
   let res = hostRules.find({ hostType, url });
@@ -115,7 +115,7 @@ export function findMatchingRules<GotOptions extends HostRulesGotOptions>(
 // Apply host rules to requests
 export function applyHostRules<GotOptions extends HostRulesGotOptions>(
   url: string,
-  inOptions: GotOptions
+  inOptions: GotOptions,
 ): GotOptions {
   const options: GotOptions = { ...inOptions };
   const foundRules = findMatchingRules(options, url);
diff --git a/lib/util/http/index.spec.ts b/lib/util/http/index.spec.ts
index 304a368c6f641c72a83d8c519465e91587502f6b..a2e0f121ba34833b5ce119fbaabb0a7f08609152 100644
--- a/lib/util/http/index.spec.ts
+++ b/lib/util/http/index.spec.ts
@@ -38,7 +38,7 @@ describe('util/http/index', () => {
   it('returns 429 error', async () => {
     httpMock.scope(baseUrl).get('/test').reply(429);
     await expect(http.get('http://renovate.com/test')).rejects.toThrow(
-      'Response code 429 (Too Many Requests)'
+      'Response code 429 (Too Many Requests)',
     );
     expect(httpMock.allUsed()).toBeTrue();
   });
@@ -47,7 +47,7 @@ describe('util/http/index', () => {
     httpMock.scope(baseUrl).get('/test').reply(404);
     hostRules.add({ abortOnError: true });
     await expect(http.get('http://renovate.com/test')).rejects.toThrow(
-      EXTERNAL_HOST_ERROR
+      EXTERNAL_HOST_ERROR,
     );
     expect(httpMock.allUsed()).toBeTrue();
   });
@@ -55,7 +55,7 @@ describe('util/http/index', () => {
   it('disables hosts', async () => {
     hostRules.add({ matchHost: 'renovate.com', enabled: false });
     await expect(http.get('http://renovate.com/test')).rejects.toThrow(
-      HOST_DISABLED
+      HOST_DISABLED,
     );
   });
 
@@ -63,7 +63,7 @@ describe('util/http/index', () => {
     httpMock.scope(baseUrl).get('/test').reply(404);
     hostRules.add({ abortOnError: true, abortIgnoreStatusCodes: [404] });
     await expect(http.get('http://renovate.com/test')).rejects.toThrow(
-      'Response code 404 (Not Found)'
+      'Response code 404 (Not Found)',
     );
     expect(httpMock.allUsed()).toBeTrue();
   });
@@ -90,7 +90,7 @@ describe('util/http/index', () => {
   it('postJson', async () => {
     httpMock.scope(baseUrl).post('/').reply(200, {});
     expect(
-      await http.postJson('http://renovate.com', { body: {}, baseUrl })
+      await http.postJson('http://renovate.com', { body: {}, baseUrl }),
     ).toEqual({
       authorization: false,
       body: {},
@@ -105,7 +105,7 @@ describe('util/http/index', () => {
   it('putJson', async () => {
     httpMock.scope(baseUrl).put('/').reply(200, {});
     expect(
-      await http.putJson('http://renovate.com', { body: {}, baseUrl })
+      await http.putJson('http://renovate.com', { body: {}, baseUrl }),
     ).toEqual({
       authorization: false,
       body: {},
@@ -120,7 +120,7 @@ describe('util/http/index', () => {
   it('patchJson', async () => {
     httpMock.scope(baseUrl).patch('/').reply(200, {});
     expect(
-      await http.patchJson('http://renovate.com', { body: {}, baseUrl })
+      await http.patchJson('http://renovate.com', { body: {}, baseUrl }),
     ).toEqual({
       authorization: false,
       body: {},
@@ -135,7 +135,7 @@ describe('util/http/index', () => {
   it('deleteJson', async () => {
     httpMock.scope(baseUrl).delete('/').reply(200, {});
     expect(
-      await http.deleteJson('http://renovate.com', { body: {}, baseUrl })
+      await http.deleteJson('http://renovate.com', { body: {}, baseUrl }),
     ).toEqual({
       authorization: false,
       body: {},
@@ -189,7 +189,7 @@ describe('util/http/index', () => {
     hostRules.add({ matchHost: 'renovate.com', enabled: false });
 
     expect(() => http.stream('http://renovate.com/test')).toThrow(
-      HOST_DISABLED
+      HOST_DISABLED,
     );
   });
 
@@ -341,7 +341,7 @@ describe('util/http/index', () => {
         const { body }: HttpResponse<string> = await http.getJson(
           'http://renovate.com',
           { headers: { accept: 'application/json' } },
-          SomeSchema
+          SomeSchema,
         );
 
         expect(body).toBe('2 + 2 = 4');
@@ -359,7 +359,7 @@ describe('util/http/index', () => {
           .reply(200, JSON.stringify({ foo: 'bar' }));
 
         await expect(
-          http.getJson('http://renovate.com', SomeSchema)
+          http.getJson('http://renovate.com', SomeSchema),
         ).rejects.toThrow(z.ZodError);
       });
     });
@@ -414,7 +414,7 @@ describe('util/http/index', () => {
 
         const { body }: HttpResponse<string> = await http.postJson(
           'http://renovate.com',
-          SomeSchema
+          SomeSchema,
         );
 
         expect(body).toBe('2 + 2 = 4');
@@ -428,7 +428,7 @@ describe('util/http/index', () => {
           .reply(200, JSON.stringify({ foo: 'bar' }));
 
         await expect(
-          http.postJson('http://renovate.com', SomeSchema)
+          http.postJson('http://renovate.com', SomeSchema),
         ).rejects.toThrow(z.ZodError);
       });
     });
@@ -509,7 +509,7 @@ describe('util/http/index', () => {
             data,
           },
         },
-        FooBar
+        FooBar,
       );
 
       expect(res.statusCode).toBe(304);
diff --git a/lib/util/http/index.ts b/lib/util/http/index.ts
index da1d7983f849342c3208b7e0ce493e38c6c529c2..1cfe45b61b2244d522dd2e4b7372e3c4d08ff920 100644
--- a/lib/util/http/index.ts
+++ b/lib/util/http/index.ts
@@ -36,7 +36,7 @@ export type SafeJsonError = RequestError | ZodError | EmptyResultError;
 type JsonArgs<
   Opts extends HttpOptions & HttpRequestOptions<ResT>,
   ResT = unknown,
-  Schema extends ZodType<ResT> = ZodType<ResT>
+  Schema extends ZodType<ResT> = ZodType<ResT>,
 > = {
   url: string;
   httpOptions?: Opts;
@@ -49,7 +49,7 @@ type Task<T> = () => Promise<HttpResponse<T>>;
 // and mutation of the cached response.
 function copyResponse<T>(
   response: HttpResponse<T>,
-  deep: boolean
+  deep: boolean,
 ): HttpResponse<T> {
   const { body, statusCode, headers } = response;
   return deep
@@ -83,7 +83,7 @@ function applyDefaultHeaders(options: Options): void {
 async function gotTask<T>(
   url: string,
   options: SetRequired<GotOptions, 'method'>,
-  requestStats: Omit<RequestStats, 'duration' | 'statusCode'>
+  requestStats: Omit<RequestStats, 'duration' | 'statusCode'>,
 ): Promise<HttpResponse<T>> {
   logger.trace({ url, options }, 'got request');
 
@@ -112,7 +112,7 @@ async function gotTask<T>(
       const retryCount =
         error.request?.retryCount ?? /* istanbul ignore next */ -1;
       logger.debug(
-        `${method} ${url} = (code=${code}, statusCode=${statusCode} retryCount=${retryCount}, duration=${duration})`
+        `${method} ${url} = (code=${code}, statusCode=${statusCode} retryCount=${retryCount}, duration=${duration})`,
       );
     }
 
@@ -137,7 +137,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
 
   protected async request<T>(
     requestUrl: string | URL,
-    httpOptions: InternalHttpOptions & HttpRequestOptions<T>
+    httpOptions: InternalHttpOptions & HttpRequestOptions<T>,
   ): Promise<HttpResponse<T>> {
     let url = requestUrl.toString();
     if (httpOptions?.baseUrl) {
@@ -150,7 +150,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
         ...this.options,
         hostType: this.hostType,
       },
-      httpOptions
+      httpOptions,
     );
 
     const etagCache =
@@ -189,7 +189,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
               url,
               headers: options.headers,
               method: options.method,
-            })}`
+            })}`,
           )
         : null;
 
@@ -246,7 +246,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
 
   get(
     url: string,
-    options: HttpOptions & HttpRequestOptions<string> = {}
+    options: HttpOptions & HttpRequestOptions<string> = {},
   ): Promise<HttpResponse> {
     return this.request<string>(url, options);
   }
@@ -257,7 +257,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
 
   getBuffer(
     url: string,
-    options: HttpOptions = {}
+    options: HttpOptions = {},
   ): Promise<HttpResponse<Buffer>> {
     return this.request<Buffer>(url, {
       ...options,
@@ -271,7 +271,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
       url,
       httpOptions: requestOptions,
       schema,
-    }: JsonArgs<Opts & HttpRequestOptions<ResT>, ResT>
+    }: JsonArgs<Opts & HttpRequestOptions<ResT>, ResT>,
   ): Promise<HttpResponse<ResT>> {
     const { body, ...httpOptions } = { ...requestOptions };
     const opts: InternalHttpOptions = {
@@ -312,7 +312,7 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
   private resolveArgs<ResT = unknown>(
     arg1: string,
     arg2: Opts | ZodType<ResT> | undefined,
-    arg3: ZodType<ResT> | undefined
+    arg3: ZodType<ResT> | undefined,
   ): JsonArgs<Opts, ResT> {
     const res: JsonArgs<Opts, ResT> = { url: arg1 };
 
@@ -331,21 +331,21 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
 
   getJson<ResT>(
     url: string,
-    options?: Opts & HttpRequestOptions<ResT>
+    options?: Opts & HttpRequestOptions<ResT>,
   ): Promise<HttpResponse<ResT>>;
   getJson<ResT, Schema extends ZodType<ResT> = ZodType<ResT>>(
     url: string,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   getJson<ResT, Schema extends ZodType<ResT> = ZodType<ResT>>(
     url: string,
     options: Opts & HttpRequestOptions<Infer<Schema>>,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   getJson<ResT = unknown, Schema extends ZodType<ResT> = ZodType<ResT>>(
     arg1: string,
     arg2?: (Opts & HttpRequestOptions<ResT>) | Schema,
-    arg3?: Schema
+    arg3?: Schema,
   ): Promise<HttpResponse<ResT>> {
     const args = this.resolveArgs<ResT>(arg1, arg2, arg3);
     return this.requestJson<ResT>('get', args);
@@ -353,27 +353,27 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
 
   getJsonSafe<
     ResT extends NonNullable<unknown>,
-    Schema extends ZodType<ResT> = ZodType<ResT>
+    Schema extends ZodType<ResT> = ZodType<ResT>,
   >(url: string, schema: Schema): AsyncResult<Infer<Schema>, SafeJsonError>;
   getJsonSafe<
     ResT extends NonNullable<unknown>,
-    Schema extends ZodType<ResT> = ZodType<ResT>
+    Schema extends ZodType<ResT> = ZodType<ResT>,
   >(
     url: string,
     options: Opts & HttpRequestOptions<Infer<Schema>>,
-    schema: Schema
+    schema: Schema,
   ): AsyncResult<Infer<Schema>, SafeJsonError>;
   getJsonSafe<
     ResT extends NonNullable<unknown>,
-    Schema extends ZodType<ResT> = ZodType<ResT>
+    Schema extends ZodType<ResT> = ZodType<ResT>,
   >(
     arg1: string,
     arg2?: (Opts & HttpRequestOptions<ResT>) | Schema,
-    arg3?: Schema
+    arg3?: Schema,
   ): AsyncResult<ResT, SafeJsonError> {
     const args = this.resolveArgs<ResT>(arg1, arg2, arg3);
     return Result.wrap(this.requestJson<ResT>('get', args)).transform(
-      (response) => Result.ok(response.body)
+      (response) => Result.ok(response.body),
     );
   }
 
@@ -384,17 +384,17 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
   postJson<T>(url: string, options?: Opts): Promise<HttpResponse<T>>;
   postJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   postJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
     options: Opts,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   postJson<T = unknown, Schema extends ZodType<T> = ZodType<T>>(
     arg1: string,
     arg2?: Opts | Schema,
-    arg3?: Schema
+    arg3?: Schema,
   ): Promise<HttpResponse<T>> {
     const args = this.resolveArgs(arg1, arg2, arg3);
     return this.requestJson<T>('post', args);
@@ -403,17 +403,17 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
   putJson<T>(url: string, options?: Opts): Promise<HttpResponse<T>>;
   putJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   putJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
     options: Opts,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   putJson<T = unknown, Schema extends ZodType<T> = ZodType<T>>(
     arg1: string,
     arg2?: Opts | Schema,
-    arg3?: ZodType
+    arg3?: ZodType,
   ): Promise<HttpResponse<T>> {
     const args = this.resolveArgs(arg1, arg2, arg3);
     return this.requestJson<T>('put', args);
@@ -422,17 +422,17 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
   patchJson<T>(url: string, options?: Opts): Promise<HttpResponse<T>>;
   patchJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   patchJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
     options: Opts,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   patchJson<T = unknown, Schema extends ZodType<T> = ZodType<T>>(
     arg1: string,
     arg2?: Opts | Schema,
-    arg3?: Schema
+    arg3?: Schema,
   ): Promise<HttpResponse<T>> {
     const args = this.resolveArgs(arg1, arg2, arg3);
     return this.requestJson<T>('patch', args);
@@ -441,17 +441,17 @@ export class Http<Opts extends HttpOptions = HttpOptions> {
   deleteJson<T>(url: string, options?: Opts): Promise<HttpResponse<T>>;
   deleteJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   deleteJson<T, Schema extends ZodType<T> = ZodType<T>>(
     url: string,
     options: Opts,
-    schema: Schema
+    schema: Schema,
   ): Promise<HttpResponse<Infer<Schema>>>;
   deleteJson<T = unknown, Schema extends ZodType<T> = ZodType<T>>(
     arg1: string,
     arg2?: Opts | Schema,
-    arg3?: Schema
+    arg3?: Schema,
   ): Promise<HttpResponse<T>> {
     const args = this.resolveArgs(arg1, arg2, arg3);
     return this.requestJson<T>('delete', args);
diff --git a/lib/util/http/jira.spec.ts b/lib/util/http/jira.spec.ts
index 4163f3b1660af656b0e504c357aa3259422d346f..e84b2e5f2dc6910a154c7c3a42df1f236c74d45d 100644
--- a/lib/util/http/jira.spec.ts
+++ b/lib/util/http/jira.spec.ts
@@ -6,7 +6,7 @@ describe('util/http/jira', () => {
 
   it('throws error if setBaseUrl not called', async () => {
     await expect(api.postJson('some-path')).rejects.toThrow(
-      new TypeError('Invalid URL')
+      new TypeError('Invalid URL'),
     );
   });
 
diff --git a/lib/util/http/jira.ts b/lib/util/http/jira.ts
index 1137cc33cce8eb6f299c48c172f587abf944d56d..7463d7b6ebb1a7dd2f5936365a3c34a9125ee475 100644
--- a/lib/util/http/jira.ts
+++ b/lib/util/http/jira.ts
@@ -19,7 +19,7 @@ export class JiraHttp extends Http {
 
   protected override request<T>(
     url: string | URL,
-    options?: InternalHttpOptions & HttpRequestOptions<T>
+    options?: InternalHttpOptions & HttpRequestOptions<T>,
   ): Promise<HttpResponse<T>> {
     const opts = { baseUrl, ...options };
     return super.request<T>(url, opts);
diff --git a/lib/util/json-writer/editor-config.ts b/lib/util/json-writer/editor-config.ts
index 57aa6aa671f88235b4d3155bf76b7464658c5200..5216039a1f550a9afa41db3d268f9ae4333a566a 100644
--- a/lib/util/json-writer/editor-config.ts
+++ b/lib/util/json-writer/editor-config.ts
@@ -21,7 +21,7 @@ export class EditorConfig {
   }
 
   private static getIndentationType(
-    knownProps: KnownProps
+    knownProps: KnownProps,
   ): IndentationType | undefined {
     const { indent_style: indentStyle } = knownProps;
 
@@ -37,7 +37,7 @@ export class EditorConfig {
   }
 
   private static getIndentationSize(
-    knownProps: KnownProps
+    knownProps: KnownProps,
   ): number | undefined {
     const indentSize = Number(knownProps.indent_size);
 
diff --git a/lib/util/markdown.ts b/lib/util/markdown.ts
index b63b7deca7d8ec4e336b5fa4fc5c9579f573edf9..35e963af819f1dca47b53e5ceb79835e1648dafe 100644
--- a/lib/util/markdown.ts
+++ b/lib/util/markdown.ts
@@ -20,7 +20,7 @@ export function sanitizeMarkdown(markdown: string): string {
   res = res.replace(regEx(/`#&#8203;(\d+)`/g), '`#$1`');
   res = res.replace(
     regEx(/(?<before>[^\n]\n)(?<title>#.*)/g),
-    '$<before>\n$<title>'
+    '$<before>\n$<title>',
   );
   return res;
 }
@@ -33,7 +33,7 @@ export function sanitizeMarkdown(markdown: string): string {
  */
 export async function linkify(
   content: string,
-  options: github.RemarkGithubOptions
+  options: github.RemarkGithubOptions,
 ): Promise<string> {
   // https://github.com/syntax-tree/mdast-util-to-markdown#optionsbullet
   const output = await remark()
diff --git a/lib/util/merge-confidence/index.spec.ts b/lib/util/merge-confidence/index.spec.ts
index 2fc566b37c424bac32eb385b7bcd676ffa73837b..a347fd0170f3066d52750811308a259e0a703fb9 100644
--- a/lib/util/merge-confidence/index.spec.ts
+++ b/lib/util/merge-confidence/index.spec.ts
@@ -75,8 +75,8 @@ describe('util/merge-confidence/index', () => {
             'renovate',
             '25.0.0',
             '25.0.0',
-            undefined as never
-          )
+            undefined as never,
+          ),
         ).toBe('neutral');
       });
 
@@ -87,8 +87,8 @@ describe('util/merge-confidence/index', () => {
             'renovate',
             '24.1.0',
             '25.0.0',
-            'bump'
-          )
+            'bump',
+          ),
         ).toBe('neutral');
       });
 
@@ -99,8 +99,8 @@ describe('util/merge-confidence/index', () => {
             'renovate',
             '25.0.1',
             '25.0.1',
-            'pin'
-          )
+            'pin',
+          ),
         ).toBe('high');
       });
 
@@ -114,8 +114,8 @@ describe('util/merge-confidence/index', () => {
             'renovate',
             '24.2.0',
             '25.0.0',
-            'major'
-          )
+            'major',
+          ),
         ).toBeUndefined();
       });
 
@@ -126,8 +126,8 @@ describe('util/merge-confidence/index', () => {
             'renovate',
             '24.2.0',
             '25.0.0',
-            'major'
-          )
+            'major',
+          ),
         ).toBeUndefined();
       });
 
@@ -139,7 +139,7 @@ describe('util/merge-confidence/index', () => {
         httpMock
           .scope(apiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${depName}/${currentVersion}/${newVersion}`
+            `/api/mc/json/${datasource}/${depName}/${currentVersion}/${newVersion}`,
           )
           .reply(200, { confidence: 'high' });
 
@@ -149,8 +149,8 @@ describe('util/merge-confidence/index', () => {
             depName,
             currentVersion,
             newVersion,
-            'major'
-          )
+            'major',
+          ),
         ).toBe('high');
       });
 
@@ -163,7 +163,7 @@ describe('util/merge-confidence/index', () => {
         httpMock
           .scope(apiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${escapedPackageName}/${currentVersion}/${newVersion}`
+            `/api/mc/json/${datasource}/${escapedPackageName}/${currentVersion}/${newVersion}`,
           )
           .reply(200, { confidence: 'high' });
 
@@ -173,8 +173,8 @@ describe('util/merge-confidence/index', () => {
             packageName,
             currentVersion,
             newVersion,
-            'major'
-          )
+            'major',
+          ),
         ).toBe('high');
       });
 
@@ -186,7 +186,7 @@ describe('util/merge-confidence/index', () => {
         httpMock
           .scope(apiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${depName}/${currentVersion}/${newVersion}`
+            `/api/mc/json/${datasource}/${depName}/${currentVersion}/${newVersion}`,
           )
           .reply(200, { invalid: 'invalid' });
 
@@ -196,8 +196,8 @@ describe('util/merge-confidence/index', () => {
             depName,
             currentVersion,
             newVersion,
-            'minor'
-          )
+            'minor',
+          ),
         ).toBe('neutral');
       });
 
@@ -209,7 +209,7 @@ describe('util/merge-confidence/index', () => {
         httpMock
           .scope(apiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${depName}/${currentVersion}/${newVersion}`
+            `/api/mc/json/${datasource}/${depName}/${currentVersion}/${newVersion}`,
           )
           .reply(400);
 
@@ -219,12 +219,12 @@ describe('util/merge-confidence/index', () => {
             depName,
             currentVersion,
             newVersion,
-            'minor'
-          )
+            'minor',
+          ),
         ).toBe('neutral');
         expect(logger.warn).toHaveBeenCalledWith(
           expect.anything(),
-          'error fetching merge confidence data'
+          'error fetching merge confidence data',
         );
       });
 
@@ -236,7 +236,7 @@ describe('util/merge-confidence/index', () => {
         httpMock
           .scope(apiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${packageName}/${currentVersion}/${newVersion}`
+            `/api/mc/json/${datasource}/${packageName}/${currentVersion}/${newVersion}`,
           )
           .reply(403);
 
@@ -246,12 +246,12 @@ describe('util/merge-confidence/index', () => {
             packageName,
             currentVersion,
             newVersion,
-            'minor'
-          )
+            'minor',
+          ),
         ).rejects.toThrow(EXTERNAL_HOST_ERROR);
         expect(logger.error).toHaveBeenCalledWith(
           expect.anything(),
-          'merge confidence API token rejected - aborting run'
+          'merge confidence API token rejected - aborting run',
         );
       });
 
@@ -263,7 +263,7 @@ describe('util/merge-confidence/index', () => {
         httpMock
           .scope(apiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${packageName}/${currentVersion}/${newVersion}`
+            `/api/mc/json/${datasource}/${packageName}/${currentVersion}/${newVersion}`,
           )
           .reply(503);
 
@@ -273,12 +273,12 @@ describe('util/merge-confidence/index', () => {
             packageName,
             currentVersion,
             newVersion,
-            'minor'
-          )
+            'minor',
+          ),
         ).rejects.toThrow(EXTERNAL_HOST_ERROR);
         expect(logger.error).toHaveBeenCalledWith(
           expect.anything(),
-          'merge confidence API failure: 5xx - aborting run'
+          'merge confidence API failure: 5xx - aborting run',
         );
       });
 
@@ -289,8 +289,8 @@ describe('util/merge-confidence/index', () => {
             'renovate',
             '25.0.1',
             '25.0.1',
-            'pinDigest'
-          )
+            'pinDigest',
+          ),
         ).toBe('high');
       });
     });
@@ -306,10 +306,10 @@ describe('util/merge-confidence/index', () => {
 
         await expect(initMergeConfidence()).toResolve();
         expect(logger.trace).toHaveBeenCalledWith(
-          'using default merge confidence API base URL'
+          'using default merge confidence API base URL',
         );
         expect(logger.debug).toHaveBeenCalledWith(
-          'merge confidence API - successfully authenticated'
+          'merge confidence API - successfully authenticated',
         );
       });
 
@@ -325,10 +325,10 @@ describe('util/merge-confidence/index', () => {
         await expect(initMergeConfidence()).toResolve();
         expect(logger.warn).toHaveBeenCalledWith(
           expect.anything(),
-          'invalid merge confidence API base URL found in environment variables - using default value instead'
+          'invalid merge confidence API base URL found in environment variables - using default value instead',
         );
         expect(logger.debug).toHaveBeenCalledWith(
-          'merge confidence API - successfully authenticated'
+          'merge confidence API - successfully authenticated',
         );
       });
 
@@ -338,7 +338,7 @@ describe('util/merge-confidence/index', () => {
 
         await expect(initMergeConfidence()).toResolve();
         expect(logger.trace).toHaveBeenCalledWith(
-          'merge confidence API usage is disabled'
+          'merge confidence API usage is disabled',
         );
       });
 
@@ -347,7 +347,7 @@ describe('util/merge-confidence/index', () => {
 
         await expect(initMergeConfidence()).toResolve();
         expect(logger.debug).toHaveBeenCalledWith(
-          'merge confidence API - successfully authenticated'
+          'merge confidence API - successfully authenticated',
         );
       });
 
@@ -355,11 +355,11 @@ describe('util/merge-confidence/index', () => {
         httpMock.scope(apiBaseUrl).get(`/api/mc/availability`).reply(403);
 
         await expect(initMergeConfidence()).rejects.toThrow(
-          EXTERNAL_HOST_ERROR
+          EXTERNAL_HOST_ERROR,
         );
         expect(logger.error).toHaveBeenCalledWith(
           expect.anything(),
-          'merge confidence API token rejected - aborting run'
+          'merge confidence API token rejected - aborting run',
         );
       });
 
@@ -367,11 +367,11 @@ describe('util/merge-confidence/index', () => {
         httpMock.scope(apiBaseUrl).get(`/api/mc/availability`).reply(503);
 
         await expect(initMergeConfidence()).rejects.toThrow(
-          EXTERNAL_HOST_ERROR
+          EXTERNAL_HOST_ERROR,
         );
         expect(logger.error).toHaveBeenCalledWith(
           expect.anything(),
-          'merge confidence API failure: 5xx - aborting run'
+          'merge confidence API failure: 5xx - aborting run',
         );
       });
 
@@ -382,11 +382,11 @@ describe('util/merge-confidence/index', () => {
           .replyWithError({ code: 'ECONNRESET' });
 
         await expect(initMergeConfidence()).rejects.toThrow(
-          EXTERNAL_HOST_ERROR
+          EXTERNAL_HOST_ERROR,
         );
         expect(logger.error).toHaveBeenCalledWith(
           expect.anything(),
-          'merge confidence API request failed - aborting run'
+          'merge confidence API request failed - aborting run',
         );
       });
     });
diff --git a/lib/util/merge-confidence/index.ts b/lib/util/merge-confidence/index.ts
index 70612faa564b366b06b6cb7452b86ec2f121b02e..e903d32cab0f4b85b9672ffbe1e818a8cce5807a 100644
--- a/lib/util/merge-confidence/index.ts
+++ b/lib/util/merge-confidence/index.ts
@@ -45,7 +45,7 @@ export function isActiveConfidenceLevel(confidence: string): boolean {
 
 export function satisfiesConfidenceLevel(
   confidence: MergeConfidence,
-  minimumConfidence: MergeConfidence
+  minimumConfidence: MergeConfidence,
 ): boolean {
   return confidenceLevels[confidence] >= confidenceLevels[minimumConfidence];
 }
@@ -82,7 +82,7 @@ export async function getMergeConfidenceLevel(
   packageName: string,
   currentVersion: string,
   newVersion: string,
-  updateType: UpdateType
+  updateType: UpdateType,
 ): Promise<MergeConfidence | undefined> {
   if (is.nullOrUndefined(apiBaseUrl) || is.nullOrUndefined(token)) {
     return undefined;
@@ -122,7 +122,7 @@ async function queryApi(
   datasource: string,
   packageName: string,
   currentVersion: string,
-  newVersion: string
+  newVersion: string,
 ): Promise<MergeConfidence> {
   // istanbul ignore if: defensive, already been validated before calling this function
   if (is.nullOrUndefined(apiBaseUrl) || is.nullOrUndefined(token)) {
@@ -144,7 +144,7 @@ async function queryApi(
         newVersion,
         cachedResult,
       },
-      'using merge confidence cached result'
+      'using merge confidence cached result',
     );
     return cachedResult;
   }
@@ -207,13 +207,13 @@ function getApiBaseUrl(): string {
     const parsedBaseUrl = new URL(baseFromEnv).toString();
     logger.trace(
       { baseUrl: parsedBaseUrl },
-      'using merge confidence API base found in environment variables'
+      'using merge confidence API base found in environment variables',
     );
     return parsedBaseUrl;
   } catch (err) {
     logger.warn(
       { err, baseFromEnv },
-      'invalid merge confidence API base URL found in environment variables - using default value instead'
+      'invalid merge confidence API base URL found in environment variables - using default value instead',
     );
     return defaultBaseUrl;
   }
diff --git a/lib/util/minimatch.spec.ts b/lib/util/minimatch.spec.ts
index 5414aacdc765603896f6f4073b61a9949aec5f09..bce5b88810f4d11102e653b3895ac6a5e083e99d 100644
--- a/lib/util/minimatch.spec.ts
+++ b/lib/util/minimatch.spec.ts
@@ -4,13 +4,13 @@ describe('util/minimatch', () => {
   it('caches minimatch', () => {
     expect(minimatch('foo')).toBe(minimatch('foo'));
     expect(minimatch('foo', { dot: true })).toBe(
-      minimatch('foo', { dot: true })
+      minimatch('foo', { dot: true }),
     );
   });
 
   it('does not cache minimatch', () => {
     expect(minimatch('foo', undefined, false)).not.toBe(
-      minimatch('foo', undefined, false)
+      minimatch('foo', undefined, false),
     );
     expect(minimatch('foo')).not.toBe(minimatch('foo', undefined, false));
     expect(minimatch('foo', { dot: true })).not.toBe(minimatch('foo'));
diff --git a/lib/util/minimatch.ts b/lib/util/minimatch.ts
index 0d915d8be909bdd2cee4341b24b1f002c2941051..1121186b4cd9d07dbdc81935892493b99dae688f 100644
--- a/lib/util/minimatch.ts
+++ b/lib/util/minimatch.ts
@@ -5,7 +5,7 @@ const cache = new Map<string, Minimatch>();
 export function minimatch(
   pattern: string,
   options?: MinimatchOptions,
-  useCache = true
+  useCache = true,
 ): Minimatch {
   const key = options ? `${pattern}:${JSON.stringify(options)}` : pattern;
 
diff --git a/lib/util/modules.ts b/lib/util/modules.ts
index 088d2e853d3bbf225d4759d6ae3401450c14118b..f64975ed5b2bb8265ac5f74e706d186af30483b0 100644
--- a/lib/util/modules.ts
+++ b/lib/util/modules.ts
@@ -27,7 +27,7 @@ function relatePath(here: string, there: string): string {
 export function loadModules<T>(
   dirname: string,
   validate?: (module: T, moduleName: string) => boolean,
-  filter: (moduleName: string) => boolean = () => true
+  filter: (moduleName: string) => boolean = () => true,
 ): Record<string, T> {
   const result: Record<string, T> = {};
 
diff --git a/lib/util/number.ts b/lib/util/number.ts
index 2fb488c9d3eac5ac802d32b3b7037d8f6a36994f..3c8bac99f5d7cdab4b436dabe5c08af9a2c6abc8 100644
--- a/lib/util/number.ts
+++ b/lib/util/number.ts
@@ -6,7 +6,7 @@
  */
 export function coerceNumber(
   val: number | null | undefined,
-  def?: number
+  def?: number,
 ): number {
   return val ?? def ?? 0;
 }
diff --git a/lib/util/object.ts b/lib/util/object.ts
index a9c22dc9c093e81441996278f2cd45926bb33969..28498dead067f556990329ccecd0677ec5a6a375 100644
--- a/lib/util/object.ts
+++ b/lib/util/object.ts
@@ -7,7 +7,7 @@
  */
 export function hasKey<K extends string, T>(
   k: K,
-  o: T
+  o: T,
 ): o is T & Record<K, unknown> {
   return o && typeof o === 'object' && k in o;
 }
diff --git a/lib/util/package-rules/base-branches.ts b/lib/util/package-rules/base-branches.ts
index e712b6f3e72ccf14b7f5cfd802fce54fd2d5af79..dd6241ad2f7ca7b0d58da4a9ce91e5deca58fb45 100644
--- a/lib/util/package-rules/base-branches.ts
+++ b/lib/util/package-rules/base-branches.ts
@@ -6,7 +6,7 @@ import { Matcher } from './base';
 export class BaseBranchesMatcher extends Matcher {
   override matches(
     { baseBranch }: PackageRuleInputConfig,
-    { matchBaseBranches }: PackageRule
+    { matchBaseBranches }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchBaseBranches)) {
       return null;
diff --git a/lib/util/package-rules/base.ts b/lib/util/package-rules/base.ts
index fa176b504f6d4eb35724e2eb447a1a3c4abf5979..981a11575335db40d90c0ba480b835c9ad66e69f 100644
--- a/lib/util/package-rules/base.ts
+++ b/lib/util/package-rules/base.ts
@@ -10,7 +10,7 @@ export abstract class Matcher implements MatcherApi {
    */
   excludes(
     inputConfig: PackageRuleInputConfig,
-    packageRule: PackageRule
+    packageRule: PackageRule,
   ): boolean | null {
     return null;
   }
@@ -23,6 +23,6 @@ export abstract class Matcher implements MatcherApi {
    */
   abstract matches(
     inputConfig: PackageRuleInputConfig,
-    packageRule: PackageRule
+    packageRule: PackageRule,
   ): boolean | null;
 }
diff --git a/lib/util/package-rules/categories.ts b/lib/util/package-rules/categories.ts
index 5e53f60665582bda6cbbb55c29597fa0d77fdebd..0a5e47ae3045804330795f74b7618644d41d2d13 100644
--- a/lib/util/package-rules/categories.ts
+++ b/lib/util/package-rules/categories.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class CategoriesMatcher extends Matcher {
   override matches(
     { categories }: PackageRuleInputConfig,
-    { matchCategories }: PackageRule
+    { matchCategories }: PackageRule,
   ): boolean | null {
     if (is.nullOrUndefined(matchCategories)) {
       return null;
diff --git a/lib/util/package-rules/current-value.spec.ts b/lib/util/package-rules/current-value.spec.ts
index 5fcb3585956f426f700f723feccca96dd44b4f8c..8c6b096f02c16ada5e23d6e1a87bc9aecd571445 100644
--- a/lib/util/package-rules/current-value.spec.ts
+++ b/lib/util/package-rules/current-value.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/current-value', () => {
         },
         {
           matchCurrentValue: '^v',
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -23,7 +23,7 @@ describe('util/package-rules/current-value', () => {
         },
         {
           matchCurrentValue: '/^v/',
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -35,7 +35,7 @@ describe('util/package-rules/current-value', () => {
         },
         {
           matchCurrentValue: '/^"/',
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -45,7 +45,7 @@ describe('util/package-rules/current-value', () => {
         {},
         {
           matchCurrentValue: '/^v?[~ -]?0/',
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/current-value.ts b/lib/util/package-rules/current-value.ts
index c161a5149f03eb8dfd9104e8a50494103a17ad57..5f3f24a0d35e58d9191e837f5d0b4aa48e04f207 100644
--- a/lib/util/package-rules/current-value.ts
+++ b/lib/util/package-rules/current-value.ts
@@ -7,7 +7,7 @@ import { Matcher } from './base';
 export class CurrentValueMatcher extends Matcher {
   override matches(
     { currentValue }: PackageRuleInputConfig,
-    { matchCurrentValue }: PackageRule
+    { matchCurrentValue }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchCurrentValue)) {
       return null;
@@ -17,7 +17,7 @@ export class CurrentValueMatcher extends Matcher {
     if (!matchCurrentValuePred) {
       logger.debug(
         { matchCurrentValue },
-        'matchCurrentValue should be a regex, starting and ending with `/`'
+        'matchCurrentValue should be a regex, starting and ending with `/`',
       );
       return false;
     }
diff --git a/lib/util/package-rules/current-version.spec.ts b/lib/util/package-rules/current-version.spec.ts
index a7795b9860029eac63ab224c8b3b322887407ab8..de7db7b9c4ae745d7613be7b5b7c056a8a9fa966 100644
--- a/lib/util/package-rules/current-version.spec.ts
+++ b/lib/util/package-rules/current-version.spec.ts
@@ -14,7 +14,7 @@ describe('util/package-rules/current-version', () => {
         },
         {
           matchCurrentVersion: '1.2.3',
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -30,7 +30,7 @@ describe('util/package-rules/current-version', () => {
         },
         {
           matchCurrentVersion: '1.2.3',
-        }
+        },
       );
       expect(result).toBeFalse();
       expect(spy.mock.calls).toHaveLength(1);
@@ -45,7 +45,7 @@ describe('util/package-rules/current-version', () => {
         },
         {
           matchCurrentVersion: 'bbbbbb',
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -59,7 +59,7 @@ describe('util/package-rules/current-version', () => {
         },
         {
           matchCurrentVersion: '/^v?[~ -]?0/',
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -73,7 +73,7 @@ describe('util/package-rules/current-version', () => {
         },
         {
           matchCurrentVersion: '/^v?[~ -]?0/',
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -86,7 +86,7 @@ describe('util/package-rules/current-version', () => {
         },
         {
           matchCurrentVersion: '/^v?[~ -]?0/',
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/current-version.ts b/lib/util/package-rules/current-version.ts
index e01cb1a37e016faeb984df6246b1888f3f6d3f48..b4f89db904c12b9dfb3d439b38254952f3940759 100644
--- a/lib/util/package-rules/current-version.ts
+++ b/lib/util/package-rules/current-version.ts
@@ -13,7 +13,7 @@ export class CurrentVersionMatcher extends Matcher {
       currentValue,
       currentVersion,
     }: PackageRuleInputConfig,
-    { matchCurrentVersion }: PackageRule
+    { matchCurrentVersion }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchCurrentVersion)) {
       return null;
@@ -23,7 +23,7 @@ export class CurrentVersionMatcher extends Matcher {
     const version = allVersioning.get(versioning);
     const matchCurrentVersionStr = matchCurrentVersion.toString();
     const matchCurrentVersionPred = configRegexPredicate(
-      matchCurrentVersionStr
+      matchCurrentVersionStr,
     );
 
     if (matchCurrentVersionPred) {
@@ -59,7 +59,7 @@ export class CurrentVersionMatcher extends Matcher {
     }
     logger.debug(
       { matchCurrentVersionStr, currentValue },
-      'Could not find a version to compare'
+      'Could not find a version to compare',
     );
     return false;
   }
diff --git a/lib/util/package-rules/datasources.ts b/lib/util/package-rules/datasources.ts
index d3237d2eb691c6dadf875b262e3b05297e617401..3e6b5bef446e510a908e1ba3ec2af32d7ef5689a 100644
--- a/lib/util/package-rules/datasources.ts
+++ b/lib/util/package-rules/datasources.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class DatasourcesMatcher extends Matcher {
   override matches(
     { datasource }: PackageRuleInputConfig,
-    { matchDatasources }: PackageRule
+    { matchDatasources }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchDatasources)) {
       return null;
diff --git a/lib/util/package-rules/dep-names.spec.ts b/lib/util/package-rules/dep-names.spec.ts
index 5efd18d0ab54b9fbd08f49c25d6348b32809bed3..eb0bea92f2584ec32a1b8a597e26ff6e1079924d 100644
--- a/lib/util/package-rules/dep-names.spec.ts
+++ b/lib/util/package-rules/dep-names.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/dep-names', () => {
         },
         {
           matchDepNames: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -25,7 +25,7 @@ describe('util/package-rules/dep-names', () => {
         },
         {
           excludeDepNames: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/dep-names.ts b/lib/util/package-rules/dep-names.ts
index cc9a470083bc2995e26b823a286ef0cd05d33a92..d745863ea4d2f583569cc9dc7d7d78a450e921c4 100644
--- a/lib/util/package-rules/dep-names.ts
+++ b/lib/util/package-rules/dep-names.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class DepNameMatcher extends Matcher {
   override matches(
     { depName }: PackageRuleInputConfig,
-    { matchDepNames }: PackageRule
+    { matchDepNames }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchDepNames)) {
       return null;
@@ -18,7 +18,7 @@ export class DepNameMatcher extends Matcher {
 
   override excludes(
     { depName }: PackageRuleInputConfig,
-    { excludeDepNames }: PackageRule
+    { excludeDepNames }: PackageRule,
   ): boolean | null {
     if (is.undefined(excludeDepNames)) {
       return null;
diff --git a/lib/util/package-rules/dep-patterns.spec.ts b/lib/util/package-rules/dep-patterns.spec.ts
index 2303abba6c20c05aab9b5997627db828a22cf5f1..0f976a179bf9bff4d7276840abca998a9cabe665 100644
--- a/lib/util/package-rules/dep-patterns.spec.ts
+++ b/lib/util/package-rules/dep-patterns.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/dep-patterns', () => {
         },
         {
           matchDepPatterns: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -25,7 +25,7 @@ describe('util/package-rules/dep-patterns', () => {
         },
         {
           excludeDepPatterns: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/dep-patterns.ts b/lib/util/package-rules/dep-patterns.ts
index fd4c5e786cf986586afd351a75742e8c1a54b978..7a566a726f991f797c251bb23310e22b5e117f25 100644
--- a/lib/util/package-rules/dep-patterns.ts
+++ b/lib/util/package-rules/dep-patterns.ts
@@ -8,7 +8,7 @@ import { massagePattern } from './utils';
 export class DepPatternsMatcher extends Matcher {
   override matches(
     { depName, updateType }: PackageRuleInputConfig,
-    { matchDepPatterns }: PackageRule
+    { matchDepPatterns }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchDepPatterns)) {
       return null;
@@ -31,7 +31,7 @@ export class DepPatternsMatcher extends Matcher {
 
   override excludes(
     { depName, updateType }: PackageRuleInputConfig,
-    { excludeDepPatterns }: PackageRule
+    { excludeDepPatterns }: PackageRule,
   ): boolean | null {
     // ignore lockFileMaintenance for backwards compatibility
     if (is.undefined(excludeDepPatterns)) {
diff --git a/lib/util/package-rules/dep-types.ts b/lib/util/package-rules/dep-types.ts
index 046f0410e32062e40b250212fd7fe0bd13c5cfdb..73a6087c96f8c2c18a51e7fedd74327e22fd476d 100644
--- a/lib/util/package-rules/dep-types.ts
+++ b/lib/util/package-rules/dep-types.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class DepTypesMatcher extends Matcher {
   override matches(
     { depTypes, depType }: PackageRuleInputConfig,
-    { matchDepTypes }: PackageRule
+    { matchDepTypes }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchDepTypes)) {
       return null;
diff --git a/lib/util/package-rules/files.spec.ts b/lib/util/package-rules/files.spec.ts
index ac30c31247f00d70dda791bdce5576eeeb7002a2..854a3563c5800b19c6e4fdd9669a32252aed37ce 100644
--- a/lib/util/package-rules/files.spec.ts
+++ b/lib/util/package-rules/files.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/files', () => {
         },
         {
           matchFileNames: ['frontend/package.json'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/files.ts b/lib/util/package-rules/files.ts
index e820c8cc514367503730006d94c0da3c9186f3a6..48b69c999181deb0c086fcf57baab74e2c5a7a77 100644
--- a/lib/util/package-rules/files.ts
+++ b/lib/util/package-rules/files.ts
@@ -6,7 +6,7 @@ import { Matcher } from './base';
 export class FileNamesMatcher extends Matcher {
   override matches(
     { packageFile, lockFiles }: PackageRuleInputConfig,
-    { matchFileNames }: PackageRule
+    { matchFileNames }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchFileNames)) {
       return null;
@@ -20,8 +20,8 @@ export class FileNamesMatcher extends Matcher {
         minimatch(matchFileName, { dot: true }).match(packageFile) ||
         (is.array(lockFiles) &&
           lockFiles.some((lockFile) =>
-            minimatch(matchFileName, { dot: true }).match(lockFile)
-          ))
+            minimatch(matchFileName, { dot: true }).match(lockFile),
+          )),
     );
   }
 }
diff --git a/lib/util/package-rules/index.spec.ts b/lib/util/package-rules/index.spec.ts
index 34a70e8fd8460421d3d7b485befddecb487a093d..e9631076fdbc4b57f9c907b890b5a2252ae26c29 100644
--- a/lib/util/package-rules/index.spec.ts
+++ b/lib/util/package-rules/index.spec.ts
@@ -734,7 +734,7 @@ describe('util/package-rules/index', () => {
       expect(error).toStrictEqual(new Error(MISSING_API_CREDENTIALS));
       expect(error.validationError).toBe('Missing credentials');
       expect(error.validationMessage).toBe(
-        'The `matchConfidence` matcher in `packageRules` requires authentication. Please refer to the [documentation](https://docs.renovatebot.com/configuration-options/#matchconfidence) and add the required host rule.'
+        'The `matchConfidence` matcher in `packageRules` requires authentication. Please refer to the [documentation](https://docs.renovatebot.com/configuration-options/#matchconfidence) and add the required host rule.',
       );
     });
   });
@@ -1013,7 +1013,7 @@ describe('util/package-rules/index', () => {
 
   it('empty rules', () => {
     expect(
-      applyPackageRules({ ...config1, packageRules: null as never })
+      applyPackageRules({ ...config1, packageRules: null as never }),
     ).toEqual({
       foo: 'bar',
       packageRules: null,
diff --git a/lib/util/package-rules/index.ts b/lib/util/package-rules/index.ts
index 9484d09dd5f7df72ef9d57c432de14e386815f81..9df24e62f0db1d7b0aaf05f6268c5356c700f850 100644
--- a/lib/util/package-rules/index.ts
+++ b/lib/util/package-rules/index.ts
@@ -8,7 +8,7 @@ import { matcherOR } from './utils';
 
 function matchesRule(
   inputConfig: PackageRuleInputConfig,
-  packageRule: PackageRule
+  packageRule: PackageRule,
 ): boolean {
   let positiveMatch = true;
   let matchApplied = false;
@@ -18,7 +18,7 @@ function matchesRule(
       'matches',
       groupMatchers,
       inputConfig,
-      packageRule
+      packageRule,
     );
 
     // no rules are defined
@@ -44,7 +44,7 @@ function matchesRule(
       'excludes',
       groupExcludes,
       inputConfig,
-      packageRule
+      packageRule,
     );
 
     // no rules are defined
@@ -61,13 +61,13 @@ function matchesRule(
 }
 
 export function applyPackageRules<T extends PackageRuleInputConfig>(
-  inputConfig: T
+  inputConfig: T,
 ): T {
   let config = { ...inputConfig };
   const packageRules = config.packageRules ?? [];
   logger.trace(
     { dependency: config.depName, packageRules },
-    `Checking against ${packageRules.length} packageRules`
+    `Checking against ${packageRules.length} packageRules`,
   );
   for (const packageRule of packageRules) {
     // This rule is considered matched if there was at least one positive match and no negative matches
diff --git a/lib/util/package-rules/managers.spec.ts b/lib/util/package-rules/managers.spec.ts
index c8e855c87e7fd2ee47d486393af9bb4df028623d..54e9c8fd18670c73608993d3f9eb8d8a28828aae 100644
--- a/lib/util/package-rules/managers.spec.ts
+++ b/lib/util/package-rules/managers.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/managers', () => {
         },
         {
           matchManagers: ['npm', 'regex'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -23,7 +23,7 @@ describe('util/package-rules/managers', () => {
         },
         {
           matchManagers: ['docker'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -33,7 +33,7 @@ describe('util/package-rules/managers', () => {
         {
           manager: 'npm',
         },
-        {}
+        {},
       );
       expect(result).toBeNull();
     });
@@ -43,7 +43,7 @@ describe('util/package-rules/managers', () => {
         {},
         {
           matchManagers: ['npm'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/managers.ts b/lib/util/package-rules/managers.ts
index cd5de99a2ad26e28c8d66bc1bd41010d48d32f52..e4fae10c0d963e5c54da3a1d05cc2371496225ed 100644
--- a/lib/util/package-rules/managers.ts
+++ b/lib/util/package-rules/managers.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class ManagersMatcher extends Matcher {
   override matches(
     { manager }: PackageRuleInputConfig,
-    { matchManagers }: PackageRule
+    { matchManagers }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchManagers)) {
       return null;
diff --git a/lib/util/package-rules/match.ts b/lib/util/package-rules/match.ts
index b96249e294f71f8a44c9df7e30c95cd55378ef36..c66f19284d187857bdc4f3eea5b4dfe65d69d100 100644
--- a/lib/util/package-rules/match.ts
+++ b/lib/util/package-rules/match.ts
@@ -19,7 +19,7 @@ export function matchRegexOrMinimatch(pattern: string, input: string): boolean {
 
 export function anyMatchRegexOrMinimatch(
   patterns: string[] | undefined,
-  input: string | undefined
+  input: string | undefined,
 ): boolean | null {
   if (is.undefined(patterns)) {
     return null;
diff --git a/lib/util/package-rules/merge-confidence.ts b/lib/util/package-rules/merge-confidence.ts
index edd87429b8d12bd760e37a68ec10f8aec3542b33..08f89153ccd01d3a653bae4b4aff1b9b96c0160d 100644
--- a/lib/util/package-rules/merge-confidence.ts
+++ b/lib/util/package-rules/merge-confidence.ts
@@ -7,7 +7,7 @@ import { Matcher } from './base';
 export class MergeConfidenceMatcher extends Matcher {
   override matches(
     { mergeConfidenceLevel }: PackageRuleInputConfig,
-    { matchConfidence }: PackageRule
+    { matchConfidence }: PackageRule,
   ): boolean | null {
     if (is.nullOrUndefined(matchConfidence)) {
       return null;
diff --git a/lib/util/package-rules/package-names.spec.ts b/lib/util/package-rules/package-names.spec.ts
index a2d4fbf75787b66acd0f40e676677936108e55d8..83552a798f915f1b800e5da24307690e33cb03ef 100644
--- a/lib/util/package-rules/package-names.spec.ts
+++ b/lib/util/package-rules/package-names.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/package-names', () => {
         },
         {
           matchPackageNames: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -24,7 +24,7 @@ describe('util/package-rules/package-names', () => {
         },
         {
           matchPackageNames: ['def'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -37,7 +37,7 @@ describe('util/package-rules/package-names', () => {
         },
         {
           matchPackageNames: ['abc'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -51,7 +51,7 @@ describe('util/package-rules/package-names', () => {
         },
         {
           excludePackageNames: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/package-names.ts b/lib/util/package-rules/package-names.ts
index 5e9f8e5af50036d203fda13463bc585443823c5b..a6476d2877cd558bdd4123e5868b81e52cc7446e 100644
--- a/lib/util/package-rules/package-names.ts
+++ b/lib/util/package-rules/package-names.ts
@@ -6,7 +6,7 @@ import { Matcher } from './base';
 export class PackageNameMatcher extends Matcher {
   override matches(
     { depName, packageName }: PackageRuleInputConfig,
-    packageRule: PackageRule
+    packageRule: PackageRule,
   ): boolean | null {
     const { matchPackageNames } = packageRule;
     if (is.undefined(matchPackageNames)) {
@@ -23,7 +23,7 @@ export class PackageNameMatcher extends Matcher {
     if (matchPackageNames.includes(depName)) {
       logger.once.info(
         { packageRule, packageName, depName },
-        'Use matchDepNames instead of matchPackageNames'
+        'Use matchDepNames instead of matchPackageNames',
       );
       return true;
     }
@@ -33,7 +33,7 @@ export class PackageNameMatcher extends Matcher {
 
   override excludes(
     { depName }: PackageRuleInputConfig,
-    { excludePackageNames }: PackageRule
+    { excludePackageNames }: PackageRule,
   ): boolean | null {
     if (is.undefined(excludePackageNames)) {
       return null;
diff --git a/lib/util/package-rules/package-patterns.spec.ts b/lib/util/package-rules/package-patterns.spec.ts
index 133867f7ee3d868388513130659ebc0137b26a00..467191ae45f35f68d63c02b96e413233e5ec817b 100644
--- a/lib/util/package-rules/package-patterns.spec.ts
+++ b/lib/util/package-rules/package-patterns.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/package-patterns', () => {
         },
         {
           matchPackagePatterns: ['@opentelemetry/http'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -24,7 +24,7 @@ describe('util/package-rules/package-patterns', () => {
         },
         {
           matchPackagePatterns: ['def'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -37,7 +37,7 @@ describe('util/package-rules/package-patterns', () => {
         },
         {
           matchPackagePatterns: ['abc'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
diff --git a/lib/util/package-rules/package-patterns.ts b/lib/util/package-rules/package-patterns.ts
index 74ed5458e8eef6645994e7cd1cd6283a836d4644..1823d9c888b5897a1eb2cbd589701445d4005ace 100644
--- a/lib/util/package-rules/package-patterns.ts
+++ b/lib/util/package-rules/package-patterns.ts
@@ -7,7 +7,7 @@ import { massagePattern } from './utils';
 
 function matchPatternsAgainstName(
   matchPackagePatterns: string[],
-  name: string
+  name: string,
 ): boolean {
   let isMatch = false;
   for (const packagePattern of matchPackagePatterns) {
@@ -21,7 +21,7 @@ function matchPatternsAgainstName(
 export class PackagePatternsMatcher extends Matcher {
   override matches(
     { depName, packageName }: PackageRuleInputConfig,
-    packageRule: PackageRule
+    packageRule: PackageRule,
   ): boolean | null {
     const { matchPackagePatterns } = packageRule;
     if (is.undefined(matchPackagePatterns)) {
@@ -41,7 +41,7 @@ export class PackagePatternsMatcher extends Matcher {
     if (matchPatternsAgainstName(matchPackagePatterns, depName)) {
       logger.once.info(
         { packageRule, packageName, depName },
-        'Use matchDepPatterns instead of matchPackagePatterns'
+        'Use matchDepPatterns instead of matchPackagePatterns',
       );
       return true;
     }
@@ -51,7 +51,7 @@ export class PackagePatternsMatcher extends Matcher {
 
   override excludes(
     { depName }: PackageRuleInputConfig,
-    { excludePackagePatterns }: PackageRule
+    { excludePackagePatterns }: PackageRule,
   ): boolean | null {
     // ignore lockFileMaintenance for backwards compatibility
     if (is.undefined(excludePackagePatterns)) {
diff --git a/lib/util/package-rules/package-prefixes.spec.ts b/lib/util/package-rules/package-prefixes.spec.ts
index cd0cd5f2944f68cc856728bb0faefee414734889..424a5f84421feac7032f6477a3a7dfb3bd1757a7 100644
--- a/lib/util/package-rules/package-prefixes.spec.ts
+++ b/lib/util/package-rules/package-prefixes.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/package-prefixes', () => {
         },
         {
           matchPackagePrefixes: ['@opentelemetry'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -24,7 +24,7 @@ describe('util/package-rules/package-prefixes', () => {
         },
         {
           matchPackagePrefixes: ['def'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -37,7 +37,7 @@ describe('util/package-rules/package-prefixes', () => {
         },
         {
           matchPackagePrefixes: ['abc'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -51,7 +51,7 @@ describe('util/package-rules/package-prefixes', () => {
         },
         {
           excludePackagePrefixes: ['@opentelemetry'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
diff --git a/lib/util/package-rules/package-prefixes.ts b/lib/util/package-rules/package-prefixes.ts
index 416f8af815b58d6005a2033409a44f1c979a6b5b..74eb59c7f4d7e76ca31068bf7445e0f60f2d6cca 100644
--- a/lib/util/package-rules/package-prefixes.ts
+++ b/lib/util/package-rules/package-prefixes.ts
@@ -6,7 +6,7 @@ import { Matcher } from './base';
 export class PackagePrefixesMatcher extends Matcher {
   override matches(
     { depName, packageName }: PackageRuleInputConfig,
-    { matchPackagePrefixes }: PackageRule
+    { matchPackagePrefixes }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchPackagePrefixes)) {
       return null;
@@ -25,7 +25,7 @@ export class PackagePrefixesMatcher extends Matcher {
     if (matchPackagePrefixes.some((prefix) => depName.startsWith(prefix))) {
       logger.once.info(
         { packageName, depName },
-        'Use matchDepPatterns instead of matchPackagePrefixes'
+        'Use matchDepPatterns instead of matchPackagePrefixes',
       );
       return true;
     }
@@ -35,7 +35,7 @@ export class PackagePrefixesMatcher extends Matcher {
 
   override excludes(
     { depName }: PackageRuleInputConfig,
-    { excludePackagePrefixes }: PackageRule
+    { excludePackagePrefixes }: PackageRule,
   ): boolean | null {
     if (is.undefined(excludePackagePrefixes)) {
       return null;
diff --git a/lib/util/package-rules/repositories.spec.ts b/lib/util/package-rules/repositories.spec.ts
index 76a2e41dca8faf7f3c0bfa31a30ddc559bfaec5c..86de8b656a0fa594ffd92a9c42cc0f5f004b5ef4 100644
--- a/lib/util/package-rules/repositories.spec.ts
+++ b/lib/util/package-rules/repositories.spec.ts
@@ -11,7 +11,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: undefined,
-        }
+        },
       );
       expect(result).toBeNull();
     });
@@ -23,7 +23,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['org/repo'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -35,7 +35,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['/^org/repo$/'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -47,7 +47,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['/[/'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -59,7 +59,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['/^org/other-repo$/'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -71,7 +71,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['org/**'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -83,7 +83,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['other-org/**'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -95,7 +95,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           matchRepositories: ['/^org/repo$/', '**/*-archived'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -109,7 +109,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: undefined,
-        }
+        },
       );
       expect(result).toBeNull();
     });
@@ -121,7 +121,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['org/repo'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -133,7 +133,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['/^org/repo$/'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -145,7 +145,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['/[/'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -157,7 +157,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['/^org/other-repo$/'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -169,7 +169,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['org/**'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
@@ -181,7 +181,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['other-org/**'],
-        }
+        },
       );
       expect(result).toBeFalse();
     });
@@ -193,7 +193,7 @@ describe('util/package-rules/repositories', () => {
         },
         {
           excludeRepositories: ['/^org/repo$/', '**/*-archived'],
-        }
+        },
       );
       expect(result).toBeTrue();
     });
diff --git a/lib/util/package-rules/repositories.ts b/lib/util/package-rules/repositories.ts
index 8e405d87c3d2fc23bb18a83ed5f66db3e6b75fcf..01c958d4615335f5bc7221c3f78086df87c844b0 100644
--- a/lib/util/package-rules/repositories.ts
+++ b/lib/util/package-rules/repositories.ts
@@ -5,14 +5,14 @@ import { anyMatchRegexOrMinimatch } from './match';
 export class RepositoriesMatcher extends Matcher {
   override matches(
     { repository }: PackageRuleInputConfig,
-    { matchRepositories }: PackageRule
+    { matchRepositories }: PackageRule,
   ): boolean | null {
     return anyMatchRegexOrMinimatch(matchRepositories, repository);
   }
 
   override excludes(
     { repository }: PackageRuleInputConfig,
-    { excludeRepositories }: PackageRule
+    { excludeRepositories }: PackageRule,
   ): boolean | null {
     return anyMatchRegexOrMinimatch(excludeRepositories, repository);
   }
diff --git a/lib/util/package-rules/sourceurl-prefixes.ts b/lib/util/package-rules/sourceurl-prefixes.ts
index 1c308fef3fa98047b2584d6d2647f7d732a5d879..13d6d4801cb9d78532346647a2eb893c3ad23bf7 100644
--- a/lib/util/package-rules/sourceurl-prefixes.ts
+++ b/lib/util/package-rules/sourceurl-prefixes.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class SourceUrlPrefixesMatcher extends Matcher {
   override matches(
     { sourceUrl }: PackageRuleInputConfig,
-    { matchSourceUrlPrefixes }: PackageRule
+    { matchSourceUrlPrefixes }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchSourceUrlPrefixes)) {
       return null;
@@ -16,7 +16,7 @@ export class SourceUrlPrefixesMatcher extends Matcher {
     const upperCaseSourceUrl = sourceUrl?.toUpperCase();
 
     return matchSourceUrlPrefixes.some((prefix) =>
-      upperCaseSourceUrl?.startsWith(prefix.toUpperCase())
+      upperCaseSourceUrl?.startsWith(prefix.toUpperCase()),
     );
   }
 }
diff --git a/lib/util/package-rules/sourceurls.ts b/lib/util/package-rules/sourceurls.ts
index b025d27089ecbba39415ad4009baec46e3baab1a..0f1e311435490d302772c1dc6023edf0cba5447e 100644
--- a/lib/util/package-rules/sourceurls.ts
+++ b/lib/util/package-rules/sourceurls.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class SourceUrlsMatcher extends Matcher {
   override matches(
     { sourceUrl }: PackageRuleInputConfig,
-    { matchSourceUrls }: PackageRule
+    { matchSourceUrls }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchSourceUrls)) {
       return null;
@@ -16,7 +16,7 @@ export class SourceUrlsMatcher extends Matcher {
 
     const upperCaseSourceUrl = sourceUrl?.toUpperCase();
     return matchSourceUrls.some(
-      (url) => upperCaseSourceUrl === url.toUpperCase()
+      (url) => upperCaseSourceUrl === url.toUpperCase(),
     );
   }
 }
diff --git a/lib/util/package-rules/types.ts b/lib/util/package-rules/types.ts
index ce8c546522222938d23ed449635e2438d3b08110..0307f2e82d6b364887c2de9df513473ffb69e024 100644
--- a/lib/util/package-rules/types.ts
+++ b/lib/util/package-rules/types.ts
@@ -5,10 +5,10 @@ export type MatchType = 'matches' | 'excludes';
 export interface MatcherApi {
   matches(
     inputConfig: PackageRuleInputConfig,
-    packageRule: PackageRule
+    packageRule: PackageRule,
   ): boolean | null;
   excludes(
     inputConfig: PackageRuleInputConfig,
-    packageRule: PackageRule
+    packageRule: PackageRule,
   ): boolean | null;
 }
diff --git a/lib/util/package-rules/update-types.ts b/lib/util/package-rules/update-types.ts
index 294b6e34b3684c183f7cb3270e6b8c3d8d388069..85c5f3f750ea0725ae37e4d043dae5adcee5c00a 100644
--- a/lib/util/package-rules/update-types.ts
+++ b/lib/util/package-rules/update-types.ts
@@ -5,7 +5,7 @@ import { Matcher } from './base';
 export class UpdateTypesMatcher extends Matcher {
   override matches(
     { updateType, isBump }: PackageRuleInputConfig,
-    { matchUpdateTypes }: PackageRule
+    { matchUpdateTypes }: PackageRule,
   ): boolean | null {
     if (is.undefined(matchUpdateTypes)) {
       return null;
diff --git a/lib/util/package-rules/utils.ts b/lib/util/package-rules/utils.ts
index 7f48742c5119faa91597fd63e254406aca2aea2d..0a8cb830c61276d0c82dab04e7fdb49fe865c490 100644
--- a/lib/util/package-rules/utils.ts
+++ b/lib/util/package-rules/utils.ts
@@ -6,7 +6,7 @@ export function matcherOR(
   matchType: MatchType,
   groupMatchers: MatcherApi[],
   inputConfig: PackageRuleInputConfig,
-  packageRule: PackageRule
+  packageRule: PackageRule,
 ): boolean | null {
   let matchApplied = false;
   for (const matcher of groupMatchers) {
diff --git a/lib/util/promises.spec.ts b/lib/util/promises.spec.ts
index b0c575dab9715adb0ba1d7ff55f25e8ba6104bf2..0e567b0b50bac2d71a14a070b8095b5cb9ba9310 100644
--- a/lib/util/promises.spec.ts
+++ b/lib/util/promises.spec.ts
@@ -56,7 +56,7 @@ describe('util/promises', () => {
 
     it('throws aggregate error for different error messages', async () => {
       await expect(
-        p.map([1, 2, 3], (x) => Promise.reject(new Error(`error ${x}`)))
+        p.map([1, 2, 3], (x) => Promise.reject(new Error(`error ${x}`))),
       ).rejects.toHaveProperty('name', 'AggregateError');
     });
 
@@ -70,7 +70,7 @@ describe('util/promises', () => {
             () => Promise.resolve('ok'),
             () => Promise.reject(unknownErr),
           ],
-          { stopOnError: true }
+          { stopOnError: true },
         );
       } catch (err) {
         res = err;
diff --git a/lib/util/promises.ts b/lib/util/promises.ts
index 259369100a122a9ea138b54437653df961b18164..033844516199d6726eca9a05f9ebcd86eda1fa64 100644
--- a/lib/util/promises.ts
+++ b/lib/util/promises.ts
@@ -37,7 +37,7 @@ function handleError(err: any): never {
 
 export async function all<T>(
   tasks: PromiseFactory<T>[],
-  options?: pAll.Options
+  options?: pAll.Options,
 ): Promise<T[]> {
   try {
     const res = await pAll(tasks, {
@@ -54,7 +54,7 @@ export async function all<T>(
 export async function map<Element, NewElement>(
   input: Iterable<Element>,
   mapper: pMap.Mapper<Element, NewElement>,
-  options?: pMap.Options
+  options?: pMap.Options,
 ): Promise<NewElement[]> {
   try {
     const res = await pMap(input, mapper, {
diff --git a/lib/util/range.ts b/lib/util/range.ts
index 945d1b64143e6c30fb2620c1f10a705de2b0b8f4..46db61e594ad192ef03a9544bdbf00a293af04b4 100644
--- a/lib/util/range.ts
+++ b/lib/util/range.ts
@@ -1,6 +1,6 @@
 export function* range(
   start: number,
-  end: number
+  end: number,
 ): Generator<number, void, void> {
   for (let i = start; i <= end; i += 1) {
     yield i;
diff --git a/lib/util/regex.ts b/lib/util/regex.ts
index 43ee366959d25ebe20e56f4719cf4b96b8d4dad3..64630d0f3cf671e11bf844b8485772ff669cc678 100644
--- a/lib/util/regex.ts
+++ b/lib/util/regex.ts
@@ -24,7 +24,7 @@ RegEx ??= RegExp;
 export function regEx(
   pattern: string | RegExp,
   flags?: string | undefined,
-  useCache = true
+  useCache = true,
 ): RegExp {
   let canBeCached = useCache;
   if (canBeCached && flags?.includes('g')) {
@@ -86,7 +86,7 @@ function parseConfigRegex(input: string): RegExp | null {
 type ConfigRegexPredicate = (s: string) => boolean;
 
 export function configRegexPredicate(
-  input: string
+  input: string,
 ): ConfigRegexPredicate | null {
   if (isConfigRegex(input)) {
     const configRegex = parseConfigRegex(input);
@@ -102,7 +102,7 @@ export function configRegexPredicate(
 }
 
 const UUIDRegex = regEx(
-  /^\{[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}\}$/i
+  /^\{[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}\}$/i,
 );
 
 export function isUUID(input: string): boolean {
diff --git a/lib/util/result.spec.ts b/lib/util/result.spec.ts
index 54d7833b96ddb44e24e00773d9b06ee6db9c7598..df666f616d48f8be1453098f9b6765e844578652 100644
--- a/lib/util/result.spec.ts
+++ b/lib/util/result.spec.ts
@@ -42,7 +42,7 @@ describe('util/result', () => {
       it('wraps nullable callback', () => {
         const res: Result<number, 'oops'> = Result.wrapNullable(
           (): number | null => 42,
-          'oops'
+          'oops',
         );
         expect(res).toEqual(Result.ok(42));
       });
@@ -59,10 +59,10 @@ describe('util/result', () => {
 
       it('distincts between null and undefined callback results', () => {
         expect(Result.wrapNullable(() => null, 'null', 'undefined')).toEqual(
-          Result.err('null')
+          Result.err('null'),
         );
         expect(
-          Result.wrapNullable(() => undefined, 'null', 'undefined')
+          Result.wrapNullable(() => undefined, 'null', 'undefined'),
         ).toEqual(Result.err('undefined'));
       });
 
@@ -96,7 +96,7 @@ describe('util/result', () => {
             issues: [
               { code: 'invalid_type', expected: 'string', received: 'number' },
             ],
-          })
+          }),
         );
       });
     });
@@ -135,7 +135,7 @@ describe('util/result', () => {
             .transform(() => {
               throw 'oops';
             })
-            .unwrapOrElse(0)
+            .unwrapOrElse(0),
         ).toThrow('oops');
       });
 
@@ -146,7 +146,7 @@ describe('util/result', () => {
             .transform(() => {
               throw 'oops';
             })
-            .unwrap()
+            .unwrap(),
         ).toThrow('oops');
       });
 
@@ -177,7 +177,7 @@ describe('util/result', () => {
             .transform(() => {
               throw 'oops';
             })
-            .unwrapOrNull()
+            .unwrapOrNull(),
         ).toThrow('oops');
       });
     });
@@ -190,7 +190,7 @@ describe('util/result', () => {
 
       it('transforms value to Result', () => {
         const res = Result.ok('foo').transform((x) =>
-          Result.ok(x.toUpperCase())
+          Result.ok(x.toUpperCase()),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
@@ -209,7 +209,7 @@ describe('util/result', () => {
         expect(res).toEqual(Result._uncaught('oops'));
         expect(logger.logger.warn).toHaveBeenCalledWith(
           { err: 'oops' },
-          'Result: unhandled transform error'
+          'Result: unhandled transform error',
         );
       });
 
@@ -237,7 +237,7 @@ describe('util/result', () => {
 
       it('converts error to Result', () => {
         const result = Result.err<string>('oops').catch(() =>
-          Result.ok<number>(42)
+          Result.ok<number>(42),
         );
         expect(result).toEqual(Result.ok(42));
       });
@@ -333,21 +333,21 @@ describe('util/result', () => {
     describe('Wrapping', () => {
       it('wraps promise', async () => {
         const res: AsyncResult<number, string> = Result.wrap(
-          Promise.resolve(42)
+          Promise.resolve(42),
         );
         await expect(res).resolves.toEqual(Result.ok(42));
       });
 
       it('wraps Result promise', async () => {
         const res: AsyncResult<number, string> = Result.wrap(
-          Promise.resolve(Result.ok(42))
+          Promise.resolve(Result.ok(42)),
         );
         await expect(res).resolves.toEqual(Result.ok(42));
       });
 
       it('handles rejected promise', async () => {
         const res: AsyncResult<number, string> = Result.wrap(
-          Promise.reject('oops')
+          Promise.reject('oops'),
         );
         await expect(res).resolves.toEqual(Result.err('oops'));
       });
@@ -355,7 +355,7 @@ describe('util/result', () => {
       it('wraps nullable promise', async () => {
         const res: AsyncResult<number, 'oops'> = Result.wrapNullable(
           Promise.resolve<number | null>(42),
-          'oops'
+          'oops',
         );
         await expect(res).resolves.toEqual(Result.ok(42));
       });
@@ -372,11 +372,11 @@ describe('util/result', () => {
 
       it('distincts between null and undefined promise results', async () => {
         await expect(
-          Result.wrapNullable(Promise.resolve(null), 'null', 'undefined')
+          Result.wrapNullable(Promise.resolve(null), 'null', 'undefined'),
         ).resolves.toEqual(Result.err('null'));
 
         await expect(
-          Result.wrapNullable(Promise.resolve(undefined), 'null', 'undefined')
+          Result.wrapNullable(Promise.resolve(undefined), 'null', 'undefined'),
         ).resolves.toEqual(Result.err('undefined'));
       });
 
@@ -437,28 +437,28 @@ describe('util/result', () => {
     describe('Transforming', () => {
       it('transforms AsyncResult to pure value', async () => {
         const res = await AsyncResult.ok('foo').transform((x) =>
-          x.toUpperCase()
+          x.toUpperCase(),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
 
       it('transforms AsyncResult to Result', async () => {
         const res = await AsyncResult.ok('foo').transform((x) =>
-          Result.ok(x.toUpperCase())
+          Result.ok(x.toUpperCase()),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
 
       it('transforms Result to AsyncResult', async () => {
         const res = await Result.ok('foo').transform((x) =>
-          AsyncResult.ok(x.toUpperCase())
+          AsyncResult.ok(x.toUpperCase()),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
 
       it('transforms AsyncResult to AsyncResult', async () => {
         const res = await AsyncResult.ok('foo').transform((x) =>
-          AsyncResult.ok(x.toUpperCase())
+          AsyncResult.ok(x.toUpperCase()),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
@@ -472,28 +472,28 @@ describe('util/result', () => {
 
       it('asyncronously transforms successfull promise to value', async () => {
         const res = await AsyncResult.ok('foo').transform((x) =>
-          Promise.resolve(x.toUpperCase())
+          Promise.resolve(x.toUpperCase()),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
 
       it('asynchronously transforms successful AsyncResult to Result', async () => {
         const res = await AsyncResult.ok('foo').transform((x) =>
-          Promise.resolve(Result.ok(x.toUpperCase()))
+          Promise.resolve(Result.ok(x.toUpperCase())),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
 
       it('asynchronously transforms value to value', async () => {
         const res = await Result.ok('foo').transform((x) =>
-          Promise.resolve(x.toUpperCase())
+          Promise.resolve(x.toUpperCase()),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
 
       it('asynchronously transforms value to Result', async () => {
         const res = await Result.ok('foo').transform((x) =>
-          Promise.resolve(Result.ok(x.toUpperCase()))
+          Promise.resolve(Result.ok(x.toUpperCase())),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
@@ -526,11 +526,11 @@ describe('util/result', () => {
       it('handles error thrown on Result async transform', async () => {
         const res = Result.ok('foo');
         await expect(
-          res.transform((_) => Promise.reject('oops'))
+          res.transform((_) => Promise.reject('oops')),
         ).resolves.toEqual(Result._uncaught('oops'));
         expect(logger.logger.warn).toHaveBeenCalledWith(
           { err: 'oops' },
-          'Result: unhandled async transform error'
+          'Result: unhandled async transform error',
         );
       });
 
@@ -539,22 +539,22 @@ describe('util/result', () => {
         await expect(
           res.transform(() => {
             throw 'bar';
-          })
+          }),
         ).resolves.toEqual(Result._uncaught('bar'));
         expect(logger.logger.warn).toHaveBeenCalledWith(
           { err: 'bar' },
-          'AsyncResult: unhandled transform error'
+          'AsyncResult: unhandled transform error',
         );
       });
 
       it('handles error thrown on promise async transform', async () => {
         const res = AsyncResult.ok('foo');
         await expect(
-          res.transform(() => Promise.reject('bar'))
+          res.transform(() => Promise.reject('bar')),
         ).resolves.toEqual(Result._uncaught('bar'));
         expect(logger.logger.warn).toHaveBeenCalledWith(
           { err: 'bar' },
-          'AsyncResult: unhandled async transform error'
+          'AsyncResult: unhandled async transform error',
         );
       });
 
@@ -580,7 +580,7 @@ describe('util/result', () => {
       it('asynchronously transforms Result to zod values', async () => {
         const schema = z.string().transform((x) => x.toUpperCase());
         const res = await Result.ok('foo').transform((x) =>
-          Promise.resolve(schema.safeParse(x))
+          Promise.resolve(schema.safeParse(x)),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
@@ -588,7 +588,7 @@ describe('util/result', () => {
       it('transforms AsyncResult to zod values', async () => {
         const schema = z.string().transform((x) => x.toUpperCase());
         const res = await AsyncResult.ok('foo').transform((x) =>
-          schema.safeParse(x)
+          schema.safeParse(x),
         );
         expect(res).toEqual(Result.ok('FOO'));
       });
@@ -597,7 +597,7 @@ describe('util/result', () => {
     describe('Catch', () => {
       it('converts error to AsyncResult', async () => {
         const result = await Result.err<string>('oops').catch(() =>
-          AsyncResult.ok(42)
+          AsyncResult.ok(42),
         );
         expect(result).toEqual(Result.ok(42));
       });
@@ -616,7 +616,7 @@ describe('util/result', () => {
 
       it('converts AsyncResult error to Result', async () => {
         const result = await AsyncResult.err<string>('oops').catch(() =>
-          AsyncResult.ok<number>(42)
+          AsyncResult.ok<number>(42),
         );
         expect(result).toEqual(Result.ok(42));
       });
@@ -631,7 +631,7 @@ describe('util/result', () => {
         .nullish();
 
       expect(await AsyncResult.ok('foo').parse(schema)).toEqual(
-        Result.ok('FOO')
+        Result.ok('FOO'),
       );
 
       expect(await AsyncResult.ok(42).parse(schema).unwrap()).toMatchObject({
diff --git a/lib/util/result.ts b/lib/util/result.ts
index f8e03c64fe9e6619191ca9836f850311cefb2450..42a6a9e0b3beb8c78b67d0095e5ccd165d0f651c 100644
--- a/lib/util/result.ts
+++ b/lib/util/result.ts
@@ -25,7 +25,7 @@ interface Err<E extends Val> {
 type Res<T extends Val, E extends Val> = Ok<T> | Err<E>;
 
 function isZodResult<Input, Output extends Val>(
-  input: unknown
+  input: unknown,
 ): input is SafeParseReturnType<Input, Output> {
   if (
     typeof input !== 'object' ||
@@ -49,7 +49,7 @@ function isZodResult<Input, Output extends Val>(
 }
 
 function fromZodResult<ZodInput, ZodOutput extends Val>(
-  input: SafeParseReturnType<ZodInput, ZodOutput>
+  input: SafeParseReturnType<ZodInput, ZodOutput>,
 ): Result<ZodOutput, ZodError<ZodInput>> {
   return input.success ? Result.ok(input.data) : Result.err(input.error);
 }
@@ -66,11 +66,11 @@ type RawValue<T extends Val> = Exclude<
 function fromNullable<
   T extends Val,
   ErrForNull extends Val,
-  ErrForUndefined extends Val
+  ErrForUndefined extends Val,
 >(
   input: Nullable<T>,
   errForNull: ErrForNull,
-  errForUndefined: ErrForUndefined
+  errForUndefined: ErrForUndefined,
 ): Result<T, ErrForNull | ErrForUndefined> {
   if (input === null) {
     return Result.err(errForNull);
@@ -136,28 +136,28 @@ export class Result<T extends Val, E extends Val = Error> {
    *   ```
    */
   static wrap<T extends Val, Input = unknown>(
-    zodResult: SafeParseReturnType<Input, T>
+    zodResult: SafeParseReturnType<Input, T>,
   ): Result<T, ZodError<Input>>;
   static wrap<T extends Val, E extends Val = Error>(
-    callback: () => RawValue<T>
+    callback: () => RawValue<T>,
   ): Result<T, E>;
   static wrap<T extends Val, E extends Val = Error, EE extends Val = never>(
-    promise: Promise<Result<T, EE>>
+    promise: Promise<Result<T, EE>>,
   ): AsyncResult<T, E | EE>;
   static wrap<T extends Val, E extends Val = Error>(
-    promise: Promise<RawValue<T>>
+    promise: Promise<RawValue<T>>,
   ): AsyncResult<T, E>;
   static wrap<
     T extends Val,
     E extends Val = Error,
     EE extends Val = never,
-    Input = unknown
+    Input = unknown,
   >(
     input:
       | SafeParseReturnType<Input, T>
       | (() => RawValue<T>)
       | Promise<Result<T, EE>>
-      | Promise<RawValue<T>>
+      | Promise<RawValue<T>>,
   ): Result<T, ZodError<Input>> | Result<T, E | EE> | AsyncResult<T, E | EE> {
     if (isZodResult<Input, T>(input)) {
       return fromZodResult(input);
@@ -222,66 +222,66 @@ export class Result<T extends Val, E extends Val = Error> {
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
-    ErrForNullable extends Val = Error
+    ErrForNullable extends Val = Error,
   >(
     callback: () => Nullable<T>,
-    errForNullable: ErrForNullable
+    errForNullable: ErrForNullable,
   ): Result<T, E | ErrForNullable>;
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
     ErrForNull extends Val = Error,
-    ErrForUndefined extends Val = Error
+    ErrForUndefined extends Val = Error,
   >(
     callback: () => Nullable<T>,
     errForNull: ErrForNull,
-    errForUndefined: ErrForUndefined
+    errForUndefined: ErrForUndefined,
   ): Result<T, E | ErrForNull | ErrForUndefined>;
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
-    ErrForNullable extends Val = Error
+    ErrForNullable extends Val = Error,
   >(
     promise: Promise<Nullable<T>>,
-    errForNullable: ErrForNullable
+    errForNullable: ErrForNullable,
   ): AsyncResult<T, E | ErrForNullable>;
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
     ErrForNull extends Val = Error,
-    ErrForUndefined extends Val = Error
+    ErrForUndefined extends Val = Error,
   >(
     promise: Promise<Nullable<T>>,
     errForNull: ErrForNull,
-    errForUndefined: ErrForUndefined
+    errForUndefined: ErrForUndefined,
   ): AsyncResult<T, E | ErrForNull | ErrForUndefined>;
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
-    ErrForNullable extends Val = Error
+    ErrForNullable extends Val = Error,
   >(
     value: Nullable<T>,
-    errForNullable: ErrForNullable
+    errForNullable: ErrForNullable,
   ): Result<T, E | ErrForNullable>;
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
     ErrForNull extends Val = Error,
-    ErrForUndefined extends Val = Error
+    ErrForUndefined extends Val = Error,
   >(
     value: Nullable<T>,
     errForNull: ErrForNull,
-    errForUndefined: ErrForUndefined
+    errForUndefined: ErrForUndefined,
   ): Result<T, E | ErrForNull | ErrForUndefined>;
   static wrapNullable<
     T extends Val,
     E extends Val = Error,
     ErrForNull extends Val = Error,
-    ErrForUndefined extends Val = Error
+    ErrForUndefined extends Val = Error,
   >(
     input: (() => Nullable<T>) | Promise<Nullable<T>> | Nullable<T>,
     arg2: ErrForNull,
-    arg3?: ErrForUndefined
+    arg3?: ErrForUndefined,
   ):
     | Result<T, E | ErrForNull | ErrForUndefined>
     | AsyncResult<T, E | ErrForNull | ErrForUndefined> {
@@ -405,27 +405,27 @@ export class Result<T extends Val, E extends Val = Error> {
    *   ```
    */
   transform<U extends Val, EE extends Val>(
-    fn: (value: T) => Result<U, E | EE>
+    fn: (value: T) => Result<U, E | EE>,
   ): Result<U, E | EE>;
   transform<U extends Val, EE extends Val>(
-    fn: (value: T) => AsyncResult<U, E | EE>
+    fn: (value: T) => AsyncResult<U, E | EE>,
   ): AsyncResult<U, E | EE>;
   transform<U extends Val, Input = unknown>(
-    fn: (value: T) => SafeParseReturnType<Input, NonNullable<U>>
+    fn: (value: T) => SafeParseReturnType<Input, NonNullable<U>>,
   ): Result<U, E | ZodError<Input>>;
   transform<U extends Val, Input = unknown>(
-    fn: (value: T) => Promise<SafeParseReturnType<Input, NonNullable<U>>>
+    fn: (value: T) => Promise<SafeParseReturnType<Input, NonNullable<U>>>,
   ): AsyncResult<U, E | ZodError<Input>>;
   transform<U extends Val, EE extends Val>(
-    fn: (value: T) => Promise<Result<U, E | EE>>
+    fn: (value: T) => Promise<Result<U, E | EE>>,
   ): AsyncResult<U, E | EE>;
   transform<U extends Val>(
-    fn: (value: T) => Promise<RawValue<U>>
+    fn: (value: T) => Promise<RawValue<U>>,
   ): AsyncResult<U, E>;
   transform<U extends Val>(fn: (value: T) => RawValue<U>): Result<U, E>;
   transform<U extends Val, EE extends Val, Input = unknown>(
     fn: (
-      value: T
+      value: T,
     ) =>
       | Result<U, E | EE>
       | AsyncResult<U, E | EE>
@@ -433,7 +433,7 @@ export class Result<T extends Val, E extends Val = Error> {
       | Promise<SafeParseReturnType<Input, NonNullable<U>>>
       | Promise<Result<U, E | EE>>
       | Promise<RawValue<U>>
-      | RawValue<U>
+      | RawValue<U>,
   ):
     | Result<U, E | EE | ZodError<Input>>
     | AsyncResult<U, E | EE | ZodError<Input>> {
@@ -471,18 +471,21 @@ export class Result<T extends Val, E extends Val = Error> {
   }
 
   catch<U extends Val = T, EE extends Val = E>(
-    fn: (err: E) => Result<U, E | EE>
+    fn: (err: E) => Result<U, E | EE>,
   ): Result<T | U, E | EE>;
   catch<U extends Val = T, EE extends Val = E>(
-    fn: (err: E) => AsyncResult<U, E | EE>
+    fn: (err: E) => AsyncResult<U, E | EE>,
   ): AsyncResult<T | U, E | EE>;
   catch<U extends Val = T, EE extends Val = E>(
-    fn: (err: E) => Promise<Result<U, E | EE>>
+    fn: (err: E) => Promise<Result<U, E | EE>>,
   ): AsyncResult<T | U, E | EE>;
   catch<U extends Val = T, EE extends Val = E>(
     fn: (
-      err: E
-    ) => Result<U, E | EE> | AsyncResult<U, E | EE> | Promise<Result<U, E | EE>>
+      err: E,
+    ) =>
+      | Result<U, E | EE>
+      | AsyncResult<U, E | EE>
+      | Promise<Result<U, E | EE>>,
   ): Result<T | U, E | EE> | AsyncResult<T | U, E | EE> {
     if (this.res.ok) {
       return this;
@@ -499,7 +502,7 @@ export class Result<T extends Val, E extends Val = Error> {
         return AsyncResult.wrap(result, (err) => {
           logger.warn(
             { err },
-            'Result: unexpected error in async catch handler'
+            'Result: unexpected error in async catch handler',
           );
           return Result._uncaught(err);
         });
@@ -519,10 +522,10 @@ export class Result<T extends Val, E extends Val = Error> {
   static parse<
     T,
     Schema extends ZodType<T, ZodTypeDef, Input>,
-    Input = unknown
+    Input = unknown,
   >(
     input: unknown,
-    schema: Schema
+    schema: Schema,
   ): Result<NonNullable<z.infer<Schema>>, ZodError<Input>> {
     const parseResult = schema
       .transform((result, ctx): NonNullable<T> => {
@@ -554,7 +557,7 @@ export class Result<T extends Val, E extends Val = Error> {
    * Additionally, `null` and `undefined` values are converted into Zod error.
    */
   parse<T, Schema extends ZodType<T, ZodTypeDef, Input>, Input = unknown>(
-    schema: Schema
+    schema: Schema,
   ): Result<NonNullable<z.infer<Schema>>, E | ZodError<Input>> {
     if (this.res.ok) {
       return Result.parse(this.res.val, schema);
@@ -615,7 +618,7 @@ export class AsyncResult<T extends Val, E extends Val>
     onfulfilled?:
       | ((value: Result<T, E>) => TResult1 | PromiseLike<TResult1>)
       | undefined
-      | null
+      | null,
   ): PromiseLike<TResult1> {
     return this.asyncResult.then(onfulfilled);
   }
@@ -633,13 +636,13 @@ export class AsyncResult<T extends Val, E extends Val>
     T extends Val,
     E extends Val = Error,
     EE extends Val = never,
-    Input = unknown
+    Input = unknown,
   >(
     promise:
       | Promise<SafeParseReturnType<Input, T>>
       | Promise<Result<T, EE>>
       | Promise<RawValue<T>>,
-    onErr?: (err: NonNullable<E>) => Result<T, E>
+    onErr?: (err: NonNullable<E>) => Result<T, E>,
   ): AsyncResult<T, E | EE> {
     return new AsyncResult(
       promise
@@ -659,7 +662,7 @@ export class AsyncResult<T extends Val, E extends Val>
             return onErr(err);
           }
           return Result.err(err);
-        })
+        }),
     );
   }
 
@@ -667,16 +670,16 @@ export class AsyncResult<T extends Val, E extends Val>
     T extends Val,
     E extends Val,
     ErrForNull extends Val,
-    ErrForUndefined extends Val
+    ErrForUndefined extends Val,
   >(
     promise: Promise<Nullable<T>>,
     errForNull: NonNullable<ErrForNull>,
-    errForUndefined: NonNullable<ErrForUndefined>
+    errForUndefined: NonNullable<ErrForUndefined>,
   ): AsyncResult<T, E | ErrForNull | ErrForUndefined> {
     return new AsyncResult(
       promise
         .then((value) => fromNullable(value, errForNull, errForUndefined))
-        .catch((err) => Result.err(err))
+        .catch((err) => Result.err(err)),
     );
   }
 
@@ -745,27 +748,27 @@ export class AsyncResult<T extends Val, E extends Val>
    *   ```
    */
   transform<U extends Val, EE extends Val>(
-    fn: (value: T) => Result<U, E | EE>
+    fn: (value: T) => Result<U, E | EE>,
   ): AsyncResult<U, E | EE>;
   transform<U extends Val, EE extends Val>(
-    fn: (value: T) => AsyncResult<U, E | EE>
+    fn: (value: T) => AsyncResult<U, E | EE>,
   ): AsyncResult<U, E | EE>;
   transform<U extends Val, Input = unknown>(
-    fn: (value: T) => SafeParseReturnType<Input, NonNullable<U>>
+    fn: (value: T) => SafeParseReturnType<Input, NonNullable<U>>,
   ): AsyncResult<U, E | ZodError<Input>>;
   transform<U extends Val, Input = unknown>(
-    fn: (value: T) => Promise<SafeParseReturnType<Input, NonNullable<U>>>
+    fn: (value: T) => Promise<SafeParseReturnType<Input, NonNullable<U>>>,
   ): AsyncResult<U, E | ZodError<Input>>;
   transform<U extends Val, EE extends Val>(
-    fn: (value: T) => Promise<Result<U, E | EE>>
+    fn: (value: T) => Promise<Result<U, E | EE>>,
   ): AsyncResult<U, E | EE>;
   transform<U extends Val>(
-    fn: (value: T) => Promise<RawValue<U>>
+    fn: (value: T) => Promise<RawValue<U>>,
   ): AsyncResult<U, E>;
   transform<U extends Val>(fn: (value: T) => RawValue<U>): AsyncResult<U, E>;
   transform<U extends Val, EE extends Val, Input = unknown>(
     fn: (
-      value: T
+      value: T,
     ) =>
       | Result<U, E | EE>
       | AsyncResult<U, E | EE>
@@ -773,7 +776,7 @@ export class AsyncResult<T extends Val, E extends Val>
       | Promise<SafeParseReturnType<Input, NonNullable<U>>>
       | Promise<Result<U, E | EE>>
       | Promise<RawValue<U>>
-      | RawValue<U>
+      | RawValue<U>,
   ): AsyncResult<U, E | EE | ZodError<Input>> {
     return new AsyncResult(
       this.asyncResult
@@ -802,7 +805,7 @@ export class AsyncResult<T extends Val, E extends Val>
               return AsyncResult.wrap(result, (err) => {
                 logger.warn(
                   { err },
-                  'AsyncResult: unhandled async transform error'
+                  'AsyncResult: unhandled async transform error',
                 );
                 return Result._uncaught(err);
               });
@@ -817,27 +820,30 @@ export class AsyncResult<T extends Val, E extends Val>
         .catch((err) => {
           // Happens when `.unwrap()` of `oldResult` throws
           return Result._uncaught(err);
-        })
+        }),
     );
   }
 
   catch<U extends Val = T, EE extends Val = E>(
-    fn: (err: NonNullable<E>) => Result<U, E | EE>
+    fn: (err: NonNullable<E>) => Result<U, E | EE>,
   ): AsyncResult<T | U, E | EE>;
   catch<U extends Val = T, EE extends Val = E>(
-    fn: (err: NonNullable<E>) => AsyncResult<U, E | EE>
+    fn: (err: NonNullable<E>) => AsyncResult<U, E | EE>,
   ): AsyncResult<T | U, E | EE>;
   catch<U extends Val = T, EE extends Val = E>(
-    fn: (err: NonNullable<E>) => Promise<Result<U, E | EE>>
+    fn: (err: NonNullable<E>) => Promise<Result<U, E | EE>>,
   ): AsyncResult<T | U, E | EE>;
   catch<U extends Val = T, EE extends Val = E>(
     fn: (
-      err: NonNullable<E>
-    ) => Result<U, E | EE> | AsyncResult<U, E | EE> | Promise<Result<U, E | EE>>
+      err: NonNullable<E>,
+    ) =>
+      | Result<U, E | EE>
+      | AsyncResult<U, E | EE>
+      | Promise<Result<U, E | EE>>,
   ): AsyncResult<T | U, E | EE> {
     const caughtAsyncResult = this.asyncResult.then((result) =>
       // eslint-disable-next-line promise/no-nesting
-      result.catch(fn as never)
+      result.catch(fn as never),
     );
     return AsyncResult.wrap(caughtAsyncResult);
   }
@@ -847,15 +853,15 @@ export class AsyncResult<T extends Val, E extends Val>
    * Additionally, `null` and `undefined` values are converted into Zod error.
    */
   parse<T, Schema extends ZodType<T, ZodTypeDef, Input>, Input = unknown>(
-    schema: Schema
+    schema: Schema,
   ): AsyncResult<NonNullable<z.infer<Schema>>, E | ZodError<Input>> {
     return new AsyncResult(
       this.asyncResult
         .then((oldResult) => oldResult.parse(schema))
         .catch(
           /* istanbul ignore next: should never happen */
-          (err) => Result._uncaught(err)
-        )
+          (err) => Result._uncaught(err),
+        ),
     );
   }
 
@@ -865,8 +871,8 @@ export class AsyncResult<T extends Val, E extends Val>
         .then((result) => result.onValue(fn))
         .catch(
           /* istanbul ignore next: should never happen */
-          (err) => Result._uncaught(err)
-        )
+          (err) => Result._uncaught(err),
+        ),
     );
   }
 
@@ -876,8 +882,8 @@ export class AsyncResult<T extends Val, E extends Val>
         .then((result) => result.onError(fn))
         .catch(
           /* istanbul ignore next: should never happen */
-          (err) => Result._uncaught(err)
-        )
+          (err) => Result._uncaught(err),
+        ),
     );
   }
 }
diff --git a/lib/util/sanitize.spec.ts b/lib/util/sanitize.spec.ts
index aa64dc782b1b3500afd0efd04b6f5445268ffea7..d37bc7866d52db1675443d63379a7b0096fdf202 100644
--- a/lib/util/sanitize.spec.ts
+++ b/lib/util/sanitize.spec.ts
@@ -40,7 +40,7 @@ describe('util/sanitize', () => {
   it('sanitizes github app tokens', () => {
     addSecretForSanitizing('x-access-token:abc123');
     expect(sanitize(`hello ${toBase64('abc123')} world`)).toBe(
-      'hello **redacted** world'
+      'hello **redacted** world',
     );
   });
 });
diff --git a/lib/util/sanitize.ts b/lib/util/sanitize.ts
index 1ed7d3ce2b376582df69ad25a10f5d7a94f77a14..1fadb386194199863daff2e981238463520ec854 100644
--- a/lib/util/sanitize.ts
+++ b/lib/util/sanitize.ts
@@ -23,10 +23,10 @@ export const redactedFields = [
 // TODO: returns null or undefined only when input is null or undefined.
 export function sanitize(input: string): string;
 export function sanitize(
-  input: string | null | undefined
+  input: string | null | undefined,
 ): string | null | undefined;
 export function sanitize(
-  input: string | null | undefined
+  input: string | null | undefined,
 ): string | null | undefined {
   if (!input) {
     return input;
@@ -46,7 +46,7 @@ const GITHUB_APP_TOKEN_PREFIX = 'x-access-token:';
 
 export function addSecretForSanitizing(
   secret: string | undefined,
-  type = 'repo'
+  type = 'repo',
 ): void {
   if (!is.nonEmptyString(secret)) {
     return;
diff --git a/lib/util/schema-utils.spec.ts b/lib/util/schema-utils.spec.ts
index 23cbddbd46122ffc7b1336065f2221d0be595198..390c2ed2bb08327390121e20bbd7c09dfa54f0b2 100644
--- a/lib/util/schema-utils.spec.ts
+++ b/lib/util/schema-utils.spec.ts
@@ -65,7 +65,7 @@ describe('util/schema-utils', () => {
           .string()
           .refine((x) => x === 'bar')
           .transform((x) => x.toUpperCase()),
-        z.string().transform((x) => x.toUpperCase())
+        z.string().transform((x) => x.toUpperCase()),
       );
       expect(s.parse({ foo: 'foo', bar: 'bar' })).toEqual({ BAR: 'BAR' });
     });
@@ -79,7 +79,7 @@ describe('util/schema-utils', () => {
           onError: (x) => {
             errorData = x;
           },
-        }
+        },
       );
 
       s.parse({ foo: 'foo', bar: 'bar' });
@@ -106,7 +106,7 @@ describe('util/schema-utils', () => {
           onError: ({ error }) => {
             err = error;
           },
-        }
+        },
       );
 
       const res = Schema.parse({
@@ -272,7 +272,7 @@ describe('util/schema-utils', () => {
   describe('UtcDate', () => {
     it('parses date', () => {
       expect(UtcDate.parse('2020-04-04').toString()).toBe(
-        '2020-04-04T00:00:00.000Z'
+        '2020-04-04T00:00:00.000Z',
       );
     });
 
@@ -283,7 +283,7 @@ describe('util/schema-utils', () => {
 
   describe('Yaml', () => {
     const Schema = Yaml.pipe(
-      z.object({ foo: z.array(z.object({ bar: z.literal('baz') })) })
+      z.object({ foo: z.array(z.object({ bar: z.literal('baz') })) }),
     );
 
     it('parses valid yaml', () => {
@@ -330,8 +330,8 @@ describe('util/schema-utils', () => {
       z.array(
         z.object({
           foo: z.number(),
-        })
-      )
+        }),
+      ),
     );
 
     it('parses valid yaml', () => {
@@ -340,7 +340,7 @@ describe('util/schema-utils', () => {
           foo: 111
           ---
           foo: 222
-        `)
+        `),
       ).toEqual([{ foo: 111 }, { foo: 222 }]);
     });
 
@@ -379,7 +379,7 @@ describe('util/schema-utils', () => {
 
   describe('Toml', () => {
     const Schema = Toml.pipe(
-      z.object({ foo: z.object({ bar: z.literal('baz') }) })
+      z.object({ foo: z.object({ bar: z.literal('baz') }) }),
     );
 
     it('parses valid toml', () => {
diff --git a/lib/util/schema-utils.ts b/lib/util/schema-utils.ts
index 6cfdd3b228487a3ff3830707d3966596653e8ac9..f2b5bed1e490d7adf00b339cee9ea8d59085f8ec 100644
--- a/lib/util/schema-utils.ts
+++ b/lib/util/schema-utils.ts
@@ -26,7 +26,7 @@ interface LooseOpts<T> {
  */
 export function LooseArray<Schema extends z.ZodTypeAny>(
   Elem: Schema,
-  { onError }: LooseOpts<unknown[]> = {}
+  { onError }: LooseOpts<unknown[]> = {},
 ): z.ZodEffects<z.ZodArray<z.ZodAny, 'many'>, z.TypeOf<Schema>[], any[]> {
   if (!onError) {
     // Avoid error-related computations inside the loop
@@ -72,7 +72,7 @@ export function LooseArray<Schema extends z.ZodTypeAny>(
 
 type LooseRecordResult<
   KeySchema extends z.ZodTypeAny,
-  ValueSchema extends z.ZodTypeAny
+  ValueSchema extends z.ZodTypeAny,
 > = z.ZodEffects<
   z.ZodRecord<z.ZodString, z.ZodAny>,
   Record<z.TypeOf<KeySchema>, z.TypeOf<ValueSchema>>,
@@ -81,7 +81,7 @@ type LooseRecordResult<
 
 type LooseRecordOpts<
   KeySchema extends z.ZodTypeAny,
-  ValueSchema extends z.ZodTypeAny
+  ValueSchema extends z.ZodTypeAny,
 > = LooseOpts<Record<z.TypeOf<KeySchema> | z.TypeOf<ValueSchema>, unknown>>;
 
 /**
@@ -96,34 +96,34 @@ type LooseRecordOpts<
  * @returns Schema for record
  */
 export function LooseRecord<ValueSchema extends z.ZodTypeAny>(
-  Value: ValueSchema
+  Value: ValueSchema,
 ): LooseRecordResult<z.ZodString, ValueSchema>;
 export function LooseRecord<
   KeySchema extends z.ZodTypeAny,
-  ValueSchema extends z.ZodTypeAny
+  ValueSchema extends z.ZodTypeAny,
 >(
   Key: KeySchema,
-  Value: ValueSchema
+  Value: ValueSchema,
 ): LooseRecordResult<KeySchema, ValueSchema>;
 export function LooseRecord<ValueSchema extends z.ZodTypeAny>(
   Value: ValueSchema,
-  { onError }: LooseRecordOpts<z.ZodString, ValueSchema>
+  { onError }: LooseRecordOpts<z.ZodString, ValueSchema>,
 ): LooseRecordResult<z.ZodString, ValueSchema>;
 export function LooseRecord<
   KeySchema extends z.ZodTypeAny,
-  ValueSchema extends z.ZodTypeAny
+  ValueSchema extends z.ZodTypeAny,
 >(
   Key: KeySchema,
   Value: ValueSchema,
-  { onError }: LooseRecordOpts<KeySchema, ValueSchema>
+  { onError }: LooseRecordOpts<KeySchema, ValueSchema>,
 ): LooseRecordResult<KeySchema, ValueSchema>;
 export function LooseRecord<
   KeySchema extends z.ZodTypeAny,
-  ValueSchema extends z.ZodTypeAny
+  ValueSchema extends z.ZodTypeAny,
 >(
   arg1: ValueSchema | KeySchema,
   arg2?: ValueSchema | LooseOpts<Record<string, unknown>>,
-  arg3?: LooseRecordOpts<KeySchema, ValueSchema>
+  arg3?: LooseRecordOpts<KeySchema, ValueSchema>,
 ): LooseRecordResult<KeySchema, ValueSchema> {
   let Key: z.ZodSchema = z.any();
   let Value: ValueSchema;
diff --git a/lib/util/streams.ts b/lib/util/streams.ts
index fe3fec56513f4e0ddfdc4b0b4f21d43878ca87c6..04abc1f8140df07a55d651cd3cd9c788b591e821 100644
--- a/lib/util/streams.ts
+++ b/lib/util/streams.ts
@@ -1,7 +1,7 @@
 import { Readable } from 'node:stream';
 
 export async function streamToString(
-  stream: NodeJS.ReadableStream
+  stream: NodeJS.ReadableStream,
 ): Promise<string> {
   const readable = Readable.from(stream);
   const chunks: Uint8Array[] = [];
diff --git a/lib/util/string.ts b/lib/util/string.ts
index f82e8f260eee7e9af94bf256a5c018315122afaa..a5da463bcd3a38e9924ad0a6b4a4470e61d623c4 100644
--- a/lib/util/string.ts
+++ b/lib/util/string.ts
@@ -2,7 +2,7 @@
 export function matchAt(
   content: string,
   index: number,
-  match: string
+  match: string,
 ): boolean {
   return content.substring(index, index + match.length) === match;
 }
@@ -12,7 +12,7 @@ export function replaceAt(
   content: string,
   index: number,
   oldString: string,
-  newString: string
+  newString: string,
 ): string {
   return (
     content.substring(0, index) +
@@ -38,14 +38,14 @@ export function fromBase64(input: string): string {
 export function uniqueStrings(
   element: string,
   index: number,
-  elements: string[]
+  elements: string[],
 ): boolean {
   return elements.indexOf(element) === index;
 }
 
 export function looseEquals(
   a: string | null | undefined,
-  b: string | null | undefined
+  b: string | null | undefined,
 ): boolean {
   if (!(a && b)) {
     return a === b;
@@ -90,7 +90,7 @@ export function copystr(x: string): string {
  */
 export function coerceString(
   val: string | null | undefined,
-  def?: string
+  def?: string,
 ): string {
   return val ?? def ?? '';
 }
diff --git a/lib/util/template/index.spec.ts b/lib/util/template/index.spec.ts
index f60c5c276a26f34c38df5a236fb9d57fe2ea1555..e0a1c90d828286bb1ddd7419367a204885e57000 100644
--- a/lib/util/template/index.spec.ts
+++ b/lib/util/template/index.spec.ts
@@ -22,7 +22,7 @@ describe('util/template/index', () => {
   it('has valid exposed config options', () => {
     const allOptions = getOptions().map((option) => option.name);
     const missingOptions = template.exposedConfigOptions.filter(
-      (option) => !allOptions.includes(option)
+      (option) => !allOptions.includes(option),
     );
     expect(missingOptions).toEqual([]);
   });
@@ -170,20 +170,20 @@ describe('util/template/index', () => {
       expect(
         template.containsTemplates(
           '{{#if logJSON}}{{logJSON}}{{/if}}',
-          'logJSON'
-        )
+          'logJSON',
+        ),
       ).toBeTrue();
       expect(
         template.containsTemplates(
           '{{#with logJSON.hasReleaseNotes as | hasNotes |}}{{hasNotes}}{{/if}}',
-          'logJSON'
-        )
+          'logJSON',
+        ),
       ).toBeTrue();
       expect(
         template.containsTemplates(
           '{{#if logJSON.hasReleaseNotes}}has notes{{/if}}',
-          'logJSON'
-        )
+          'logJSON',
+        ),
       ).toBeTrue();
     });
 
@@ -196,7 +196,7 @@ describe('util/template/index', () => {
     it('encodes values', () => {
       const output = template.compile(
         '{{{encodeURIComponent "@fsouza/prettierd"}}}',
-        undefined as never
+        undefined as never,
       );
       expect(output).toBe('%40fsouza%2Fprettierd');
     });
@@ -204,7 +204,7 @@ describe('util/template/index', () => {
     it('decodes values', () => {
       const output = template.compile(
         '{{{decodeURIComponent "%40fsouza/prettierd"}}}',
-        undefined as never
+        undefined as never,
       );
       expect(output).toBe('@fsouza/prettierd');
     });
@@ -217,7 +217,7 @@ describe('util/template/index', () => {
         {
           datasource: 'git-refs',
           packageName: 'renovatebot/renovate',
-        }
+        },
       );
       expect(output).toBe('https://github.com/renovatebot/renovate');
     });
@@ -228,7 +228,7 @@ describe('util/template/index', () => {
         {
           datasource: 'github-releases',
           packageName: 'renovatebot/renovate',
-        }
+        },
       );
       expect(output).toBe('renovatebot/renovate');
     });
@@ -238,7 +238,7 @@ describe('util/template/index', () => {
         '{{#if (equals newMajor "3")}}equals{{else}}not equals{{/if}}',
         {
           newMajor: 3,
-        }
+        },
       );
       expect(output).toBe('not equals');
     });
diff --git a/lib/util/template/index.ts b/lib/util/template/index.ts
index 4d6b32c7c3dc303953470157b5000a497014c0af..10eed7681638b57ff395d353f01566966b31abca 100644
--- a/lib/util/template/index.ts
+++ b/lib/util/template/index.ts
@@ -8,20 +8,20 @@ handlebars.registerHelper('encodeURIComponent', encodeURIComponent);
 handlebars.registerHelper('decodeURIComponent', decodeURIComponent);
 
 handlebars.registerHelper('stringToPrettyJSON', (input: string): string =>
-  JSON.stringify(JSON.parse(input), null, 2)
+  JSON.stringify(JSON.parse(input), null, 2),
 );
 
 // istanbul ignore next
 handlebars.registerHelper(
   'replace',
   (find, replace, context) =>
-    (context || '').replace(new RegExp(find, 'g'), replace) // TODO #12873
+    (context || '').replace(new RegExp(find, 'g'), replace), // TODO #12873
 );
 
 handlebars.registerHelper('lowercase', (str: string) => str?.toLowerCase());
 
 handlebars.registerHelper('containsString', (str, subStr) =>
-  str?.includes(subStr)
+  str?.includes(subStr),
 );
 
 handlebars.registerHelper('equals', (arg1, arg2) => arg1 === arg2);
@@ -193,7 +193,7 @@ const compileInputProxyHandler: ProxyHandler<CompileInput> = {
       return value.map((element) =>
         is.primitive(element)
           ? element
-          : proxyCompileInput(element as CompileInput)
+          : proxyCompileInput(element as CompileInput),
       );
     }
 
@@ -215,7 +215,7 @@ const templateRegex =
 export function compile(
   template: string,
   input: CompileInput,
-  filterFields = true
+  filterFields = true,
 ): string {
   const env = getChildEnv({});
   const data = { ...GlobalConfig.get(), ...input, env };
@@ -232,7 +232,7 @@ export function compile(
         if (!allowedFieldsList.includes(varName)) {
           logger.info(
             { varName, template },
-            'Disallowed variable name in template'
+            'Disallowed variable name in template',
           );
         }
       }
@@ -244,7 +244,7 @@ export function compile(
 export function safeCompile(
   template: string,
   input: CompileInput,
-  filterFields = true
+  filterFields = true,
 ): string {
   try {
     return compile(template, input, filterFields);
@@ -256,7 +256,7 @@ export function safeCompile(
 
 export function containsTemplates(
   value: unknown,
-  templates: string | string[]
+  templates: string | string[],
 ): boolean {
   if (!is.string(value)) {
     return false;
diff --git a/lib/util/uniq.ts b/lib/util/uniq.ts
index 60079afa3c9bfd3a54e455a3355d98326acd9fb2..baf56d8ab8e015f205ee0d853dc92e320fca9f93 100644
--- a/lib/util/uniq.ts
+++ b/lib/util/uniq.ts
@@ -1,6 +1,6 @@
 export function uniq<T = unknown>(
   array: T[],
-  eql = (x: T, y: T): boolean => x === y
+  eql = (x: T, y: T): boolean => x === y,
 ): T[] {
   return array.filter((x, idx, arr) => arr.findIndex((y) => eql(x, y)) === idx);
 }
diff --git a/lib/util/url.spec.ts b/lib/util/url.spec.ts
index 8978f1fa610a1626f4d7a3c880741211767b49b9..cbc946437275548e7856400b9abb334450c8955b 100644
--- a/lib/util/url.spec.ts
+++ b/lib/util/url.spec.ts
@@ -143,19 +143,19 @@ describe('util/url', () => {
 
   it('ensures path prefix', () => {
     expect(ensurePathPrefix('https://index.docker.io', '/v2')).toBe(
-      'https://index.docker.io/v2/'
+      'https://index.docker.io/v2/',
     );
     expect(ensurePathPrefix('https://index.docker.io/v2', '/v2')).toBe(
-      'https://index.docker.io/v2'
+      'https://index.docker.io/v2',
     );
     expect(
-      ensurePathPrefix('https://index.docker.io/v2/something', '/v2')
+      ensurePathPrefix('https://index.docker.io/v2/something', '/v2'),
     ).toBe('https://index.docker.io/v2/something');
     expect(ensurePathPrefix('https://index.docker.io:443', '/v2')).toBe(
-      'https://index.docker.io/v2/'
+      'https://index.docker.io/v2/',
     );
     expect(
-      ensurePathPrefix('https://index.docker.io/something?with=query', '/v2')
+      ensurePathPrefix('https://index.docker.io/something?with=query', '/v2'),
     ).toBe('https://index.docker.io/v2/something?with=query');
   });
 
@@ -164,23 +164,23 @@ describe('util/url', () => {
     expect(joinUrlParts(registryUrl, 'foo')).toBe(`${registryUrl}/foo`);
     expect(joinUrlParts(registryUrl, '/?foo')).toBe(`${registryUrl}?foo`);
     expect(joinUrlParts(registryUrl, '/foo/bar/')).toBe(
-      `${registryUrl}/foo/bar/`
+      `${registryUrl}/foo/bar/`,
     );
     expect(joinUrlParts(`${registryUrl}/foo/`, '/foo/bar')).toBe(
-      `${registryUrl}/foo/foo/bar`
+      `${registryUrl}/foo/foo/bar`,
     );
     expect(joinUrlParts(`${registryUrl}/api/`, '/foo/bar')).toBe(
-      `${registryUrl}/api/foo/bar`
+      `${registryUrl}/api/foo/bar`,
     );
     expect(joinUrlParts('foo//////')).toBe('foo/');
   });
 
   it('createURLFromHostOrURL', () => {
     expect(createURLFromHostOrURL('https://some.test')).toEqual(
-      new URL('https://some.test/')
+      new URL('https://some.test/'),
     );
     expect(createURLFromHostOrURL('some.test')).toEqual(
-      new URL('https://some.test/')
+      new URL('https://some.test/'),
     );
   });
 
@@ -191,8 +191,8 @@ describe('util/url', () => {
       parseLinkHeader(
         '<https://api.github.com/user/9287/repos?page=3&per_page=100>; rel="next",' +
           '<https://api.github.com/user/9287/repos?page=1&per_page=100>; rel="prev"; pet="cat", ' +
-          '<https://api.github.com/user/9287/repos?page=5&per_page=100>; rel="last"'
-      )
+          '<https://api.github.com/user/9287/repos?page=5&per_page=100>; rel="last"',
+      ),
     ).toStrictEqual({
       next: {
         page: '3',
diff --git a/lib/util/url.ts b/lib/util/url.ts
index 90400e58eb9d0d27ea13fc20de3a99674b4b99b3..c28d421e41c389b3a876802e4f2c8682579db0ec 100644
--- a/lib/util/url.ts
+++ b/lib/util/url.ts
@@ -87,7 +87,7 @@ export function getQueryString(params: Record<string, any>): string {
 
 export function validateUrl(
   url: string | null | undefined,
-  httpOnly = true
+  httpOnly = true,
 ): boolean {
   if (!is.nonEmptyString(url)) {
     return false;
@@ -124,7 +124,7 @@ export function createURLFromHostOrURL(url: string): URL | null {
 export type LinkHeaderLinks = _parseLinkHeader.Links;
 
 export function parseLinkHeader(
-  linkHeader: string | null | undefined
+  linkHeader: string | null | undefined,
 ): LinkHeaderLinks | null {
   if (!is.nonEmptyString(linkHeader)) {
     return null;
diff --git a/lib/util/vulnerability/utils.ts b/lib/util/vulnerability/utils.ts
index bc451a7db5365892ebcf69966e14eb9d4d3df3dd..3306631cd9ab896fbc2a824695b5ba6c43556233 100644
--- a/lib/util/vulnerability/utils.ts
+++ b/lib/util/vulnerability/utils.ts
@@ -9,7 +9,7 @@ const severityOrder: Record<string, number> = {
 
 export function getHighestVulnerabilitySeverity<
   T extends Record<string, any>,
-  TChild extends Record<string, any> | undefined
+  TChild extends Record<string, any> | undefined,
 >(parent: T, child: TChild): string {
   const parentVulSeverity = parent.vulnerabilitySeverity?.toUpperCase();
   const childVulSeverity = child?.vulnerabilitySeverity?.toUpperCase();
diff --git a/lib/workers/global/autodiscover.spec.ts b/lib/workers/global/autodiscover.spec.ts
index 6f20520b1383055a0bf479e29bc0c41365802418..e1934e2b049cbaf579513b09612f78b9b9a97f2f 100644
--- a/lib/workers/global/autodiscover.spec.ts
+++ b/lib/workers/global/autodiscover.spec.ts
@@ -71,7 +71,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'project/another-repo'])
+      Promise.resolve(['project/repo', 'project/another-repo']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(['project/repo']);
@@ -85,7 +85,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'project/.github'])
+      Promise.resolve(['project/repo', 'project/.github']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(['project/repo', 'project/.github']);
@@ -99,7 +99,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['another-project/repo', 'another-project/another-repo'])
+      Promise.resolve(['another-project/repo', 'another-project/another-repo']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res).toEqual(config);
@@ -113,7 +113,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'project/another-repo'])
+      Promise.resolve(['project/repo', 'project/another-repo']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(['project/repo']);
@@ -127,7 +127,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'project/another-repo'])
+      Promise.resolve(['project/repo', 'project/another-repo']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(['project/another-repo']);
@@ -141,7 +141,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'project/another-repo'])
+      Promise.resolve(['project/repo', 'project/another-repo']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(['project/another-repo']);
@@ -155,7 +155,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'project/another-repo'])
+      Promise.resolve(['project/repo', 'project/another-repo']),
     );
     await expect(autodiscoverRepositories(config)).rejects.toThrow();
   });
@@ -172,7 +172,10 @@ describe('workers/global/autodiscover', () => {
       'department/dev/aProject',
     ];
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['another-project/another-repo', ...expectedRepositories])
+      Promise.resolve([
+        'another-project/another-repo',
+        ...expectedRepositories,
+      ]),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(expectedRepositories);
@@ -186,7 +189,7 @@ describe('workers/global/autodiscover', () => {
       token: 'abc',
     }));
     ghApi.getRepos = jest.fn(() =>
-      Promise.resolve(['project/repo', 'PROJECT/repo2'])
+      Promise.resolve(['project/repo', 'PROJECT/repo2']),
     );
     const res = await autodiscoverRepositories(config);
     expect(res.repositories).toEqual(['project/repo', 'PROJECT/repo2']);
diff --git a/lib/workers/global/autodiscover.ts b/lib/workers/global/autodiscover.ts
index 92f6f653a9fbf6dc895256975c1c4eca25a27a42..4eaab2e746d27b6dbd4869254ac6b37e6229f3c9 100644
--- a/lib/workers/global/autodiscover.ts
+++ b/lib/workers/global/autodiscover.ts
@@ -11,17 +11,17 @@ function repoName(value: string | { repository: string }): string {
 }
 
 export async function autodiscoverRepositories(
-  config: AllConfig
+  config: AllConfig,
 ): Promise<AllConfig> {
   const { autodiscoverFilter } = config;
   if (config.platform === 'local') {
     if (config.repositories?.length) {
       logger.debug(
         { repositories: config.repositories },
-        'Found repositories when in local mode'
+        'Found repositories when in local mode',
       );
       throw new Error(
-        'Invalid configuration: repositories list not supported when platform=local'
+        'Invalid configuration: repositories list not supported when platform=local',
       );
     }
     config.repositories = ['local'];
@@ -30,7 +30,7 @@ export async function autodiscoverRepositories(
   if (!config.autodiscover) {
     if (!config.repositories?.length) {
       logger.warn(
-        'No repositories found - did you want to run with flag --autodiscover?'
+        'No repositories found - did you want to run with flag --autodiscover?',
       );
     }
     return config;
@@ -53,7 +53,7 @@ export async function autodiscoverRepositories(
     logger.debug({ autodiscoverFilter }, 'Applying autodiscoverFilter');
     discovered = applyFilters(
       discovered,
-      is.string(autodiscoverFilter) ? [autodiscoverFilter] : autodiscoverFilter
+      is.string(autodiscoverFilter) ? [autodiscoverFilter] : autodiscoverFilter,
     );
 
     if (!discovered.length) {
@@ -62,7 +62,7 @@ export async function autodiscoverRepositories(
       return config;
     }
     logger.debug(
-      `Autodiscovered ${discovered.length} repositories after filter`
+      `Autodiscovered ${discovered.length} repositories after filter`,
     );
   }
 
@@ -71,7 +71,7 @@ export async function autodiscoverRepositories(
   // istanbul ignore if
   if (config.repositories?.length) {
     logger.debug(
-      'Checking autodiscovered repositories against configured repositories'
+      'Checking autodiscovered repositories against configured repositories',
     );
     for (const configuredRepo of config.repositories) {
       const repository = repoName(configuredRepo);
@@ -87,7 +87,7 @@ export async function autodiscoverRepositories(
       if (!found) {
         logger.warn(
           { repository },
-          'Configured repository is in not in autodiscover list'
+          'Configured repository is in not in autodiscover list',
         );
       }
     }
diff --git a/lib/workers/global/config/parse/cli.spec.ts b/lib/workers/global/config/parse/cli.spec.ts
index dd9ac01d21a0e59868d335736465672820985aea..635757ba834aeef1635a5adea2f7ca801c2a63e8 100644
--- a/lib/workers/global/config/parse/cli.spec.ts
+++ b/lib/workers/global/config/parse/cli.spec.ts
@@ -49,8 +49,8 @@ describe('workers/global/config/parse/cli', () => {
       argv.push('badvalue');
       expect(() => cli.getConfig(argv)).toThrow(
         Error(
-          "Invalid boolean value: expected 'true' or 'false', but got 'badvalue'"
-        )
+          "Invalid boolean value: expected 'true' or 'false', but got 'badvalue'",
+        ),
       );
     });
 
@@ -93,7 +93,7 @@ describe('workers/global/config/parse/cli', () => {
 
     it('parses json lists correctly', () => {
       argv.push(
-        `--host-rules=[{"matchHost":"docker.io","hostType":"${DockerDatasource.id}","username":"user","password":"password"}]`
+        `--host-rules=[{"matchHost":"docker.io","hostType":"${DockerDatasource.id}","username":"user","password":"password"}]`,
       );
       expect(cli.getConfig(argv)).toEqual({
         hostRules: [
@@ -167,7 +167,7 @@ describe('workers/global/config/parse/cli', () => {
     it('throws exception for invalid json object', () => {
       argv.push('--onboarding-config=Hello_World');
       expect(() => cli.getConfig(argv)).toThrow(
-        Error("Invalid JSON value: 'Hello_World'")
+        Error("Invalid JSON value: 'Hello_World'"),
       );
     });
 
diff --git a/lib/workers/global/config/parse/cli.ts b/lib/workers/global/config/parse/cli.ts
index 040618ad05cf68c8fa7cf2232fd76edc5def389d..7830852e74e2aae2e49babe36e153a0cdde26147 100644
--- a/lib/workers/global/config/parse/cli.ts
+++ b/lib/workers/global/config/parse/cli.ts
@@ -36,7 +36,7 @@ export function getConfig(input: string[]): AllConfig {
         .replace('--include-forks', '--fork-processing=enabled')
         .replace('--recreate-closed=false', '--recreate-when=auto')
         .replace('--recreate-closed=true', '--recreate-when=always')
-        .replace('--recreate-closed', '--recreate-when=always')
+        .replace('--recreate-closed', '--recreate-when=always'),
     )
     .filter((a) => !a.startsWith('--git-fs'));
   const options = getOptions();
@@ -52,7 +52,7 @@ export function getConfig(input: string[]): AllConfig {
       program = program.option(
         optionString,
         option.description,
-        coersions[option.type]
+        coersions[option.type],
       );
     }
   });
@@ -64,11 +64,11 @@ export function getConfig(input: string[]): AllConfig {
     console.log('');
     console.log('    $ renovate --token 123test singapore/lint-condo');
     console.log(
-      '    $ LOG_LEVEL=debug renovate --labels=renovate,dependency --ignore-unstable=false singapore/lint-condo'
+      '    $ LOG_LEVEL=debug renovate --labels=renovate,dependency --ignore-unstable=false singapore/lint-condo',
     );
     console.log('    $ renovate singapore/lint-condo singapore/package-test');
     console.log(
-      `    $ renovate singapore/lint-condo --onboarding-config='{"extends":["config:recommended"]}'`
+      `    $ renovate singapore/lint-condo --onboarding-config='{"extends":["config:recommended"]}'`,
     );
     /* eslint-enable no-console */
   }
@@ -88,12 +88,12 @@ export function getConfig(input: string[]): AllConfig {
             if (option.name === 'dryRun') {
               if (config[option.name] === 'true') {
                 logger.warn(
-                  'cli config dryRun property has been changed to full'
+                  'cli config dryRun property has been changed to full',
                 );
                 config[option.name] = 'full';
               } else if (config[option.name] === 'false') {
                 logger.warn(
-                  'cli config dryRun property has been changed to null'
+                  'cli config dryRun property has been changed to null',
                 );
                 config[option.name] = null;
               } else if (config[option.name] === 'null') {
@@ -103,12 +103,12 @@ export function getConfig(input: string[]): AllConfig {
             if (option.name === 'requireConfig') {
               if (config[option.name] === 'true') {
                 logger.warn(
-                  'cli config requireConfig property has been changed to required'
+                  'cli config requireConfig property has been changed to required',
                 );
                 config[option.name] = 'required';
               } else if (config[option.name] === 'false') {
                 logger.warn(
-                  'cli config requireConfig property has been changed to optional'
+                  'cli config requireConfig property has been changed to optional',
                 );
                 config[option.name] = 'optional';
               }
diff --git a/lib/workers/global/config/parse/coersions.ts b/lib/workers/global/config/parse/coersions.ts
index 199b80afa5b8a2aeb4d31e693de34015b35bde53..88fe1a7778b78bee3cdcd04fec7f222980fc85e4 100644
--- a/lib/workers/global/config/parse/coersions.ts
+++ b/lib/workers/global/config/parse/coersions.ts
@@ -10,7 +10,9 @@ export const coersions: Record<string, (arg: string) => unknown> = {
       return false;
     }
     throw new Error(
-      "Invalid boolean value: expected 'true' or 'false', but got '" + val + "'"
+      "Invalid boolean value: expected 'true' or 'false', but got '" +
+        val +
+        "'",
     );
   },
   array: (val: string): string[] => {
diff --git a/lib/workers/global/config/parse/env.spec.ts b/lib/workers/global/config/parse/env.spec.ts
index 784c219b2c4787b28c2501062fe7f4a9e272c519..767624090e850a0b6fbf4fbb60cb3d2d784fdd75 100644
--- a/lib/workers/global/config/parse/env.spec.ts
+++ b/lib/workers/global/config/parse/env.spec.ts
@@ -27,8 +27,8 @@ describe('workers/global/config/parse/env', () => {
       };
       expect(() => env.getConfig(envParam)).toThrow(
         Error(
-          "Invalid boolean value: expected 'true' or 'false', but got 'badvalue'"
-        )
+          "Invalid boolean value: expected 'true' or 'false', but got 'badvalue'",
+        ),
       );
     });
 
@@ -106,7 +106,7 @@ describe('workers/global/config/parse/env', () => {
       expect(res).toEqual({ hostRules: [] });
       expect(logger.debug).toHaveBeenLastCalledWith(
         { val, envName },
-        'Could not parse object array'
+        'Could not parse object array',
       );
     });
 
@@ -120,7 +120,7 @@ describe('workers/global/config/parse/env', () => {
       expect(res).toEqual({ hostRules: [] });
       expect(logger.debug).toHaveBeenLastCalledWith(
         { val, envName },
-        'Could not parse environment variable'
+        'Could not parse environment variable',
       );
     });
 
diff --git a/lib/workers/global/config/parse/env.ts b/lib/workers/global/config/parse/env.ts
index 81f7d3384dd365bfaee8576c73e041c141629361..4db7bf7e9cac114203aed71251450f4d322fec5a 100644
--- a/lib/workers/global/config/parse/env.ts
+++ b/lib/workers/global/config/parse/env.ts
@@ -8,7 +8,7 @@ import type { ParseConfigOptions } from './types';
 
 function normalizePrefixes(
   env: NodeJS.ProcessEnv,
-  prefix: string | undefined
+  prefix: string | undefined,
 ): NodeJS.ProcessEnv {
   const result = { ...env };
   if (prefix) {
@@ -118,13 +118,13 @@ export function getConfig(inputEnv: NodeJS.ProcessEnv): AllConfig {
             } else {
               logger.debug(
                 { val: envVal, envName },
-                'Could not parse object array'
+                'Could not parse object array',
               );
             }
           } catch (err) {
             logger.debug(
               { val: envVal, envName },
-              'Could not parse environment variable'
+              'Could not parse environment variable',
             );
           }
         } else {
@@ -133,12 +133,12 @@ export function getConfig(inputEnv: NodeJS.ProcessEnv): AllConfig {
           if (option.name === 'dryRun') {
             if ((config[option.name] as string) === 'true') {
               logger.warn(
-                'env config dryRun property has been changed to full'
+                'env config dryRun property has been changed to full',
               );
               config[option.name] = 'full';
             } else if ((config[option.name] as string) === 'false') {
               logger.warn(
-                'env config dryRun property has been changed to null'
+                'env config dryRun property has been changed to null',
               );
               delete config[option.name];
             } else if ((config[option.name] as string) === 'null') {
@@ -148,12 +148,12 @@ export function getConfig(inputEnv: NodeJS.ProcessEnv): AllConfig {
           if (option.name === 'requireConfig') {
             if ((config[option.name] as string) === 'true') {
               logger.warn(
-                'env config requireConfig property has been changed to required'
+                'env config requireConfig property has been changed to required',
               );
               config[option.name] = 'required';
             } else if ((config[option.name] as string) === 'false') {
               logger.warn(
-                'env config requireConfig property has been changed to optional'
+                'env config requireConfig property has been changed to optional',
               );
               config[option.name] = 'optional';
             }
diff --git a/lib/workers/global/config/parse/file.spec.ts b/lib/workers/global/config/parse/file.spec.ts
index 05fcc01fb88162f922f7ad81685981906834797b..96ad95cbeab0f50ceaa4266dea996da9c2ae1611 100644
--- a/lib/workers/global/config/parse/file.spec.ts
+++ b/lib/workers/global/config/parse/file.spec.ts
@@ -41,7 +41,7 @@ describe('workers/global/config/parse/file', () => {
     ])('parses %s', async (_fileType, filePath) => {
       const configFile = upath.resolve(__dirname, './__fixtures__/', filePath);
       expect(
-        await file.getConfig({ RENOVATE_CONFIG_FILE: configFile })
+        await file.getConfig({ RENOVATE_CONFIG_FILE: configFile }),
       ).toEqual(customConfig);
     });
 
@@ -82,7 +82,7 @@ describe('workers/global/config/parse/file', () => {
         await file.getConfig({ RENOVATE_CONFIG_FILE: configFile });
         expect(processExitSpy).toHaveBeenCalledWith(1);
         fs.unlinkSync(configFile);
-      }
+      },
     );
 
     it('fatal error and exit if custom config file does not exist', async () => {
@@ -101,7 +101,7 @@ describe('workers/global/config/parse/file', () => {
 
       const configFile = upath.resolve(
         __dirname,
-        './__fixtures__/config-ref-error.js-invalid'
+        './__fixtures__/config-ref-error.js-invalid',
       );
       const tmpDir = tmp.path;
       await fsExtra.ensureDir(tmpDir);
@@ -112,7 +112,7 @@ describe('workers/global/config/parse/file', () => {
       await file.getConfig({ RENOVATE_CONFIG_FILE: tmpConfigFile });
 
       expect(logger.fatal).toHaveBeenCalledWith(
-        `Error parsing config file due to unresolved variable(s): CI_API_V4_URL is not defined`
+        `Error parsing config file due to unresolved variable(s): CI_API_V4_URL is not defined`,
       );
       expect(processExitSpy).toHaveBeenCalledWith(1);
     });
@@ -159,7 +159,7 @@ describe('workers/global/config/parse/file', () => {
         await file.deleteNonDefaultConfig({ RENOVATE_CONFIG_FILE: configFile });
 
         expect(fsRemoveSpy).toHaveBeenCalledTimes(0);
-      }
+      },
     );
 
     it('skip when config file does not exist', async () => {
@@ -184,7 +184,7 @@ describe('workers/global/config/parse/file', () => {
         });
 
         expect(fsRemoveSpy).toHaveBeenCalledTimes(0);
-      }
+      },
     );
 
     it('removes the specified config file', async () => {
@@ -203,7 +203,7 @@ describe('workers/global/config/parse/file', () => {
       expect(fsRemoveSpy).toHaveBeenCalledWith(configFile);
       expect(logger.trace).toHaveBeenCalledWith(
         expect.anything(),
-        'config file successfully deleted'
+        'config file successfully deleted',
       );
     });
 
@@ -223,7 +223,7 @@ describe('workers/global/config/parse/file', () => {
       expect(fsRemoveSpy).toHaveBeenCalledWith(configFile);
       expect(logger.warn).toHaveBeenCalledWith(
         expect.anything(),
-        'error deleting config file'
+        'error deleting config file',
       );
     });
   });
diff --git a/lib/workers/global/config/parse/file.ts b/lib/workers/global/config/parse/file.ts
index 679fdbb0c22f1a82933f4826a837ffd15ce370ee..3d0ecb7d9d42eea5d00fd6f88779526c701523cf 100644
--- a/lib/workers/global/config/parse/file.ts
+++ b/lib/workers/global/config/parse/file.ts
@@ -23,7 +23,7 @@ export async function getParsedContent(file: string): Promise<RenovateConfig> {
     case '.json':
       return parseJson(
         await readSystemFile(file, 'utf8'),
-        file
+        file,
       ) as RenovateConfig;
     case '.js': {
       const tmpConfig = await import(file);
@@ -50,7 +50,7 @@ export async function getConfig(env: NodeJS.ProcessEnv): Promise<AllConfig> {
   if (env.RENOVATE_CONFIG_FILE && !(await fs.pathExists(configFile))) {
     logger.fatal(
       { configFile },
-      `Custom config file specified in RENOVATE_CONFIG_FILE must exist`
+      `Custom config file specified in RENOVATE_CONFIG_FILE must exist`,
     );
     process.exit(1);
   }
@@ -65,7 +65,7 @@ export async function getConfig(env: NodeJS.ProcessEnv): Promise<AllConfig> {
       process.exit(1);
     } else if (err instanceof ReferenceError) {
       logger.fatal(
-        `Error parsing config file due to unresolved variable(s): ${err.message}`
+        `Error parsing config file due to unresolved variable(s): ${err.message}`,
       );
       process.exit(1);
     } else if (err.message === 'Unsupported file type') {
@@ -85,7 +85,7 @@ export async function getConfig(env: NodeJS.ProcessEnv): Promise<AllConfig> {
   if (isMigrated) {
     logger.warn(
       { originalConfig: config, migratedConfig },
-      'Config needs migrating'
+      'Config needs migrating',
     );
     config = migratedConfig;
   }
@@ -93,7 +93,7 @@ export async function getConfig(env: NodeJS.ProcessEnv): Promise<AllConfig> {
 }
 
 export async function deleteNonDefaultConfig(
-  env: NodeJS.ProcessEnv
+  env: NodeJS.ProcessEnv,
 ): Promise<void> {
   const configFile = env.RENOVATE_CONFIG_FILE;
 
diff --git a/lib/workers/global/config/parse/host-rules-from-env.ts b/lib/workers/global/config/parse/host-rules-from-env.ts
index 42b0c2928e1cb503ee7ca46ea4af044b283a1314..58936a5ac3aaf8c74f37eefa2d3c9c6f953f3fb1 100644
--- a/lib/workers/global/config/parse/host-rules-from-env.ts
+++ b/lib/workers/global/config/parse/host-rules-from-env.ts
@@ -37,7 +37,7 @@ function restoreHttpsAuthField(x: HttpsAuthField | AuthField): string {
 function setHostRuleValue(
   rule: HostRule,
   key: string,
-  value: string | undefined
+  value: string | undefined,
 ): void {
   if (value !== undefined) {
     switch (key) {
@@ -88,7 +88,7 @@ export function hostRulesFromEnv(env: NodeJS.ProcessEnv): HostRule[] {
           matchHost = splitEnv.join('.');
         }
         const existingRule = hostRules.find(
-          (hr) => hr.hostType === hostType && hr.matchHost === matchHost
+          (hr) => hr.hostType === hostType && hr.matchHost === matchHost,
         );
         logger.debug(`Converting ${envName} into a global host rule`);
         if (existingRule) {
diff --git a/lib/workers/global/config/parse/index.spec.ts b/lib/workers/global/config/parse/index.spec.ts
index 6301ecc3f24d2660e7f701f93131e0e0f3274079..64b0543b15431030365c6276eb5e98f75ee6c4e4 100644
--- a/lib/workers/global/config/parse/index.spec.ts
+++ b/lib/workers/global/config/parse/index.spec.ts
@@ -23,7 +23,7 @@ describe('workers/global/config/parse/index', () => {
       defaultEnv = {
         RENOVATE_CONFIG_FILE: upath.resolve(
           __dirname,
-          './__fixtures__/default.js'
+          './__fixtures__/default.js',
         ),
       };
     });
@@ -42,7 +42,7 @@ describe('workers/global/config/parse/index', () => {
       ]);
       const parsedConfig = await configParser.parseConfigs(
         defaultEnv,
-        defaultArgv
+        defaultArgv,
       );
       expect(parsedConfig).toContainEntries([
         ['token', 'abc'],
@@ -87,7 +87,7 @@ describe('workers/global/config/parse/index', () => {
       const privateKeyPath = upath.join(__dirname, '__fixtures__/private.pem');
       const privateKeyPathOld = upath.join(
         __dirname,
-        '__fixtures__/private.pem'
+        '__fixtures__/private.pem',
       );
       const env: NodeJS.ProcessEnv = {
         ...defaultEnv,
@@ -108,7 +108,7 @@ describe('workers/global/config/parse/index', () => {
       ]);
       const parsedConfig = await configParser.parseConfigs(
         defaultEnv,
-        defaultArgv
+        defaultArgv,
       );
       expect(parsedConfig).toContainEntries([
         ['platform', 'bitbucket'],
diff --git a/lib/workers/global/config/parse/index.ts b/lib/workers/global/config/parse/index.ts
index beb03cb895da40366ba4cbfd928f6968848120cc..859ca93a9b8f29e625c59bde9a0456a1bc61b56b 100644
--- a/lib/workers/global/config/parse/index.ts
+++ b/lib/workers/global/config/parse/index.ts
@@ -15,7 +15,7 @@ import { hostRulesFromEnv } from './host-rules-from-env';
 
 export async function parseConfigs(
   env: NodeJS.ProcessEnv,
-  argv: string[]
+  argv: string[],
 ): Promise<AllConfig> {
   logger.debug('Parsing configs');
 
@@ -53,7 +53,7 @@ export async function parseConfigs(
   if (!config.privateKeyOld && config.privateKeyPathOld) {
     config.privateKeyOld = await readSystemFile(
       config.privateKeyPathOld,
-      'utf8'
+      'utf8',
     );
     delete config.privateKeyPathOld;
   }
@@ -71,7 +71,7 @@ export async function parseConfigs(
   if (config.logFile) {
     logger.debug(
       // TODO: types (#22198)
-      `Enabling ${config.logFileLevel!} logging to ${config.logFile}`
+      `Enabling ${config.logFileLevel!} logging to ${config.logFile}`,
     );
     await ensureDir(getParentDir(config.logFile));
     addStream({
diff --git a/lib/workers/global/index.spec.ts b/lib/workers/global/index.spec.ts
index f950d5b4b0536a0c1e22ebc58b7130069bab6fe2..cd740eba5686156be842dbbb20a0873f8d92378d 100644
--- a/lib/workers/global/index.spec.ts
+++ b/lib/workers/global/index.spec.ts
@@ -77,7 +77,7 @@ describe('workers/global/index', () => {
       throw new Error('some-error');
     });
     await expect(
-      globalWorker.resolveGlobalExtends(['some-preset'])
+      globalWorker.resolveGlobalExtends(['some-preset']),
     ).rejects.toThrow(CONFIG_PRESETS_INVALID);
     expect(presets.resolveConfigPresets).toHaveBeenCalled();
     expect(parseConfigs).not.toHaveBeenCalled();
@@ -211,7 +211,7 @@ describe('workers/global/index', () => {
       expect(fs.writeFile).toHaveBeenCalledTimes(1);
       expect(fs.writeFile).toHaveBeenCalledWith(
         '/tmp/renovate-output.json',
-        '["myOrg/myRepo"]'
+        '["myOrg/myRepo"]',
       );
       expect(parseConfigs).toHaveBeenCalledTimes(1);
       expect(repositoryWorker.renovateRepository).toHaveBeenCalledTimes(0);
diff --git a/lib/workers/global/index.ts b/lib/workers/global/index.ts
index bf40c5e674b3cbc46930cf75cab779ecaecc19a2..ddcd3ac97c32cdd23f302b8f695d667bb4271eec 100644
--- a/lib/workers/global/index.ts
+++ b/lib/workers/global/index.ts
@@ -29,11 +29,11 @@ import { isLimitReached } from './limits';
 
 export async function getRepositoryConfig(
   globalConfig: RenovateConfig,
-  repository: RenovateRepository
+  repository: RenovateRepository,
 ): Promise<RenovateConfig> {
   const repoConfig = configParser.mergeChildConfig(
     globalConfig,
-    is.string(repository) ? { repository } : repository
+    is.string(repository) ? { repository } : repository,
   );
   // TODO: types (#22198)
   const platform = GlobalConfig.get('platform')!;
@@ -42,7 +42,7 @@ export async function getRepositoryConfig(
       ? process.cwd()
       : upath.join(
           repoConfig.baseDir,
-          `./repos/${platform}/${repoConfig.repository}`
+          `./repos/${platform}/${repoConfig.repository}`,
         );
   await fs.ensureDir(repoConfig.localDir);
   delete repoConfig.baseDir;
@@ -68,12 +68,12 @@ function checkEnv(): void {
   if (process.release?.name !== 'node' || !process.versions?.node) {
     logger[process.env.RENOVATE_X_IGNORE_NODE_WARN ? 'info' : 'warn'](
       { release: process.release, versions: process.versions },
-      'Unknown node environment detected.'
+      'Unknown node environment detected.',
     );
   } else if (!semver.satisfies(process.versions?.node, range)) {
     logger.error(
       { versions: process.versions, range },
-      'Unsupported node environment detected. Please update your node version.'
+      'Unsupported node environment detected. Please update your node version.',
     );
   } else if (
     rangeNext &&
@@ -81,7 +81,7 @@ function checkEnv(): void {
   ) {
     logger[process.env.RENOVATE_X_IGNORE_NODE_WARN ? 'info' : 'warn'](
       { versions: process.versions },
-      `Please upgrade the version of Node.js used to run Renovate to satisfy "${rangeNext}". Support for your current version will be removed in Renovate's next major release.`
+      `Please upgrade the version of Node.js used to run Renovate to satisfy "${rangeNext}". Support for your current version will be removed in Renovate's next major release.`,
     );
   }
 }
@@ -97,7 +97,7 @@ export async function validatePresets(config: AllConfig): Promise<void> {
 }
 
 export async function resolveGlobalExtends(
-  globalExtends: string[]
+  globalExtends: string[],
 ): Promise<AllConfig> {
   try {
     // Make a "fake" config to pass to resolveConfigPresets and resolve globalPresets
@@ -127,7 +127,7 @@ export async function start(): Promise<number> {
         // resolve global presets immediately
         config = mergeChildConfig(
           config,
-          await resolveGlobalExtends(config.globalExtends)
+          await resolveGlobalExtends(config.globalExtends),
         );
       }
       // initialize all submodules
@@ -149,14 +149,14 @@ export async function start(): Promise<number> {
 
     // autodiscover repositories (needs to come after platform initialization)
     config = await instrument('discover', () =>
-      autodiscoverRepositories(config)
+      autodiscoverRepositories(config),
     );
 
     if (is.nonEmptyString(config.writeDiscoveredRepos)) {
       const content = JSON.stringify(config.repositories);
       await fs.writeFile(config.writeDiscoveredRepos, content);
       logger.info(
-        `Written discovered repositories to ${config.writeDiscoveredRepos}`
+        `Written discovered repositories to ${config.writeDiscoveredRepos}`,
       );
       return 0;
     }
@@ -191,7 +191,7 @@ export async function start(): Promise<number> {
                 ? repository
                 : repository.repository,
           },
-        }
+        },
       );
     }
   } catch (err) /* istanbul ignore next */ {
@@ -213,7 +213,7 @@ export async function start(): Promise<number> {
   if (loggerErrors.length) {
     logger.info(
       { loggerErrors },
-      'Renovate is exiting with a non-zero code due to the following logged errors'
+      'Renovate is exiting with a non-zero code due to the following logged errors',
     );
     return 1;
   }
diff --git a/lib/workers/global/initialize.ts b/lib/workers/global/initialize.ts
index df57af8756a50f12867cae6562997d50f61819f1..60a73476c521c692d1eae174fac625184e10e4c5 100644
--- a/lib/workers/global/initialize.ts
+++ b/lib/workers/global/initialize.ts
@@ -32,7 +32,7 @@ async function setDirectories(input: AllConfig): Promise<AllConfig> {
   if (config.binarySource === 'docker' || config.binarySource === 'install') {
     if (config.containerbaseDir) {
       logger.debug(
-        'Using configured containerbaseDir: ' + config.containerbaseDir
+        'Using configured containerbaseDir: ' + config.containerbaseDir,
       );
     } else {
       config.containerbaseDir = upath.join(config.cacheDir, 'containerbase');
@@ -65,7 +65,7 @@ function setGlobalHostRules(config: RenovateConfig): void {
 }
 
 export async function globalInitialize(
-  config_: AllConfig
+  config_: AllConfig,
 ): Promise<RenovateConfig> {
   let config = config_;
   await checkVersions();
diff --git a/lib/workers/repository/cache.ts b/lib/workers/repository/cache.ts
index 1f21b0c5d97d6ae9a258d0e10593b676709faf6f..7f23e4e088a24f127ec26ada60ae8143e39be40b 100644
--- a/lib/workers/repository/cache.ts
+++ b/lib/workers/repository/cache.ts
@@ -17,7 +17,7 @@ import type { BranchConfig, BranchUpgradeConfig } from '../types';
 import { getPrCache } from './update/pr/pr-cache';
 
 function generateBranchUpgradeCache(
-  upgrade: BranchUpgradeConfig
+  upgrade: BranchUpgradeConfig,
 ): BranchUpgradeCache {
   const {
     datasource,
@@ -61,7 +61,7 @@ function generateBranchUpgradeCache(
 }
 
 async function generateBranchCache(
-  branch: BranchConfig
+  branch: BranchConfig,
 ): Promise<BranchCache | null> {
   const { baseBranch, branchName, prBlockedBy, prTitle, result } = branch;
   try {
@@ -83,14 +83,14 @@ async function generateBranchCache(
           branchName,
           branchSha,
           baseBranch,
-          baseBranchSha
+          baseBranchSha,
         ) ?? undefined;
       isConflicted =
         getCachedConflictResult(
           branchName,
           branchSha,
           baseBranch,
-          baseBranchSha
+          baseBranchSha,
         ) ?? undefined;
     }
 
diff --git a/lib/workers/repository/changelog/index.ts b/lib/workers/repository/changelog/index.ts
index f6c6c6ca4f68811df3c886cd282d2be29368c4c8..ebdc8ec8f5d857c2fc6802d301cf7ebaa39df9a9 100644
--- a/lib/workers/repository/changelog/index.ts
+++ b/lib/workers/repository/changelog/index.ts
@@ -3,7 +3,7 @@ import type { BranchUpgradeConfig } from '../../types';
 import { getChangeLogJSON } from '../update/pr/changelog';
 
 export async function embedChangelog(
-  upgrade: BranchUpgradeConfig
+  upgrade: BranchUpgradeConfig,
 ): Promise<void> {
   // getChangeLogJSON returns null on error, so don't try again
   if (upgrade.logJSON !== undefined) {
@@ -13,7 +13,7 @@ export async function embedChangelog(
 }
 
 export async function embedChangelogs(
-  branches: BranchUpgradeConfig[]
+  branches: BranchUpgradeConfig[],
 ): Promise<void> {
   await p.map(branches, embedChangelog, { concurrency: 10 });
 }
diff --git a/lib/workers/repository/common.ts b/lib/workers/repository/common.ts
index f477430bd589e0df4f19a5c2ad719141f43ada1e..758d7fb5597b5056a05c4091708b770e31e9fffe 100644
--- a/lib/workers/repository/common.ts
+++ b/lib/workers/repository/common.ts
@@ -2,17 +2,17 @@ import { nameFromLevel } from 'bunyan';
 import { getProblems } from '../../logger';
 
 export function extractRepoProblems(
-  repository: string | undefined
+  repository: string | undefined,
 ): Set<string> {
   return new Set(
     getProblems()
       .filter(
         (problem) =>
-          problem.repository === repository && !problem.artifactErrors
+          problem.repository === repository && !problem.artifactErrors,
       )
       .map(
         (problem) =>
-          `${nameFromLevel[problem.level].toUpperCase()}: ${problem.msg}`
-      )
+          `${nameFromLevel[problem.level].toUpperCase()}: ${problem.msg}`,
+      ),
   );
 }
diff --git a/lib/workers/repository/config-migration/branch/commit-message.spec.ts b/lib/workers/repository/config-migration/branch/commit-message.spec.ts
index a2ab74b131f6e421b8c2c0acf18ac82af645db23..2ace202a078a0da1bcf93510042a0764d6eca93e 100644
--- a/lib/workers/repository/config-migration/branch/commit-message.spec.ts
+++ b/lib/workers/repository/config-migration/branch/commit-message.spec.ts
@@ -9,10 +9,10 @@ describe('workers/repository/config-migration/branch/commit-message', () => {
     config.semanticCommits = 'enabled';
     const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
       config,
-      fileName
+      fileName,
     );
     expect(commitMessageFactory.getCommitMessage()).toBe(
-      'chore(config): migrate config renovate.json'
+      'chore(config): migrate config renovate.json',
     );
   });
 
@@ -20,10 +20,10 @@ describe('workers/repository/config-migration/branch/commit-message', () => {
     config.semanticCommits = 'enabled';
     const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
       config,
-      fileName
+      fileName,
     );
     expect(commitMessageFactory.getPrTitle()).toBe(
-      'chore(config): migrate renovate config'
+      'chore(config): migrate renovate config',
     );
   });
 
@@ -31,10 +31,10 @@ describe('workers/repository/config-migration/branch/commit-message', () => {
     config.semanticCommits = 'disabled';
     const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
       config,
-      fileName
+      fileName,
     );
     expect(commitMessageFactory.getCommitMessage()).toBe(
-      'Migrate config renovate.json'
+      'Migrate config renovate.json',
     );
   });
 
@@ -42,7 +42,7 @@ describe('workers/repository/config-migration/branch/commit-message', () => {
     config.semanticCommits = 'disabled';
     const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
       config,
-      fileName
+      fileName,
     );
     expect(commitMessageFactory.getPrTitle()).toBe('Migrate renovate config');
   });
@@ -52,7 +52,7 @@ describe('workers/repository/config-migration/branch/commit-message', () => {
     config.commitMessage = '';
     const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
       config,
-      fileName
+      fileName,
     );
     expect(commitMessageFactory.getPrTitle()).toBe('Migrate renovate config');
   });
diff --git a/lib/workers/repository/config-migration/branch/commit-message.ts b/lib/workers/repository/config-migration/branch/commit-message.ts
index 57b467d0889de3f8d1864c4b72e430665f085174..161daa2c3a3d3e516f5807df223c8aa288a8017f 100644
--- a/lib/workers/repository/config-migration/branch/commit-message.ts
+++ b/lib/workers/repository/config-migration/branch/commit-message.ts
@@ -6,7 +6,7 @@ import { CommitMessageFactory } from '../../model/commit-message-factory';
 export class ConfigMigrationCommitMessageFactory {
   constructor(
     private readonly config: RenovateConfig,
-    private readonly configFile: string
+    private readonly configFile: string,
   ) {}
 
   private create(commitMessageTopic: string): CommitMessage {
diff --git a/lib/workers/repository/config-migration/branch/create.spec.ts b/lib/workers/repository/config-migration/branch/create.spec.ts
index 8a1d1aab545d18179ba284a910beff5c964eed65..175a91d57f46cef633ccd007b0dd4c0ee50c0ae0 100644
--- a/lib/workers/repository/config-migration/branch/create.spec.ts
+++ b/lib/workers/repository/config-migration/branch/create.spec.ts
@@ -16,7 +16,7 @@ describe('workers/repository/config-migration/branch/create', () => {
   const filename = 'renovate.json';
   const prettierSpy = jest.spyOn(
     MigratedDataFactory,
-    'applyPrettierFormatting'
+    'applyPrettierFormatting',
   );
 
   let config: RenovateConfig;
diff --git a/lib/workers/repository/config-migration/branch/create.ts b/lib/workers/repository/config-migration/branch/create.ts
index e188e212f5a82e0a4308dad50e2e353b609e6138..c762b797435c4adccf6451170710e90fdbfb8ca9 100644
--- a/lib/workers/repository/config-migration/branch/create.ts
+++ b/lib/workers/repository/config-migration/branch/create.ts
@@ -9,7 +9,7 @@ import type { MigratedData } from './migrated-data';
 
 export async function createConfigMigrationBranch(
   config: Partial<RenovateConfig>,
-  migratedConfigData: MigratedData
+  migratedConfigData: MigratedData,
 ): Promise<string | null> {
   logger.debug('createConfigMigrationBranch()');
   const configFileName = migratedConfigData.filename;
@@ -17,7 +17,7 @@ export async function createConfigMigrationBranch(
 
   const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
     config,
-    configFileName
+    configFileName,
   );
 
   const commitMessage = commitMessageFactory.getCommitMessage();
@@ -30,7 +30,7 @@ export async function createConfigMigrationBranch(
 
   await scm.checkoutBranch(config.defaultBranch!);
   const contents = await MigratedDataFactory.applyPrettierFormatting(
-    migratedConfigData
+    migratedConfigData,
   );
   return scm.commitAndPush({
     baseBranch: config.baseBranch,
diff --git a/lib/workers/repository/config-migration/branch/index.spec.ts b/lib/workers/repository/config-migration/branch/index.spec.ts
index 3d4754978b4ae8d4b3025d1964400777679ab93b..3e524e79c74591681a0047838df87343f88e4023 100644
--- a/lib/workers/repository/config-migration/branch/index.spec.ts
+++ b/lib/workers/repository/config-migration/branch/index.spec.ts
@@ -39,10 +39,10 @@ describe('workers/repository/config-migration/branch/index', () => {
 
     it('Exits when Migration is not needed', async () => {
       await expect(
-        checkConfigMigrationBranch(config, null)
+        checkConfigMigrationBranch(config, null),
       ).resolves.toBeNull();
       expect(logger.debug).toHaveBeenCalledWith(
-        'checkConfigMigrationBranch() Config does not need migration'
+        'checkConfigMigrationBranch() Config does not need migration',
       );
     });
 
@@ -58,7 +58,7 @@ describe('workers/repository/config-migration/branch/index', () => {
       expect(scm.checkoutBranch).toHaveBeenCalledTimes(1);
       expect(git.commitFiles).toHaveBeenCalledTimes(0);
       expect(logger.debug).toHaveBeenCalledWith(
-        'Config Migration PR already exists'
+        'Config Migration PR already exists',
       );
     });
 
@@ -77,7 +77,7 @@ describe('workers/repository/config-migration/branch/index', () => {
 
     it('Creates migration PR', async () => {
       mockedFunction(createConfigMigrationBranch).mockResolvedValueOnce(
-        'committed'
+        'committed',
       );
       const res = await checkConfigMigrationBranch(config, migratedData);
       // TODO: types (#22198)
@@ -92,7 +92,7 @@ describe('workers/repository/config-migration/branch/index', () => {
         dryRun: 'full',
       });
       mockedFunction(createConfigMigrationBranch).mockResolvedValueOnce(
-        'committed'
+        'committed',
       );
       const res = await checkConfigMigrationBranch(config, migratedData);
       // TODO: types (#22198)
@@ -116,7 +116,7 @@ describe('workers/repository/config-migration/branch/index', () => {
         expect(scm.deleteBranch).toHaveBeenCalledTimes(1);
         expect(logger.debug).toHaveBeenCalledWith(
           { prTitle: title },
-          'Closed PR already exists. Skipping branch.'
+          'Closed PR already exists. Skipping branch.',
         );
         expect(platform.ensureComment).toHaveBeenCalledTimes(1);
         expect(platform.ensureComment).toHaveBeenCalledWith({
@@ -135,14 +135,14 @@ describe('workers/repository/config-migration/branch/index', () => {
         const res = await checkConfigMigrationBranch(config, migratedData);
         expect(res).toBeNull();
         expect(logger.info).toHaveBeenCalledWith(
-          `DRY-RUN: Would ensure closed PR comment in PR #${pr.number}`
+          `DRY-RUN: Would ensure closed PR comment in PR #${pr.number}`,
         );
         expect(logger.info).toHaveBeenCalledWith(
-          'DRY-RUN: Would delete branch ' + pr.sourceBranch
+          'DRY-RUN: Would delete branch ' + pr.sourceBranch,
         );
         expect(logger.debug).toHaveBeenCalledWith(
           { prTitle: title },
-          'Closed PR already exists. Skipping branch.'
+          'Closed PR already exists. Skipping branch.',
         );
         expect(platform.ensureComment).toHaveBeenCalledTimes(0);
       });
diff --git a/lib/workers/repository/config-migration/branch/index.ts b/lib/workers/repository/config-migration/branch/index.ts
index c5827006fc5696bd4ce494f14aaa488a503b73f4..2862c8ef507a075957cc2874105a97cb3665dc41 100644
--- a/lib/workers/repository/config-migration/branch/index.ts
+++ b/lib/workers/repository/config-migration/branch/index.ts
@@ -12,7 +12,7 @@ import { rebaseMigrationBranch } from './rebase';
 
 export async function checkConfigMigrationBranch(
   config: RenovateConfig,
-  migratedConfigData: MigratedData | null
+  migratedConfigData: MigratedData | null,
 ): Promise<string | null> {
   logger.debug('checkConfigMigrationBranch()');
   if (!migratedConfigData) {
@@ -23,13 +23,13 @@ export async function checkConfigMigrationBranch(
 
   const branchPr = await migrationPrExists(
     configMigrationBranch,
-    config.baseBranch
+    config.baseBranch,
   ); // handles open/autoClosed PRs
 
   if (!branchPr) {
     const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
       config,
-      migratedConfigData.filename
+      migratedConfigData.filename,
     );
     const prTitle = commitMessageFactory.getPrTitle();
     const closedPrConfig: FindPRConfig = {
@@ -46,7 +46,7 @@ export async function checkConfigMigrationBranch(
     if (closedPr) {
       logger.debug(
         { prTitle: closedPr.title },
-        'Closed PR already exists. Skipping branch.'
+        'Closed PR already exists. Skipping branch.',
       );
       await handlePr(config, closedPr);
       return null;
@@ -59,7 +59,7 @@ export async function checkConfigMigrationBranch(
     if (platform.refreshPr) {
       const configMigrationPr = await platform.getBranchPr(
         configMigrationBranch,
-        config.baseBranch
+        config.baseBranch,
       );
       if (configMigrationPr) {
         await platform.refreshPr(configMigrationPr.number);
@@ -78,7 +78,7 @@ export async function checkConfigMigrationBranch(
 
 export async function migrationPrExists(
   branchName: string,
-  targetBranch?: string
+  targetBranch?: string,
 ): Promise<boolean> {
   return !!(await platform.getBranchPr(branchName, targetBranch));
 }
@@ -90,7 +90,7 @@ async function handlePr(config: RenovateConfig, pr: Pr): Promise<void> {
   ) {
     if (GlobalConfig.get('dryRun')) {
       logger.info(
-        `DRY-RUN: Would ensure closed PR comment in PR #${pr.number}`
+        `DRY-RUN: Would ensure closed PR comment in PR #${pr.number}`,
       );
     } else {
       const content =
diff --git a/lib/workers/repository/config-migration/branch/migrated-data.spec.ts b/lib/workers/repository/config-migration/branch/migrated-data.spec.ts
index 8e892b8409206c0d8504b6b12d5da6c4eee7ed16..acafa26226c39a3d5c2b1008bb4fa217824ed93c 100644
--- a/lib/workers/repository/config-migration/branch/migrated-data.spec.ts
+++ b/lib/workers/repository/config-migration/branch/migrated-data.spec.ts
@@ -20,7 +20,7 @@ const migratedData = Fixtures.getJson('./migrated-data.json');
 const migratedDataJson5 = Fixtures.getJson('./migrated-data.json5');
 const migratedConfigObj = Fixtures.getJson('./migrated.json');
 const formattedMigratedData = Fixtures.getJson(
-  './migrated-data-formatted.json'
+  './migrated-data-formatted.json',
 );
 
 describe('workers/repository/config-migration/branch/migrated-data', () => {
@@ -51,14 +51,14 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
 
     it('Calls getAsync a first time to initialize the factory', async () => {
       await expect(MigratedDataFactory.getAsync()).resolves.toEqual(
-        migratedData
+        migratedData,
       );
       expect(detectRepoFileConfig).toHaveBeenCalledTimes(1);
     });
 
     it('Calls getAsync a second time to get the saved data from before', async () => {
       await expect(MigratedDataFactory.getAsync()).resolves.toEqual(
-        migratedData
+        migratedData,
       );
       expect(detectRepoFileConfig).toHaveBeenCalledTimes(0);
     });
@@ -78,7 +78,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
     it('Resets the factory and gets a new value', async () => {
       MigratedDataFactory.reset();
       await expect(MigratedDataFactory.getAsync()).resolves.toEqual(
-        migratedData
+        migratedData,
       );
     });
 
@@ -104,7 +104,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       });
       MigratedDataFactory.reset();
       await expect(MigratedDataFactory.getAsync()).resolves.toEqual(
-        migratedDataJson5
+        migratedDataJson5,
       );
     });
 
@@ -115,7 +115,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       await expect(MigratedDataFactory.getAsync()).resolves.toBeNull();
       expect(logger.debug).toHaveBeenCalledWith(
         { err },
-        'MigratedDataFactory.getAsync() Error initializing renovate MigratedData'
+        'MigratedDataFactory.getAsync() Error initializing renovate MigratedData',
       );
     });
   });
@@ -147,7 +147,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       mockedFunction(readLocalFile).mockResolvedValueOnce(null);
       await MigratedDataFactory.getAsync();
       await expect(
-        MigratedDataFactory.applyPrettierFormatting(migratedData)
+        MigratedDataFactory.applyPrettierFormatting(migratedData),
       ).resolves.toEqual(unformatted);
     });
 
@@ -156,7 +156,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       mockedFunction(readLocalFile).mockRejectedValueOnce(null);
       await MigratedDataFactory.getAsync();
       await expect(
-        MigratedDataFactory.applyPrettierFormatting(migratedData)
+        MigratedDataFactory.applyPrettierFormatting(migratedData),
       ).resolves.toEqual(unformatted);
     });
 
@@ -165,7 +165,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       mockedFunction(readLocalFile).mockResolvedValueOnce('invalid json');
       await MigratedDataFactory.getAsync();
       await expect(
-        MigratedDataFactory.applyPrettierFormatting(migratedData)
+        MigratedDataFactory.applyPrettierFormatting(migratedData),
       ).resolves.toEqual(unformatted);
     });
 
@@ -174,7 +174,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       mockedFunction(scm.getFileList).mockResolvedValue(['.prettierrc']);
       await MigratedDataFactory.getAsync();
       await expect(
-        MigratedDataFactory.applyPrettierFormatting(migratedData)
+        MigratedDataFactory.applyPrettierFormatting(migratedData),
       ).resolves.toEqual(formatted);
     });
 
@@ -186,7 +186,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
       mockedFunction(readLocalFile).mockResolvedValueOnce('{"prettier":{}}');
       await MigratedDataFactory.getAsync();
       await expect(
-        MigratedDataFactory.applyPrettierFormatting(migratedData)
+        MigratedDataFactory.applyPrettierFormatting(migratedData),
       ).resolves.toEqual(formatted);
     });
 
@@ -196,7 +196,7 @@ describe('workers/repository/config-migration/branch/migrated-data', () => {
         applyPrettierFormatting(migratedData.content, 'json', {
           amount: 0,
           indent: '  ',
-        })
+        }),
       ).resolves.toEqual(formattedMigratedData.content);
     });
   });
diff --git a/lib/workers/repository/config-migration/branch/migrated-data.ts b/lib/workers/repository/config-migration/branch/migrated-data.ts
index fc74fc1722a67ba2f31406cbe20b21a405d2c948..84d91607357de985ff7ada8dc7fd38d30ef0e395 100644
--- a/lib/workers/repository/config-migration/branch/migrated-data.ts
+++ b/lib/workers/repository/config-migration/branch/migrated-data.ts
@@ -39,13 +39,13 @@ export type PrettierParser = BuiltInParserName;
 export async function applyPrettierFormatting(
   content: string,
   parser: PrettierParser,
-  indent?: Indent
+  indent?: Indent,
 ): Promise<string> {
   try {
     logger.trace('applyPrettierFormatting - START');
     const fileList = await scm.getFileList();
     let prettierExists = fileList.some((file) =>
-      prettierConfigFilenames.has(file)
+      prettierConfigFilenames.has(file),
     );
 
     if (!prettierExists) {
@@ -55,7 +55,7 @@ export async function applyPrettierFormatting(
           packageJsonContent && JSON.parse(packageJsonContent).prettier;
       } catch {
         logger.warn(
-          'applyPrettierFormatting - Error processing package.json file'
+          'applyPrettierFormatting - Error processing package.json file',
         );
       }
     }
@@ -145,7 +145,7 @@ export class MigratedDataFactory {
     } catch (err) {
       logger.debug(
         { err },
-        'MigratedDataFactory.getAsync() Error initializing renovate MigratedData'
+        'MigratedDataFactory.getAsync() Error initializing renovate MigratedData',
       );
     }
     return res;
diff --git a/lib/workers/repository/config-migration/branch/rebase.spec.ts b/lib/workers/repository/config-migration/branch/rebase.spec.ts
index b09a7654cb655ffea49265fdaa8c6adbc0e4b927..2ad91a731f0344e687b2d5b2c9dba53a0b6a6eaf 100644
--- a/lib/workers/repository/config-migration/branch/rebase.spec.ts
+++ b/lib/workers/repository/config-migration/branch/rebase.spec.ts
@@ -11,13 +11,13 @@ import { jsonStripWhitespaces, rebaseMigrationBranch } from './rebase';
 jest.mock('../../../../util/git');
 
 const formattedMigratedData = Fixtures.getJson(
-  './migrated-data-formatted.json'
+  './migrated-data-formatted.json',
 );
 
 describe('workers/repository/config-migration/branch/rebase', () => {
   const prettierSpy = jest.spyOn(
     MigratedDataFactory,
-    'applyPrettierFormatting'
+    'applyPrettierFormatting',
   );
 
   beforeEach(() => {
@@ -73,7 +73,7 @@ describe('workers/repository/config-migration/branch/rebase', () => {
         expect(scm.checkoutBranch).toHaveBeenCalledTimes(0);
         expect(scm.commitAndPush).toHaveBeenCalledTimes(0);
         expect(git.getFile).toHaveBeenCalledTimes(1);
-      }
+      },
     );
 
     it.each([
@@ -120,7 +120,7 @@ describe('workers/repository/config-migration/branch/rebase', () => {
           platformCommit: false,
           baseBranch: 'dev',
         });
-      }
+      },
     );
 
     it.each([
@@ -140,7 +140,7 @@ describe('workers/repository/config-migration/branch/rebase', () => {
 
         expect(scm.checkoutBranch).toHaveBeenCalledTimes(0);
         expect(scm.commitAndPush).toHaveBeenCalledTimes(0);
-      }
+      },
     );
   });
 
diff --git a/lib/workers/repository/config-migration/branch/rebase.ts b/lib/workers/repository/config-migration/branch/rebase.ts
index eb082c893a6a4f9528c881650098d1251ed1b8e9..da1d9095649b2afa37f27552d80a529d6dd0b945 100644
--- a/lib/workers/repository/config-migration/branch/rebase.ts
+++ b/lib/workers/repository/config-migration/branch/rebase.ts
@@ -12,7 +12,7 @@ import type { MigratedData } from './migrated-data';
 
 export async function rebaseMigrationBranch(
   config: RenovateConfig,
-  migratedConfigData: MigratedData
+  migratedConfigData: MigratedData,
 ): Promise<string | null> {
   logger.debug('Checking if migration branch needs rebasing');
   const branchName = getMigrationBranchName(config);
@@ -38,13 +38,13 @@ export async function rebaseMigrationBranch(
 
   const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
     config,
-    configFileName
+    configFileName,
   );
   const commitMessage = commitMessageFactory.getCommitMessage();
 
   await scm.checkoutBranch(config.defaultBranch!);
   contents = await MigratedDataFactory.applyPrettierFormatting(
-    migratedConfigData
+    migratedConfigData,
   );
   return scm.commitAndPush({
     baseBranch: config.baseBranch,
diff --git a/lib/workers/repository/config-migration/index.ts b/lib/workers/repository/config-migration/index.ts
index 33a401d063575070fa2edf29664032076b2e0341..0746ccdf8d77bee40b291f15d4c88c7973eac319 100644
--- a/lib/workers/repository/config-migration/index.ts
+++ b/lib/workers/repository/config-migration/index.ts
@@ -5,13 +5,13 @@ import { ensureConfigMigrationPr } from './pr';
 
 export async function configMigration(
   config: RenovateConfig,
-  branchList: string[]
+  branchList: string[],
 ): Promise<void> {
   if (config.configMigration) {
     const migratedConfigData = await MigratedDataFactory.getAsync();
     const migrationBranch = await checkConfigMigrationBranch(
       config,
-      migratedConfigData
+      migratedConfigData,
     ); // null if migration not needed
     if (migrationBranch) {
       branchList.push(migrationBranch);
diff --git a/lib/workers/repository/config-migration/pr/index.spec.ts b/lib/workers/repository/config-migration/pr/index.spec.ts
index d577ac254f34686cffa2336296c5c7b4efc95cc3..7792174a60a70fd92c64a11050ad699261496ace 100644
--- a/lib/workers/repository/config-migration/pr/index.spec.ts
+++ b/lib/workers/repository/config-migration/pr/index.spec.ts
@@ -20,11 +20,11 @@ import { ensureConfigMigrationPr } from '.';
 describe('workers/repository/config-migration/pr/index', () => {
   const spy = jest.spyOn(platform, 'massageMarkdown');
   const { configFileName, migratedContent } = Fixtures.getJson(
-    './migrated-data.json'
+    './migrated-data.json',
   );
   const prTitle = new ConfigMigrationCommitMessageFactory(
     {},
-    configFileName
+    configFileName,
   ).getPrTitle();
   const migratedData: MigratedData = {
     content: migratedContent,
@@ -65,7 +65,7 @@ describe('workers/repository/config-migration/pr/index', () => {
     it('creates PR with default PR title', async () => {
       await ensureConfigMigrationPr(
         { ...config, onboardingPrTitle: '' },
-        migratedData
+        migratedData,
       );
       expect(platform.getBranchPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr).toHaveBeenCalledTimes(1);
@@ -75,7 +75,7 @@ describe('workers/repository/config-migration/pr/index', () => {
     it('Founds an open PR and as it is up to date and returns', async () => {
       hash = hashBody(createPrBody);
       platform.getBranchPr.mockResolvedValueOnce(
-        mock<Pr>({ bodyStruct: { hash }, title: prTitle })
+        mock<Pr>({ bodyStruct: { hash }, title: prTitle }),
       );
       await ensureConfigMigrationPr(config, migratedData);
       expect(platform.updatePr).toHaveBeenCalledTimes(0);
@@ -84,7 +84,7 @@ describe('workers/repository/config-migration/pr/index', () => {
 
     it('Founds an open PR and updates it', async () => {
       platform.getBranchPr.mockResolvedValueOnce(
-        mock<Pr>({ bodyStruct: { hash: '' } })
+        mock<Pr>({ bodyStruct: { hash: '' } }),
       );
       await ensureConfigMigrationPr(config, migratedData);
       expect(platform.updatePr).toHaveBeenCalledTimes(1);
@@ -94,7 +94,7 @@ describe('workers/repository/config-migration/pr/index', () => {
     it('updates an open PR with unexpected PR title', async () => {
       hash = hashBody(createPrBody);
       platform.getBranchPr.mockResolvedValueOnce(
-        mock<Pr>({ bodyStruct: { hash }, title: 'unexpected PR title' })
+        mock<Pr>({ bodyStruct: { hash }, title: 'unexpected PR title' }),
       );
       await ensureConfigMigrationPr(config, migratedData);
       expect(platform.updatePr).toHaveBeenCalledTimes(1);
@@ -107,17 +107,17 @@ describe('workers/repository/config-migration/pr/index', () => {
         dryRun: 'full',
       });
       platform.getBranchPr.mockResolvedValueOnce(
-        mock<Pr>({ bodyStruct: { hash: '' } })
+        mock<Pr>({ bodyStruct: { hash: '' } }),
       );
       await ensureConfigMigrationPr(config, migratedData);
       expect(platform.updatePr).toHaveBeenCalledTimes(0);
       expect(platform.createPr).toHaveBeenCalledTimes(0);
       expect(logger.debug).toHaveBeenCalledWith('Found open migration PR');
       expect(logger.debug).not.toHaveBeenLastCalledWith(
-        `does not need updating`
+        `does not need updating`,
       );
       expect(logger.info).toHaveBeenLastCalledWith(
-        'DRY-RUN: Would update migration PR'
+        'DRY-RUN: Would update migration PR',
       );
     });
 
@@ -129,7 +129,7 @@ describe('workers/repository/config-migration/pr/index', () => {
       expect(platform.getBranchPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr).toHaveBeenCalledTimes(0);
       expect(logger.info).toHaveBeenLastCalledWith(
-        'DRY-RUN: Would create migration PR'
+        'DRY-RUN: Would create migration PR',
       );
     });
 
@@ -140,7 +140,7 @@ describe('workers/repository/config-migration/pr/index', () => {
           labels: ['label'],
           addLabels: ['label', 'additional-label'],
         },
-        migratedData
+        migratedData,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].labels).toEqual([
@@ -156,7 +156,7 @@ describe('workers/repository/config-migration/pr/index', () => {
           prHeader: '',
           prFooter: '',
         },
-        migratedData
+        migratedData,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].prBody).toMatchSnapshot();
@@ -180,7 +180,7 @@ describe('workers/repository/config-migration/pr/index', () => {
           prFooter:
             'There should be several empty lines at the end of the PR\r\n\n\n',
         },
-        migratedData
+        migratedData,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].prBody).toMatchSnapshot();
@@ -194,11 +194,11 @@ describe('workers/repository/config-migration/pr/index', () => {
           prFooter:
             'There should be several empty lines at the end of the PR\r\n\n\n',
         },
-        migratedData
+        migratedData,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].prTitle).toBe(
-        'Migrate renovate config'
+        'Migrate renovate config',
       );
     });
 
@@ -212,11 +212,11 @@ describe('workers/repository/config-migration/pr/index', () => {
           prFooter:
             'There should be several empty lines at the end of the PR\r\n\n\n',
         },
-        migratedData
+        migratedData,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].prTitle).toBe(
-        'chore(config): migrate renovate config'
+        'chore(config): migrate renovate config',
       );
     });
 
@@ -230,17 +230,17 @@ describe('workers/repository/config-migration/pr/index', () => {
           prFooter:
             'And this is a footer for repository:{{repository}} baseBranch:{{baseBranch}}',
         },
-        migratedData
+        migratedData,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].prBody).toMatch(
-        /platform:github/
+        /platform:github/,
       );
       expect(platform.createPr.mock.calls[0][0].prBody).toMatch(
-        /repository:test/
+        /repository:test/,
       );
       expect(platform.createPr.mock.calls[0][0].prBody).toMatch(
-        /baseBranch:some-branch/
+        /baseBranch:some-branch/,
       );
       expect(platform.createPr.mock.calls[0][0].prBody).toMatchSnapshot();
     });
@@ -269,7 +269,7 @@ describe('workers/repository/config-migration/pr/index', () => {
       await expect(ensureConfigMigrationPr(config, migratedData)).toResolve();
       expect(logger.warn).toHaveBeenCalledWith(
         { err },
-        'Migration PR already exists but cannot find it. It was probably created by a different user.'
+        'Migration PR already exists but cannot find it. It was probably created by a different user.',
       );
       expect(scm.deleteBranch).toHaveBeenCalledTimes(1);
     });
diff --git a/lib/workers/repository/config-migration/pr/index.ts b/lib/workers/repository/config-migration/pr/index.ts
index 9eb0355783f40ac486bab75ffb938fd0ae643bab..aaf28923789c15437447c862e649efe349a9c9f7 100644
--- a/lib/workers/repository/config-migration/pr/index.ts
+++ b/lib/workers/repository/config-migration/pr/index.ts
@@ -18,17 +18,17 @@ import { getMigrationBranchName } from '../common';
 
 export async function ensureConfigMigrationPr(
   config: RenovateConfig,
-  migratedConfigData: MigratedData
+  migratedConfigData: MigratedData,
 ): Promise<void> {
   logger.debug('ensureConfigMigrationPr()');
   const docsLink = joinUrlParts(
     coerceString(config.productLinks?.documentation),
-    'configuration-options/#configmigration'
+    'configuration-options/#configmigration',
   );
   const branchName = getMigrationBranchName(config);
   const commitMessageFactory = new ConfigMigrationCommitMessageFactory(
     config,
-    migratedConfigData.filename
+    migratedConfigData.filename,
   );
 
   const prTitle = commitMessageFactory.getPrTitle();
@@ -52,7 +52,7 @@ ${
 
 :question: Got questions? Does something look wrong to you? Please don't hesitate to [request help here](${
       config.productLinks?.help
-    }).\n\n`
+    }).\n\n`,
   );
 
   if (is.string(config.prHeader)) {
@@ -116,12 +116,12 @@ ${
     if (
       err.response?.statusCode === 422 &&
       err.response?.body?.errors?.[0]?.message?.startsWith(
-        'A pull request already exists'
+        'A pull request already exists',
       )
     ) {
       logger.warn(
         { err },
-        'Migration PR already exists but cannot find it. It was probably created by a different user.'
+        'Migration PR already exists but cannot find it. It was probably created by a different user.',
       );
       await scm.deleteBranch(branchName);
       return;
diff --git a/lib/workers/repository/dependency-dashboard.spec.ts b/lib/workers/repository/dependency-dashboard.spec.ts
index d43bb4fcb16a5ba4399bf30434e548831b50157e..55cc25997c81cb69c1c2f1435ff2f33382e56172 100644
--- a/lib/workers/repository/dependency-dashboard.spec.ts
+++ b/lib/workers/repository/dependency-dashboard.spec.ts
@@ -64,7 +64,7 @@ function genRandString(length: number): string {
 
 function genRandPackageFile(
   depsNum: number,
-  depNameLen: number
+  depNameLen: number,
 ): Record<string, PackageFile[]> {
   const deps: PackageDependency[] = [];
   for (let i = 0; i < depsNum; i++) {
@@ -80,12 +80,12 @@ async function dryRun(
   branches: BranchConfig[],
   platform: jest.MockedObject<Platform>,
   ensureIssueClosingCalls: number,
-  ensureIssueCalls: number
+  ensureIssueCalls: number,
 ) {
   GlobalConfig.set({ dryRun: 'full' });
   await dependencyDashboard.ensureDependencyDashboard(config, branches);
   expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(
-    ensureIssueClosingCalls
+    ensureIssueClosingCalls,
   );
   expect(platform.ensureIssue).toHaveBeenCalledTimes(ensureIssueCalls);
 }
@@ -121,7 +121,7 @@ describe('workers/repository/dependency-dashboard', () => {
         body:
           Fixtures.get('dependency-dashboard-with-8-PR.txt').replace(
             '- [ ]',
-            '- [x]'
+            '- [x]',
           ) + '\n\n - [x] <!-- rebase-all-open-prs -->',
       });
       await dependencyDashboard.readDashboardBody(conf);
@@ -171,7 +171,7 @@ describe('workers/repository/dependency-dashboard', () => {
         number: 1,
         body: Fixtures.get('dependency-dashboard-with-8-PR.txt').replace(
           '- [ ] <!-- approve-all-pending-prs -->',
-          '- [x] <!-- approve-all-pending-prs -->'
+          '- [x] <!-- approve-all-pending-prs -->',
         ),
       });
       await dependencyDashboard.readDashboardBody(conf);
@@ -197,7 +197,7 @@ describe('workers/repository/dependency-dashboard', () => {
         number: 1,
         body: Fixtures.get('dependency-dashboard-with-8-PR.txt').replace(
           '- [ ] <!-- create-all-rate-limited-prs -->',
-          '- [x] <!-- create-all-rate-limited-prs -->'
+          '- [x] <!-- create-all-rate-limited-prs -->',
         ),
       });
       await dependencyDashboard.readDashboardBody(conf);
@@ -260,7 +260,7 @@ describe('workers/repository/dependency-dashboard', () => {
       await dependencyDashboard.ensureDependencyDashboard(config, branches);
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssueClosing.mock.calls[0][0]).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue).toHaveBeenCalledTimes(0);
 
@@ -287,7 +287,7 @@ describe('workers/repository/dependency-dashboard', () => {
       await dependencyDashboard.ensureDependencyDashboard(config, branches);
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssueClosing.mock.calls[0][0]).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue).toHaveBeenCalledTimes(0);
 
@@ -304,7 +304,7 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toMatchSnapshot();
 
@@ -329,13 +329,13 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toMatch(
-        /platform:github/
+        /platform:github/,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toMatch(
-        /repository:test/
+        /repository:test/,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toMatchSnapshot();
 
@@ -415,10 +415,10 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toBe(
-        Fixtures.get('dependency-dashboard-with-8-PR.txt')
+        Fixtures.get('dependency-dashboard-with-8-PR.txt'),
       );
 
       // same with dry run
@@ -452,10 +452,10 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toBe(
-        Fixtures.get('dependency-dashboard-with-2-PR-edited.txt')
+        Fixtures.get('dependency-dashboard-with-2-PR-edited.txt'),
       );
 
       // same with dry run
@@ -497,10 +497,10 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toBe(
-        Fixtures.get('dependency-dashboard-with-3-PR-in-progress.txt')
+        Fixtures.get('dependency-dashboard-with-3-PR-in-progress.txt'),
       );
 
       // same with dry run
@@ -532,10 +532,10 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toBe(
-        Fixtures.get('dependency-dashboard-with-2-PR-closed-ignored.txt')
+        Fixtures.get('dependency-dashboard-with-2-PR-closed-ignored.txt'),
       );
 
       // same with dry run
@@ -582,10 +582,10 @@ describe('workers/repository/dependency-dashboard', () => {
       expect(platform.ensureIssueClosing).toHaveBeenCalledTimes(0);
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].title).toBe(
-        config.dependencyDashboardTitle
+        config.dependencyDashboardTitle,
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toBe(
-        Fixtures.get('dependency-dashboard-with-3-PR-in-approval.txt')
+        Fixtures.get('dependency-dashboard-with-3-PR-in-approval.txt'),
       );
 
       // same with dry run
@@ -668,7 +668,7 @@ describe('workers/repository/dependency-dashboard', () => {
 
       expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
       expect(platform.ensureIssue.mock.calls[0][0].body).toContain(
-        'platform is github'
+        'platform is github',
       );
       expect(platform.ensureIssue.mock.calls[0][0].body).toMatchSnapshot();
     });
@@ -710,28 +710,28 @@ describe('workers/repository/dependency-dashboard', () => {
       });
       await dependencyDashboard.ensureDependencyDashboard(config, branches);
       const checkApprovePendingSelectAll = regEx(
-        / - \[ ] <!-- approve-all-pending-prs -->/g
+        / - \[ ] <!-- approve-all-pending-prs -->/g,
       );
       const checkApprovePendingBranch1 = regEx(
-        / - \[ ] <!-- approve-branch=branchName1 -->pr1/g
+        / - \[ ] <!-- approve-branch=branchName1 -->pr1/g,
       );
       const checkApprovePendingBranch2 = regEx(
-        / - \[ ] <!-- approve-branch=branchName2 -->pr2/g
+        / - \[ ] <!-- approve-branch=branchName2 -->pr2/g,
       );
       expect(
         checkApprovePendingSelectAll.test(
-          platform.ensureIssue.mock.calls[0][0].body
-        )
+          platform.ensureIssue.mock.calls[0][0].body,
+        ),
       ).toBeTrue();
       expect(
         checkApprovePendingBranch1.test(
-          platform.ensureIssue.mock.calls[0][0].body
-        )
+          platform.ensureIssue.mock.calls[0][0].body,
+        ),
       ).toBeTrue();
       expect(
         checkApprovePendingBranch2.test(
-          platform.ensureIssue.mock.calls[0][0].body
-        )
+          platform.ensureIssue.mock.calls[0][0].body,
+        ),
       ).toBeTrue();
     });
 
@@ -769,24 +769,28 @@ describe('workers/repository/dependency-dashboard', () => {
       });
       await dependencyDashboard.ensureDependencyDashboard(config, branches);
       const checkRateLimitedSelectAll = regEx(
-        / - \[ ] <!-- create-all-rate-limited-prs -->/g
+        / - \[ ] <!-- create-all-rate-limited-prs -->/g,
       );
       const checkRateLimitedBranch1 = regEx(
-        / - \[ ] <!-- unlimit-branch=branchName1 -->pr1/g
+        / - \[ ] <!-- unlimit-branch=branchName1 -->pr1/g,
       );
       const checkRateLimitedBranch2 = regEx(
-        / - \[ ] <!-- unlimit-branch=branchName2 -->pr2/g
+        / - \[ ] <!-- unlimit-branch=branchName2 -->pr2/g,
       );
       expect(
         checkRateLimitedSelectAll.test(
-          platform.ensureIssue.mock.calls[0][0].body
-        )
+          platform.ensureIssue.mock.calls[0][0].body,
+        ),
       ).toBeTrue();
       expect(
-        checkRateLimitedBranch1.test(platform.ensureIssue.mock.calls[0][0].body)
+        checkRateLimitedBranch1.test(
+          platform.ensureIssue.mock.calls[0][0].body,
+        ),
       ).toBeTrue();
       expect(
-        checkRateLimitedBranch2.test(platform.ensureIssue.mock.calls[0][0].body)
+        checkRateLimitedBranch2.test(
+          platform.ensureIssue.mock.calls[0][0].body,
+        ),
       ).toBeTrue();
     });
 
@@ -859,7 +863,7 @@ describe('workers/repository/dependency-dashboard', () => {
     describe('checks detected dependencies section', () => {
       const packageFiles = Fixtures.getJson('./package-files.json');
       const packageFilesWithDigest = Fixtures.getJson(
-        './package-files-digest.json'
+        './package-files-digest.json',
       );
       let config: RenovateConfig;
 
@@ -969,7 +973,7 @@ describe('workers/repository/dependency-dashboard', () => {
           expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
           expect(
             platform.ensureIssue.mock.calls[0][0].body.length <
-              GitHubMaxPrBodyLen
+              GitHubMaxPrBodyLen,
           ).toBeTrue();
 
           // same with dry run
@@ -1003,7 +1007,7 @@ describe('workers/repository/dependency-dashboard', () => {
           await dependencyDashboard.ensureDependencyDashboard(
             config,
             branches,
-            packageFiles
+            packageFiles,
           );
           expect(platform.ensureIssue).toHaveBeenCalledTimes(1);
           expect(platform.ensureIssue.mock.calls[0][0].body).toMatchSnapshot();
@@ -1084,13 +1088,13 @@ describe('workers/repository/dependency-dashboard', () => {
 
   describe('getDashboardMarkdownVulnerabilities()', () => {
     const packageFiles = Fixtures.getJson<Record<string, PackageFile[]>>(
-      './package-files.json'
+      './package-files.json',
     );
 
     it('return empty string if summary is empty', async () => {
       const result = await getDashboardMarkdownVulnerabilities(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(result).toBeEmpty();
     });
@@ -1101,7 +1105,7 @@ describe('workers/repository/dependency-dashboard', () => {
           ...config,
           dependencyDashboardOSVVulnerabilitySummary: 'none',
         },
-        packageFiles
+        packageFiles,
       );
       expect(result).toBeEmpty();
     });
@@ -1118,10 +1122,10 @@ describe('workers/repository/dependency-dashboard', () => {
           ...config,
           dependencyDashboardOSVVulnerabilitySummary: 'all',
         },
-        {}
+        {},
       );
       expect(result).toBe(
-        `## Vulnerabilities\n\nRenovate has not found any CVEs on [osv.dev](https://osv.dev).\n\n`
+        `## Vulnerabilities\n\nRenovate has not found any CVEs on [osv.dev](https://osv.dev).\n\n`,
       );
     });
 
@@ -1160,7 +1164,7 @@ describe('workers/repository/dependency-dashboard', () => {
           dependencyDashboardOSVVulnerabilitySummary: 'all',
           osvVulnerabilityAlerts: true,
         },
-        packageFiles
+        packageFiles,
       );
       expect(result.trimEnd()).toBe(codeBlock`## Vulnerabilities
 
@@ -1226,7 +1230,7 @@ describe('workers/repository/dependency-dashboard', () => {
           ...config,
           dependencyDashboardOSVVulnerabilitySummary: 'unresolved',
         },
-        packageFiles
+        packageFiles,
       );
       expect(result.trimEnd()).toBe(codeBlock`## Vulnerabilities
 
diff --git a/lib/workers/repository/dependency-dashboard.ts b/lib/workers/repository/dependency-dashboard.ts
index aac1ada612d0453a810729564360d436e3de173d..1f5f0be36f3d8a6474017235b6945299d3f959d1 100644
--- a/lib/workers/repository/dependency-dashboard.ts
+++ b/lib/workers/repository/dependency-dashboard.ts
@@ -24,16 +24,16 @@ interface DependencyDashboard {
 
 const rateLimitedRe = regEx(
   ' - \\[ \\] <!-- unlimit-branch=([^\\s]+) -->',
-  'g'
+  'g',
 );
 const pendingApprovalRe = regEx(
   ' - \\[ \\] <!-- approve-branch=([^\\s]+) -->',
-  'g'
+  'g',
 );
 const generalBranchRe = regEx(' <!-- ([a-zA-Z]+)-branch=([^\\s]+) -->');
 const markedBranchesRe = regEx(
   ' - \\[x\\] <!-- ([a-zA-Z]+)-branch=([^\\s]+) -->',
-  'g'
+  'g',
 );
 
 function checkOpenAllRateLimitedPR(issueBody: string): boolean {
@@ -65,7 +65,7 @@ function selectAllRelevantBranches(issueBody: string): string[] {
 
 function getAllSelectedBranches(
   issueBody: string,
-  dependencyDashboardChecks: Record<string, string>
+  dependencyDashboardChecks: Record<string, string>,
 ): Record<string, string> {
   const allRelevantBranches = selectAllRelevantBranches(issueBody);
   for (const branch of allRelevantBranches) {
@@ -82,7 +82,7 @@ function getCheckedBranches(issueBody: string): Record<string, string> {
   }
   dependencyDashboardChecks = getAllSelectedBranches(
     issueBody,
-    dependencyDashboardChecks
+    dependencyDashboardChecks,
   );
   return dependencyDashboardChecks;
 }
@@ -102,7 +102,7 @@ function parseDashboardIssue(issueBody: string): DependencyDashboard {
 }
 
 export async function readDashboardBody(
-  config: SelectAllConfig
+  config: SelectAllConfig,
 ): Promise<void> {
   config.dependencyDashboardChecks = {};
   const stringifiedConfig = JSON.stringify(config);
@@ -123,7 +123,7 @@ export async function readDashboardBody(
           config.checkedBranches.map((branchName) => [
             branchName,
             'global-config',
-          ])
+          ]),
         );
         dashboardChecks.dependencyDashboardChecks = {
           ...dashboardChecks.dependencyDashboardChecks,
@@ -176,7 +176,7 @@ function appendRepoProblems(config: RenovateConfig, issueBody: string): string {
 export async function ensureDependencyDashboard(
   config: SelectAllConfig,
   allBranches: BranchConfig[],
-  packageFiles: Record<string, PackageFile[]> = {}
+  packageFiles: Record<string, PackageFile[]> = {},
 ): Promise<void> {
   logger.debug('ensureDependencyDashboard()');
   // legacy/migrated issue
@@ -184,7 +184,7 @@ export async function ensureDependencyDashboard(
   const branches = allBranches.filter(
     (branch) =>
       branch.result !== 'automerged' &&
-      !branch.upgrades?.every((upgrade) => upgrade.remediationNotPossible)
+      !branch.upgrades?.every((upgrade) => upgrade.remediationNotPossible),
   );
   if (
     !(
@@ -195,14 +195,14 @@ export async function ensureDependencyDashboard(
       branches.some(
         (branch) =>
           !!branch.dependencyDashboardApproval ||
-          !!branch.dependencyDashboardPrApproval
+          !!branch.dependencyDashboardPrApproval,
       )
     )
   ) {
     if (GlobalConfig.get('dryRun')) {
       logger.info(
         { title: config.dependencyDashboardTitle },
-        'DRY-RUN: Would close Dependency Dashboard'
+        'DRY-RUN: Would close Dependency Dashboard',
       );
     } else {
       logger.debug('Closing Dependency Dashboard');
@@ -221,7 +221,7 @@ export async function ensureDependencyDashboard(
     if (GlobalConfig.get('dryRun')) {
       logger.info(
         { title: config.dependencyDashboardTitle },
-        'DRY-RUN: Would close Dependency Dashboard'
+        'DRY-RUN: Would close Dependency Dashboard',
       );
     } else {
       logger.debug('Closing Dependency Dashboard');
@@ -238,7 +238,7 @@ export async function ensureDependencyDashboard(
   issueBody = appendRepoProblems(config, issueBody);
 
   const pendingApprovals = branches.filter(
-    (branch) => branch.result === 'needs-approval'
+    (branch) => branch.result === 'needs-approval',
   );
   if (pendingApprovals.length) {
     issueBody += '## Pending Approval\n\n';
@@ -254,7 +254,7 @@ export async function ensureDependencyDashboard(
     issueBody += '\n';
   }
   const awaitingSchedule = branches.filter(
-    (branch) => branch.result === 'not-scheduled'
+    (branch) => branch.result === 'not-scheduled',
   );
   if (awaitingSchedule.length) {
     issueBody += '## Awaiting Schedule\n\n';
@@ -269,7 +269,7 @@ export async function ensureDependencyDashboard(
     (branch) =>
       branch.result === 'branch-limit-reached' ||
       branch.result === 'pr-limit-reached' ||
-      branch.result === 'commit-limit-reached'
+      branch.result === 'commit-limit-reached',
   );
   if (rateLimited.length) {
     issueBody += '## Rate-Limited\n\n';
@@ -296,7 +296,7 @@ export async function ensureDependencyDashboard(
     issueBody += '\n';
   }
   const awaitingPr = branches.filter(
-    (branch) => branch.result === 'needs-pr-approval'
+    (branch) => branch.result === 'needs-pr-approval',
   );
   if (awaitingPr.length) {
     issueBody += '## PR Creation Approval Required\n\n';
@@ -326,7 +326,7 @@ export async function ensureDependencyDashboard(
     issueBody += '\n';
   }
   const prPendingBranchAutomerge = branches.filter(
-    (branch) => branch.prBlockedBy === 'BranchAutomerge'
+    (branch) => branch.prBlockedBy === 'BranchAutomerge',
   );
   if (prPendingBranchAutomerge.length) {
     issueBody += '## Pending Branch Automerge\n\n';
@@ -359,10 +359,10 @@ export async function ensureDependencyDashboard(
   let inProgress = branches.filter(
     (branch) =>
       !otherRes.includes(branch.result!) &&
-      branch.prBlockedBy !== 'BranchAutomerge'
+      branch.prBlockedBy !== 'BranchAutomerge',
   );
   const otherBranches = inProgress.filter(
-    (branch) => !!branch.prBlockedBy || !branch.prNo
+    (branch) => !!branch.prBlockedBy || !branch.prNo,
   );
   // istanbul ignore if
   if (otherBranches.length) {
@@ -374,7 +374,7 @@ export async function ensureDependencyDashboard(
     issueBody += '\n';
   }
   inProgress = inProgress.filter(
-    (branch) => branch.prNo && !branch.prBlockedBy
+    (branch) => branch.prNo && !branch.prBlockedBy,
   );
   if (inProgress.length) {
     issueBody += '## Open\n\n';
@@ -392,7 +392,7 @@ export async function ensureDependencyDashboard(
     issueBody += '\n';
   }
   const alreadyExisted = branches.filter(
-    (branch) => branch.result === 'already-existed'
+    (branch) => branch.result === 'already-existed',
   );
   if (alreadyExisted.length) {
     issueBody += '## Ignored or Blocked\n\n';
@@ -415,7 +415,7 @@ export async function ensureDependencyDashboard(
   // fit the detected dependencies section
   const footer = getFooter(config);
   issueBody += PackageFiles.getDashboardMarkdown(
-    GitHubMaxPrBodyLen - issueBody.length - footer.length
+    GitHubMaxPrBodyLen - issueBody.length - footer.length,
   );
 
   issueBody += footer;
@@ -423,11 +423,11 @@ export async function ensureDependencyDashboard(
   if (config.dependencyDashboardIssue) {
     const updatedIssue = await platform.getIssue?.(
       config.dependencyDashboardIssue,
-      false
+      false,
     );
     if (updatedIssue) {
       const { dependencyDashboardChecks } = parseDashboardIssue(
-        coerceString(updatedIssue.body)
+        coerceString(updatedIssue.body),
       );
       for (const branchName of Object.keys(config.dependencyDashboardChecks!)) {
         delete dependencyDashboardChecks[branchName];
@@ -436,7 +436,7 @@ export async function ensureDependencyDashboard(
         const checkText = `- [ ] <!-- ${dependencyDashboardChecks[branchName]}-branch=${branchName} -->`;
         issueBody = issueBody.replace(
           checkText,
-          checkText.replace('[ ]', '[x]')
+          checkText.replace('[ ]', '[x]'),
         );
       }
     }
@@ -445,7 +445,7 @@ export async function ensureDependencyDashboard(
   if (GlobalConfig.get('dryRun')) {
     logger.info(
       { title: config.dependencyDashboardTitle },
-      'DRY-RUN: Would ensure Dependency Dashboard'
+      'DRY-RUN: Would ensure Dependency Dashboard',
     );
   } else {
     await platform.ensureIssue({
@@ -472,7 +472,7 @@ function getFooter(config: RenovateConfig): string {
 
 export async function getDashboardMarkdownVulnerabilities(
   config: RenovateConfig,
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): Promise<string> {
   let result = '';
 
@@ -488,7 +488,7 @@ export async function getDashboardMarkdownVulnerabilities(
   const vulnerabilityFetcher = await Vulnerabilities.create();
   const vulnerabilities = await vulnerabilityFetcher.fetchVulnerabilities(
     config,
-    packageFiles
+    packageFiles,
   );
 
   if (vulnerabilities.length === 0) {
@@ -498,7 +498,7 @@ export async function getDashboardMarkdownVulnerabilities(
   }
 
   const unresolvedVulnerabilities = vulnerabilities.filter((value) =>
-    is.nullOrUndefined(value.fixedVersion)
+    is.nullOrUndefined(value.fixedVersion),
   );
   const resolvedVulnerabilitiesLength =
     vulnerabilities.length - unresolvedVulnerabilities.length;
@@ -535,20 +535,20 @@ export async function getDashboardMarkdownVulnerabilities(
     }
     if (
       is.nullOrUndefined(
-        managerRecords[manager!][packageFile][vulnerability.packageName]
+        managerRecords[manager!][packageFile][vulnerability.packageName],
       )
     ) {
       managerRecords[manager!][packageFile][vulnerability.packageName] = [];
     }
     managerRecords[manager!][packageFile][vulnerability.packageName].push(
-      vulnerability
+      vulnerability,
     );
   }
 
   for (const [manager, packageFileRecords] of Object.entries(managerRecords)) {
     result += `<details><summary>${manager}</summary>\n<blockquote>\n\n`;
     for (const [packageFile, packageNameRecords] of Object.entries(
-      packageFileRecords
+      packageFileRecords,
     )) {
       result += `<details><summary>${packageFile}</summary>\n<blockquote>\n\n`;
       for (const [packageName, cves] of Object.entries(packageNameRecords)) {
diff --git a/lib/workers/repository/error-config.spec.ts b/lib/workers/repository/error-config.spec.ts
index b37abdd4be65d48ffe3b0500a86b4f235f246d79..f08d0fad23d6e34c516065b7282c092779ccb844 100644
--- a/lib/workers/repository/error-config.spec.ts
+++ b/lib/workers/repository/error-config.spec.ts
@@ -47,10 +47,10 @@ Message: some-message
       expect(res).toBeUndefined();
       expect(logger.warn).toHaveBeenCalledWith(
         { configError: error, res: 'created' },
-        'Configuration Warning'
+        'Configuration Warning',
       );
       expect(platform.ensureIssue).toHaveBeenCalledWith(
-        expect.objectContaining({ body: expectedBody })
+        expect.objectContaining({ body: expectedBody }),
       );
     });
 
@@ -66,7 +66,7 @@ Message: some-message
       expect(res).toBeUndefined();
       expect(logger.info).toHaveBeenCalledWith(
         { configError: error },
-        'DRY-RUN: Would ensure configuration error issue'
+        'DRY-RUN: Would ensure configuration error issue',
       );
     });
 
@@ -85,7 +85,7 @@ Message: some-message
 
       expect(res).toBeUndefined();
       expect(platform.updatePr).toHaveBeenCalledWith(
-        expect.objectContaining({ prTitle: pr.title, number: pr.number })
+        expect.objectContaining({ prTitle: pr.title, number: pr.number }),
       );
     });
 
@@ -104,7 +104,7 @@ Message: some-message
 
       expect(res).toBeUndefined();
       expect(logger.info).toHaveBeenCalledWith(
-        `DRY-RUN: Would update PR #${pr.number}`
+        `DRY-RUN: Would update PR #${pr.number}`,
       );
     });
 
@@ -126,7 +126,7 @@ Message: some-message
       expect(res).toBeUndefined();
       expect(logger.info).toHaveBeenCalledWith(
         { notificationName },
-        'Configuration failure, issues will be suppressed'
+        'Configuration failure, issues will be suppressed',
       );
     });
   });
diff --git a/lib/workers/repository/error-config.ts b/lib/workers/repository/error-config.ts
index 24f04c4e3a3130714c43481a7fc722f51199d7a8..3654ef90db65bbb72264f220601cc749ce15186a 100644
--- a/lib/workers/repository/error-config.ts
+++ b/lib/workers/repository/error-config.ts
@@ -6,7 +6,7 @@ import { Pr, platform } from '../../modules/platform';
 
 export function raiseConfigWarningIssue(
   config: RenovateConfig,
-  error: Error
+  error: Error,
 ): Promise<void> {
   logger.debug('raiseConfigWarningIssue()');
   const title = `Action Required: Fix Renovate Configuration`;
@@ -17,7 +17,7 @@ export function raiseConfigWarningIssue(
 
 export function raiseCredentialsWarningIssue(
   config: RenovateConfig,
-  error: Error
+  error: Error,
 ): Promise<void> {
   logger.debug('raiseCredentialsWarningIssue()');
   const title = `Action Required: Add missing credentials`;
@@ -31,7 +31,7 @@ async function raiseWarningIssue(
   notificationName: string,
   title: string,
   initialBody: string,
-  error: Error
+  error: Error,
 ): Promise<void> {
   let body = initialBody;
   if (error.validationSource) {
@@ -46,7 +46,7 @@ async function raiseWarningIssue(
 
   const pr = await platform.getBranchPr(
     config.onboardingBranch!,
-    config.baseBranch
+    config.baseBranch,
   );
   if (pr?.state === 'open') {
     await handleOnboardingPr(pr, body);
@@ -56,7 +56,7 @@ async function raiseWarningIssue(
   if (GlobalConfig.get('dryRun')) {
     logger.info(
       { configError: error },
-      'DRY-RUN: Would ensure configuration error issue'
+      'DRY-RUN: Would ensure configuration error issue',
     );
     return;
   }
@@ -64,7 +64,7 @@ async function raiseWarningIssue(
   if (config.suppressNotifications?.includes(notificationName)) {
     logger.info(
       { notificationName },
-      'Configuration failure, issues will be suppressed'
+      'Configuration failure, issues will be suppressed',
     );
     return;
   }
diff --git a/lib/workers/repository/error.spec.ts b/lib/workers/repository/error.spec.ts
index f643a7f5401a9eb723b0fc895c7819a342c679c7..6b2ed7a118299c9101e43c6508376ba7dc84ef02 100644
--- a/lib/workers/repository/error.spec.ts
+++ b/lib/workers/repository/error.spec.ts
@@ -78,14 +78,14 @@ describe('workers/repository/error', () => {
     it(`handles ExternalHostError`, async () => {
       const res = await handleError(
         config,
-        new ExternalHostError(new Error(), 'some-host-type')
+        new ExternalHostError(new Error(), 'some-host-type'),
       );
       expect(res).toEqual(EXTERNAL_HOST_ERROR);
     });
 
     it('rewrites git 5xx error', async () => {
       const gitError = new Error(
-        "fatal: unable to access 'https://**redacted**@gitlab.com/learnox/learnox.git/': The requested URL returned error: 500\n"
+        "fatal: unable to access 'https://**redacted**@gitlab.com/learnox/learnox.git/': The requested URL returned error: 500\n",
       );
       const res = await handleError(config, gitError);
       expect(res).toEqual(EXTERNAL_HOST_ERROR);
@@ -93,7 +93,7 @@ describe('workers/repository/error', () => {
 
     it('rewrites git remote error', async () => {
       const gitError = new Error(
-        'fatal: remote error: access denied or repository not exported: /b/nw/bd/27/47/159945428/108610112.git\n'
+        'fatal: remote error: access denied or repository not exported: /b/nw/bd/27/47/159945428/108610112.git\n',
       );
       const res = await handleError(config, gitError);
       expect(res).toEqual(EXTERNAL_HOST_ERROR);
@@ -101,7 +101,7 @@ describe('workers/repository/error', () => {
 
     it('rewrites git fatal error', async () => {
       const gitError = new Error(
-        'fatal: not a git repository (or any parent up to mount point /mnt)\nStopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).\n'
+        'fatal: not a git repository (or any parent up to mount point /mnt)\nStopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).\n',
       );
       const res = await handleError(config, gitError);
       expect(res).toEqual(TEMPORARY_ERROR);
diff --git a/lib/workers/repository/error.ts b/lib/workers/repository/error.ts
index bcb0cffc9122817c1d97c48c0df6b6378f7d1637..0ec3d982cc7b99998a5253a6e3966542ccdb61b9 100644
--- a/lib/workers/repository/error.ts
+++ b/lib/workers/repository/error.ts
@@ -41,7 +41,7 @@ import {
 
 export default async function handleError(
   config: RenovateConfig,
-  err: Error
+  err: Error,
 ): Promise<string> {
   if (err.message === REPOSITORY_UNINITIATED) {
     logger.info('Repository is uninitiated - skipping');
@@ -95,7 +95,7 @@ export default async function handleError(
   }
   if (err.message === REPOSITORY_FORKED) {
     logger.info(
-      'Repository is a fork and not manually configured - skipping - did you want to run with flag --include-forks?'
+      'Repository is a fork and not manually configured - skipping - did you want to run with flag --include-forks?',
     );
     return err.message;
   }
@@ -132,14 +132,14 @@ export default async function handleError(
     delete config.branchList;
     logger.warn(
       { error: err },
-      'Repository aborted due to potential secrets exposure'
+      'Repository aborted due to potential secrets exposure',
     );
     return err.message;
   }
   if (err instanceof ExternalHostError) {
     logger.warn(
       { hostType: err.hostType, packageName: err.packageName, err: err.err },
-      'Host error'
+      'Host error',
     );
     logger.info('External host error causing abort - skipping');
     delete config.branchList;
diff --git a/lib/workers/repository/errors-warnings.ts b/lib/workers/repository/errors-warnings.ts
index e60487e5f51f2bdd7761b9840b6aeaa8b440e72d..1bbc25cf7325035118b93350a3592b8fb1918ca9 100644
--- a/lib/workers/repository/errors-warnings.ts
+++ b/lib/workers/repository/errors-warnings.ts
@@ -34,7 +34,7 @@ export function getErrors(config: RenovateConfig): string {
 }
 
 function getDepWarnings(
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): DepWarnings {
   const warnings: string[] = [];
   const warningFiles: string[] = [];
@@ -64,7 +64,7 @@ function getDepWarnings(
 
 export function getDepWarningsOnboardingPR(
   packageFiles: Record<string, PackageFile[]>,
-  config: RenovateConfig
+  config: RenovateConfig,
 ): string {
   const { warnings, warningFiles } = getDepWarnings(packageFiles);
   if (config.suppressNotifications?.includes('dependencyLookupWarnings')) {
@@ -89,7 +89,7 @@ export function getDepWarningsOnboardingPR(
 export function getDepWarningsPR(
   packageFiles: Record<string, PackageFile[]>,
   config: RenovateConfig,
-  dependencyDashboard?: boolean
+  dependencyDashboard?: boolean,
 ): string {
   const { warnings } = getDepWarnings(packageFiles);
   if (config.suppressNotifications?.includes('dependencyLookupWarnings')) {
@@ -111,7 +111,7 @@ export function getDepWarningsPR(
 
 export function getDepWarningsDashboard(
   packageFiles: Record<string, PackageFile[]>,
-  config: RenovateConfig
+  config: RenovateConfig,
 ): string {
   if (config.suppressNotifications?.includes('dependencyLookupWarnings')) {
     return '';
@@ -123,13 +123,13 @@ export function getDepWarningsDashboard(
 
   const depWarnings = warnings
     .map((w) =>
-      w.replace(regEx(/^Failed to look up(?: [-\w]+)? dependency /), '')
+      w.replace(regEx(/^Failed to look up(?: [-\w]+)? dependency /), ''),
     )
     .map((dep) => '`' + dep + '`')
     .join(', ');
 
   let warningText = emojify(
-    `\n---\n\n> :warning: **Warning**\n> \n> Renovate failed to look up the following dependencies: `
+    `\n---\n\n> :warning: **Warning**\n> \n> Renovate failed to look up the following dependencies: `,
   );
   warningText += depWarnings;
   warningText += '.\n> \n> Files affected: ';
diff --git a/lib/workers/repository/extract/extract-fingerprint-config.spec.ts b/lib/workers/repository/extract/extract-fingerprint-config.spec.ts
index ade8d1b42d2c3cc17b7896f40794e7533f9e0f2d..685c799cb3f0fbabe3c6d6b4db14943e26af9eea 100644
--- a/lib/workers/repository/extract/extract-fingerprint-config.spec.ts
+++ b/lib/workers/repository/extract/extract-fingerprint-config.spec.ts
@@ -35,7 +35,7 @@ describe('workers/repository/extract/extract-fingerprint-config', () => {
 
     expect(fingerprintConfig.managerList).toEqual(new Set(['npm', 'regex']));
     expect(
-      fingerprintConfig.managers.find((manager) => manager.manager === 'npm')
+      fingerprintConfig.managers.find((manager) => manager.manager === 'npm'),
     ).toEqual({
       enabled: true,
       fileList: [],
@@ -51,7 +51,7 @@ describe('workers/repository/extract/extract-fingerprint-config', () => {
       skipInstalls: null,
     });
     expect(
-      fingerprintConfig.managers.find((manager) => manager.manager === 'regex')
+      fingerprintConfig.managers.find((manager) => manager.manager === 'regex'),
     ).toEqual({
       fileMatch: ['js', '***$}{]]['],
       ignorePaths: ['ignore-path-1'],
@@ -80,7 +80,7 @@ describe('workers/repository/extract/extract-fingerprint-config', () => {
     const fingerprintConfig = generateFingerprintConfig(config);
     expect(fingerprintConfig.managerList).toEqual(new Set(allManagersList));
     expect(
-      fingerprintConfig.managers.find((manager) => manager.manager === 'npm')
+      fingerprintConfig.managers.find((manager) => manager.manager === 'npm'),
     ).toEqual({
       enabled: true,
       fileList: [],
@@ -95,8 +95,8 @@ describe('workers/repository/extract/extract-fingerprint-config', () => {
     });
     expect(
       fingerprintConfig.managers.find(
-        (manager) => manager.manager === 'dockerfile'
-      )
+        (manager) => manager.manager === 'dockerfile',
+      ),
     ).toEqual({
       enabled: true,
       fileList: [],
@@ -113,7 +113,7 @@ describe('workers/repository/extract/extract-fingerprint-config', () => {
       skipInstalls: null,
     });
     expect(
-      fingerprintConfig.managers.find((manager) => manager.manager === 'regex')
+      fingerprintConfig.managers.find((manager) => manager.manager === 'regex'),
     ).toBeUndefined();
   });
 });
diff --git a/lib/workers/repository/extract/extract-fingerprint-config.ts b/lib/workers/repository/extract/extract-fingerprint-config.ts
index c30139c874413838dd8a26c4e78d060128061bb7..be5cccd079b95e6539e5057385e2ca9715b7dc9a 100644
--- a/lib/workers/repository/extract/extract-fingerprint-config.ts
+++ b/lib/workers/repository/extract/extract-fingerprint-config.ts
@@ -14,11 +14,11 @@ export interface FingerprintExtractConfig {
 
 // checks for regex manager fields
 function getCustomManagerFields(
-  config: WorkerExtractConfig
+  config: WorkerExtractConfig,
 ): CustomExtractConfig {
   const regexFields = {} as CustomExtractConfig;
   for (const field of validMatchFields.map(
-    (f) => `${f}Template` as keyof RegexManagerTemplates
+    (f) => `${f}Template` as keyof RegexManagerTemplates,
   )) {
     if (config[field]) {
       regexFields[field] = config[field];
@@ -34,7 +34,7 @@ function getCustomManagerFields(
 }
 
 function getFilteredManagerConfig(
-  config: WorkerExtractConfig
+  config: WorkerExtractConfig,
 ): WorkerExtractConfig {
   return {
     ...(isCustomManager(config.manager) && getCustomManagerFields(config)),
@@ -52,7 +52,7 @@ function getFilteredManagerConfig(
 }
 
 export function generateFingerprintConfig(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): FingerprintExtractConfig {
   const managerExtractConfigs: WorkerExtractConfig[] = [];
   let managerList: Set<string>;
@@ -67,7 +67,7 @@ export function generateFingerprintConfig(
     const managerConfig = getManagerConfig(config, manager);
     if (isCustomManager(manager)) {
       const filteredCustomManagers = (config.customManagers ?? []).filter(
-        (mgr) => mgr.customType === manager
+        (mgr) => mgr.customType === manager,
       );
       for (const customManager of filteredCustomManagers) {
         managerExtractConfigs.push({
diff --git a/lib/workers/repository/extract/file-match.ts b/lib/workers/repository/extract/file-match.ts
index c9a078ecbf6263aaa5ad5f161c3a6b3ca75f14aa..9f2e6045dcaec4f94d3d9a3f6fd435b0c7b6792f 100644
--- a/lib/workers/repository/extract/file-match.ts
+++ b/lib/workers/repository/extract/file-match.ts
@@ -5,7 +5,7 @@ import { regEx } from '../../../util/regex';
 
 export function getIncludedFiles(
   fileList: string[],
-  includePaths: string[]
+  includePaths: string[],
 ): string[] {
   if (!includePaths?.length) {
     return [...fileList];
@@ -14,14 +14,14 @@ export function getIncludedFiles(
     includePaths.some(
       (includePath) =>
         file === includePath ||
-        minimatch(includePath, { dot: true }).match(file)
-    )
+        minimatch(includePath, { dot: true }).match(file),
+    ),
   );
 }
 
 export function filterIgnoredFiles(
   fileList: string[],
-  ignorePaths: string[]
+  ignorePaths: string[],
 ): string[] {
   if (!ignorePaths?.length) {
     return [...fileList];
@@ -31,14 +31,14 @@ export function filterIgnoredFiles(
       !ignorePaths.some(
         (ignorePath) =>
           file.includes(ignorePath) ||
-          minimatch(ignorePath, { dot: true }).match(file)
-      )
+          minimatch(ignorePath, { dot: true }).match(file),
+      ),
   );
 }
 
 export function getFilteredFileList(
   config: RenovateConfig,
-  fileList: string[]
+  fileList: string[],
 ): string[] {
   const { includePaths, ignorePaths } = config;
   // TODO #22198
@@ -50,7 +50,7 @@ export function getFilteredFileList(
 
 export function getMatchingFiles(
   config: RenovateConfig,
-  allFiles: string[]
+  allFiles: string[],
 ): string[] {
   const fileList = getFilteredFileList(config, allFiles);
   const { fileMatch, manager } = config;
@@ -60,7 +60,7 @@ export function getMatchingFiles(
     logger.debug(`Using file match: ${match} for manager ${manager!}`);
     const re = regEx(match);
     matchedFiles = matchedFiles.concat(
-      fileList.filter((file) => re.test(file))
+      fileList.filter((file) => re.test(file)),
     );
   }
   // filter out duplicates
diff --git a/lib/workers/repository/extract/index.ts b/lib/workers/repository/extract/index.ts
index 925d96a6c39a4bc9632cd9e4e0a6fc57cd10148a..9216550726e5c372691ce647353c9f133a6aae42 100644
--- a/lib/workers/repository/extract/index.ts
+++ b/lib/workers/repository/extract/index.ts
@@ -11,14 +11,14 @@ import { getManagerPackageFiles } from './manager-files';
 import { processSupersedesManagers } from './supersedes';
 
 export async function extractAllDependencies(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<ExtractResult> {
   let managerList = allManagersList;
   const { enabledManagers } = config;
   if (is.nonEmptyArray(enabledManagers)) {
     logger.debug('Applying enabledManagers filtering');
     managerList = managerList.filter((manager) =>
-      enabledManagers.includes(manager)
+      enabledManagers.includes(manager),
     );
   }
   const extractList: WorkerExtractConfig[] = [];
@@ -36,7 +36,7 @@ export async function extractAllDependencies(
     managerConfig.manager = manager;
     if (isCustomManager(manager)) {
       const filteredCustomManagers = (config.customManagers ?? []).filter(
-        (mgr) => mgr.customType === manager
+        (mgr) => mgr.customType === manager,
       );
       for (const customManager of filteredCustomManagers) {
         tryConfig(mergeChildConfig(managerConfig, customManager));
@@ -65,7 +65,7 @@ export async function extractAllDependencies(
       const durationMs = Math.round(Date.now() - start);
       extractDurations[managerConfig.manager] = durationMs;
       return { manager: managerConfig.manager, packageFiles };
-    })
+    }),
   );
 
   // De-duplicate results using supersedesManagers
@@ -73,7 +73,7 @@ export async function extractAllDependencies(
 
   logger.debug(
     { managers: extractDurations },
-    'manager extract durations (ms)'
+    'manager extract durations (ms)',
   );
   let fileCount = 0;
   for (const { manager, packageFiles } of extractResults) {
@@ -94,7 +94,7 @@ export async function extractAllDependencies(
       if (!(enabledManager in extractResult.packageFiles)) {
         logger.debug(
           { manager: enabledManager },
-          `Manager explicitly enabled in "enabledManagers" config, but found no results. Possible config error?`
+          `Manager explicitly enabled in "enabledManagers" config, but found no results. Possible config error?`,
         );
       }
     }
diff --git a/lib/workers/repository/extract/manager-files.spec.ts b/lib/workers/repository/extract/manager-files.spec.ts
index 8f2fa4ff5ded473419709d08507c669119fb6362..58d074b7898ae75ad603b9f9ee118126e2e48b6a 100644
--- a/lib/workers/repository/extract/manager-files.spec.ts
+++ b/lib/workers/repository/extract/manager-files.spec.ts
@@ -71,7 +71,7 @@ describe('workers/repository/extract/manager-files', () => {
       };
       fileMatch.getMatchingFiles.mockReturnValue(['package.json']);
       fs.readLocalFile.mockResolvedValueOnce(
-        '{"dependencies":{"chalk":"2.0.0"}}'
+        '{"dependencies":{"chalk":"2.0.0"}}',
       );
       const res = await getManagerPackageFiles(managerConfig);
       expect(res).toMatchObject([
diff --git a/lib/workers/repository/extract/manager-files.ts b/lib/workers/repository/extract/manager-files.ts
index f74196bd6b08356ff76b8857d1cb6268e468b9a1..d247968c8ce72bba26d939820ade43050d66947a 100644
--- a/lib/workers/repository/extract/manager-files.ts
+++ b/lib/workers/repository/extract/manager-files.ts
@@ -10,7 +10,7 @@ import { readLocalFile } from '../../../util/fs';
 import type { WorkerExtractConfig } from '../../types';
 
 export async function getManagerPackageFiles(
-  config: WorkerExtractConfig
+  config: WorkerExtractConfig,
 ): Promise<PackageFile[] | null> {
   const { enabled, manager, fileList } = config;
   logger.trace(`getPackageFiles(${manager})`);
@@ -23,7 +23,7 @@ export async function getManagerPackageFiles(
     logger.debug(
       `Matched ${
         fileList.length
-      } file(s) for manager ${manager}: ${fileList.join(', ')}`
+      } file(s) for manager ${manager}: ${fileList.join(', ')}`,
     );
   } else {
     return [];
@@ -33,7 +33,7 @@ export async function getManagerPackageFiles(
     const allPackageFiles = await extractAllPackageFiles(
       manager,
       config,
-      fileList
+      fileList,
     );
     return allPackageFiles;
   }
@@ -46,7 +46,7 @@ export async function getManagerPackageFiles(
         manager,
         content,
         packageFile,
-        config
+        config,
       );
       if (res) {
         packageFiles.push({
diff --git a/lib/workers/repository/extract/supersedes.ts b/lib/workers/repository/extract/supersedes.ts
index a0b322f9025bf047aafeff93784b6d3e10f6a79a..597329a45d2175d6d02bb7a448d82bbe194e01bd 100644
--- a/lib/workers/repository/extract/supersedes.ts
+++ b/lib/workers/repository/extract/supersedes.ts
@@ -3,7 +3,7 @@ import { get } from '../../../modules/manager';
 import type { ExtractResults } from './types';
 
 export function processSupersedesManagers(
-  extractResults: ExtractResults[]
+  extractResults: ExtractResults[],
 ): void {
   for (const { manager, packageFiles } of extractResults) {
     if (!packageFiles) {
@@ -12,11 +12,11 @@ export function processSupersedesManagers(
     const supersedesManagers = get(manager, 'supersedesManagers');
     if (is.nonEmptyArray(supersedesManagers)) {
       const supercedingPackageFileNames = packageFiles.map(
-        (packageFile) => packageFile.packageFile
+        (packageFile) => packageFile.packageFile,
       );
       for (const supercededManager of supersedesManagers) {
         const supercededManagerResults = extractResults.find(
-          (result) => result.manager === supercededManager
+          (result) => result.manager === supercededManager,
         );
         if (supercededManagerResults?.packageFiles) {
           supercededManagerResults.packageFiles =
diff --git a/lib/workers/repository/finalize/index.ts b/lib/workers/repository/finalize/index.ts
index c4b291b6b1bb2709e31dd204ea1be03c16c2687a..f15803c57a73a4b53c400b77f46075859dd5d7ed 100644
--- a/lib/workers/repository/finalize/index.ts
+++ b/lib/workers/repository/finalize/index.ts
@@ -15,7 +15,7 @@ import {
 // istanbul ignore next
 export async function finalizeRepo(
   config: RenovateConfig,
-  branchList: string[]
+  branchList: string[],
 ): Promise<void> {
   await validateReconfigureBranch(config);
   await configMigration(config, branchList);
@@ -30,7 +30,7 @@ export async function finalizeRepo(
       (pr) =>
         pr.state === 'merged' &&
         pr.title !== 'Configure Renovate' &&
-        pr.title !== config.onboardingPrTitle
+        pr.title !== config.onboardingPrTitle,
     )
   ) {
     logger.debug('Repo is activated');
diff --git a/lib/workers/repository/finalize/prune.spec.ts b/lib/workers/repository/finalize/prune.spec.ts
index 3818b973aea8e74526f924cbff9155c1f54b8ac6..a9087eb2c667cb796e81615a5a726ea589cb77fa 100644
--- a/lib/workers/repository/finalize/prune.spec.ts
+++ b/lib/workers/repository/finalize/prune.spec.ts
@@ -47,7 +47,7 @@ describe('workers/repository/finalize/prune', () => {
       config.branchList = [];
       git.getBranchList.mockReturnValueOnce([]);
       await expect(
-        cleanup.pruneStaleBranches(config, config.branchList)
+        cleanup.pruneStaleBranches(config, config.branchList),
       ).resolves.not.toThrow();
     });
 
@@ -62,7 +62,7 @@ describe('workers/repository/finalize/prune', () => {
     it('renames deletes remaining branch', async () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.findPr.mockResolvedValueOnce(partial<Pr>({ title: 'foo' }));
       await cleanup.pruneStaleBranches(config, config.branchList);
@@ -74,12 +74,12 @@ describe('workers/repository/finalize/prune', () => {
     it('skips rename but still deletes branch', async () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.findPr.mockResolvedValueOnce(
         partial<Pr>({
           title: 'foo - autoclosed',
-        })
+        }),
       );
       await cleanup.pruneStaleBranches(config, config.branchList);
       expect(git.getBranchList).toHaveBeenCalledTimes(1);
@@ -91,7 +91,7 @@ describe('workers/repository/finalize/prune', () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       GlobalConfig.set({ dryRun: 'full' });
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.findPr.mockResolvedValueOnce(partial<Pr>({ title: 'foo' }));
       await cleanup.pruneStaleBranches(config, config.branchList);
@@ -104,7 +104,7 @@ describe('workers/repository/finalize/prune', () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       config.pruneStaleBranches = false;
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.findPr.mockResolvedValueOnce(partial<Pr>({ title: 'foo' }));
       await cleanup.pruneStaleBranches(config, config.branchList);
@@ -116,7 +116,7 @@ describe('workers/repository/finalize/prune', () => {
     it('notifies via PR changes if someone pushed to PR', async () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.getBranchPr.mockResolvedValueOnce(partial<Pr>());
       scm.isBranchModified.mockResolvedValueOnce(true);
@@ -131,12 +131,12 @@ describe('workers/repository/finalize/prune', () => {
     it('skips appending - abandoned to PR title if already present', async () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.getBranchPr.mockResolvedValueOnce(partial<Pr>());
       scm.isBranchModified.mockResolvedValueOnce(true);
       platform.findPr.mockResolvedValueOnce(
-        partial<Pr>({ title: 'foo - abandoned' })
+        partial<Pr>({ title: 'foo - abandoned' }),
       );
       await cleanup.pruneStaleBranches(config, config.branchList);
       expect(platform.updatePr).toHaveBeenCalledTimes(0);
@@ -146,7 +146,7 @@ describe('workers/repository/finalize/prune', () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       GlobalConfig.set({ dryRun: 'full' });
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.getBranchPr.mockResolvedValueOnce(partial<Pr>());
       scm.isBranchModified.mockResolvedValueOnce(true);
@@ -162,7 +162,7 @@ describe('workers/repository/finalize/prune', () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       GlobalConfig.set({ dryRun: 'full' });
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.findPr.mockResolvedValueOnce(null as never);
       await cleanup.pruneStaleBranches(config, config.branchList);
@@ -174,7 +174,7 @@ describe('workers/repository/finalize/prune', () => {
     it('delete branch no PR', async () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       platform.findPr.mockResolvedValueOnce(null as never);
       await cleanup.pruneStaleBranches(config, config.branchList);
@@ -186,7 +186,7 @@ describe('workers/repository/finalize/prune', () => {
     it('does not delete modified orphan branch', async () => {
       config.branchList = ['renovate/a', 'renovate/b'];
       git.getBranchList.mockReturnValueOnce(
-        config.branchList.concat(['renovate/c'])
+        config.branchList.concat(['renovate/c']),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       platform.findPr.mockResolvedValueOnce(null as never);
diff --git a/lib/workers/repository/finalize/prune.ts b/lib/workers/repository/finalize/prune.ts
index 15f06d56f216a62058a61055ffde4eeb435af530..e46e24ae7505dd4740b8410e08cf66829b7d69b2 100644
--- a/lib/workers/repository/finalize/prune.ts
+++ b/lib/workers/repository/finalize/prune.ts
@@ -10,7 +10,7 @@ import { getReconfigureBranchName } from '../reconfigure';
 
 async function cleanUpBranches(
   config: RenovateConfig,
-  remainingBranches: string[]
+  remainingBranches: string[],
 ): Promise<void> {
   if (!config.pruneStaleBranches) {
     logger.debug('Branch/PR pruning is disabled - skipping');
@@ -30,7 +30,7 @@ async function cleanUpBranches(
         if (branchIsModified) {
           logger.debug(
             { prNo: pr.number, prTitle: pr.title },
-            'Branch is modified - skipping PR autoclosing'
+            'Branch is modified - skipping PR autoclosing',
           );
           if (GlobalConfig.get('dryRun')) {
             logger.info(`DRY-RUN: Would update PR title and ensure comment.`);
@@ -54,12 +54,12 @@ async function cleanUpBranches(
         } else if (GlobalConfig.get('dryRun')) {
           logger.info(
             { prNo: pr.number, prTitle: pr.title },
-            `DRY-RUN: Would autoclose PR`
+            `DRY-RUN: Would autoclose PR`,
           );
         } else {
           logger.info(
             { branchName, prNo: pr.number, prTitle: pr.title },
-            'Autoclosing PR'
+            'Autoclosing PR',
           );
           let newPrTitle = pr.title;
           if (!pr.title.endsWith('- autoclosed')) {
@@ -83,12 +83,12 @@ async function cleanUpBranches(
     } catch (err) /* istanbul ignore next */ {
       if (err.message === 'config-validation') {
         logger.debug(
-          'Cannot prune branch due to collision between tags and branch names'
+          'Cannot prune branch due to collision between tags and branch names',
         );
       } else if (err.message?.includes("bad revision 'origin/")) {
         logger.debug(
           { branchName },
-          'Branch not found on origin when attempting to prune'
+          'Branch not found on origin when attempting to prune',
         );
       } else if (err.message !== REPOSITORY_CHANGED) {
         logger.warn({ err, branch: branchName }, 'Error pruning branch');
@@ -99,7 +99,7 @@ async function cleanUpBranches(
 
 export async function pruneStaleBranches(
   config: RenovateConfig,
-  branchList: string[] | null | undefined
+  branchList: string[] | null | undefined,
 ): Promise<void> {
   logger.debug('Removing any stale branches');
   logger.trace({ config }, `pruneStaleBranches`);
@@ -113,7 +113,7 @@ export async function pruneStaleBranches(
   let renovateBranches = getBranchList().filter(
     (branchName) =>
       branchName.startsWith(config.branchPrefix!) &&
-      branchName !== getReconfigureBranchName(config.branchPrefix!)
+      branchName !== getReconfigureBranchName(config.branchPrefix!),
   );
   if (!renovateBranches?.length) {
     logger.debug('No renovate branches found');
@@ -124,15 +124,15 @@ export async function pruneStaleBranches(
       branchList: branchList?.sort(),
       renovateBranches: renovateBranches?.sort(),
     },
-    'Branch lists'
+    'Branch lists',
   );
   // TODO: types (#22198)
   const lockFileBranch = `${config.branchPrefix!}lock-file-maintenance`;
   renovateBranches = renovateBranches.filter(
-    (branch) => branch !== lockFileBranch
+    (branch) => branch !== lockFileBranch,
   );
   const remainingBranches = renovateBranches.filter(
-    (branch) => !branchList.includes(branch)
+    (branch) => !branchList.includes(branch),
   );
   logger.debug(`remainingBranches=${String(remainingBranches)}`);
   if (remainingBranches.length === 0) {
diff --git a/lib/workers/repository/finalize/repository-statistics.spec.ts b/lib/workers/repository/finalize/repository-statistics.spec.ts
index 66e05794105833bbb0139219eeb8f4c2e189bae2..e092fb1a20e63f0615deff089f74a92bf2a00095 100644
--- a/lib/workers/repository/finalize/repository-statistics.spec.ts
+++ b/lib/workers/repository/finalize/repository-statistics.spec.ts
@@ -47,7 +47,7 @@ describe('workers/repository/finalize/repository-statistics', () => {
             merged: 1,
           },
         },
-        `Renovate repository PR statistics`
+        `Renovate repository PR statistics`,
       );
     });
   });
@@ -84,7 +84,7 @@ describe('workers/repository/finalize/repository-statistics', () => {
           branches: [],
           inactiveBranches: [],
         },
-        `Branch summary`
+        `Branch summary`,
       );
     });
 
@@ -149,7 +149,7 @@ describe('workers/repository/finalize/repository-statistics', () => {
           defaultBranch,
           inactiveBranches: ['b3'],
         },
-        `Branch summary`
+        `Branch summary`,
       );
     });
 
diff --git a/lib/workers/repository/finalize/repository-statistics.ts b/lib/workers/repository/finalize/repository-statistics.ts
index e2b5224a5be35ccb6f55dbf32be426dde6fb73b6..830dafea93c422bbc8934e032ba8eaf454ef3094 100644
--- a/lib/workers/repository/finalize/repository-statistics.ts
+++ b/lib/workers/repository/finalize/repository-statistics.ts
@@ -14,7 +14,7 @@ import type {
 
 export function runRenovateRepoStats(
   config: RenovateConfig,
-  prList: Pr[]
+  prList: Pr[],
 ): void {
   const prStats = { total: 0, open: 0, closed: 0, merged: 0 };
 
@@ -64,7 +64,7 @@ function branchCacheToMetadata({
 }
 
 function filterDependencyDashboardData(
-  branches: BranchCache[]
+  branches: BranchCache[],
 ): Partial<BranchCache>[] {
   const branchesFiltered: Partial<BranchCache>[] = [];
   for (const branch of branches) {
diff --git a/lib/workers/repository/index.ts b/lib/workers/repository/index.ts
index 809af62f7cee2dd6ac339e62c4c8d077fdaa9330..6e775996ce0b55d1febb90fbbf2b938b36002a79 100644
--- a/lib/workers/repository/index.ts
+++ b/lib/workers/repository/index.ts
@@ -35,11 +35,11 @@ import { printLookupStats, printRequestStats } from './stats';
 // istanbul ignore next
 export async function renovateRepository(
   repoConfig: RenovateConfig,
-  canRetry = true
+  canRetry = true,
 ): Promise<ProcessResult | undefined> {
   splitInit();
   let config = GlobalConfig.set(
-    applySecretsToConfig(repoConfig, undefined, false)
+    applySecretsToConfig(repoConfig, undefined, false),
   );
   await removeDanglingContainers();
   setMeta({ repository: config.repository });
@@ -70,11 +70,11 @@ export async function renovateRepository(
       GlobalConfig.get('dryRun') !== 'extract'
     ) {
       await instrument('onboarding', () =>
-        ensureOnboardingPr(config, packageFiles, branches)
+        ensureOnboardingPr(config, packageFiles, branches),
       );
       addSplit('onboarding');
       const res = await instrument('update', () =>
-        updateRepo(config, branches)
+        updateRepo(config, branches),
       );
       setMeta({ repository: config.repository });
       addSplit('update');
@@ -146,7 +146,7 @@ export function printRepositoryProblems(repository: string | undefined): void {
   if (repoProblems.size) {
     logger.debug(
       { repoProblems: Array.from(repoProblems) },
-      'repository problems'
+      'repository problems',
     );
   }
 }
diff --git a/lib/workers/repository/init/apis.spec.ts b/lib/workers/repository/init/apis.spec.ts
index 5359522d7183151cdc9995d2226be0a9a5703b65..c35030fc98e08a331501030753b34c502dcc6bc3 100644
--- a/lib/workers/repository/init/apis.spec.ts
+++ b/lib/workers/repository/init/apis.spec.ts
@@ -40,7 +40,7 @@ describe('workers/repository/init/apis', () => {
         initApis({
           ...config,
           optimizeForDisabled: true,
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_DISABLED);
     });
 
@@ -57,7 +57,7 @@ describe('workers/repository/init/apis', () => {
         initApis({
           ...config,
           forkProcessing: 'disabled',
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_FORKED);
     });
 
@@ -87,7 +87,7 @@ describe('workers/repository/init/apis', () => {
           optimizeForDisabled: true,
           forkProcessing: 'disabled',
           isFork: true,
-        })
+        }),
       ).resolves.not.toThrow();
     });
 
@@ -107,10 +107,10 @@ describe('workers/repository/init/apis', () => {
       });
       expect(workerPlatformConfig).toBeTruthy();
       expect(workerPlatformConfig.onboardingConfigFileName).toBe(
-        '.github/renovate.json'
+        '.github/renovate.json',
       );
       expect(platform.getJsonFile).toHaveBeenCalledWith(
-        '.github/renovate.json'
+        '.github/renovate.json',
       );
       expect(platform.getJsonFile).not.toHaveBeenCalledWith('renovate.json');
     });
@@ -181,7 +181,7 @@ describe('workers/repository/init/apis', () => {
           ...config,
           optimizeForDisabled: true,
           extends: [':disableRenovate'],
-        })
+        }),
       ).rejects.toThrow(REPOSITORY_DISABLED);
     });
   });
diff --git a/lib/workers/repository/init/apis.ts b/lib/workers/repository/init/apis.ts
index 554e4829dea6d253ef5fdb416bb0c0c8d90c7500..fd392a208c5779d018f57fec295eb0da476e3d53 100644
--- a/lib/workers/repository/init/apis.ts
+++ b/lib/workers/repository/init/apis.ts
@@ -27,7 +27,7 @@ async function getJsonFile(file: string): Promise<RenovateConfig | null> {
 }
 
 async function validateOptimizeForDisabled(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<void> {
   if (config.optimizeForDisabled) {
     const renovateConfig = await getJsonFile(defaultConfigFile(config));
@@ -42,7 +42,7 @@ async function validateOptimizeForDisabled(
      */
     if (config.extends?.includes(':disableRenovate')) {
       logger.debug(
-        'Global config disables Renovate - checking renovate.json to see if it is re-enabled'
+        'Global config disables Renovate - checking renovate.json to see if it is re-enabled',
       );
       if (
         renovateConfig?.extends?.includes(':enableRenovate') ??
@@ -52,7 +52,7 @@ async function validateOptimizeForDisabled(
         logger.debug('Repository config re-enables Renovate - continuing');
       } else {
         logger.debug(
-          'Repository config does not re-enable Renovate - skipping'
+          'Repository config does not re-enable Renovate - skipping',
         );
         throw new Error(REPOSITORY_DISABLED_BY_CONFIG);
       }
@@ -75,7 +75,7 @@ async function validateIncludeForks(config: RenovateConfig): Promise<void> {
 
 // TODO: fix types (#22198)
 async function getPlatformConfig(
-  config: RepoParams
+  config: RepoParams,
 ): Promise<WorkerPlatformConfig> {
   const platformConfig = await platform.initRepo(config);
   return {
@@ -86,7 +86,7 @@ async function getPlatformConfig(
 
 // TODO: fix types (#22198)
 export async function initApis(
-  input: RenovateConfig
+  input: RenovateConfig,
 ): Promise<WorkerPlatformConfig> {
   let config: WorkerPlatformConfig = { ...input } as never;
   config = await getPlatformConfig(config as never);
diff --git a/lib/workers/repository/init/cache.ts b/lib/workers/repository/init/cache.ts
index ee11f0447eeeb0a9469e5600a6a8981f47a53253..c255541aa6e4c86e7bc2b7e966dc22e585000549 100644
--- a/lib/workers/repository/init/cache.ts
+++ b/lib/workers/repository/init/cache.ts
@@ -13,7 +13,7 @@ export async function resetCaches(): Promise<void> {
 }
 
 export async function initializeCaches(
-  config: WorkerPlatformConfig
+  config: WorkerPlatformConfig,
 ): Promise<void> {
   memCache.init();
   await initRepoCache(config);
diff --git a/lib/workers/repository/init/config.ts b/lib/workers/repository/init/config.ts
index da482619a4d85a8e25b11b11e557fee5d86a86a2..96f64a7ba73913a486306bae18ab9492b3e82f57 100644
--- a/lib/workers/repository/init/config.ts
+++ b/lib/workers/repository/init/config.ts
@@ -4,7 +4,7 @@ import { mergeRenovateConfig } from './merge';
 
 // istanbul ignore next
 export async function getRepoConfig(
-  config_: RenovateConfig
+  config_: RenovateConfig,
 ): Promise<RenovateConfig> {
   let config = { ...config_ };
   config.baseBranch = config.defaultBranch;
diff --git a/lib/workers/repository/init/index.spec.ts b/lib/workers/repository/init/index.spec.ts
index cd69dc65bc716c97b67b99d6dc53fd70ede083ab..1927900bcd683f062d1289261a725dd8e235e2f5 100644
--- a/lib/workers/repository/init/index.spec.ts
+++ b/lib/workers/repository/init/index.spec.ts
@@ -41,7 +41,7 @@ describe('workers/repository/init/index', () => {
       config.getRepoConfig.mockResolvedValueOnce({});
       merge.mergeRenovateConfig.mockResolvedValueOnce({});
       secrets.applySecretsToConfig.mockReturnValueOnce(
-        partial<RenovateConfig>()
+        partial<RenovateConfig>(),
       );
       const renovateConfig = await initRepo({});
       expect(renovateConfig).toEqual({});
@@ -56,14 +56,14 @@ describe('workers/repository/init/index', () => {
       });
       merge.mergeRenovateConfig.mockResolvedValueOnce({});
       secrets.applySecretsToConfig.mockReturnValueOnce(
-        partial<RenovateConfig>()
+        partial<RenovateConfig>(),
       );
       await initRepo({});
       expect(logger.logger.warn).toHaveBeenCalledWith(
-        "Configuration option 'filterUnavailableUsers' is not supported on the current platform 'undefined'."
+        "Configuration option 'filterUnavailableUsers' is not supported on the current platform 'undefined'.",
       );
       expect(logger.logger.warn).toHaveBeenCalledWith(
-        "Configuration option 'expandCodeOwnersGroups' is not supported on the current platform 'undefined'."
+        "Configuration option 'expandCodeOwnersGroups' is not supported on the current platform 'undefined'.",
       );
     });
   });
diff --git a/lib/workers/repository/init/index.ts b/lib/workers/repository/init/index.ts
index e67a255d9cd9ccfd66ab0bbbeaffe9e755ea73a0..374b1ec8c81a31ad8359f67735bc1ec29828020a 100644
--- a/lib/workers/repository/init/index.ts
+++ b/lib/workers/repository/init/index.ts
@@ -27,7 +27,7 @@ function warnOnUnsupportedOptions(config: RenovateConfig): void {
     // TODO: types (#22198)
     const platform = GlobalConfig.get('platform')!;
     logger.warn(
-      `Configuration option 'filterUnavailableUsers' is not supported on the current platform '${platform}'.`
+      `Configuration option 'filterUnavailableUsers' is not supported on the current platform '${platform}'.`,
     );
   }
 
@@ -35,13 +35,13 @@ function warnOnUnsupportedOptions(config: RenovateConfig): void {
     // TODO: types (#22198)
     const platform = GlobalConfig.get('platform')!;
     logger.warn(
-      `Configuration option 'expandCodeOwnersGroups' is not supported on the current platform '${platform}'.`
+      `Configuration option 'expandCodeOwnersGroups' is not supported on the current platform '${platform}'.`,
     );
   }
 }
 
 export async function initRepo(
-  config_: RenovateConfig
+  config_: RenovateConfig,
 ): Promise<RenovateConfig> {
   PackageFiles.clear();
   let config: RenovateConfig = initializeConfig(config_);
@@ -59,7 +59,7 @@ export async function initRepo(
   if (config.printConfig) {
     logger.info(
       { config, hostRules: getAll() },
-      'Full resolved config and hostRules including presets'
+      'Full resolved config and hostRules including presets',
     );
   }
   await cloneSubmodules(!!config.cloneSubmodules);
diff --git a/lib/workers/repository/init/merge.spec.ts b/lib/workers/repository/init/merge.spec.ts
index b7a4b5d91cd29c31cd8b6fc8cbfbdac4a7ababba..b8e9bcbbe8fb43d50099b1e4db2d75042682d785 100644
--- a/lib/workers/repository/init/merge.spec.ts
+++ b/lib/workers/repository/init/merge.spec.ts
@@ -59,14 +59,14 @@ describe('workers/repository/init/merge', () => {
       jest
         .spyOn(repoCache, 'getCache')
         .mockReturnValueOnce(
-          partial<RepoCacheData>({ configFileName: 'renovate.json' })
+          partial<RepoCacheData>({ configFileName: 'renovate.json' }),
         );
       platform.getRawFile.mockRejectedValueOnce(new Error());
       scm.getFileList.mockResolvedValue(['package.json']);
       fs.readLocalFile.mockResolvedValue('{}');
       expect(await detectRepoFileConfig()).toEqual({});
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Existing config file no longer exists'
+        'Existing config file no longer exists',
       );
     });
 
@@ -76,7 +76,7 @@ describe('workers/repository/init/merge', () => {
       });
       OnboardingState.onboardingCacheValid = true;
       onboardingCache.getOnboardingFileNameFromCache.mockReturnValueOnce(
-        'package.json'
+        'package.json',
       );
       onboardingCache.getOnboardingConfigFromCache.mockReturnValueOnce(pJson);
       expect(await detectRepoFileConfig()).toEqual({
@@ -88,10 +88,10 @@ describe('workers/repository/init/merge', () => {
     it('clones, if onboarding cache is valid but parsed config is undefined', async () => {
       OnboardingState.onboardingCacheValid = true;
       onboardingCache.getOnboardingFileNameFromCache.mockReturnValueOnce(
-        'package.json'
+        'package.json',
       );
       onboardingCache.getOnboardingConfigFromCache.mockReturnValueOnce(
-        undefined as never
+        undefined as never,
       );
       scm.getFileList.mockResolvedValueOnce(['package.json']);
       const pJson = JSON.stringify({
@@ -114,10 +114,10 @@ describe('workers/repository/init/merge', () => {
       });
       OnboardingState.onboardingCacheValid = true;
       onboardingCache.getOnboardingFileNameFromCache.mockReturnValueOnce(
-        'renovate.json'
+        'renovate.json',
       );
       onboardingCache.getOnboardingConfigFromCache.mockReturnValueOnce(
-        configParsed
+        configParsed,
       );
       expect(await detectRepoFileConfig()).toEqual({
         configFileName: 'renovate.json',
@@ -178,7 +178,7 @@ describe('workers/repository/init/merge', () => {
     it('throws error if duplicate keys', async () => {
       scm.getFileList.mockResolvedValue(['package.json', '.renovaterc']);
       fs.readLocalFile.mockResolvedValue(
-        '{ "enabled": true, "enabled": false }'
+        '{ "enabled": true, "enabled": false }',
       );
       expect(await detectRepoFileConfig()).toEqual({
         configFileName: '.renovaterc',
@@ -275,7 +275,7 @@ describe('workers/repository/init/merge', () => {
       expect(() =>
         checkForRepoConfigError({
           configFileParseError: { validationError: '', validationMessage: '' },
-        })
+        }),
       ).toThrow();
     });
   });
@@ -378,7 +378,7 @@ describe('workers/repository/init/merge', () => {
           configFileParsed: undefined,
           warnings: undefined,
           secrets: undefined,
-        })
+        }),
       ).toBeDefined();
     });
   });
diff --git a/lib/workers/repository/init/merge.ts b/lib/workers/repository/init/merge.ts
index c8a33f23946f4096566e85bed56ef514d091c28e..9bfcf8fe86082dfef4e4b22ac9a4914acae1707b 100644
--- a/lib/workers/repository/init/merge.ts
+++ b/lib/workers/repository/init/merge.ts
@@ -39,7 +39,7 @@ export async function detectConfigFile(): Promise<string | null> {
     if (fileName === 'package.json') {
       try {
         const pJson = JSON.parse(
-          (await readLocalFile('package.json', 'utf8'))!
+          (await readLocalFile('package.json', 'utf8'))!,
         );
         if (pJson.renovate) {
           logger.debug('Using package.json for global renovate config');
@@ -111,7 +111,7 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
     // We already know it parses
     configFileParsed = JSON.parse(
       // TODO #22198
-      (await readLocalFile('package.json', 'utf8'))!
+      (await readLocalFile('package.json', 'utf8'))!,
     ).renovate;
     if (is.string(configFileParsed)) {
       logger.debug('Massaging string renovate config to extends array');
@@ -138,12 +138,12 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { renovateConfig: configFileRaw },
-          'Error parsing renovate config renovate.json5'
+          'Error parsing renovate config renovate.json5',
         );
         const validationError = 'Invalid JSON5 (parsing failed)';
         const validationMessage = `JSON5.parse error: \`${err.message.replaceAll(
           '`',
-          "'"
+          "'",
         )}\``;
         return {
           configFileName,
@@ -154,7 +154,7 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
       let allowDuplicateKeys = true;
       let jsonValidationError = jsonValidator.validate(
         configFileRaw,
-        allowDuplicateKeys
+        allowDuplicateKeys,
       );
       if (jsonValidationError) {
         const validationError = 'Invalid JSON (parsing failed)';
@@ -167,7 +167,7 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
       allowDuplicateKeys = false;
       jsonValidationError = jsonValidator.validate(
         configFileRaw,
-        allowDuplicateKeys
+        allowDuplicateKeys,
       );
       if (jsonValidationError) {
         const validationError = 'Duplicate keys in JSON';
@@ -182,12 +182,12 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
       } catch (err) /* istanbul ignore next */ {
         logger.debug(
           { renovateConfig: configFileRaw },
-          'Error parsing renovate config'
+          'Error parsing renovate config',
         );
         const validationError = 'Invalid JSON (parsing failed)';
         const validationMessage = `JSON.parse error:  \`${err.message.replaceAll(
           '`',
-          "'"
+          "'",
         )}\``;
         return {
           configFileName,
@@ -197,7 +197,7 @@ export async function detectRepoFileConfig(): Promise<RepoFileConfig> {
     }
     logger.debug(
       { fileName: configFileName, config: configFileParsed },
-      'Repository config'
+      'Repository config',
     );
   }
 
@@ -218,7 +218,7 @@ export function checkForRepoConfigError(repoConfig: RepoFileConfig): void {
 
 // Check for repository config
 export async function mergeRenovateConfig(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<RenovateConfig> {
   let returnConfig = { ...config };
   let repoConfig: RepoFileConfig = {};
@@ -267,9 +267,9 @@ export async function mergeRenovateConfig(
     await presets.resolveConfigPresets(
       decryptedConfig,
       config,
-      config.ignorePresets
+      config.ignorePresets,
     ),
-    repository
+    repository,
   );
   logger.trace({ config: resolvedConfig }, 'resolved config');
   const migrationResult = migrateConfig(resolvedConfig);
@@ -281,13 +281,13 @@ export async function mergeRenovateConfig(
   // istanbul ignore if
   if (is.string(resolvedConfig.npmrc)) {
     logger.debug(
-      'Ignoring any .npmrc files in repository due to configured npmrc'
+      'Ignoring any .npmrc files in repository due to configured npmrc',
     );
     npmApi.setNpmrc(resolvedConfig.npmrc);
   }
   resolvedConfig = applySecretsToConfig(
     resolvedConfig,
-    mergeChildConfig(config.secrets ?? {}, resolvedConfig.secrets ?? {})
+    mergeChildConfig(config.secrets ?? {}, resolvedConfig.secrets ?? {}),
   );
   // istanbul ignore if
   if (resolvedConfig.hostRules) {
@@ -298,7 +298,7 @@ export async function mergeRenovateConfig(
       } catch (err) {
         logger.warn(
           { err, config: rule },
-          'Error setting hostRule from config'
+          'Error setting hostRule from config',
         );
       }
     }
@@ -314,7 +314,7 @@ export async function mergeRenovateConfig(
   if (returnConfig.ignorePaths?.length) {
     logger.debug(
       { ignorePaths: returnConfig.ignorePaths },
-      `Found repo ignorePaths`
+      `Found repo ignorePaths`,
     );
   }
   return returnConfig;
diff --git a/lib/workers/repository/init/vulnerability.spec.ts b/lib/workers/repository/init/vulnerability.spec.ts
index de28fe48d93698d1e66df194a8433375091898a1..e6562c86d16e96f4f12436b2b00f7c005e0c36be 100644
--- a/lib/workers/repository/init/vulnerability.spec.ts
+++ b/lib/workers/repository/init/vulnerability.spec.ts
@@ -34,7 +34,7 @@ describe('workers/repository/init/vulnerability', () => {
       config.vulnerabilityAlertsOnly = true;
       platform.getVulnerabilityAlerts.mockResolvedValue([]);
       await expect(detectVulnerabilityAlerts(config)).rejects.toThrow(
-        NO_VULNERABILITY_ALERTS
+        NO_VULNERABILITY_ALERTS,
       );
     });
 
diff --git a/lib/workers/repository/init/vulnerability.ts b/lib/workers/repository/init/vulnerability.ts
index 39bae817a78559d9333d6e5fb89df79c4d7ccefb..4f1c0242dc37f75ffc57c5835ea604e43531b439 100644
--- a/lib/workers/repository/init/vulnerability.ts
+++ b/lib/workers/repository/init/vulnerability.ts
@@ -47,7 +47,7 @@ type CombinedAlert = Record<
 
 // TODO can return `null` and `undefined` (#22198)
 export async function detectVulnerabilityAlerts(
-  input: RenovateConfig
+  input: RenovateConfig,
 ): Promise<RenovateConfig> {
   if (!input?.vulnerabilityAlerts) {
     return input;
@@ -91,7 +91,7 @@ export async function detectVulnerabilityAlerts(
       if (!alert.securityVulnerability.firstPatchedVersion) {
         logger.debug(
           { alert },
-          'Vulnerability alert has no firstPatchedVersion - skipping'
+          'Vulnerability alert has no firstPatchedVersion - skipping',
         );
         continue;
       }
@@ -127,7 +127,7 @@ export async function detectVulnerabilityAlerts(
       if (datasource === PypiDatasource.id) {
         vulnerableRequirements = vulnerableRequirements.replace(
           regEx(/^= /),
-          '== '
+          '== ',
         );
       }
       if (
@@ -153,7 +153,7 @@ export async function detectVulnerabilityAlerts(
           !alertDetails.firstPatchedVersion ||
           version.isGreaterThan(
             firstPatchedVersion,
-            alertDetails.firstPatchedVersion
+            alertDetails.firstPatchedVersion,
           )
         ) {
           alertDetails.firstPatchedVersion = firstPatchedVersion;
@@ -172,7 +172,7 @@ export async function detectVulnerabilityAlerts(
     for (const [datasource, dependencies] of Object.entries(files)) {
       for (const [depName, currentValues] of Object.entries(dependencies)) {
         for (const [matchCurrentVersion, val] of Object.entries(
-          currentValues
+          currentValues,
         )) {
           let prBodyNotes: string[] = [];
           try {
@@ -198,7 +198,7 @@ export async function detectVulnerabilityAlerts(
 
                 content += sanitizeMarkdown(description);
                 return content;
-              })
+              }),
             );
           } catch (err) /* istanbul ignore next */ {
             logger.warn({ err }, 'Error generating vulnerability PR notes');
diff --git a/lib/workers/repository/model/commit-message.ts b/lib/workers/repository/model/commit-message.ts
index 0344e5b6471fc0ee8eb2f321ec58d84ae11edfb6..fe591b4349e47756cee4cdc899566a972c44b59a 100644
--- a/lib/workers/repository/model/commit-message.ts
+++ b/lib/workers/repository/model/commit-message.ts
@@ -64,7 +64,7 @@ export abstract class CommitMessage {
     this._subject = this.normalizeInput(value);
     this._subject = this._subject?.replace(
       CommitMessage.EXTRA_WHITESPACES,
-      ' '
+      ' ',
     );
   }
 
diff --git a/lib/workers/repository/model/custom-commit-message.spec.ts b/lib/workers/repository/model/custom-commit-message.spec.ts
index 18c9fb02740410ac1d721f499ad586c2fb33cded..19c3c2426d35bb9359cd728725c14b6080fc4ac3 100644
--- a/lib/workers/repository/model/custom-commit-message.spec.ts
+++ b/lib/workers/repository/model/custom-commit-message.spec.ts
@@ -25,7 +25,7 @@ describe('workers/repository/model/custom-commit-message', () => {
         commitMessage.prefix = prefix;
 
         expect(commitMessage.toString()).toEqual(result);
-      }
+      },
     );
 
     it('should provide ability to set body and footer', () => {
diff --git a/lib/workers/repository/model/semantic-commit-message.spec.ts b/lib/workers/repository/model/semantic-commit-message.spec.ts
index e2629fcc02edd3575ccca58212b165ba07bd5c46..c12d4c265223147bf2e7455fb564c5094d6b1733 100644
--- a/lib/workers/repository/model/semantic-commit-message.spec.ts
+++ b/lib/workers/repository/model/semantic-commit-message.spec.ts
@@ -49,7 +49,7 @@ describe('workers/repository/model/semantic-commit-message', () => {
 
   it('should create instance from string with scope', () => {
     const instance = SemanticCommitMessage.fromString(
-      'fix(dashboard): ticket 123'
+      'fix(dashboard): ticket 123',
     );
 
     expect(SemanticCommitMessage.is(instance)).toBeTrue();
diff --git a/lib/workers/repository/onboarding/branch/check.spec.ts b/lib/workers/repository/onboarding/branch/check.spec.ts
index 7fd1d200bee61153b945e56457585e30f24ad0be..75e8d2eb612a6c2f47c0bd7244330095b9a0edad 100644
--- a/lib/workers/repository/onboarding/branch/check.spec.ts
+++ b/lib/workers/repository/onboarding/branch/check.spec.ts
@@ -39,7 +39,7 @@ describe('workers/repository/onboarding/branch/check', () => {
     const res = await isOnboarded(config);
     expect(res).toBeFalse();
     expect(logger.debug).toHaveBeenCalledWith(
-      'Onboarding cache is valid. Repo is not onboarded'
+      'Onboarding cache is valid. Repo is not onboarded',
     );
   });
 
@@ -55,7 +55,7 @@ describe('workers/repository/onboarding/branch/check', () => {
     scm.getFileList.mockResolvedValue([]);
     await isOnboarded(config);
     expect(logger.debug).not.toHaveBeenCalledWith(
-      'Onboarding cache is valid. Repo is not onboarded'
+      'Onboarding cache is valid. Repo is not onboarded',
     );
   });
 
@@ -64,7 +64,7 @@ describe('workers/repository/onboarding/branch/check', () => {
     platform.findPr.mockResolvedValue(partial<Pr>());
     scm.getFileList.mockResolvedValue([]);
     await expect(isOnboarded(config)).rejects.toThrow(
-      REPOSITORY_CLOSED_ONBOARDING
+      REPOSITORY_CLOSED_ONBOARDING,
     );
   });
 });
diff --git a/lib/workers/repository/onboarding/branch/check.ts b/lib/workers/repository/onboarding/branch/check.ts
index 540e848aac5a2c70571c4eaeb0eebf3f61872b50..8351c36934c1c918c820080f810ad78fdf0966b3 100644
--- a/lib/workers/repository/onboarding/branch/check.ts
+++ b/lib/workers/repository/onboarding/branch/check.ts
@@ -89,7 +89,7 @@ export async function isOnboarded(config: RenovateConfig): Promise<boolean> {
     logger.debug('Checking cached config file name');
     try {
       const configFileContent = await platform.getJsonFile(
-        cache.configFileName
+        cache.configFileName,
       );
       if (configFileContent) {
         if (
@@ -99,7 +99,7 @@ export async function isOnboarded(config: RenovateConfig): Promise<boolean> {
           logger.debug('Existing config file confirmed');
           logger.debug(
             { fileName: cache.configFileName, config: configFileContent },
-            'Repository config'
+            'Repository config',
           );
           return true;
         }
@@ -149,10 +149,10 @@ export async function isOnboarded(config: RenovateConfig): Promise<boolean> {
 }
 
 export async function getOnboardingPr(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<Pr | null> {
   return await platform.getBranchPr(
     config.onboardingBranch!,
-    config.baseBranch
+    config.baseBranch,
   );
 }
diff --git a/lib/workers/repository/onboarding/branch/config.spec.ts b/lib/workers/repository/onboarding/branch/config.spec.ts
index 297fbb50b2bad3b060765c9e27945268696c805a..c1b0f4df4b266caea682d58ec84da38c4a8d6df2 100644
--- a/lib/workers/repository/onboarding/branch/config.spec.ts
+++ b/lib/workers/repository/onboarding/branch/config.spec.ts
@@ -39,7 +39,7 @@ describe('workers/repository/onboarding/branch/config', () => {
           '  "extends": [\n' +
           '    "local>some/renovate-config"\n' +
           '  ]\n' +
-          '}\n'
+          '}\n',
       );
     });
   });
@@ -57,7 +57,7 @@ describe('workers/repository/onboarding/branch/config', () => {
 
     it('handles finding an organization dot platform preset', async () => {
       mockedPresets.getPreset.mockRejectedValueOnce(
-        new Error(PRESET_DEP_NOT_FOUND)
+        new Error(PRESET_DEP_NOT_FOUND),
       );
       mockedPresets.getPreset.mockResolvedValueOnce({ enabled: true });
       const onboardingConfig = await getOnboardingConfig(config);
@@ -70,7 +70,7 @@ describe('workers/repository/onboarding/branch/config', () => {
 
     it('handles not finding an organization preset', async () => {
       mockedPresets.getPreset.mockRejectedValue(
-        new Error(PRESET_DEP_NOT_FOUND)
+        new Error(PRESET_DEP_NOT_FOUND),
       );
       const onboardingConfig = await getOnboardingConfig(config);
       expect(mockedPresets.getPreset).toHaveBeenCalledTimes(2);
@@ -79,7 +79,7 @@ describe('workers/repository/onboarding/branch/config', () => {
 
     it('ignores an unknown error', async () => {
       mockedPresets.getPreset.mockRejectedValue(
-        new Error('unknown error for test')
+        new Error('unknown error for test'),
       );
       const onboardingConfig = await getOnboardingConfig(config);
       expect(mockedPresets.getPreset).toHaveBeenCalledTimes(2);
@@ -88,7 +88,7 @@ describe('workers/repository/onboarding/branch/config', () => {
 
     it('ignores unsupported platform', async () => {
       mockedPresets.getPreset.mockRejectedValue(
-        new Error(`Unsupported platform 'dummy' for local preset.`)
+        new Error(`Unsupported platform 'dummy' for local preset.`),
       );
       const onboardingConfig = await getOnboardingConfig(config);
       expect(mockedPresets.getPreset).toHaveBeenCalledTimes(2);
diff --git a/lib/workers/repository/onboarding/branch/config.ts b/lib/workers/repository/onboarding/branch/config.ts
index b6cabbe4abda14aacdbd2850b87992b0964d48a0..f4db5d0aa21ff7de7dc928b606b8b648d2868e65 100644
--- a/lib/workers/repository/onboarding/branch/config.ts
+++ b/lib/workers/repository/onboarding/branch/config.ts
@@ -10,14 +10,14 @@ import { clone } from '../../../../util/clone';
 import { EditorConfig, JSONWriter } from '../../../../util/json-writer';
 
 async function getOnboardingConfig(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<RenovateSharedConfig | undefined> {
   let onboardingConfig = clone(config.onboardingConfig);
 
   let orgPreset: string | undefined;
 
   logger.debug(
-    'Checking if this org/owner has a default Renovate preset which can be used.'
+    'Checking if this org/owner has a default Renovate preset which can be used.',
   );
 
   // TODO #22198
@@ -65,7 +65,7 @@ async function getOnboardingConfig(
 
   if (orgPreset) {
     logger.debug(
-      `Found org preset ${orgPreset} - using it in onboarding config`
+      `Found org preset ${orgPreset} - using it in onboarding config`,
     );
     onboardingConfig = {
       $schema: 'https://docs.renovatebot.com/renovate-schema.json',
@@ -74,7 +74,7 @@ async function getOnboardingConfig(
   } else {
     // Organization preset did not exist
     logger.debug(
-      'No default org/owner preset found, so the default onboarding config will be used instead. Note: do not be concerned with any 404 messages that preceded this.'
+      'No default org/owner preset found, so the default onboarding config will be used instead. Note: do not be concerned with any 404 messages that preceded this.',
     );
   }
 
@@ -84,7 +84,7 @@ async function getOnboardingConfig(
 
 async function getOnboardingConfigContents(
   config: RenovateConfig,
-  fileName: string
+  fileName: string,
 ): Promise<string> {
   const codeFormat = await EditorConfig.getCodeFormat(fileName);
   const jsonWriter = new JSONWriter(codeFormat);
diff --git a/lib/workers/repository/onboarding/branch/create.spec.ts b/lib/workers/repository/onboarding/branch/create.spec.ts
index af05137a06a8911b52c86ea759300bae786cf69b..e58bd7aec5ef1b0c002aa09e221b9ee368452d71 100644
--- a/lib/workers/repository/onboarding/branch/create.spec.ts
+++ b/lib/workers/repository/onboarding/branch/create.spec.ts
@@ -86,7 +86,7 @@ describe('workers/repository/onboarding/branch/create', () => {
         const text =
           "Cause your deps need an update and if they dont update, well they're no deps of mine";
         const message = `${prefix}: ${text.charAt(0).toLowerCase()}${text.slice(
-          1
+          1,
         )}`;
 
         config.commitMessagePrefix = prefix;
@@ -139,7 +139,7 @@ describe('workers/repository/onboarding/branch/create', () => {
         const text =
           'I say, we can update when we want to, a commit they will never mind';
         const message = `${prefix}: ${text.charAt(0).toLowerCase()}${text.slice(
-          1
+          1,
         )}`;
 
         config.semanticCommits = 'enabled';
diff --git a/lib/workers/repository/onboarding/branch/create.ts b/lib/workers/repository/onboarding/branch/create.ts
index e9a8e41e5b3d85c2bc0cc6ef48ec5b2f15b73f2d..e90459e3100c8bbf5d43fcfb4620123c58cca251 100644
--- a/lib/workers/repository/onboarding/branch/create.ts
+++ b/lib/workers/repository/onboarding/branch/create.ts
@@ -9,7 +9,7 @@ import { getOnboardingConfigContents } from './config';
 const defaultConfigFile = configFileNames[0];
 
 export async function createOnboardingBranch(
-  config: Partial<RenovateConfig>
+  config: Partial<RenovateConfig>,
 ): Promise<string | null> {
   // TODO #22198
   const configFile = configFileNames.includes(config.onboardingConfigFileName!)
@@ -23,7 +23,7 @@ export async function createOnboardingBranch(
 
   const commitMessageFactory = new OnboardingCommitMessageFactory(
     config,
-    configFile!
+    configFile!,
   );
   const commitMessage = commitMessageFactory.create();
 
diff --git a/lib/workers/repository/onboarding/branch/index.spec.ts b/lib/workers/repository/onboarding/branch/index.spec.ts
index bc6e748928daa819b92b770a4d97d23409418194..a54b013cbdcbad66eafc55939acc69495d22b90a 100644
--- a/lib/workers/repository/onboarding/branch/index.spec.ts
+++ b/lib/workers/repository/onboarding/branch/index.spec.ts
@@ -51,21 +51,21 @@ describe('workers/repository/onboarding/branch/index', () => {
 
     it('throws if no package files', async () => {
       await expect(checkOnboardingBranch(config)).rejects.toThrow(
-        REPOSITORY_NO_PACKAGE_FILES
+        REPOSITORY_NO_PACKAGE_FILES,
       );
     });
 
     it("doesn't throw if there are no package files and onboardingNoDeps config option is set", async () => {
       config.onboardingNoDeps = true;
       await expect(checkOnboardingBranch(config)).resolves.not.toThrow(
-        REPOSITORY_NO_PACKAGE_FILES
+        REPOSITORY_NO_PACKAGE_FILES,
       );
     });
 
     it('throws if fork', async () => {
       config.isFork = true;
       await expect(checkOnboardingBranch(config)).rejects.toThrow(
-        REPOSITORY_FORKED
+        REPOSITORY_FORKED,
       );
     });
 
@@ -79,12 +79,12 @@ describe('workers/repository/onboarding/branch/index', () => {
       async ({ checkboxEnabled, expected }) => {
         config.onboardingRebaseCheckbox = checkboxEnabled;
         configModule.getOnboardingConfig.mockResolvedValue(
-          config.onboardingConfig
+          config.onboardingConfig,
         );
         configModule.getOnboardingConfigContents.mockResolvedValue(
           '{\n' +
             '  "$schema": "https://docs.renovatebot.com/renovate-schema.json"\n' +
-            '}\n'
+            '}\n',
         );
         scm.getFileList.mockResolvedValue(['package.json']);
         fs.readLocalFile.mockResolvedValue('{}');
@@ -98,7 +98,7 @@ describe('workers/repository/onboarding/branch/index', () => {
           $schema: 'https://docs.renovatebot.com/renovate-schema.json',
         });
         expect(OnboardingState.prUpdateRequested).toBe(expected);
-      }
+      },
     );
 
     it('uses discovered onboarding config', async () => {
@@ -109,7 +109,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         '{\n' +
           '  "$schema": "https://docs.renovatebot.com/renovate-schema.json",\n' +
           '  "extends": ["some/renovate-config"]\n' +
-          '}\n'
+          '}\n',
       );
       scm.getFileList.mockResolvedValue(['package.json']);
       fs.readLocalFile.mockResolvedValue('{}');
@@ -124,7 +124,7 @@ describe('workers/repository/onboarding/branch/index', () => {
       delete expectConfig.ignorePresets;
       expect(configModule.getOnboardingConfigContents).toHaveBeenCalledWith(
         expectConfig,
-        configFileNames[0]
+        configFileNames[0],
       );
       const file = scm.commitAndPush.mock.calls[0][0].files[0] as FileAddition;
       const contents = file.contents?.toString();
@@ -186,17 +186,17 @@ describe('workers/repository/onboarding/branch/index', () => {
       platform.getJsonFile.mockResolvedValueOnce({});
       const res = await checkOnboardingBranch(config);
       expect(logger.debug).toHaveBeenCalledWith(
-        'Checking cached config file name'
+        'Checking cached config file name',
       );
       expect(logger.debug).toHaveBeenCalledWith(
-        'Existing config file confirmed'
+        'Existing config file confirmed',
       );
       expect(logger.debug).toHaveBeenCalledWith(
         {
           fileName: '.renovaterc',
           config: {},
         },
-        'Repository config'
+        'Repository config',
       );
       expect(res.repoIsOnboarded).toBeTrue();
     });
@@ -207,10 +207,10 @@ describe('workers/repository/onboarding/branch/index', () => {
       fs.readLocalFile.mockResolvedValueOnce('{}');
       const res = await checkOnboardingBranch(config);
       expect(logger.debug).toHaveBeenCalledWith(
-        'Checking cached config file name'
+        'Checking cached config file name',
       );
       expect(logger.debug).toHaveBeenCalledWith(
-        'Existing config file confirmed'
+        'Existing config file confirmed',
       );
       expect(logger.debug).toHaveBeenCalledWith(
         {
@@ -219,7 +219,7 @@ describe('workers/repository/onboarding/branch/index', () => {
             renovate: {},
           },
         },
-        'Repository config'
+        'Repository config',
       );
       expect(res.repoIsOnboarded).toBeTrue();
     });
@@ -278,7 +278,7 @@ describe('workers/repository/onboarding/branch/index', () => {
       scm.getFileList.mockResolvedValue(['package.json']);
       platform.findPr.mockResolvedValue(null); // finds closed onboarding pr
       platform.getBranchPr.mockResolvedValueOnce(
-        mock<Pr>({ bodyStruct: { rebaseRequested: false } })
+        mock<Pr>({ bodyStruct: { rebaseRequested: false } }),
       ); // finds open onboarding pr
       git.getBranchCommit
         .mockReturnValueOnce('default-sha')
@@ -316,7 +316,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         'default-sha',
         'new-onboarding-sha',
         false,
-        true
+        true,
       );
       expect(dummyCache).toMatchObject({
         scan: {},
@@ -340,7 +340,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         'default-sha',
         'onboarding-sha',
         true,
-        true
+        true,
       );
     });
 
@@ -358,7 +358,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         'default-sha',
         'onboarding-sha',
         false,
-        false
+        false,
       );
     });
 
@@ -379,7 +379,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         await checkOnboardingBranch(config);
 
         expect(logger.trace).toHaveBeenCalledWith(
-          `Platform '${pl}' does not support extended markdown`
+          `Platform '${pl}' does not support extended markdown`,
         );
         expect(OnboardingState.prUpdateRequested).toBeTrue();
         expect(scm.mergeToLocal).toHaveBeenCalledOnce();
@@ -393,7 +393,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         await checkOnboardingBranch(config);
 
         expect(logger.debug).toHaveBeenCalledWith(
-          `No rebase checkbox was found in the onboarding PR`
+          `No rebase checkbox was found in the onboarding PR`,
         );
         expect(OnboardingState.prUpdateRequested).toBeTrue();
         expect(scm.mergeToLocal).toHaveBeenCalledOnce();
@@ -407,7 +407,7 @@ describe('workers/repository/onboarding/branch/index', () => {
         await checkOnboardingBranch(config);
 
         expect(logger.debug).toHaveBeenCalledWith(
-          `Manual onboarding PR update requested`
+          `Manual onboarding PR update requested`,
         );
         expect(OnboardingState.prUpdateRequested).toBeTrue();
         expect(scm.mergeToLocal).toHaveBeenCalledOnce();
diff --git a/lib/workers/repository/onboarding/branch/index.ts b/lib/workers/repository/onboarding/branch/index.ts
index fc4ddbe2a87f94d75ae4fe5b19adca07edd0b6b0..fcb432bcb23a21db644d360d10eb08eb05e519d6 100644
--- a/lib/workers/repository/onboarding/branch/index.ts
+++ b/lib/workers/repository/onboarding/branch/index.ts
@@ -26,7 +26,7 @@ import {
 } from './onboarding-branch-cache';
 
 export async function checkOnboardingBranch(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<RenovateConfig> {
   logger.debug('checkOnboarding()');
   logger.trace({ config });
@@ -62,7 +62,7 @@ export async function checkOnboardingBranch(
       !(config.onboardingRebaseCheckbox && OnboardingState.prUpdateRequested)
     ) {
       logger.debug(
-        'Skip processing since the onboarding branch is up to date and default branch has not changed'
+        'Skip processing since the onboarding branch is up to date and default branch has not changed',
       );
       OnboardingState.onboardingCacheValid = true;
       return { ...config, repoIsOnboarded, onboardingBranch, branchList };
@@ -76,7 +76,7 @@ export async function checkOnboardingBranch(
       }
       isConflicted = await isOnboardingBranchConflicted(
         config.baseBranch!,
-        config.onboardingBranch!
+        config.onboardingBranch!,
       );
     }
   } else {
@@ -103,7 +103,7 @@ export async function checkOnboardingBranch(
     if (commit) {
       logger.info(
         { branch: onboardingBranch, commit, onboarding: true },
-        'Branch created'
+        'Branch created',
       );
     }
   }
@@ -118,7 +118,7 @@ export async function checkOnboardingBranch(
     getBranchCommit(config.defaultBranch!)!,
     getBranchCommit(onboardingBranch!)!,
     isConflicted,
-    isModified
+    isModified,
   );
 
   return { ...config, repoIsOnboarded, onboardingBranch, branchList };
@@ -150,7 +150,7 @@ function invalidateExtractCache(baseBranch: string): void {
 
 function isOnboardingCacheValid(
   defaultBranch: string,
-  onboardingBranch: string
+  onboardingBranch: string,
 ): boolean {
   const cache = getCache();
   const onboardingBranchCache = cache?.onboardingBranchCache;
diff --git a/lib/workers/repository/onboarding/branch/onboarding-branch-cache.spec.ts b/lib/workers/repository/onboarding/branch/onboarding-branch-cache.spec.ts
index aabb3d30c021cdf90332bcec953a635fa019c093..85fb37e28637a4db6ab35142f400546ad1f4b33b 100644
--- a/lib/workers/repository/onboarding/branch/onboarding-branch-cache.spec.ts
+++ b/lib/workers/repository/onboarding/branch/onboarding-branch-cache.spec.ts
@@ -136,7 +136,7 @@ describe('workers/repository/onboarding/branch/onboarding-branch-cache', () => {
       git.getBranchCommit.mockReturnValueOnce('onboarding-sha');
       scm.isBranchModified.mockResolvedValueOnce(false);
       expect(
-        await isOnboardingBranchModified('configure/renovate')
+        await isOnboardingBranchModified('configure/renovate'),
       ).toBeFalse();
     });
 
@@ -178,7 +178,7 @@ describe('workers/repository/onboarding/branch/onboarding-branch-cache', () => {
         .mockReturnValueOnce('default-sha');
       scm.isBranchConflicted.mockResolvedValueOnce(false);
       expect(
-        await isOnboardingBranchConflicted('master', 'configure/renovate')
+        await isOnboardingBranchConflicted('master', 'configure/renovate'),
       ).toBeFalse();
     });
 
@@ -197,7 +197,7 @@ describe('workers/repository/onboarding/branch/onboarding-branch-cache', () => {
         .mockReturnValueOnce('new-default-sha');
       scm.isBranchConflicted.mockResolvedValueOnce(false);
       expect(
-        await isOnboardingBranchConflicted('master', 'configure/renovate')
+        await isOnboardingBranchConflicted('master', 'configure/renovate'),
       ).toBeFalse();
     });
 
@@ -216,7 +216,7 @@ describe('workers/repository/onboarding/branch/onboarding-branch-cache', () => {
         .mockReturnValueOnce('default-sha');
       scm.isBranchConflicted.mockResolvedValueOnce(false);
       expect(
-        await isOnboardingBranchConflicted('master', 'configure/renovate')
+        await isOnboardingBranchConflicted('master', 'configure/renovate'),
       ).toBeFalse();
     });
 
@@ -234,7 +234,7 @@ describe('workers/repository/onboarding/branch/onboarding-branch-cache', () => {
         .mockReturnValueOnce('onboarding-sha')
         .mockReturnValueOnce('default-sha');
       expect(
-        await isOnboardingBranchConflicted('master', 'configure/renovate')
+        await isOnboardingBranchConflicted('master', 'configure/renovate'),
       ).toBeTrue();
     });
   });
diff --git a/lib/workers/repository/onboarding/branch/onboarding-branch-cache.ts b/lib/workers/repository/onboarding/branch/onboarding-branch-cache.ts
index 7917b124ab05508c21323ffa7f333481f06a5d25..d3ca81028b7d28112b653cc6f3114ae9ed08b205 100644
--- a/lib/workers/repository/onboarding/branch/onboarding-branch-cache.ts
+++ b/lib/workers/repository/onboarding/branch/onboarding-branch-cache.ts
@@ -8,7 +8,7 @@ export function setOnboardingCache(
   defaultBranchSha: string,
   onboardingBranchSha: string,
   isConflicted: boolean,
-  isModified: boolean
+  isModified: boolean,
 ): void {
   // do not update cache if commit is null/undefined
   if (
@@ -60,7 +60,7 @@ export function hasOnboardingBranchChanged(onboardingBranch: string): boolean {
 // checks if onboarding branch has been modified by user
 // once set to true it stays true as we do not rebase onboarding branches anymore (this feature will be added in future though)
 export async function isOnboardingBranchModified(
-  onboardingBranch: string
+  onboardingBranch: string,
 ): Promise<boolean> {
   const cache = getCache();
   const onboardingCache = cache.onboardingBranchCache;
@@ -92,7 +92,7 @@ export function getOnboardingConfigFromCache(): string | undefined {
 
 export function setOnboardingConfigDetails(
   configFileName: string,
-  configFileParsed: string
+  configFileParsed: string,
 ): void {
   const cache = getCache();
   if (cache.onboardingBranchCache) {
@@ -103,7 +103,7 @@ export function setOnboardingConfigDetails(
 
 export async function isOnboardingBranchConflicted(
   defaultBranch: string,
-  onboardingBranch: string
+  onboardingBranch: string,
 ): Promise<boolean> {
   const cache = getCache();
   const onboardingCache = cache.onboardingBranchCache;
@@ -121,7 +121,7 @@ export async function isOnboardingBranchConflicted(
   } else {
     isConflicted = await scm.isBranchConflicted(
       defaultBranch,
-      onboardingBranch
+      onboardingBranch,
     );
   }
 
diff --git a/lib/workers/repository/onboarding/common.ts b/lib/workers/repository/onboarding/common.ts
index d19da1ca0a492644312db6bed7f1e2a91a6fab65..f4a03a98892a49483ba5566c6f80c90b92f41cc9 100644
--- a/lib/workers/repository/onboarding/common.ts
+++ b/lib/workers/repository/onboarding/common.ts
@@ -15,11 +15,11 @@ export class OnboardingState {
 
   static get prUpdateRequested(): boolean {
     const updateRequested = !!memCache.get<boolean | undefined>(
-      OnboardingState.cacheKey
+      OnboardingState.cacheKey,
     );
     logger.trace(
       { value: updateRequested },
-      'Get OnboardingState.prUpdateRequested'
+      'Get OnboardingState.prUpdateRequested',
     );
     return updateRequested;
   }
@@ -31,11 +31,11 @@ export class OnboardingState {
 
   static get onboardingCacheValid(): boolean {
     const cacheValid = !!memCache.get<boolean | undefined>(
-      OnboardingState.skipKey
+      OnboardingState.skipKey,
     );
     logger.trace(
       { value: cacheValid },
-      'Get OnboardingState.onboardingCacheValid'
+      'Get OnboardingState.onboardingCacheValid',
     );
     return cacheValid;
   }
diff --git a/lib/workers/repository/onboarding/pr/base-branch.spec.ts b/lib/workers/repository/onboarding/pr/base-branch.spec.ts
index a499bc13d62bd6f552adaee200c72856ce4f9e0b..52019e65ab06adc43b6d0d6f26a7c7916ed2a8ec 100644
--- a/lib/workers/repository/onboarding/pr/base-branch.spec.ts
+++ b/lib/workers/repository/onboarding/pr/base-branch.spec.ts
@@ -18,7 +18,7 @@ describe('workers/repository/onboarding/pr/base-branch', () => {
       config.baseBranches = ['some-branch'];
       const res = getBaseBranchDesc(config);
       expect(res.trim()).toBe(
-        'You have configured Renovate to use branch `some-branch` as base branch.'
+        'You have configured Renovate to use branch `some-branch` as base branch.',
       );
     });
 
@@ -26,7 +26,7 @@ describe('workers/repository/onboarding/pr/base-branch', () => {
       config.baseBranches = ['some-branch', 'some-other-branch'];
       const res = getBaseBranchDesc(config);
       expect(res.trim()).toBe(
-        'You have configured Renovate to use the following baseBranches: `some-branch`, `some-other-branch`.'
+        'You have configured Renovate to use the following baseBranches: `some-branch`, `some-other-branch`.',
       );
     });
   });
diff --git a/lib/workers/repository/onboarding/pr/config-description.ts b/lib/workers/repository/onboarding/pr/config-description.ts
index 2209cc3d87b212b79bb2d97fd8921e5303cfa0d8..16e8142d46bda923a989bec479bc98a10d68daec 100644
--- a/lib/workers/repository/onboarding/pr/config-description.ts
+++ b/lib/workers/repository/onboarding/pr/config-description.ts
@@ -31,7 +31,7 @@ function getDescriptionArray(config: RenovateConfig): string[] {
 
 export function getConfigDesc(
   config: RenovateConfig,
-  packageFiles?: Record<string, PackageFile[]>
+  packageFiles?: Record<string, PackageFile[]>,
 ): string {
   // TODO: type (#22198)
   const configFile = configFileNames.includes(config.onboardingConfigFileName!)
@@ -52,7 +52,7 @@ export function getConfigDesc(
   });
   desc += '\n';
   desc += emojify(
-    `:abcd: Would you like to change the way Renovate is upgrading your dependencies?`
+    `:abcd: Would you like to change the way Renovate is upgrading your dependencies?`,
   );
   desc += ` Simply edit the \`${configFile}\` in this branch with your custom config and the list of Pull Requests in the "What to Expect" section below will be updated the next time Renovate runs.`;
   desc += '\n\n---\n';
diff --git a/lib/workers/repository/onboarding/pr/index.spec.ts b/lib/workers/repository/onboarding/pr/index.spec.ts
index 24095e9ec05ca0e43ee5acf326eda460504bcd3c..b9a29bbbdc5c78ecc5646419dd0c59780f5a98a4 100644
--- a/lib/workers/repository/onboarding/pr/index.spec.ts
+++ b/lib/workers/repository/onboarding/pr/index.spec.ts
@@ -45,7 +45,7 @@ describe('workers/repository/onboarding/pr/index', () => {
     it('returns if onboarded', async () => {
       config.repoIsOnboarded = true;
       await expect(
-        ensureOnboardingPr(config, packageFiles, branches)
+        ensureOnboardingPr(config, packageFiles, branches),
       ).resolves.not.toThrow();
       expect(platform.createPr).toHaveBeenCalledTimes(0);
       expect(platform.updatePr).toHaveBeenCalledTimes(0);
@@ -65,11 +65,11 @@ describe('workers/repository/onboarding/pr/index', () => {
         config.onboardingRebaseCheckbox = onboardingRebaseCheckbox;
         OnboardingState.prUpdateRequested = prUpdateRequested;
         await expect(
-          ensureOnboardingPr(config, packageFiles, branches)
+          ensureOnboardingPr(config, packageFiles, branches),
         ).resolves.not.toThrow();
         expect(platform.updatePr).toHaveBeenCalledTimes(0);
         expect(platform.createPr).toHaveBeenCalledTimes(expected);
-      }
+      },
     );
 
     it('creates PR', async () => {
@@ -85,7 +85,7 @@ describe('workers/repository/onboarding/pr/index', () => {
           addLabels: ['label', 'additional-label'],
         },
         packageFiles,
-        branches
+        branches,
       );
       expect(platform.createPr).toHaveBeenCalledTimes(1);
       expect(platform.createPr.mock.calls[0][0].labels).toEqual([
@@ -111,11 +111,11 @@ describe('workers/repository/onboarding/pr/index', () => {
             prFooter: '',
           },
           packageFiles,
-          branches
+          branches,
         );
         expect(platform.createPr).toHaveBeenCalledTimes(1);
         expect(platform.createPr.mock.calls[0][0].prBody).toMatchSnapshot();
-      }
+      },
     );
 
     it.each`
@@ -136,11 +136,11 @@ describe('workers/repository/onboarding/pr/index', () => {
               'There should be several empty lines at the end of the PR\r\n\n\n',
           },
           packageFiles,
-          branches
+          branches,
         );
         expect(platform.createPr).toHaveBeenCalledTimes(1);
         expect(platform.createPr.mock.calls[0][0].prBody).toMatchSnapshot();
-      }
+      },
     );
 
     it.each`
@@ -164,17 +164,17 @@ describe('workers/repository/onboarding/pr/index', () => {
               'And this is a footer for repository:{{repository}} baseBranch:{{baseBranch}}',
           },
           packageFiles,
-          branches
+          branches,
         );
         expect(platform.createPr).toHaveBeenCalledTimes(1);
         expect(platform.createPr.mock.calls[0][0].prBody).toMatch(
-          /platform:github/
+          /platform:github/,
         );
         expect(platform.createPr.mock.calls[0][0].prBody).toMatch(
-          /repository:test/
+          /repository:test/,
         );
         expect(platform.createPr.mock.calls[0][0].prBody).toMatchSnapshot();
-      }
+      },
     );
 
     it.each`
@@ -193,12 +193,12 @@ describe('workers/repository/onboarding/pr/index', () => {
           partial<Pr>({
             title: 'Configure Renovate',
             bodyStruct: onboardingRebaseCheckbox ? bodyStruct : { hash },
-          })
+          }),
         );
         await ensureOnboardingPr(config, packageFiles, branches);
         expect(platform.createPr).toHaveBeenCalledTimes(0);
         expect(platform.updatePr).toHaveBeenCalledTimes(0);
-      }
+      },
     );
 
     it('ensures comment, when PR is conflicted', async () => {
@@ -207,7 +207,7 @@ describe('workers/repository/onboarding/pr/index', () => {
         partial<Pr>({
           title: 'Configure Renovate',
           bodyStruct,
-        })
+        }),
       );
       scm.isBranchConflicted.mockResolvedValueOnce(true);
       await ensureOnboardingPr(config, {}, branches);
@@ -222,7 +222,7 @@ describe('workers/repository/onboarding/pr/index', () => {
         partial<Pr>({
           title: 'Configure Renovate',
           bodyStruct,
-        })
+        }),
       );
       await ensureOnboardingPr(config, {}, branches);
       expect(platform.createPr).toHaveBeenCalledTimes(0);
@@ -245,10 +245,10 @@ describe('workers/repository/onboarding/pr/index', () => {
       GlobalConfig.set({ dryRun: 'full' });
       await ensureOnboardingPr(config, packageFiles, branches);
       expect(logger.info).toHaveBeenCalledWith(
-        'DRY-RUN: Would check branch renovate/configure'
+        'DRY-RUN: Would check branch renovate/configure',
       );
       expect(logger.info).toHaveBeenLastCalledWith(
-        'DRY-RUN: Would create onboarding PR'
+        'DRY-RUN: Would create onboarding PR',
       );
     });
 
@@ -258,14 +258,14 @@ describe('workers/repository/onboarding/pr/index', () => {
         partial<Pr>({
           title: 'Configure Renovate',
           bodyStruct,
-        })
+        }),
       );
       await ensureOnboardingPr(config, packageFiles, branches);
       expect(logger.info).toHaveBeenCalledWith(
-        'DRY-RUN: Would check branch renovate/configure'
+        'DRY-RUN: Would check branch renovate/configure',
       );
       expect(logger.info).toHaveBeenLastCalledWith(
-        'DRY-RUN: Would update onboarding PR'
+        'DRY-RUN: Would update onboarding PR',
       );
     });
 
@@ -282,7 +282,7 @@ describe('workers/repository/onboarding/pr/index', () => {
       it('throws when trying to create a new PR', async () => {
         platform.createPr.mockRejectedValueOnce(err);
         await expect(
-          ensureOnboardingPr(config, packageFiles, branches)
+          ensureOnboardingPr(config, packageFiles, branches),
         ).toReject();
         expect(scm.deleteBranch).toHaveBeenCalledTimes(0);
       });
@@ -293,10 +293,10 @@ describe('workers/repository/onboarding/pr/index', () => {
         };
         platform.createPr.mockRejectedValueOnce(err);
         await expect(
-          ensureOnboardingPr(config, packageFiles, branches)
+          ensureOnboardingPr(config, packageFiles, branches),
         ).toResolve();
         expect(logger.warn).toHaveBeenCalledWith(
-          'Onboarding PR already exists but cannot find it. It was probably created by a different user.'
+          'Onboarding PR already exists but cannot find it. It was probably created by a different user.',
         );
         expect(scm.deleteBranch).toHaveBeenCalledTimes(1);
       });
diff --git a/lib/workers/repository/onboarding/pr/index.ts b/lib/workers/repository/onboarding/pr/index.ts
index d11dca54ca081ce17b46302c614123cda97fac58..6810cd4b24878d90dd607369061658385f5bcab8 100644
--- a/lib/workers/repository/onboarding/pr/index.ts
+++ b/lib/workers/repository/onboarding/pr/index.ts
@@ -29,7 +29,7 @@ import { getPrList } from './pr-list';
 export async function ensureOnboardingPr(
   config: RenovateConfig,
   packageFiles: Record<string, PackageFile[]> | null,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<void> {
   if (
     config.repoIsOnboarded === true ||
@@ -43,28 +43,28 @@ export async function ensureOnboardingPr(
   // TODO #22198
   const existingPr = await platform.getBranchPr(
     config.onboardingBranch!,
-    config.defaultBranch
+    config.defaultBranch,
   );
   if (existingPr) {
     // skip pr-update if branch is conflicted
     if (
       await isOnboardingBranchConflicted(
         config.defaultBranch!,
-        config.onboardingBranch!
+        config.onboardingBranch!,
       )
     ) {
       await ensureComment({
         number: existingPr.number,
         topic: 'Branch Conflicted',
         content: emojify(
-          `:warning: This PR has a merge conflict which Renovate is unable to automatically resolve, so updates to this PR description are now paused. Please resolve the merge conflict manually.\n\n`
+          `:warning: This PR has a merge conflict which Renovate is unable to automatically resolve, so updates to this PR description are now paused. Please resolve the merge conflict manually.\n\n`,
         ),
       });
       return;
     }
   }
   const onboardingConfigHashComment = await getOnboardingConfigHashComment(
-    config
+    config,
   );
   const rebaseCheckBox = getRebaseCheckbox(config.onboardingRebaseCheckbox);
   logger.debug('Filling in onboarding PR template');
@@ -74,10 +74,10 @@ export async function ensureOnboardingPr(
   prTemplate +=
     config.requireConfig === 'required'
       ? emojify(
-          `:vertical_traffic_light: To activate Renovate, merge this Pull Request. To disable Renovate, simply close this Pull Request unmerged.\n\n`
+          `:vertical_traffic_light: To activate Renovate, merge this Pull Request. To disable Renovate, simply close this Pull Request unmerged.\n\n`,
         )
       : emojify(
-          `:vertical_traffic_light: Renovate will begin keeping your dependencies up-to-date only once you merge or close this Pull Request.\n\n`
+          `:vertical_traffic_light: Renovate will begin keeping your dependencies up-to-date only once you merge or close this Pull Request.\n\n`,
         );
   // TODO #22198
   prTemplate += emojify(
@@ -99,7 +99,7 @@ export async function ensureOnboardingPr(
 If you need any further assistance then you can also [request help here](${
       config.productLinks!.help
     }).
-`
+`,
   );
   prTemplate += rebaseCheckBox;
   let prBody = prTemplate;
@@ -107,13 +107,13 @@ If you need any further assistance then you can also [request help here](${
     let files: string[] = [];
     for (const [manager, managerFiles] of Object.entries(packageFiles)) {
       files = files.concat(
-        managerFiles.map((file) => ` * \`${file.packageFile}\` (${manager})`)
+        managerFiles.map((file) => ` * \`${file.packageFile}\` (${manager})`),
       );
     }
     prBody =
       prBody.replace(
         '{{PACKAGE FILES}}',
-        '### Detected Package Files\n\n' + files.join('\n')
+        '### Detected Package Files\n\n' + files.join('\n'),
       ) + '\n';
   } else {
     prBody = prBody.replace('{{PACKAGE FILES}}\n', '');
@@ -128,7 +128,7 @@ If you need any further assistance then you can also [request help here](${
   prBody = prBody.replace('{{CONFIG}}\n', configDesc);
   prBody = prBody.replace(
     '{{WARNINGS}}\n',
-    getWarnings(config) + getDepWarningsOnboardingPR(packageFiles!, config)
+    getWarnings(config) + getDepWarningsOnboardingPR(packageFiles!, config),
   );
   prBody = prBody.replace('{{ERRORS}}\n', getErrors(config));
   prBody = prBody.replace('{{BASEBRANCH}}\n', getBaseBranchDesc(config));
@@ -187,7 +187,7 @@ If you need any further assistance then you can also [request help here](${
       });
       logger.info(
         { pr: `Pull Request #${pr!.number}` },
-        'Onboarding PR created'
+        'Onboarding PR created',
       );
       await addParticipants(config, pr!);
     }
@@ -195,11 +195,11 @@ If you need any further assistance then you can also [request help here](${
     if (
       err.response?.statusCode === 422 &&
       err.response?.body?.errors?.[0]?.message?.startsWith(
-        'A pull request already exists'
+        'A pull request already exists',
       )
     ) {
       logger.warn(
-        'Onboarding PR already exists but cannot find it. It was probably created by a different user.'
+        'Onboarding PR already exists but cannot find it. It was probably created by a different user.',
       );
       await scm.deleteBranch(config.onboardingBranch!);
       return;
@@ -219,7 +219,7 @@ function getRebaseCheckbox(onboardingRebaseCheckbox?: boolean): string {
 }
 
 async function getOnboardingConfigHashComment(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<string> {
   const configFile = defaultConfigFile(config);
   const existingContents =
diff --git a/lib/workers/repository/onboarding/pr/pr-list.ts b/lib/workers/repository/onboarding/pr/pr-list.ts
index 6c596e74baf95df4f2a87d3b13c7f15fb07a329c..24f231c7d701455a38bdbffecda8e5cc08fe82e7 100644
--- a/lib/workers/repository/onboarding/pr/pr-list.ts
+++ b/lib/workers/repository/onboarding/pr/pr-list.ts
@@ -6,7 +6,7 @@ import type { BranchConfig } from '../../../types';
 
 export function getPrList(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): string {
   logger.debug('getPrList()');
   logger.trace({ config });
@@ -22,7 +22,7 @@ export function getPrList(
     // TODO #22198
     prDesc += `<details>\n<summary>${branch.prTitle!.replace(
       prTitleRe,
-      '@&#8203;$1'
+      '@&#8203;$1',
     )}</summary>\n\n`;
     if (branch.schedule?.length) {
       prDesc += `  - Schedule: ${JSON.stringify(branch.schedule)}\n`;
@@ -70,7 +70,7 @@ export function getPrList(
     prHourlyLimit < branches.length
   ) {
     prDesc += emojify(
-      `<br />\n\n:children_crossing: Branch creation will be limited to maximum ${prHourlyLimit} per hour, so it doesn't swamp any CI resources or overwhelm the project. See docs for \`prhourlylimit\` for details.\n\n`
+      `<br />\n\n:children_crossing: Branch creation will be limited to maximum ${prHourlyLimit} per hour, so it doesn't swamp any CI resources or overwhelm the project. See docs for \`prhourlylimit\` for details.\n\n`,
     );
   }
   return prDesc;
diff --git a/lib/workers/repository/package-files.ts b/lib/workers/repository/package-files.ts
index a99357526a19a970e2482d1a9c331f54f727a6f9..3dd16047f9d3319375e287609578f26f59f716ec 100644
--- a/lib/workers/repository/package-files.ts
+++ b/lib/workers/repository/package-files.ts
@@ -8,11 +8,11 @@ export class PackageFiles {
 
   static add(
     baseBranch: string,
-    packageFiles: Record<string, PackageFile[]> | null
+    packageFiles: Record<string, PackageFile[]> | null,
   ): void {
     logger.debug(
       { baseBranch },
-      `PackageFiles.add() - Package file saved for base branch`
+      `PackageFiles.add() - Package file saved for base branch`,
     );
     this.data.set(baseBranch, packageFiles);
   }
@@ -79,7 +79,7 @@ export class PackageFiles {
    * @param data
    */
   private static getDashboardMarkdownInternal(
-    data: Map<string, Record<string, PackageFile[]> | null>
+    data: Map<string, Record<string, PackageFile[]> | null>,
   ): string {
     const none = 'None detected\n\n';
     const pad = data.size > 1; // padding condition for a multi base branch repo
@@ -132,7 +132,7 @@ export class PackageFiles {
    *          otherwise false is returned
    */
   private static pop(
-    data: Map<string, Record<string, PackageFile[]> | null>
+    data: Map<string, Record<string, PackageFile[]> | null>,
   ): boolean {
     // get detected managers list of the last listed base branch
     const [branch, managers] = Array.from(data).pop() ?? [];
diff --git a/lib/workers/repository/process/deprecated.ts b/lib/workers/repository/process/deprecated.ts
index 40d6d6e445bd82dbcc74f8c1da6edd1c6d554846..43c699cbf43386a2bd0154ecce5357957b623dd0 100644
--- a/lib/workers/repository/process/deprecated.ts
+++ b/lib/workers/repository/process/deprecated.ts
@@ -7,7 +7,7 @@ import { platform } from '../../../modules/platform';
 
 export async function raiseDeprecationWarnings(
   config: RenovateConfig,
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): Promise<void> {
   if (!config.repoIsOnboarded) {
     return;
@@ -31,7 +31,7 @@ export async function raiseDeprecationWarnings(
             depPackageFiles: [],
           };
           deprecatedPackages[dep.depName!].depPackageFiles.push(
-            packageFile.packageFile
+            packageFile.packageFile,
           );
         }
       }
@@ -48,7 +48,7 @@ export async function raiseDeprecationWarnings(
           deprecationMessage,
           packageFiles: depPackageFiles,
         },
-        'dependency is deprecated'
+        'dependency is deprecated',
       );
       const issueTitle = `${issueTitlePrefix} ${depName} (${manager})`;
       issueTitleList.push(issueTitle);
@@ -71,12 +71,12 @@ export async function raiseDeprecationWarnings(
       }
     }
     logger.debug(
-      'Checking for existing deprecated package issues missing in current deprecatedPackages'
+      'Checking for existing deprecated package issues missing in current deprecatedPackages',
     );
     const issueList = await platform.getIssueList();
     if (issueList?.length) {
       const deprecatedIssues = issueList.filter(
-        (i) => i.title!.startsWith(issueTitlePrefix) && i.state === 'open'
+        (i) => i.title!.startsWith(issueTitlePrefix) && i.state === 'open',
       );
       for (const i of deprecatedIssues) {
         if (!issueTitleList.includes(i.title!)) {
diff --git a/lib/workers/repository/process/extract-update.spec.ts b/lib/workers/repository/process/extract-update.spec.ts
index eadb2c2d74758c1cc80eee876b78cfb6aa1d7681..2b87fc007fb4c094e746a03ee0a5ef686aeefcfd 100644
--- a/lib/workers/repository/process/extract-update.spec.ts
+++ b/lib/workers/repository/process/extract-update.spec.ts
@@ -175,7 +175,7 @@ describe('workers/repository/process/extract-update', () => {
       cachedExtract.configHash = 'hash';
       expect(isCacheExtractValid('new_sha', 'hash', cachedExtract)).toBe(false);
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        `Cached extract result cannot be used due to base branch SHA change (old=sha, new=new_sha)`
+        `Cached extract result cannot be used due to base branch SHA change (old=sha, new=new_sha)`,
       );
       expect(logger.logger.debug).toHaveBeenCalledTimes(1);
     });
@@ -184,7 +184,7 @@ describe('workers/repository/process/extract-update', () => {
       cachedExtract.configHash = 'hash';
       expect(isCacheExtractValid('sha', 'new_hash', cachedExtract)).toBe(false);
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Cached extract result cannot be used due to config change'
+        'Cached extract result cannot be used due to config change',
       );
       expect(logger.logger.debug).toHaveBeenCalledTimes(1);
     });
@@ -196,11 +196,11 @@ describe('workers/repository/process/extract-update', () => {
         isCacheExtractValid(
           'sha',
           'hash',
-          restOfCache as never as BaseBranchCache
-        )
+          restOfCache as never as BaseBranchCache,
+        ),
       ).toBe(false);
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Cached extract is missing extractionFingerprints, so cannot be used'
+        'Cached extract is missing extractionFingerprints, so cannot be used',
       );
       expect(logger.logger.debug).toHaveBeenCalledTimes(1);
     });
@@ -216,7 +216,7 @@ describe('workers/repository/process/extract-update', () => {
       cachedExtract.configHash = 'hash';
       expect(isCacheExtractValid('sha', 'hash', cachedExtract)).toBe(true);
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Cached extract for sha=sha is valid and can be used'
+        'Cached extract for sha=sha is valid and can be used',
       );
       expect(logger.logger.debug).toHaveBeenCalledTimes(1);
     });
diff --git a/lib/workers/repository/process/extract-update.ts b/lib/workers/repository/process/extract-update.ts
index 85bb18cbf1d6b9513ad41cf7d56e9b90e5b6fc56..aceaab7f407fde236e50990ef7c787c8c2baa6a2 100644
--- a/lib/workers/repository/process/extract-update.ts
+++ b/lib/workers/repository/process/extract-update.ts
@@ -36,7 +36,7 @@ export interface Stats {
 
 // istanbul ignore next
 function extractStats(
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): Stats | null {
   if (!packageFiles) {
     return null;
@@ -67,14 +67,14 @@ function extractStats(
 export function isCacheExtractValid(
   baseBranchSha: string,
   configHash: string,
-  cachedExtract?: BaseBranchCache
+  cachedExtract?: BaseBranchCache,
 ): boolean {
   if (!(cachedExtract?.sha && cachedExtract.configHash)) {
     return false;
   }
   if (cachedExtract.sha !== baseBranchSha) {
     logger.debug(
-      `Cached extract result cannot be used due to base branch SHA change (old=${cachedExtract.sha}, new=${baseBranchSha})`
+      `Cached extract result cannot be used due to base branch SHA change (old=${cachedExtract.sha}, new=${baseBranchSha})`,
     );
     return false;
   }
@@ -84,13 +84,13 @@ export function isCacheExtractValid(
   }
   if (!cachedExtract.extractionFingerprints) {
     logger.debug(
-      'Cached extract is missing extractionFingerprints, so cannot be used'
+      'Cached extract is missing extractionFingerprints, so cannot be used',
     );
     return false;
   }
   const changedManagers = new Set();
   for (const [manager, fingerprint] of Object.entries(
-    cachedExtract.extractionFingerprints
+    cachedExtract.extractionFingerprints,
   )) {
     if (fingerprint !== hashMap.get(manager)) {
       changedManagers.add(manager);
@@ -99,18 +99,18 @@ export function isCacheExtractValid(
   if (changedManagers.size > 0) {
     logger.debug(
       { changedManagers: [...changedManagers] },
-      'Manager fingerprint(s) have changed, extract cache cannot be reused'
+      'Manager fingerprint(s) have changed, extract cache cannot be reused',
     );
     return false;
   }
   logger.debug(
-    `Cached extract for sha=${baseBranchSha} is valid and can be used`
+    `Cached extract for sha=${baseBranchSha} is valid and can be used`,
   );
   return true;
 }
 
 export async function extract(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<Record<string, PackageFile[]>> {
   logger.debug('extract()');
   const { baseBranch } = config;
@@ -160,7 +160,7 @@ export async function extract(
   const stats = extractStats(packageFiles);
   logger.info(
     { baseBranch: config.baseBranch, stats },
-    `Dependency extraction complete`
+    `Dependency extraction complete`,
   );
   logger.trace({ config: packageFiles }, 'packageFiles');
   ensureGithubToken(packageFiles);
@@ -169,7 +169,7 @@ export async function extract(
 
 async function fetchVulnerabilities(
   config: RenovateConfig,
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): Promise<void> {
   if (config.osvVulnerabilityAlerts) {
     logger.debug('fetchVulnerabilities() - osvVulnerabilityAlerts=true');
@@ -177,7 +177,7 @@ async function fetchVulnerabilities(
       const vulnerabilities = await Vulnerabilities.create();
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
     } catch (err) {
       logger.warn({ err }, 'Unable to read vulnerability information');
@@ -187,18 +187,18 @@ async function fetchVulnerabilities(
 
 export async function lookup(
   config: RenovateConfig,
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): Promise<ExtractResult> {
   await fetchVulnerabilities(config, packageFiles);
   await fetchUpdates(config, packageFiles);
   await raiseDeprecationWarnings(config, packageFiles);
   const { branches, branchList } = await branchifyUpgrades(
     config,
-    packageFiles
+    packageFiles,
   );
   logger.debug(
     { baseBranch: config.baseBranch, config: packageFiles },
-    'packageFiles with updates'
+    'packageFiles with updates',
   );
   sortBranches(branches);
   return { branches, branchList, packageFiles };
@@ -206,7 +206,7 @@ export async function lookup(
 
 export async function update(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<WriteUpdateResult | undefined> {
   let res: WriteUpdateResult | undefined;
   // istanbul ignore else
diff --git a/lib/workers/repository/process/fetch.spec.ts b/lib/workers/repository/process/fetch.spec.ts
index d701e08868d9bbce6f4c82d7db77490130018e8c..c8d4090c5321050fa7a42efceb8882baa8d8c218 100644
--- a/lib/workers/repository/process/fetch.spec.ts
+++ b/lib/workers/repository/process/fetch.spec.ts
@@ -118,7 +118,7 @@ describe('workers/repository/process/fetch', () => {
       };
       await fetchUpdates(config, packageFiles);
       expect(packageFiles.docker[0].deps[0].skipReason).toBe(
-        'internal-package'
+        'internal-package',
       );
       expect(packageFiles.docker[0].deps[0].updates).toHaveLength(0);
     });
@@ -158,7 +158,7 @@ describe('workers/repository/process/fetch', () => {
       lookupUpdates.mockRejectedValueOnce(new Error('some error'));
 
       await expect(
-        fetchUpdates({ ...config, repoIsOnboarded: true }, packageFiles)
+        fetchUpdates({ ...config, repoIsOnboarded: true }, packageFiles),
       ).rejects.toThrow();
     });
 
@@ -175,7 +175,7 @@ describe('workers/repository/process/fetch', () => {
       lookupUpdates.mockRejectedValueOnce(new Error('some error'));
 
       await expect(
-        fetchUpdates({ ...config, repoIsOnboarded: true }, packageFiles)
+        fetchUpdates({ ...config, repoIsOnboarded: true }, packageFiles),
       ).rejects.toThrow();
     });
 
diff --git a/lib/workers/repository/process/fetch.ts b/lib/workers/repository/process/fetch.ts
index e60fdf05f857dc21c9cafbbd923db7e845578305..59b7af29a8e12c59a3920a076de9d8b88ff818ae 100644
--- a/lib/workers/repository/process/fetch.ts
+++ b/lib/workers/repository/process/fetch.ts
@@ -21,7 +21,7 @@ import type { LookupUpdateConfig } from './lookup/types';
 
 async function withLookupStats<T>(
   datasource: string,
-  callback: () => Promise<T>
+  callback: () => Promise<T>,
 ): Promise<T> {
   const start = Date.now();
   const result = await callback();
@@ -34,7 +34,7 @@ async function withLookupStats<T>(
 
 async function fetchDepUpdates(
   packageFileConfig: RenovateConfig & PackageFile,
-  indep: PackageDependency
+  indep: PackageDependency,
 ): Promise<PackageDependency> {
   const dep = clone(indep);
   dep.updates = [];
@@ -70,7 +70,7 @@ async function fetchDepUpdates(
     if (depConfig.datasource) {
       try {
         const updateResult = await withLookupStats(depConfig.datasource, () =>
-          lookupUpdates(depConfig as LookupUpdateConfig)
+          lookupUpdates(depConfig as LookupUpdateConfig),
         );
         Object.assign(dep, updateResult);
       } catch (err) {
@@ -98,7 +98,7 @@ async function fetchDepUpdates(
 async function fetchManagerPackagerFileUpdates(
   config: RenovateConfig,
   managerConfig: RenovateConfig,
-  pFile: PackageFile
+  pFile: PackageFile,
 ): Promise<void> {
   const { packageFile } = pFile;
   const packageFileConfig = mergeChildConfig(managerConfig, pFile);
@@ -111,11 +111,11 @@ async function fetchManagerPackagerFileUpdates(
   const { manager } = packageFileConfig;
   const queue = pFile.deps.map(
     (dep) => (): Promise<PackageDependency> =>
-      fetchDepUpdates(packageFileConfig, dep)
+      fetchDepUpdates(packageFileConfig, dep),
   );
   logger.trace(
     { manager, packageFile, queueLength: queue.length },
-    'fetchManagerPackagerFileUpdates starting with concurrency'
+    'fetchManagerPackagerFileUpdates starting with concurrency',
   );
 
   pFile.deps = await p.all(queue);
@@ -125,16 +125,16 @@ async function fetchManagerPackagerFileUpdates(
 async function fetchManagerUpdates(
   config: RenovateConfig,
   packageFiles: Record<string, PackageFile[]>,
-  manager: string
+  manager: string,
 ): Promise<void> {
   const managerConfig = getManagerConfig(config, manager);
   const queue = packageFiles[manager].map(
     (pFile) => (): Promise<void> =>
-      fetchManagerPackagerFileUpdates(config, managerConfig, pFile)
+      fetchManagerPackagerFileUpdates(config, managerConfig, pFile),
   );
   logger.trace(
     { manager, queueLength: queue.length },
-    'fetchManagerUpdates starting'
+    'fetchManagerUpdates starting',
   );
   await p.all(queue);
   logger.trace({ manager }, 'fetchManagerUpdates finished');
@@ -142,16 +142,16 @@ async function fetchManagerUpdates(
 
 export async function fetchUpdates(
   config: RenovateConfig,
-  packageFiles: Record<string, PackageFile[]>
+  packageFiles: Record<string, PackageFile[]>,
 ): Promise<void> {
   const managers = Object.keys(packageFiles);
   const allManagerJobs = managers.map((manager) =>
-    fetchManagerUpdates(config, packageFiles, manager)
+    fetchManagerUpdates(config, packageFiles, manager),
   );
   await Promise.all(allManagerJobs);
   PackageFiles.add(config.baseBranch!, { ...packageFiles });
   logger.debug(
     { baseBranch: config.baseBranch },
-    'Package releases lookups complete'
+    'Package releases lookups complete',
   );
 }
diff --git a/lib/workers/repository/process/index.spec.ts b/lib/workers/repository/process/index.spec.ts
index d6cd8c148ffa800cbf8f956a2f7fa5a0ca3612a9..4bcf83e03c551579fa5ad2623df206538cbd22ed 100644
--- a/lib/workers/repository/process/index.spec.ts
+++ b/lib/workers/repository/process/index.spec.ts
@@ -64,7 +64,7 @@ describe('workers/repository/process/index', () => {
       expect(platform.getJsonFile).not.toHaveBeenCalledWith(
         'renovate.json',
         undefined,
-        'dev'
+        'dev',
       );
     });
 
@@ -85,7 +85,7 @@ describe('workers/repository/process/index', () => {
       expect(platform.getJsonFile).toHaveBeenCalledWith(
         'renovate.json',
         undefined,
-        'dev'
+        'dev',
       );
       expect(addMeta).toHaveBeenNthCalledWith(1, { baseBranch: 'master' });
       expect(addMeta).toHaveBeenNthCalledWith(2, { baseBranch: 'dev' });
@@ -105,7 +105,7 @@ describe('workers/repository/process/index', () => {
       config.baseBranches = ['master', 'dev'];
       config.useBaseBranchConfig = 'merge';
       await expect(extractDependencies(config)).rejects.toThrow(
-        CONFIG_VALIDATION
+        CONFIG_VALIDATION,
       );
       expect(addMeta).toHaveBeenNthCalledWith(1, { baseBranch: 'master' });
       expect(addMeta).toHaveBeenNthCalledWith(2, { baseBranch: 'dev' });
@@ -144,7 +144,7 @@ describe('workers/repository/process/index', () => {
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { baseBranches: ['release/v1', 'release/v2', 'dev', 'some-other'] },
-        'baseBranches'
+        'baseBranches',
       );
       expect(addMeta).toHaveBeenCalledWith({ baseBranch: 'release/v1' });
       expect(addMeta).toHaveBeenCalledWith({ baseBranch: 'release/v2' });
diff --git a/lib/workers/repository/process/index.ts b/lib/workers/repository/process/index.ts
index ff10492c10c7f5febd87b2883e56371d3badc19f..cef8b546c1886a7bf3c90a217a3b335456c55b90 100644
--- a/lib/workers/repository/process/index.ts
+++ b/lib/workers/repository/process/index.ts
@@ -20,7 +20,7 @@ import type { WriteUpdateResult } from './write';
 
 async function getBaseBranchConfig(
   baseBranch: string,
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<RenovateConfig> {
   logger.debug(`baseBranch: ${baseBranch}`);
 
@@ -32,7 +32,7 @@ async function getBaseBranchConfig(
   ) {
     logger.debug(
       { baseBranch },
-      `Merging config from base branch because useBaseBranchConfig=merge`
+      `Merging config from base branch because useBaseBranchConfig=merge`,
     );
 
     // Retrieve config file name autodetected for this repo
@@ -44,13 +44,13 @@ async function getBaseBranchConfig(
       baseBranchConfig = await platform.getJsonFile(
         configFileName,
         config.repository,
-        baseBranch
+        baseBranch,
       );
       logger.debug({ config: baseBranchConfig }, 'Base branch config raw');
     } catch (err) {
       logger.error(
         { configFileName, baseBranch },
-        `Error fetching config file from base branch - possible config name mismatch between branches?`
+        `Error fetching config file from base branch - possible config name mismatch between branches?`,
       );
 
       const error = new Error(CONFIG_VALIDATION);
@@ -67,7 +67,7 @@ async function getBaseBranchConfig(
     if (config.printConfig) {
       logger.info(
         { config: baseBranchConfig },
-        'Base branch config after merge'
+        'Base branch config after merge',
       );
     }
 
@@ -87,7 +87,7 @@ async function getBaseBranchConfig(
 
 function unfoldBaseBranches(
   defaultBranch: string,
-  baseBranches: string[]
+  baseBranches: string[],
 ): string[] {
   const unfoldedList: string[] = [];
 
@@ -97,7 +97,7 @@ function unfoldBaseBranches(
     if (isAllowedPred) {
       const matchingBranches = allBranches.filter(isAllowedPred);
       logger.debug(
-        `baseBranches regex "${baseBranch}" matches [${matchingBranches.join()}]`
+        `baseBranches regex "${baseBranch}" matches [${matchingBranches.join()}]`,
       );
       unfoldedList.push(...matchingBranches);
     } else if (baseBranch === '$default') {
@@ -112,7 +112,7 @@ function unfoldBaseBranches(
 }
 
 export async function extractDependencies(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<ExtractResult> {
   await readDashboardBody(config);
   let res: ExtractResult = {
@@ -123,7 +123,7 @@ export async function extractDependencies(
   if (GlobalConfig.get('platform') !== 'local' && config.baseBranches?.length) {
     config.baseBranches = unfoldBaseBranches(
       config.defaultBranch!,
-      config.baseBranches
+      config.baseBranches,
     );
     logger.debug({ baseBranches: config.baseBranches }, 'baseBranches');
     const extracted: Record<string, Record<string, PackageFile[]>> = {};
@@ -166,7 +166,7 @@ export async function extractDependencies(
 
 export function updateRepo(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<WriteUpdateResult | undefined> {
   logger.debug('processRepo()');
 
diff --git a/lib/workers/repository/process/limits.spec.ts b/lib/workers/repository/process/limits.spec.ts
index 9f0252dc5e753964d9632635439f4da3a4d34592..3fc3f786662494cfc007c714b768c1f45c1aeed9 100644
--- a/lib/workers/repository/process/limits.spec.ts
+++ b/lib/workers/repository/process/limits.spec.ts
@@ -62,9 +62,9 @@ describe('workers/repository/process/limits', () => {
               partial<Pr>({
                 sourceBranch: branchName,
                 state: 'open',
-              })
+              }),
             )
-          : Promise.reject('some error')
+          : Promise.reject('some error'),
       );
       const branches: BranchConfig[] = [
         { branchName: 'test' },
@@ -133,7 +133,7 @@ describe('workers/repository/process/limits', () => {
       // TODO: #22198
       const res = await limits.getConcurrentBranchesRemaining(
         config,
-        null as never
+        null as never,
       );
       expect(res).toBe(2);
     });
@@ -150,8 +150,8 @@ describe('workers/repository/process/limits', () => {
             prHourlyLimit: 3,
             branchConcurrentLimit: 5,
           },
-          []
-        )
+          [],
+        ),
       ).resolves.toBe(3);
 
       await expect(
@@ -161,8 +161,8 @@ describe('workers/repository/process/limits', () => {
             prHourlyLimit: 11,
             branchConcurrentLimit: 7,
           },
-          []
-        )
+          [],
+        ),
       ).resolves.toBe(7);
     });
   });
diff --git a/lib/workers/repository/process/limits.ts b/lib/workers/repository/process/limits.ts
index 3cfedafc23ee16533524fb6363c6bb99dd5843b1..e5e9b68b3c85fdbe746dead74bf55215656cf78e 100644
--- a/lib/workers/repository/process/limits.ts
+++ b/lib/workers/repository/process/limits.ts
@@ -7,7 +7,7 @@ import { ExternalHostError } from '../../../types/errors/external-host-error';
 import type { BranchConfig } from '../../types';
 
 export async function getPrHourlyRemaining(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<number> {
   if (config.prHourlyLimit) {
     try {
@@ -19,11 +19,11 @@ export async function getPrHourlyRemaining(
         (pr) =>
           pr.sourceBranch !== config.onboardingBranch &&
           pr.sourceBranch.startsWith(config.branchPrefix!) &&
-          DateTime.fromISO(pr.createdAt!) > currentHourStart
+          DateTime.fromISO(pr.createdAt!) > currentHourStart,
       );
       const prsRemaining = Math.max(
         0,
-        config.prHourlyLimit - soFarThisHour.length
+        config.prHourlyLimit - soFarThisHour.length,
       );
       logger.debug(`PR hourly limit remaining: ${prsRemaining}`);
       return prsRemaining;
@@ -41,7 +41,7 @@ export async function getPrHourlyRemaining(
 
 export async function getConcurrentPrsRemaining(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<number> {
   if (config.prConcurrentLimit) {
     logger.debug(`Calculating prConcurrentLimit (${config.prConcurrentLimit})`);
@@ -69,7 +69,7 @@ export async function getConcurrentPrsRemaining(
       logger.debug(`${openPrs.length} PRs are currently open`);
       const concurrentRemaining = Math.max(
         0,
-        config.prConcurrentLimit - openPrs.length
+        config.prConcurrentLimit - openPrs.length,
       );
       logger.debug(`PR concurrent limit remaining: ${concurrentRemaining}`);
       return concurrentRemaining;
@@ -83,7 +83,7 @@ export async function getConcurrentPrsRemaining(
 
 export async function getPrsRemaining(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<number> {
   const hourlyRemaining = await getPrHourlyRemaining(config);
   const concurrentRemaining = await getConcurrentPrsRemaining(config, branches);
@@ -92,7 +92,7 @@ export async function getPrsRemaining(
 
 export async function getConcurrentBranchesRemaining(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<number> {
   const { branchConcurrentLimit, prConcurrentLimit } = config;
   const limit =
@@ -111,7 +111,7 @@ export async function getConcurrentBranchesRemaining(
 
       const existingCount = existingBranches.length;
       logger.debug(
-        `${existingCount} already existing branches found: ${existingBranches.join()}`
+        `${existingCount} already existing branches found: ${existingBranches.join()}`,
       );
 
       const concurrentRemaining = Math.max(0, limit - existingCount);
@@ -129,12 +129,12 @@ export async function getConcurrentBranchesRemaining(
 
 export async function getBranchesRemaining(
   config: RenovateConfig,
-  branches: BranchConfig[]
+  branches: BranchConfig[],
 ): Promise<number> {
   const hourlyRemaining = await getPrHourlyRemaining(config);
   const concurrentRemaining = await getConcurrentBranchesRemaining(
     config,
-    branches
+    branches,
   );
   return Math.min(hourlyRemaining, concurrentRemaining);
 }
diff --git a/lib/workers/repository/process/lookup/bucket.ts b/lib/workers/repository/process/lookup/bucket.ts
index ba12098d001249833a924788e4a704b9dffd8a3b..4cc4295673f8911e1a64ee41dfc3152db067294c 100644
--- a/lib/workers/repository/process/lookup/bucket.ts
+++ b/lib/workers/repository/process/lookup/bucket.ts
@@ -10,7 +10,7 @@ export function getBucket(
   config: BucketConfig,
   currentVersion: string,
   newVersion: string,
-  versioning: VersioningApi
+  versioning: VersioningApi,
 ): string | null {
   const { separateMajorMinor, separateMultipleMajor, separateMinorPatch } =
     config;
diff --git a/lib/workers/repository/process/lookup/current.ts b/lib/workers/repository/process/lookup/current.ts
index e987d7cd9b0fe511268bf435e3ac86b34bb0da7f..96fa560d51af2bd56a8696f7aebbb967cb74519d 100644
--- a/lib/workers/repository/process/lookup/current.ts
+++ b/lib/workers/repository/process/lookup/current.ts
@@ -9,7 +9,7 @@ export function getCurrentVersion(
   versioning: VersioningApi,
   rangeStrategy: string,
   latestVersion: string,
-  allVersions: string[]
+  allVersions: string[],
 ): string | null {
   // istanbul ignore if
   if (!is.string(currentValue)) {
@@ -23,11 +23,11 @@ export function getCurrentVersion(
   }
   logger.trace(`currentValue ${currentValue} is range`);
   let useVersions = allVersions.filter((v) =>
-    versioning.matches(v, currentValue)
+    versioning.matches(v, currentValue),
   );
   if (latestVersion && versioning.matches(latestVersion, currentValue)) {
     useVersions = useVersions.filter(
-      (v) => !versioning.isGreaterThan(v, latestVersion)
+      (v) => !versioning.isGreaterThan(v, latestVersion),
     );
   }
   if (rangeStrategy === 'pin') {
diff --git a/lib/workers/repository/process/lookup/filter-checks.spec.ts b/lib/workers/repository/process/lookup/filter-checks.spec.ts
index c07531057fc2e267b7fce2f49ab9883f5b3a2abb..f0f01e38496a777bcf6f73cd6b178c590453fe20 100644
--- a/lib/workers/repository/process/lookup/filter-checks.spec.ts
+++ b/lib/workers/repository/process/lookup/filter-checks.spec.ts
@@ -56,7 +56,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
@@ -71,7 +71,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
@@ -86,7 +86,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeTrue();
@@ -101,7 +101,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
@@ -116,7 +116,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
@@ -134,7 +134,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
@@ -149,7 +149,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
@@ -169,7 +169,7 @@ describe('workers/repository/process/lookup/filter-checks', () => {
         config,
         versioning,
         'patch',
-        sortedReleases
+        sortedReleases,
       );
       expect(res).toMatchSnapshot();
       expect(res.pendingChecks).toBeFalse();
diff --git a/lib/workers/repository/process/lookup/filter-checks.ts b/lib/workers/repository/process/lookup/filter-checks.ts
index 3959e36f77e509d1945492ec656b6a81c758f21e..d8f3f631cd2540bebd2f52f01b9ffd49a8d3ada2 100644
--- a/lib/workers/repository/process/lookup/filter-checks.ts
+++ b/lib/workers/repository/process/lookup/filter-checks.ts
@@ -25,7 +25,7 @@ export async function filterInternalChecks(
   config: Partial<LookupUpdateConfig & UpdateResult>,
   versioning: VersioningApi,
   bucket: string,
-  sortedReleases: Release[]
+  sortedReleases: Release[],
 ): Promise<InternalChecksResult> {
   const { currentVersion, datasource, depName, internalChecksFilter } = config;
   let release: Release | undefined = undefined;
@@ -45,11 +45,11 @@ export async function filterInternalChecks(
         versioning,
         // TODO #22198
         currentVersion!,
-        candidateRelease.version
+        candidateRelease.version,
       );
       releaseConfig = mergeChildConfig(
         releaseConfig,
-        releaseConfig[releaseConfig.updateType]!
+        releaseConfig[releaseConfig.updateType]!,
       );
       // Apply packageRules in case any apply to updateType
       releaseConfig = applyPackageRules(releaseConfig);
@@ -69,7 +69,7 @@ export async function filterInternalChecks(
           // Skip it if it doesn't pass checks
           logger.trace(
             { depName, check: 'minimumReleaseAge' },
-            `Release ${candidateRelease.version} is pending status checks`
+            `Release ${candidateRelease.version} is pending status checks`,
           );
           pendingReleases.unshift(candidateRelease);
           continue;
@@ -84,13 +84,13 @@ export async function filterInternalChecks(
             depName!,
             currentVersion!,
             newVersion,
-            updateType!
+            updateType!,
           )) ?? 'neutral';
         // TODO #22198
         if (!satisfiesConfidenceLevel(confidenceLevel, minimumConfidence!)) {
           logger.trace(
             { depName, check: 'minimumConfidence' },
-            `Release ${candidateRelease.version} is pending status checks`
+            `Release ${candidateRelease.version} is pending status checks`,
           );
           pendingReleases.unshift(candidateRelease);
           continue;
@@ -105,7 +105,7 @@ export async function filterInternalChecks(
         // If all releases were pending then just take the highest
         logger.trace(
           { depName, bucket },
-          'All releases are pending - using latest'
+          'All releases are pending - using latest',
         );
         release = pendingReleases.pop();
         // None are pending anymore because we took the latest, so empty the array
diff --git a/lib/workers/repository/process/lookup/filter.spec.ts b/lib/workers/repository/process/lookup/filter.spec.ts
index 2ba090b5b49cd9b4db807b4a48c30ad86b4ee5e7..0d75534e35ec27de9d3e0b2151b240f8348d9228 100644
--- a/lib/workers/repository/process/lookup/filter.spec.ts
+++ b/lib/workers/repository/process/lookup/filter.spec.ts
@@ -49,7 +49,7 @@ describe('workers/repository/process/lookup/filter', () => {
         currentVersion,
         latestVersion,
         releases,
-        versioning
+        versioning,
       );
 
       expect(filteredVersions).toEqual([
diff --git a/lib/workers/repository/process/lookup/filter.ts b/lib/workers/repository/process/lookup/filter.ts
index d8e31fc3581eb8c21b071ad055e12a38c6254c2b..bdcc4371e9d02c3a98c1a469e6ca9151c9f89a4d 100644
--- a/lib/workers/repository/process/lookup/filter.ts
+++ b/lib/workers/repository/process/lookup/filter.ts
@@ -14,7 +14,7 @@ export function filterVersions(
   currentVersion: string,
   latestVersion: string,
   releases: Release[],
-  versioning: VersioningApi
+  versioning: VersioningApi,
 ): Release[] {
   const { ignoreUnstable, ignoreDeprecated, respectLatest, allowedVersions } =
     config;
@@ -38,22 +38,22 @@ export function filterVersions(
   let filteredVersions = releases.filter(
     (v) =>
       versioning.isVersion(v.version) &&
-      versioning.isGreaterThan(v.version, currentVersion)
+      versioning.isGreaterThan(v.version, currentVersion),
   );
 
   // Don't upgrade from non-deprecated to deprecated
   const fromRelease = releases.find(
-    (release) => release.version === currentVersion
+    (release) => release.version === currentVersion,
   );
   if (ignoreDeprecated && fromRelease && !fromRelease.isDeprecated) {
     filteredVersions = filteredVersions.filter((v) => {
       const versionRelease = releases.find(
-        (release) => release.version === v.version
+        (release) => release.version === v.version,
       );
       // TODO: types (#22198)
       if (versionRelease!.isDeprecated) {
         logger.trace(
-          `Skipping ${config.depName!}@${v.version} because it is deprecated`
+          `Skipping ${config.depName!}@${v.version} because it is deprecated`,
         );
         return false;
       }
@@ -65,11 +65,11 @@ export function filterVersions(
     const isAllowedPred = configRegexPredicate(allowedVersions);
     if (isAllowedPred) {
       filteredVersions = filteredVersions.filter(({ version }) =>
-        isAllowedPred(version)
+        isAllowedPred(version),
       );
     } else if (versioning.isValid(allowedVersions)) {
       filteredVersions = filteredVersions.filter((v) =>
-        versioning.matches(v.version, allowedVersions)
+        versioning.matches(v.version, allowedVersions),
       );
     } else if (
       config.versioning !== npmVersioning.id &&
@@ -77,13 +77,13 @@ export function filterVersions(
     ) {
       logger.debug(
         { depName: config.depName },
-        'Falling back to npm semver syntax for allowedVersions'
+        'Falling back to npm semver syntax for allowedVersions',
       );
       filteredVersions = filteredVersions.filter((v) =>
         semver.satisfies(
           semver.valid(v.version) ? v.version : semver.coerce(v.version)!,
-          allowedVersions
-        )
+          allowedVersions,
+        ),
       );
     } else if (
       config.versioning === poetryVersioning.id &&
@@ -91,10 +91,10 @@ export function filterVersions(
     ) {
       logger.debug(
         { depName: config.depName },
-        'Falling back to pypi syntax for allowedVersions'
+        'Falling back to pypi syntax for allowedVersions',
       );
       filteredVersions = filteredVersions.filter((v) =>
-        pep440.matches(v.version, allowedVersions)
+        pep440.matches(v.version, allowedVersions),
       );
     } else {
       const error = new Error(CONFIG_VALIDATION);
@@ -117,7 +117,7 @@ export function filterVersions(
     !versioning.isGreaterThan(currentVersion, latestVersion)
   ) {
     filteredVersions = filteredVersions.filter(
-      (v) => !versioning.isGreaterThan(v.version, latestVersion)
+      (v) => !versioning.isGreaterThan(v.version, latestVersion),
     );
   }
 
diff --git a/lib/workers/repository/process/lookup/generate.ts b/lib/workers/repository/process/lookup/generate.ts
index 81c46edb3eed0e116582c5de32dbf8a4192fbe80..7d89efc976a2cb55baff20ea810222b5e32aa379 100644
--- a/lib/workers/repository/process/lookup/generate.ts
+++ b/lib/workers/repository/process/lookup/generate.ts
@@ -15,7 +15,7 @@ export async function generateUpdate(
   rangeStrategy: RangeStrategy,
   currentVersion: string,
   bucket: string,
-  release: Release
+  release: Release,
 ): Promise<LookupUpdate> {
   const newVersion = release.version;
   const update: LookupUpdate = {
@@ -61,7 +61,7 @@ export async function generateUpdate(
     } catch (err) /* istanbul ignore next */ {
       logger.warn(
         { err, currentValue, rangeStrategy, currentVersion, newVersion },
-        'getNewValue error'
+        'getNewValue error',
       );
       update.newValue = currentValue;
     }
@@ -86,7 +86,7 @@ export async function generateUpdate(
       packageName,
       currentVersion,
       newVersion,
-      update.updateType
+      update.updateType,
     );
   }
   if (!versioning.isVersion(update.newValue)) {
diff --git a/lib/workers/repository/process/lookup/index.spec.ts b/lib/workers/repository/process/lookup/index.spec.ts
index 3c4b49507f366b9506cc15dbee9877c31b7390c0..e9001458c7177b8e0c1cb294ee53f9741d9fe17e 100644
--- a/lib/workers/repository/process/lookup/index.spec.ts
+++ b/lib/workers/repository/process/lookup/index.spec.ts
@@ -43,17 +43,17 @@ let config: LookupUpdateConfig;
 describe('workers/repository/process/lookup/index', () => {
   const getGithubReleases = jest.spyOn(
     GithubReleasesDatasource.prototype,
-    'getReleases'
+    'getReleases',
   );
 
   const getGithubTags = jest.spyOn(
     GithubTagsDatasource.prototype,
-    'getReleases'
+    'getReleases',
   );
 
   const getDockerReleases = jest.spyOn(
     DockerDatasource.prototype,
-    'getReleases'
+    'getReleases',
   );
 
   const getDockerDigest = jest.spyOn(DockerDatasource.prototype, 'getDigest');
@@ -85,7 +85,7 @@ describe('workers/repository/process/lookup/index', () => {
       // @ts-expect-error: testing invalid currentValue
       config.currentValue = 3;
       expect((await lookup.lookupUpdates(config)).skipReason).toBe(
-        'invalid-value'
+        'invalid-value',
       );
     });
 
@@ -261,7 +261,7 @@ describe('workers/repository/process/lookup/index', () => {
       config.datasource = NpmDatasource.id;
       httpMock.scope('https://registry.npmjs.org').get('/q').reply(200, qJson);
       await expect(lookup.lookupUpdates(config)).rejects.toThrow(
-        Error(CONFIG_VALIDATION)
+        Error(CONFIG_VALIDATION),
       );
     });
 
@@ -385,7 +385,7 @@ describe('workers/repository/process/lookup/index', () => {
           .get('/q')
           .reply(200, qJson);
         expect(await lookup.lookupUpdates(config)).toMatchObject({ updates });
-      }
+      },
     );
 
     it.each`
@@ -406,7 +406,7 @@ describe('workers/repository/process/lookup/index', () => {
           .get('/q')
           .reply(200, qJson);
         expect((await lookup.lookupUpdates(config)).updates).toEqual([]);
-      }
+      },
     );
 
     it('supports pinning for x-range-all (no lockfile)', async () => {
@@ -447,7 +447,7 @@ describe('workers/repository/process/lookup/index', () => {
           .get('/q')
           .reply(200, qJson);
         expect((await lookup.lookupUpdates(config)).updates).toEqual([]);
-      }
+      },
     );
 
     it('ignores pinning for ranges when other upgrade exists', async () => {
@@ -1228,7 +1228,7 @@ describe('workers/repository/process/lookup/index', () => {
       expect(res.updates).toHaveLength(0);
       expect(res.warnings).toHaveLength(1);
       expect(res.warnings[0].message).toBe(
-        "Can't find version with tag foo for npm package typescript"
+        "Can't find version with tag foo for npm package typescript",
       );
     });
 
@@ -1301,7 +1301,7 @@ describe('workers/repository/process/lookup/index', () => {
       config.datasource = GithubTagsDatasource.id;
 
       getGithubTags.mockRejectedValueOnce(
-        new Error('Not contained in registry')
+        new Error('Not contained in registry'),
       );
       getGithubTags.mockResolvedValueOnce({
         releases: [
@@ -1321,7 +1321,7 @@ describe('workers/repository/process/lookup/index', () => {
         expect.objectContaining({
           registryUrl: 'https://github.com',
         }),
-        'v1.0.0'
+        'v1.0.0',
       );
 
       expect(res.updates).toHaveLength(1);
@@ -2270,7 +2270,7 @@ describe('workers/repository/process/lookup/index', () => {
       const defaultApiBaseUrl = 'https://developer.mend.io/';
       const getMergeConfidenceSpy = jest.spyOn(
         McApi,
-        'getMergeConfidenceLevel'
+        'getMergeConfidenceLevel',
       );
       const hostRule: HostRule = {
         hostType: 'merge-confidence',
@@ -2304,7 +2304,7 @@ describe('workers/repository/process/lookup/index', () => {
         httpMock
           .scope(defaultApiBaseUrl)
           .get(
-            `/api/mc/json/${datasource}/${packageName}/${currentValue}/${newVersion}`
+            `/api/mc/json/${datasource}/${packageName}/${currentValue}/${newVersion}`,
           )
           .reply(200, { confidence: 'high' });
 
diff --git a/lib/workers/repository/process/lookup/index.ts b/lib/workers/repository/process/lookup/index.ts
index d061bf8fec6e24d0dab165b5e28859ebb1ee6b51..d2b4117a4db334cb64c43c29a4195d667f454b86 100644
--- a/lib/workers/repository/process/lookup/index.ts
+++ b/lib/workers/repository/process/lookup/index.ts
@@ -35,7 +35,7 @@ import {
 } from './utils';
 
 export async function lookupUpdates(
-  inconfig: LookupUpdateConfig
+  inconfig: LookupUpdateConfig,
 ): Promise<UpdateResult> {
   let config: LookupUpdateConfig = { ...inconfig };
   config.versioning ??= getDefaultVersioning(config.datasource);
@@ -57,7 +57,7 @@ export async function lookupUpdates(
         dependency: config.packageName,
         currentValue: config.currentValue,
       },
-      'lookupUpdates'
+      'lookupUpdates',
     );
     if (config.currentValue && !is.string(config.currentValue)) {
       res.skipReason = 'invalid-value';
@@ -85,7 +85,7 @@ export async function lookupUpdates(
             packageName: config.packageName,
             groups: regexMatch.groups,
           },
-          'version compatibility regex match'
+          'version compatibility regex match',
         );
         config.currentCompatibility = regexMatch.groups.compatibility;
         compareValue = regexMatch.groups.version;
@@ -96,7 +96,7 @@ export async function lookupUpdates(
             currentValue: config.currentValue,
             packageName: config.packageName,
           },
-          'version compatibility regex mismatch'
+          'version compatibility regex mismatch',
         );
       }
     }
@@ -113,7 +113,7 @@ export async function lookupUpdates(
       }
 
       const { val: releaseResult, err: lookupError } = await getRawPkgReleases(
-        config
+        config,
       )
         .transform((res) => applyDatasourceFilters(res, config))
         .unwrap();
@@ -133,7 +133,7 @@ export async function lookupUpdates(
             dependency: config.packageName,
             packageFile: config.packageFile,
           },
-          warning.message
+          warning.message,
         );
         // TODO: return warnings in own field
         res.warnings.push(warning);
@@ -144,7 +144,7 @@ export async function lookupUpdates(
 
       if (dependency.deprecationMessage) {
         logger.debug(
-          `Found deprecationMessage for ${config.datasource} package ${config.packageName}`
+          `Found deprecationMessage for ${config.datasource} package ${config.packageName}`,
         );
       }
 
@@ -161,7 +161,7 @@ export async function lookupUpdates(
       const latestVersion = dependency.tags?.latest;
       // Filter out any results from datasource that don't comply with our versioning
       let allVersions = dependency.releases.filter((release) =>
-        versioning.isVersion(release.version)
+        versioning.isVersion(release.version),
       );
 
       // istanbul ignore if
@@ -172,7 +172,7 @@ export async function lookupUpdates(
             dependency: config.packageName,
             result: dependency,
           },
-          message
+          message,
         );
         if (!config.currentDigest) {
           return res;
@@ -193,18 +193,18 @@ export async function lookupUpdates(
           (v) =>
             v.version === taggedVersion ||
             (v.version === compareValue &&
-              versioning.isGreaterThan(taggedVersion, compareValue))
+              versioning.isGreaterThan(taggedVersion, compareValue)),
         );
       }
       // Check that existing constraint can be satisfied
       const allSatisfyingVersions = allVersions.filter(
         (v) =>
           // TODO #22198
-          unconstrainedValue || versioning.matches(v.version, compareValue!)
+          unconstrainedValue || versioning.matches(v.version, compareValue!),
       );
       if (!allSatisfyingVersions.length) {
         logger.debug(
-          `Found no satisfying versions with '${config.versioning}' versioning`
+          `Found no satisfying versions with '${config.versioning}' versioning`,
         );
       }
 
@@ -248,7 +248,7 @@ export async function lookupUpdates(
           versioning,
           rangeStrategy!,
           latestVersion!,
-          nonDeprecatedVersions
+          nonDeprecatedVersions,
         ) ??
         getCurrentVersion(
           compareValue!,
@@ -256,7 +256,7 @@ export async function lookupUpdates(
           versioning,
           rangeStrategy!,
           latestVersion!,
-          allVersions.map((v) => v.version)
+          allVersions.map((v) => v.version),
         )!;
       // istanbul ignore if
       if (!currentVersion! && config.lockedVersion) {
@@ -301,11 +301,12 @@ export async function lookupUpdates(
         config.rangeStrategy === 'in-range-only'
           ? allSatisfyingVersions
           : allVersions,
-        versioning
+        versioning,
       ).filter(
         (v) =>
           // Leave only compatible versions
-          unconstrainedValue || versioning.isCompatible(v.version, compareValue)
+          unconstrainedValue ||
+          versioning.isCompatible(v.version, compareValue),
       );
       if (config.isVulnerabilityAlert && !config.osvVulnerabilityAlerts) {
         filteredReleases = filteredReleases.slice(0, 1);
@@ -317,7 +318,7 @@ export async function lookupUpdates(
           // TODO #22198
           currentVersion!,
           release.version,
-          versioning
+          versioning,
         );
         if (is.string(bucket)) {
           if (buckets[bucket]) {
@@ -330,14 +331,14 @@ export async function lookupUpdates(
       const depResultConfig = mergeChildConfig(config, res);
       for (const [bucket, releases] of Object.entries(buckets)) {
         const sortedReleases = releases.sort((r1, r2) =>
-          versioning.sortVersions(r1.version, r2.version)
+          versioning.sortVersions(r1.version, r2.version),
         );
         const { release, pendingChecks, pendingReleases } =
           await filterInternalChecks(
             depResultConfig,
             versioning,
             bucket,
-            sortedReleases
+            sortedReleases,
           );
         // istanbul ignore next
         if (!release) {
@@ -353,7 +354,7 @@ export async function lookupUpdates(
           rangeStrategy!,
           config.lockedVersion ?? currentVersion!,
           bucket,
-          release
+          release,
         );
         if (pendingChecks) {
           update.pendingChecks = pendingChecks;
@@ -376,7 +377,7 @@ export async function lookupUpdates(
                 lockedVersion: config.lockedVersion,
                 newVersion,
               },
-              'Skipping bump because newValue is the same'
+              'Skipping bump because newValue is the same',
             );
             continue;
           }
@@ -389,7 +390,7 @@ export async function lookupUpdates(
       }
     } else if (compareValue) {
       logger.debug(
-        `Dependency ${config.packageName} has unsupported/unversioned value ${compareValue} (versioning=${config.versioning})`
+        `Dependency ${config.packageName} has unsupported/unversioned value ${compareValue} (versioning=${config.versioning})`,
       );
 
       if (!config.pinDigests && !config.currentDigest) {
@@ -457,7 +458,7 @@ export async function lookupUpdates(
           if (is.string(config.currentValue) && is.string(update.newValue)) {
             update.newValue = config.currentValue.replace(
               compareValue,
-              update.newValue
+              update.newValue,
             );
           }
         }
@@ -482,7 +483,7 @@ export async function lookupUpdates(
                 newValue: update.newValue,
                 bucket: update.bucket,
               },
-              'Could not determine new digest for update.'
+              'Could not determine new digest for update.',
             );
 
             // Only report a warning if there is a current digest.
@@ -499,7 +500,7 @@ export async function lookupUpdates(
         }
         if (update.newVersion) {
           const registryUrl = dependency?.releases?.find(
-            (release) => release.version === update.newVersion
+            (release) => release.version === update.newVersion,
           )?.registryUrl;
           if (registryUrl && registryUrl !== res.registryUrl) {
             update.registryUrl = registryUrl;
@@ -513,7 +514,7 @@ export async function lookupUpdates(
     // Strip out any non-changed ones
     res.updates = res.updates
       .filter(
-        (update) => update.newValue !== null || config.currentValue === null
+        (update) => update.newValue !== null || config.currentValue === null,
       )
       .filter((update) => update.newDigest !== null)
       .filter(
@@ -525,12 +526,12 @@ export async function lookupUpdates(
           update.isLockfileUpdate === true ||
           // TODO #22198
           (update.newDigest &&
-            !update.newDigest.startsWith(config.currentDigest!))
+            !update.newDigest.startsWith(config.currentDigest!)),
       );
     // If range strategy specified in config is 'in-range-only', also strip out updates where currentValue !== newValue
     if (config.rangeStrategy === 'in-range-only') {
       res.updates = res.updates.filter(
-        (update) => update.newValue === config.currentValue
+        (update) => update.newValue === config.currentValue,
       );
     }
     // Handle a weird edge case involving followTag and fallbacks
@@ -538,7 +539,7 @@ export async function lookupUpdates(
       res.updates = res.updates.filter(
         (update) =>
           res.updates.length === 1 ||
-          /* istanbul ignore next */ update.updateType !== 'rollback'
+          /* istanbul ignore next */ update.updateType !== 'rollback',
       );
     }
   } catch (err) /* istanbul ignore next */ {
@@ -562,7 +563,7 @@ export async function lookupUpdates(
         unconstrainedValue,
         err,
       },
-      'lookupUpdates error'
+      'lookupUpdates error',
     );
     res.skipReason = 'internal-error';
   }
diff --git a/lib/workers/repository/process/lookup/rollback.ts b/lib/workers/repository/process/lookup/rollback.ts
index 3f6ce4dfb3486b61aba0d1b1fd3f565fc126d449..d725fa764ad2981a31f5aa64ff85921a8169b48e 100644
--- a/lib/workers/repository/process/lookup/rollback.ts
+++ b/lib/workers/repository/process/lookup/rollback.ts
@@ -7,14 +7,14 @@ import type { RollbackConfig } from './types';
 export function getRollbackUpdate(
   config: RollbackConfig,
   versions: Release[],
-  version: VersioningApi
+  version: VersioningApi,
 ): LookupUpdate | null {
   const { packageFile, versioning, depName, currentValue } = config;
   // istanbul ignore if
   if (!('isLessThanRange' in version)) {
     logger.debug(
       { versioning },
-      'Current versioning does not support isLessThanRange()'
+      'Current versioning does not support isLessThanRange()',
     );
     return null;
   }
@@ -29,17 +29,17 @@ export function getRollbackUpdate(
   if (!lessThanVersions.length) {
     logger.debug(
       { packageFile, depName, currentValue },
-      'Missing version has nothing to roll back to'
+      'Missing version has nothing to roll back to',
     );
     return null;
   }
   logger.debug(
     { packageFile, depName, currentValue },
-    `Current version not found - rolling back`
+    `Current version not found - rolling back`,
   );
   logger.debug(
     { dependency: depName, versions },
-    'Versions found before rolling back'
+    'Versions found before rolling back',
   );
 
   lessThanVersions.sort((a, b) => version.sortVersions(a.version, b.version));
diff --git a/lib/workers/repository/process/lookup/update-type.ts b/lib/workers/repository/process/lookup/update-type.ts
index 4da91f3e338ffa2d7fb86a4864b93dbae5e4e337..a47375b4c64e77c5723cf4a0ec1ed7f5ca4037f1 100644
--- a/lib/workers/repository/process/lookup/update-type.ts
+++ b/lib/workers/repository/process/lookup/update-type.ts
@@ -11,7 +11,7 @@ export function getUpdateType(
   config: UpdateTypeConfig,
   versioning: allVersioning.VersioningApi,
   currentVersion: string,
-  newVersion: string
+  newVersion: string,
 ): UpdateType {
   if (versioning.getMajor(newVersion)! > versioning.getMajor(currentVersion)!) {
     return 'major';
diff --git a/lib/workers/repository/process/lookup/utils.ts b/lib/workers/repository/process/lookup/utils.ts
index 2142e3a04e43cc2c7d4476027d3772ab40512f1a..3a63e363e40f2d213e4e007a4d4eab76f84bae25 100644
--- a/lib/workers/repository/process/lookup/utils.ts
+++ b/lib/workers/repository/process/lookup/utils.ts
@@ -8,7 +8,7 @@ import type { LookupUpdateConfig } from './types';
 
 export function addReplacementUpdateIfValid(
   updates: LookupUpdate[],
-  config: LookupUpdateConfig
+  config: LookupUpdateConfig,
 ): void {
   const replacementNewName = determineNewReplacementName(config);
   const replacementNewValue = determineNewReplacementValue(config);
@@ -26,7 +26,7 @@ export function addReplacementUpdateIfValid(
 }
 
 export function isReplacementRulesConfigured(
-  config: LookupUpdateConfig
+  config: LookupUpdateConfig,
 ): boolean {
   return (
     is.nonEmptyString(config.replacementName) ||
@@ -36,7 +36,7 @@ export function isReplacementRulesConfigured(
 }
 
 export function determineNewReplacementName(
-  config: LookupUpdateConfig
+  config: LookupUpdateConfig,
 ): string {
   return (
     config.replacementName ??
@@ -45,7 +45,7 @@ export function determineNewReplacementName(
 }
 
 export function determineNewReplacementValue(
-  config: LookupUpdateConfig
+  config: LookupUpdateConfig,
 ): string | undefined | null {
   const versioning = allVersioning.get(config.versioning);
   const rangeStrategy = getRangeStrategy(config);
diff --git a/lib/workers/repository/process/vulnerabilities.spec.ts b/lib/workers/repository/process/vulnerabilities.spec.ts
index fa370f831a693f02a1957fac3af8d6d35c7a085b..017486b80b693c099a80f43c1357de9a30201fe6 100644
--- a/lib/workers/repository/process/vulnerabilities.spec.ts
+++ b/lib/workers/repository/process/vulnerabilities.spec.ts
@@ -98,7 +98,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       const vulnerabilityList = await vulnerabilities.fetchVulnerabilities(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(vulnerabilityList).toMatchObject([
         {
@@ -166,10 +166,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.trace).toHaveBeenCalledWith(
-        'Cannot map datasource docker to OSV ecosystem'
+        'Cannot map datasource docker to OSV ecosystem',
       );
     });
 
@@ -186,10 +186,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.trace).toHaveBeenCalledWith(
-        'No vulnerabilities found in OSV database for lodash'
+        'No vulnerabilities found in OSV database for lodash',
       );
     });
 
@@ -213,7 +213,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(config.packageRules).toHaveLength(0);
     });
@@ -238,10 +238,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.trace).toHaveBeenCalledWith(
-        'Skipping withdrawn vulnerability GHSA-x5rq-j2xg-h7qm'
+        'Skipping withdrawn vulnerability GHSA-x5rq-j2xg-h7qm',
       );
       expect(config.packageRules).toHaveLength(0);
     });
@@ -265,10 +265,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Skipping vulnerability lookup for package lodash due to unsupported version #4.17.11'
+        'Skipping vulnerability lookup for package lodash due to unsupported version #4.17.11',
       );
     });
 
@@ -292,11 +292,11 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.warn).toHaveBeenCalledWith(
         { err },
-        'Error fetching vulnerability information for lodash'
+        'Error fetching vulnerability information for lodash',
       );
     });
 
@@ -340,11 +340,11 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.debug).toHaveBeenCalledWith(
         { event },
-        'Skipping OSV event with invalid version'
+        'Skipping OSV event with invalid version',
       );
     });
 
@@ -373,7 +373,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(config.packageRules).toHaveLength(0);
     });
@@ -404,10 +404,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.info).toHaveBeenCalledWith(
-        'No fixed version available for vulnerability GHSA-xxxx-yyyy-zzzz in fake 4.17.11'
+        'No fixed version available for vulnerability GHSA-xxxx-yyyy-zzzz in fake 4.17.11',
       );
     });
 
@@ -446,10 +446,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.info).toHaveBeenCalledWith(
-        'No fixed version available for vulnerability GHSA-xxxx-yyyy-zzzz in fake 1.5.1'
+        'No fixed version available for vulnerability GHSA-xxxx-yyyy-zzzz in fake 1.5.1',
       );
     });
 
@@ -495,13 +495,13 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Vulnerability GO-2022-0187 affects stdlib 1.7.5'
+        'Vulnerability GO-2022-0187 affects stdlib 1.7.5',
       );
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Setting allowed version 1.7.6 to fix vulnerability GO-2022-0187 in stdlib 1.7.5'
+        'Setting allowed version 1.7.6 to fix vulnerability GO-2022-0187 in stdlib 1.7.5',
       );
       expect(config.packageRules).toHaveLength(1);
       expect(config.packageRules).toMatchObject([
@@ -573,7 +573,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(config.packageRules).toHaveLength(1);
       expect(config.packageRules).toMatchObject([
@@ -635,7 +635,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(config.packageRules).toHaveLength(2);
       expect(config.packageRules).toMatchObject([
@@ -671,7 +671,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(config.packageRules).toHaveLength(0);
     });
@@ -735,7 +735,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
 
       expect(config.packageRules).toHaveLength(1);
@@ -817,7 +817,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
 
       expect(config.packageRules).toHaveLength(2);
@@ -874,10 +874,10 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(logger.logger.debug).not.toHaveBeenCalledWith(
-        'OSV advisory GHSA-xxxx-yyyy-zzzz lists quokka 1.2.3 as vulnerable'
+        'OSV advisory GHSA-xxxx-yyyy-zzzz lists quokka 1.2.3 as vulnerable',
       );
       expect(config.packageRules).toHaveLength(0);
     });
@@ -921,7 +921,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
       expect(config.packageRules).toHaveLength(1);
       expect(config.packageRules).toMatchObject([
@@ -999,11 +999,11 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
 
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'Error processing CVSS vector some-invalid-score'
+        'Error processing CVSS vector some-invalid-score',
       );
       expect(config.packageRules).toHaveLength(1);
       expect(config.packageRules).toMatchObject([
@@ -1064,7 +1064,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
 
       expect(config.packageRules).toHaveLength(1);
@@ -1149,7 +1149,7 @@ describe('workers/repository/process/vulnerabilities', () => {
 
       await vulnerabilities.appendVulnerabilityPackageRules(
         config,
-        packageFiles
+        packageFiles,
       );
 
       expect(config.packageRules).toHaveLength(1);
diff --git a/lib/workers/repository/process/vulnerabilities.ts b/lib/workers/repository/process/vulnerabilities.ts
index a51ecd0590017c6916b75c715642b2c308f55742..49fb8e637383d12c9b63a51de86a76e2abeed07e 100644
--- a/lib/workers/repository/process/vulnerabilities.ts
+++ b/lib/workers/repository/process/vulnerabilities.ts
@@ -57,11 +57,11 @@ export class Vulnerabilities {
 
   async appendVulnerabilityPackageRules(
     config: RenovateConfig,
-    packageFiles: Record<string, PackageFile[]>
+    packageFiles: Record<string, PackageFile[]>,
   ): Promise<void> {
     const dependencyVulnerabilities = await this.fetchDependencyVulnerabilities(
       config,
-      packageFiles
+      packageFiles,
     );
 
     config.packageRules ??= [];
@@ -85,22 +85,22 @@ export class Vulnerabilities {
 
   async fetchVulnerabilities(
     config: RenovateConfig,
-    packageFiles: Record<string, PackageFile[]>
+    packageFiles: Record<string, PackageFile[]>,
   ): Promise<Vulnerability[]> {
     const groups = await this.fetchDependencyVulnerabilities(
       config,
-      packageFiles
+      packageFiles,
     );
     return groups.flatMap((group) => group.vulnerabilities);
   }
 
   private async fetchDependencyVulnerabilities(
     config: RenovateConfig,
-    packageFiles: Record<string, PackageFile[]>
+    packageFiles: Record<string, PackageFile[]>,
   ): Promise<DependencyVulnerabilities[]> {
     const managers = Object.keys(packageFiles);
     const allManagerJobs = managers.map((manager) =>
-      this.fetchManagerVulnerabilities(config, packageFiles, manager)
+      this.fetchManagerVulnerabilities(config, packageFiles, manager),
     );
     return (await Promise.all(allManagerJobs)).flat();
   }
@@ -108,16 +108,16 @@ export class Vulnerabilities {
   private async fetchManagerVulnerabilities(
     config: RenovateConfig,
     packageFiles: Record<string, PackageFile[]>,
-    manager: string
+    manager: string,
   ): Promise<DependencyVulnerabilities[]> {
     const managerConfig = getManagerConfig(config, manager);
     const queue = packageFiles[manager].map(
       (pFile) => (): Promise<DependencyVulnerabilities[]> =>
-        this.fetchManagerPackageFileVulnerabilities(managerConfig, pFile)
+        this.fetchManagerPackageFileVulnerabilities(managerConfig, pFile),
     );
     logger.trace(
       { manager, queueLength: queue.length },
-      'fetchManagerVulnerabilities starting'
+      'fetchManagerVulnerabilities starting',
     );
     const result = (await p.all(queue)).flat();
     logger.trace({ manager }, 'fetchManagerVulnerabilities finished');
@@ -126,24 +126,24 @@ export class Vulnerabilities {
 
   private async fetchManagerPackageFileVulnerabilities(
     managerConfig: RenovateConfig,
-    pFile: PackageFile
+    pFile: PackageFile,
   ): Promise<DependencyVulnerabilities[]> {
     const { packageFile } = pFile;
     const packageFileConfig = mergeChildConfig(managerConfig, pFile);
     const { manager } = packageFileConfig;
     const queue = pFile.deps.map(
       (dep) => (): Promise<DependencyVulnerabilities | null> =>
-        this.fetchDependencyVulnerability(packageFileConfig, dep)
+        this.fetchDependencyVulnerability(packageFileConfig, dep),
     );
     logger.trace(
       { manager, packageFile, queueLength: queue.length },
-      'fetchManagerPackageFileVulnerabilities starting with concurrency'
+      'fetchManagerPackageFileVulnerabilities starting with concurrency',
     );
 
     const result = await p.all(queue);
     logger.trace(
       { packageFile },
-      'fetchManagerPackageFileVulnerabilities finished'
+      'fetchManagerPackageFileVulnerabilities finished',
     );
 
     return result.filter(is.truthy);
@@ -151,7 +151,7 @@ export class Vulnerabilities {
 
   private async fetchDependencyVulnerability(
     packageFileConfig: RenovateConfig & PackageFile,
-    dep: PackageDependency
+    dep: PackageDependency,
   ): Promise<DependencyVulnerabilities | null> {
     const ecosystem = Vulnerabilities.datasourceEcosystemMap[dep.datasource!];
     if (!ecosystem) {
@@ -168,14 +168,14 @@ export class Vulnerabilities {
     try {
       const osvVulnerabilities = await this.osvOffline?.getVulnerabilities(
         ecosystem,
-        packageName
+        packageName,
       );
       if (
         is.nullOrUndefined(osvVulnerabilities) ||
         is.emptyArray(osvVulnerabilities)
       ) {
         logger.trace(
-          `No vulnerabilities found in OSV database for ${packageName}`
+          `No vulnerabilities found in OSV database for ${packageName}`,
         );
         return null;
       }
@@ -188,7 +188,7 @@ export class Vulnerabilities {
 
       if (!versioningApi.isVersion(depVersion)) {
         logger.debug(
-          `Skipping vulnerability lookup for package ${packageName} due to unsupported version ${depVersion}`
+          `Skipping vulnerability lookup for package ${packageName} due to unsupported version ${depVersion}`,
         );
         return null;
       }
@@ -197,7 +197,7 @@ export class Vulnerabilities {
       for (const osvVulnerability of osvVulnerabilities) {
         if (osvVulnerability.withdrawn) {
           logger.trace(
-            `Skipping withdrawn vulnerability ${osvVulnerability.id}`
+            `Skipping withdrawn vulnerability ${osvVulnerability.id}`,
           );
           continue;
         }
@@ -208,20 +208,20 @@ export class Vulnerabilities {
             packageName,
             depVersion,
             affected,
-            versioningApi
+            versioningApi,
           );
           if (!isVulnerable) {
             continue;
           }
 
           logger.debug(
-            `Vulnerability ${osvVulnerability.id} affects ${packageName} ${depVersion}`
+            `Vulnerability ${osvVulnerability.id} affects ${packageName} ${depVersion}`,
           );
           const fixedVersion = this.getFixedVersion(
             ecosystem,
             depVersion,
             affected,
-            versioningApi
+            versioningApi,
           );
 
           vulnerabilities.push({
@@ -240,7 +240,7 @@ export class Vulnerabilities {
     } catch (err) {
       logger.warn(
         { err },
-        `Error fetching vulnerability information for ${packageName}`
+        `Error fetching vulnerability information for ${packageName}`,
       );
       return null;
     }
@@ -248,7 +248,7 @@ export class Vulnerabilities {
 
   private sortByFixedVersion(
     packageRules: PackageRule[],
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): void {
     const versionsCleaned: Record<string, string> = {};
     for (const rule of packageRules) {
@@ -258,15 +258,15 @@ export class Vulnerabilities {
     packageRules.sort((a, b) =>
       versioningApi.sortVersions(
         versionsCleaned[a.allowedVersions as string],
-        versionsCleaned[b.allowedVersions as string]
-      )
+        versionsCleaned[b.allowedVersions as string],
+      ),
     );
   }
 
   // https://ossf.github.io/osv-schema/#affectedrangesevents-fields
   private sortEvents(
     events: Osv.Event[],
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): Osv.Event[] {
     const sortedCopy: Osv.Event[] = [];
     let zeroEvent: Osv.Event | null = null;
@@ -283,7 +283,7 @@ export class Vulnerabilities {
 
     sortedCopy.sort((a, b) =>
       // no pre-processing, as there are only very few values to sort
-      versioningApi.sortVersions(Object.values(a)[0], Object.values(b)[0])
+      versioningApi.sortVersions(Object.values(a)[0], Object.values(b)[0]),
     );
 
     if (zeroEvent) {
@@ -296,7 +296,7 @@ export class Vulnerabilities {
   private isPackageAffected(
     ecosystem: Ecosystem,
     packageName: string,
-    affected: Osv.Affected
+    affected: Osv.Affected,
   ): boolean {
     return (
       affected.package?.name === packageName &&
@@ -306,7 +306,7 @@ export class Vulnerabilities {
 
   private includedInVersions(
     depVersion: string,
-    affected: Osv.Affected
+    affected: Osv.Affected,
   ): boolean {
     return !!affected.versions?.includes(depVersion);
   }
@@ -314,7 +314,7 @@ export class Vulnerabilities {
   private includedInRanges(
     depVersion: string,
     affected: Osv.Affected,
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): boolean {
     for (const range of affected.ranges ?? []) {
       if (range.type === 'GIT') {
@@ -356,7 +356,7 @@ export class Vulnerabilities {
     packageName: string,
     depVersion: string,
     affected: Osv.Affected,
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): boolean {
     return (
       this.isPackageAffected(ecosystem, packageName, affected) &&
@@ -369,7 +369,7 @@ export class Vulnerabilities {
     ecosystem: Ecosystem,
     depVersion: string,
     affected: Osv.Affected,
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): string | null {
     const fixedVersions: string[] = [];
     const lastAffectedVersions: string[] = [];
@@ -396,7 +396,7 @@ export class Vulnerabilities {
 
     fixedVersions.sort((a, b) => versioningApi.sortVersions(a, b));
     const fixedVersion = fixedVersions.find((version) =>
-      this.isVersionGt(version, depVersion, versioningApi)
+      this.isVersionGt(version, depVersion, versioningApi),
     );
     if (fixedVersion) {
       return ecosystem === 'PyPI' ? `==${fixedVersion}` : fixedVersion;
@@ -404,7 +404,7 @@ export class Vulnerabilities {
 
     lastAffectedVersions.sort((a, b) => versioningApi.sortVersions(a, b));
     const lastAffected = lastAffectedVersions.find((version) =>
-      this.isVersionGtOrEq(version, depVersion, versioningApi)
+      this.isVersionGtOrEq(version, depVersion, versioningApi),
     );
     if (lastAffected) {
       return `> ${lastAffected}`;
@@ -416,7 +416,7 @@ export class Vulnerabilities {
   private isVersionGt(
     version: string,
     other: string,
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): boolean {
     return (
       versioningApi.isVersion(version) &&
@@ -428,7 +428,7 @@ export class Vulnerabilities {
   private isVersionGtOrEq(
     version: string,
     other: string,
-    versioningApi: VersioningApi
+    versioningApi: VersioningApi,
   ): boolean {
     return (
       versioningApi.isVersion(version) &&
@@ -450,18 +450,18 @@ export class Vulnerabilities {
     } = vul;
     if (is.nullOrUndefined(fixedVersion)) {
       logger.info(
-        `No fixed version available for vulnerability ${vulnerability.id} in ${packageName} ${depVersion}`
+        `No fixed version available for vulnerability ${vulnerability.id} in ${packageName} ${depVersion}`,
       );
       return null;
     }
 
     logger.debug(
-      `Setting allowed version ${fixedVersion} to fix vulnerability ${vulnerability.id} in ${packageName} ${depVersion}`
+      `Setting allowed version ${fixedVersion} to fix vulnerability ${vulnerability.id} in ${packageName} ${depVersion}`,
     );
 
     const severityDetails = this.extractSeverityDetails(
       vulnerability,
-      affected
+      affected,
     );
 
     return {
@@ -493,7 +493,7 @@ export class Vulnerabilities {
 
   private generatePrBodyNotes(
     vulnerability: Osv.Vulnerability,
-    affected: Osv.Affected
+    affected: Osv.Affected,
   ): string[] {
     let aliases = [vulnerability.id].concat(vulnerability.aliases ?? []).sort();
     aliases = aliases.map((id) => {
@@ -517,14 +517,14 @@ export class Vulnerabilities {
 
     const details = vulnerability.details?.replace(
       regEx(/^#{1,4} /gm),
-      '##### '
+      '##### ',
     );
     content += `#### Details\n${details ?? 'No details.'}\n`;
 
     content += '#### Severity\n';
     const severityDetails = this.extractSeverityDetails(
       vulnerability,
-      affected
+      affected,
     );
 
     if (severityDetails.cvssVector) {
@@ -560,7 +560,7 @@ export class Vulnerabilities {
 
   private extractSeverityDetails(
     vulnerability: Osv.Vulnerability,
-    affected: Osv.Affected
+    affected: Osv.Affected,
   ): SeverityDetails {
     let severityLevel = 'UNKNOWN';
     let score = 'Unknown';
diff --git a/lib/workers/repository/process/write.spec.ts b/lib/workers/repository/process/write.spec.ts
index 421cfbac554b192fe3d35a8536c35a92b39bfcb1..706857548b3c3dd7a6f5d2478eae13299e879fa6 100644
--- a/lib/workers/repository/process/write.spec.ts
+++ b/lib/workers/repository/process/write.spec.ts
@@ -195,7 +195,7 @@ describe('workers/repository/process/write', () => {
         ...new Set(
           branch.upgrades
             .map((upgrade) => hashMap.get(upgrade.manager) ?? upgrade.manager)
-            .filter(is.string)
+            .filter(is.string),
         ),
       ].sort();
       const commitFingerprint = fingerprint({
@@ -224,7 +224,7 @@ describe('workers/repository/process/write', () => {
         ...new Set(
           branch.upgrades
             .map((upgrade) => hashMap.get(upgrade.manager) ?? upgrade.manager)
-            .filter(is.string)
+            .filter(is.string),
         ),
       ].sort();
 
@@ -269,7 +269,7 @@ describe('workers/repository/process/write', () => {
         ...new Set(
           branch.upgrades
             .map((upgrade) => hashMap.get(upgrade.manager) ?? upgrade.manager)
-            .filter(is.string)
+            .filter(is.string),
         ),
       ].sort();
       const commitFingerprint = fingerprint({
@@ -318,7 +318,7 @@ describe('workers/repository/process/write', () => {
       scm.branchExists.mockResolvedValueOnce(true);
       await writeUpdates(config, branches);
       expect(logger.logger.debug).not.toHaveBeenCalledWith(
-        'No branch cache found for new/some-branch'
+        'No branch cache found for new/some-branch',
       );
       expect(repoCacheObj).toEqual({
         branches: [
@@ -381,7 +381,7 @@ describe('workers/repository/process/write', () => {
       scm.getBranchCommit.mockResolvedValueOnce('sha');
       scm.getBranchCommit.mockResolvedValueOnce('base_sha');
       return expect(
-        syncBranchState('branch_name', 'base_branch')
+        syncBranchState('branch_name', 'base_branch'),
       ).resolves.toEqual({
         branchName: 'branch_name',
         sha: 'sha',
@@ -410,7 +410,7 @@ describe('workers/repository/process/write', () => {
       scm.getBranchCommit.mockResolvedValueOnce('sha');
       scm.getBranchCommit.mockResolvedValueOnce('base_sha');
       return expect(
-        syncBranchState('branch_name', 'new_base_branch')
+        syncBranchState('branch_name', 'new_base_branch'),
       ).resolves.toEqual({
         branchName: 'branch_name',
         sha: 'sha',
@@ -443,7 +443,7 @@ describe('workers/repository/process/write', () => {
       scm.getBranchCommit.mockResolvedValueOnce('sha');
       scm.getBranchCommit.mockResolvedValueOnce('new_base_sha');
       return expect(
-        syncBranchState('branch_name', 'base_branch')
+        syncBranchState('branch_name', 'base_branch'),
       ).resolves.toEqual({
         branchName: 'branch_name',
         sha: 'sha',
@@ -479,7 +479,7 @@ describe('workers/repository/process/write', () => {
       scm.getBranchCommit.mockResolvedValueOnce('new_sha');
       scm.getBranchCommit.mockResolvedValueOnce('base_sha');
       return expect(
-        syncBranchState('branch_name', 'base_branch')
+        syncBranchState('branch_name', 'base_branch'),
       ).resolves.toEqual({
         branchName: 'branch_name',
         sha: 'new_sha',
@@ -515,7 +515,7 @@ describe('workers/repository/process/write', () => {
       scm.getBranchCommit.mockResolvedValueOnce('sha');
       scm.getBranchCommit.mockResolvedValueOnce('base_sha');
       return expect(
-        syncBranchState('branch_name', 'base_branch')
+        syncBranchState('branch_name', 'base_branch'),
       ).resolves.toEqual({
         branchName: 'branch_name',
         sha: 'sha',
diff --git a/lib/workers/repository/process/write.ts b/lib/workers/repository/process/write.ts
index 9ac7b0dd3e1bfa84979cc59872aed8d372ef64f3..b4524f6bd6ea1146ceda2c98c9a060844ff6a8c0 100644
--- a/lib/workers/repository/process/write.ts
+++ b/lib/workers/repository/process/write.ts
@@ -16,7 +16,7 @@ import { getBranchesRemaining, getPrsRemaining } from './limits';
 export type WriteUpdateResult = 'done' | 'automerged';
 
 export function generateCommitFingerprintConfig(
-  branch: BranchConfig
+  branch: BranchConfig,
 ): UpgradeFingerprintConfig[] {
   const res = branch.upgrades.map((upgrade) => {
     const filteredUpgrade = {} as UpgradeFingerprintConfig;
@@ -31,7 +31,7 @@ export function generateCommitFingerprintConfig(
 
 export function canSkipBranchUpdateCheck(
   branchState: BranchCache,
-  commitFingerprint: string
+  commitFingerprint: string,
 ): boolean {
   if (!branchState.commitFingerprint) {
     logger.trace('branch.isUpToDate(): no fingerprint');
@@ -49,7 +49,7 @@ export function canSkipBranchUpdateCheck(
 
 export async function syncBranchState(
   branchName: string,
-  baseBranch: string
+  baseBranch: string,
 ): Promise<BranchCache> {
   logger.debug('syncBranchState()');
   const branchSha = await scm.getBranchCommit(branchName)!;
@@ -61,7 +61,7 @@ export async function syncBranchState(
   let branchState = cachedBranches.find((br) => br.branchName === branchName);
   if (!branchState) {
     logger.debug(
-      'syncBranchState(): Branch cache not found, creating minimal branchState'
+      'syncBranchState(): Branch cache not found, creating minimal branchState',
     );
     // create a minimal branch state
     branchState = {
@@ -110,7 +110,7 @@ export async function syncBranchState(
 
 export async function writeUpdates(
   config: RenovateConfig,
-  allBranches: BranchConfig[]
+  allBranches: BranchConfig[],
 ): Promise<WriteUpdateResult> {
   const branches = allBranches;
   logger.debug(
@@ -119,7 +119,7 @@ export async function writeUpdates(
     }: ${branches
       .map((b) => b.branchName)
       .sort()
-      .join(', ')}`
+      .join(', ')}`,
   );
   const prsRemaining = await getPrsRemaining(config, branches);
   logger.debug(`Calculated maximum PRs remaining this run: ${prsRemaining}`);
@@ -127,7 +127,7 @@ export async function writeUpdates(
 
   const branchesRemaining = await getBranchesRemaining(config, branches);
   logger.debug(
-    `Calculated maximum branches remaining this run: ${branchesRemaining}`
+    `Calculated maximum branches remaining this run: ${branchesRemaining}`,
   );
   setMaxLimit('Branches', branchesRemaining);
 
@@ -145,7 +145,7 @@ export async function writeUpdates(
       ...new Set(
         branch.upgrades
           .map((upgrade) => hashMap.get(upgrade.manager) ?? upgrade.manager)
-          .filter(is.string)
+          .filter(is.string),
       ),
     ].sort();
     const commitFingerprint = fingerprint({
@@ -154,7 +154,7 @@ export async function writeUpdates(
     });
     branch.skipBranchUpdate = canSkipBranchUpdateCheck(
       branchState,
-      commitFingerprint
+      commitFingerprint,
     );
     const res = await processBranch(branch);
     branch.prBlockedBy = res?.prBlockedBy;
diff --git a/lib/workers/repository/reconfigure/index.spec.ts b/lib/workers/repository/reconfigure/index.spec.ts
index 9efbf29065d5e7ba332d67975377852934e18af8..c658edec9777e16d7674d3e574996c01d32dadd1 100644
--- a/lib/workers/repository/reconfigure/index.spec.ts
+++ b/lib/workers/repository/reconfigure/index.spec.ts
@@ -50,7 +50,7 @@ describe('workers/repository/reconfigure/index', () => {
     await validateReconfigureBranch(config);
     expect(logger.error).toHaveBeenCalledWith(
       { err },
-      'Error while searching for config file in reconfigure branch'
+      'Error while searching for config file in reconfigure branch',
     );
   });
 
@@ -58,7 +58,7 @@ describe('workers/repository/reconfigure/index', () => {
     merge.detectConfigFile.mockResolvedValue(null);
     await validateReconfigureBranch(config);
     expect(logger.warn).toHaveBeenCalledWith(
-      'No config file found in reconfigure branch'
+      'No config file found in reconfigure branch',
     );
   });
 
@@ -68,7 +68,7 @@ describe('workers/repository/reconfigure/index', () => {
     await validateReconfigureBranch(config);
     expect(logger.error).toHaveBeenCalledWith(
       { err },
-      'Error while reading config file'
+      'Error while reading config file',
     );
   });
 
@@ -86,7 +86,7 @@ describe('workers/repository/reconfigure/index', () => {
     await validateReconfigureBranch(config);
     expect(logger.error).toHaveBeenCalledWith(
       { err: expect.any(Object) },
-      'Error while parsing config file'
+      'Error while parsing config file',
     );
     expect(platform.setBranchStatus).toHaveBeenCalledWith({
       branchName: 'prefix/reconfigure',
@@ -105,7 +105,7 @@ describe('workers/repository/reconfigure/index', () => {
     await validateReconfigureBranch(config);
     expect(logger.debug).toHaveBeenCalledWith(
       { errors: expect.any(String) },
-      'Validation Errors'
+      'Validation Errors',
     );
     expect(platform.setBranchStatus).toHaveBeenCalledWith({
       branchName: 'prefix/reconfigure',
@@ -125,7 +125,7 @@ describe('workers/repository/reconfigure/index', () => {
     await validateReconfigureBranch(config);
     expect(logger.debug).toHaveBeenCalledWith(
       { errors: expect.any(String) },
-      'Validation Errors'
+      'Validation Errors',
     );
     expect(platform.setBranchStatus).toHaveBeenCalled();
     expect(platform.ensureComment).toHaveBeenCalled();
@@ -159,7 +159,7 @@ describe('workers/repository/reconfigure/index', () => {
     });
     await validateReconfigureBranch(config);
     expect(logger.debug).toHaveBeenCalledWith(
-      'Skipping validation check as branch sha is unchanged'
+      'Skipping validation check as branch sha is unchanged',
     );
   });
 
@@ -173,7 +173,7 @@ describe('workers/repository/reconfigure/index', () => {
     platform.getBranchStatusCheck.mockResolvedValueOnce('green');
     await validateReconfigureBranch(config);
     expect(logger.debug).toHaveBeenCalledWith(
-      'Skipping validation check as status check already exists'
+      'Skipping validation check as status check already exists',
     );
   });
 
diff --git a/lib/workers/repository/reconfigure/index.ts b/lib/workers/repository/reconfigure/index.ts
index 281b2696bbd5aa62e41c9ff1c20251deb085d1c4..e8e4f69c2fa8e8eb9c37a1a7f4a75450d384670a 100644
--- a/lib/workers/repository/reconfigure/index.ts
+++ b/lib/workers/repository/reconfigure/index.ts
@@ -20,7 +20,7 @@ export function getReconfigureBranchName(prefix: string): string {
   return `${prefix}reconfigure`;
 }
 export async function validateReconfigureBranch(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<void> {
   logger.debug('validateReconfigureBranch()');
   const context = `renovate/config-validation`;
@@ -50,7 +50,7 @@ export async function validateReconfigureBranch(
 
   const validationStatus = await platform.getBranchStatusCheck(
     branchName,
-    'renovate/config-validation'
+    'renovate/config-validation',
   );
   // if old status check is present skip validation
   if (is.nonEmptyString(validationStatus)) {
@@ -64,7 +64,7 @@ export async function validateReconfigureBranch(
   } catch (err) {
     logger.error(
       { err },
-      'Error while searching for config file in reconfigure branch'
+      'Error while searching for config file in reconfigure branch',
     );
   }
 
@@ -128,13 +128,13 @@ export async function validateReconfigureBranch(
   if (validationResult.errors.length > 0) {
     logger.debug(
       { errors: validationResult.errors.map((err) => err.message).join(', ') },
-      'Validation Errors'
+      'Validation Errors',
     );
 
     // add comment to reconfigure PR if it exists
     const branchPr = await platform.getBranchPr(
       branchName,
-      config.defaultBranch
+      config.defaultBranch,
     );
     if (branchPr) {
       let body = `There is an error with this repository's Renovate configuration that needs to be fixed.\n\n`;
diff --git a/lib/workers/repository/reconfigure/reconfigure-cache.ts b/lib/workers/repository/reconfigure/reconfigure-cache.ts
index 03bd9678375cc2ab0e6e03b8f7afc42246fcf1d8..a261d3557e0fed68316652f0eca33a561d71eccc 100644
--- a/lib/workers/repository/reconfigure/reconfigure-cache.ts
+++ b/lib/workers/repository/reconfigure/reconfigure-cache.ts
@@ -3,7 +3,7 @@ import { getCache } from '../../../util/cache/repository';
 
 export function setReconfigureBranchCache(
   reconfigureBranchSha: string,
-  isConfigValid: boolean
+  isConfigValid: boolean,
 ): void {
   const cache = getCache();
   const reconfigureBranchCache = {
diff --git a/lib/workers/repository/result.ts b/lib/workers/repository/result.ts
index f5abb9cc70e97c6f840c1d0ca3933dff1ec09c22..83829671885d8c5a1f4405f80767bf003727ea0c 100644
--- a/lib/workers/repository/result.ts
+++ b/lib/workers/repository/result.ts
@@ -37,7 +37,7 @@ export interface ProcessResult {
 
 export function processResult(
   config: RenovateConfig,
-  res: string
+  res: string,
 ): ProcessResult {
   const disabledStatuses = [
     REPOSITORY_ACCESS_FORBIDDEN,
@@ -85,7 +85,7 @@ export function processResult(
   }
   logger.debug(
     // TODO: types (#22198)
-    `Repository result: ${res}, status: ${status}, enabled: ${enabled!}, onboarded: ${onboarded!}`
+    `Repository result: ${res}, status: ${status}, enabled: ${enabled!}, onboarded: ${onboarded!}`,
   );
   return { res, status, enabled, onboarded };
 }
diff --git a/lib/workers/repository/stats.ts b/lib/workers/repository/stats.ts
index 112a7fe9efbb0b6b3943c2afb81522566981ddc5..f23ab29c938ca2b0f7ff6feb1cec354250032ad5 100644
--- a/lib/workers/repository/stats.ts
+++ b/lib/workers/repository/stats.ts
@@ -47,7 +47,7 @@ export function printRequestStats(): void {
   };
   if (packageCacheGets.length) {
     packageCacheStats.get.avgMs = Math.round(
-      packageCacheGets.reduce((a, b) => a + b, 0) / packageCacheGets.length
+      packageCacheGets.reduce((a, b) => a + b, 0) / packageCacheGets.length,
     );
     if (packageCacheGets.length > 1) {
       packageCacheStats.get.medianMs =
@@ -58,7 +58,7 @@ export function printRequestStats(): void {
   }
   if (packageCacheSets.length) {
     packageCacheStats.set.avgMs = Math.round(
-      packageCacheSets.reduce((a, b) => a + b, 0) / packageCacheSets.length
+      packageCacheSets.reduce((a, b) => a + b, 0) / packageCacheSets.length,
     );
     if (packageCacheSets.length > 1) {
       packageCacheStats.set.medianMs =
@@ -96,7 +96,7 @@ export function printRequestStats(): void {
       rawUrls[urlKey] = 1;
     }
     allRequests.push(
-      `${method.toUpperCase()} ${url} ${statusCode} ${duration} ${queueDuration}`
+      `${method.toUpperCase()} ${url} ${statusCode} ${duration} ${queueDuration}`,
     );
     const { hostname } = URL.parse(url);
 
diff --git a/lib/workers/repository/update/branch/artifacts.ts b/lib/workers/repository/update/branch/artifacts.ts
index 9acc2798e8b1f5fbe6a6a147348b1477847b6e7c..0d395e4e770fdbeff4197ff1a71e9b956ba5d264 100644
--- a/lib/workers/repository/update/branch/artifacts.ts
+++ b/lib/workers/repository/update/branch/artifacts.ts
@@ -4,7 +4,7 @@ import { platform } from '../../../../modules/platform';
 import type { BranchConfig } from '../../../types';
 
 export async function setArtifactErrorStatus(
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<void> {
   if (!config.artifactErrors?.length) {
     // no errors
@@ -16,7 +16,7 @@ export async function setArtifactErrorStatus(
   const state = 'red';
   const existingState = await platform.getBranchStatusCheck(
     config.branchName,
-    context
+    context,
   );
 
   // Check if state needs setting
diff --git a/lib/workers/repository/update/branch/auto-replace.spec.ts b/lib/workers/repository/update/branch/auto-replace.spec.ts
index 04f7a440c15bb327789de9aaa86eefb1de56911f..ee0b2c8e8abfb69d1a721b638c33059e70076093 100644
--- a/lib/workers/repository/update/branch/auto-replace.spec.ts
+++ b/lib/workers/repository/update/branch/auto-replace.spec.ts
@@ -9,7 +9,7 @@ import { doAutoReplace } from './auto-replace';
 
 const sampleHtml = Fixtures.get(
   'sample.html',
-  `../../../../modules/manager/html`
+  `../../../../modules/manager/html`,
 );
 
 jest.mock('../../../../util/fs');
@@ -39,7 +39,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         'existing content',
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toBeNull();
     });
@@ -120,7 +120,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         srcAlreadyUpdated,
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toEqual(srcAlreadyUpdated);
     });
@@ -155,7 +155,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         'wrong source',
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toBe('wrong source');
     });
@@ -175,7 +175,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       upgrade.replaceString = script;
       const res = await doAutoReplace(upgrade, script, reuseExistingBranch);
       expect(res).toBe(
-        `<script src="https://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.11.1/katex.min.js" integrity="sha256-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" crossorigin="anonymous">`
+        `<script src="https://cdnjs.cloudflare.com/ajax/libs/KaTeX/0.11.1/katex.min.js" integrity="sha256-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa" crossorigin="anonymous">`,
       );
     });
 
@@ -197,7 +197,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
         '{{depName}}{{#if newValue}}:{{newValue}}{{/if}}{{#if newDigest}}@{{newDigest}}{{/if}}';
       const res = await doAutoReplace(upgrade, dockerfile, reuseExistingBranch);
       expect(res).toBe(
-        `FROM node:8.11.4-alpine@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa AS node`
+        `FROM node:8.11.4-alpine@sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa AS node`,
       );
     });
 
@@ -233,7 +233,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
           "file: 'ci-include-docker-test-base.yml'\n" +
           "- project: 'pipeline-solutions/gitlab/fragments/docker-lint'\n" +
           'ref: 2-4-1\n' +
-          "file: 'ci-include-docker-lint-base.yml'"
+          "file: 'ci-include-docker-lint-base.yml'",
       );
     });
 
@@ -359,7 +359,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -381,7 +381,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -405,7 +405,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -428,7 +428,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -447,7 +447,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -470,7 +470,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -492,7 +492,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         gemfile
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -511,7 +511,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         build
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -532,7 +532,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         cargo
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -555,7 +555,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -573,7 +573,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         podfile
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -597,7 +597,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         json
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -620,7 +620,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         edn
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -643,7 +643,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -662,7 +662,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         dockerfile
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -684,7 +684,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
         dockerfile
           .replace(upgrade.depName, upgrade.newName)
           .replace(upgrade.currentValue, upgrade.newValue)
-          .replace(upgrade.currentDigest, upgrade.newDigest)
+          .replace(upgrade.currentDigest, upgrade.newDigest),
       );
     });
 
@@ -707,7 +707,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -726,7 +726,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -750,7 +750,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -768,7 +768,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         txt
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -794,7 +794,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         js
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -828,7 +828,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
           'xml2js': '0.2.0',
           'connect': '2.7.10'
         });
-      `
+      `,
       );
     });
 
@@ -869,7 +869,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         exs
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -897,7 +897,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         json
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -919,7 +919,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         yml
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -942,7 +942,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         tf
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -965,7 +965,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         tf
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -986,7 +986,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         tf
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -1004,7 +1004,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         tf
           .replace(upgrade.depName, upgrade.newName)
-          .replace(upgrade.currentValue, upgrade.newValue)
+          .replace(upgrade.currentValue, upgrade.newValue),
       );
     });
 
@@ -1028,7 +1028,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
           FROM ubuntu:16.04
           FROM ubuntu:20.04
           FROM alpine:3.16
-        `
+        `,
       );
     });
 
@@ -1052,7 +1052,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
           FROM ubuntu:16.04
           FROM alpine:3.16
           FROM ubuntu:18.04
-        `
+        `,
       );
     });
 
@@ -1076,7 +1076,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
           FROM notUbuntu:18.04
           FROM alsoNotUbuntu:18.04
           FROM alpine:3.16
-        `
+        `,
       );
     });
 
@@ -1100,7 +1100,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         codeBlock`
           FROM alpine:3.16
-        `
+        `,
       );
     });
 
@@ -1124,7 +1124,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         codeBlock`
           FROM alpine:3.16@sha256:p0o9i8u7z6t5r4e3w2q1
-        `
+        `,
       );
     });
 
@@ -1169,7 +1169,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       expect(res).toBe(
         codeBlock`
           FROM eclipse-temurin:11@sha256:p0o9i8u7z6t5r4e3w2q1
-        `
+        `,
       );
     });
 
@@ -1215,7 +1215,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       ];
       const res = await doAutoReplace(upgrade, yml, reuseExistingBranch);
       expect(res).toBe(
-        'image: "some.other.url.com/some-new-repo:3.16@sha256:p0o9i8u7z6t5r4e3w2q1"'
+        'image: "some.other.url.com/some-new-repo:3.16@sha256:p0o9i8u7z6t5r4e3w2q1"',
       );
     });
 
@@ -1243,7 +1243,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         githubAction,
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toBe(
         codeBlock`
@@ -1252,7 +1252,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
               runs-on: ubuntu-latest
               steps:
                 - uses: actions/checkout@v2.0.0
-        `
+        `,
       );
     });
 
@@ -1280,7 +1280,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         githubAction,
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toBe(
         codeBlock`
@@ -1289,7 +1289,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
               runs-on: ubuntu-latest
               steps:
                 - uses: actions/checkout@1cf887 # v2.0.0
-        `
+        `,
       );
     });
 
@@ -1319,7 +1319,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         githubAction,
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toBe(
         codeBlock`
@@ -1328,7 +1328,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
               runs-on: ubuntu-latest
               steps:
                 - uses: some-other-action/checkout@v2.0.0
-        `
+        `,
       );
     });
 
@@ -1357,7 +1357,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
       const res = await doAutoReplace(
         upgrade,
         githubAction,
-        reuseExistingBranch
+        reuseExistingBranch,
       );
       expect(res).toBe(
         codeBlock`
@@ -1366,7 +1366,7 @@ describe('workers/repository/update/branch/auto-replace', () => {
               runs-on: ubuntu-latest
               steps:
                 - uses: some-other-action/checkout@1cf887 # tag=v2.0.0
-        `
+        `,
       );
     });
   });
diff --git a/lib/workers/repository/update/branch/auto-replace.ts b/lib/workers/repository/update/branch/auto-replace.ts
index 6e5bd6e789e4d95a21edf414f11f558be8588c4c..9166e674b367bdcb0bd29abbe46467590fe05fd7 100644
--- a/lib/workers/repository/update/branch/auto-replace.ts
+++ b/lib/workers/repository/update/branch/auto-replace.ts
@@ -12,7 +12,7 @@ import type { BranchUpgradeConfig } from '../../../types';
 
 export async function confirmIfDepUpdated(
   upgrade: BranchUpgradeConfig,
-  newContent: string
+  newContent: string,
 ): Promise<boolean> {
   const { manager, packageFile, depIndex } = upgrade;
   let newUpgrade: PackageDependency;
@@ -21,31 +21,31 @@ export async function confirmIfDepUpdated(
       manager,
       newContent,
       packageFile!,
-      upgrade
+      upgrade,
     );
     // istanbul ignore if
     if (!newExtract) {
       // TODO: fix types (#22198)
       logger.debug(
-        `Could not extract ${packageFile!} (manager=${manager}) after autoreplace. Did the autoreplace make the file unparseable?`
+        `Could not extract ${packageFile!} (manager=${manager}) after autoreplace. Did the autoreplace make the file unparseable?`,
       );
       logger.trace(
         { packageFile, content: newContent },
-        'packageFile content after autoreplace'
+        'packageFile content after autoreplace',
       );
       return false;
     }
     // istanbul ignore if
     if (!newExtract.deps?.length) {
       logger.debug(
-        `Extracted ${packageFile!} after autoreplace has no deps array. Did the autoreplace make the file unparseable?`
+        `Extracted ${packageFile!} after autoreplace has no deps array. Did the autoreplace make the file unparseable?`,
       );
       return false;
     }
     // istanbul ignore if
     if (is.number(depIndex) && depIndex >= newExtract.deps.length) {
       logger.debug(
-        `Extracted ${packageFile!} after autoreplace has fewer deps than expected.`
+        `Extracted ${packageFile!} after autoreplace has fewer deps than expected.`,
       );
       return false;
     }
@@ -70,7 +70,7 @@ export async function confirmIfDepUpdated(
         currentDepName: upgrade.depName,
         newDepName: newUpgrade.depName,
       },
-      'depName mismatch'
+      'depName mismatch',
     );
     return false;
   }
@@ -83,7 +83,7 @@ export async function confirmIfDepUpdated(
         currentDepName: upgrade.depName,
         newDepName: newUpgrade.depName,
       },
-      'depName is not updated'
+      'depName is not updated',
     );
     return false;
   }
@@ -96,7 +96,7 @@ export async function confirmIfDepUpdated(
         expectedValue: upgrade.newValue,
         foundValue: newUpgrade.currentValue,
       },
-      'Value is not updated'
+      'Value is not updated',
     );
     return false;
   }
@@ -113,7 +113,7 @@ export async function confirmIfDepUpdated(
         expectedValue: upgrade.newDigest,
         foundValue: newUpgrade.currentDigest,
       },
-      'Digest is not updated'
+      'Digest is not updated',
     );
     return false;
   }
@@ -127,14 +127,14 @@ function getDepsSignature(deps: PackageDependency[]): string {
     .map(
       (dep) =>
         `${(dep.depName ?? dep.packageName)!}${(dep.packageName ??
-          dep.depName)!}`
+          dep.depName)!}`,
     )
     .join(',');
 }
 
 export async function checkBranchDepsMatchBaseDeps(
   upgrade: BranchUpgradeConfig,
-  branchContent: string
+  branchContent: string,
 ): Promise<boolean> {
   const { baseDeps, manager, packageFile } = upgrade;
   try {
@@ -142,14 +142,14 @@ export async function checkBranchDepsMatchBaseDeps(
       manager,
       branchContent,
       packageFile!,
-      upgrade
+      upgrade,
     )!;
     const branchDeps = res!.deps;
     return getDepsSignature(baseDeps!) === getDepsSignature(branchDeps);
   } catch (err) /* istanbul ignore next */ {
     logger.info(
       { manager, packageFile },
-      'Failed to parse branchContent - rebasing'
+      'Failed to parse branchContent - rebasing',
     );
     return false;
   }
@@ -157,20 +157,20 @@ export async function checkBranchDepsMatchBaseDeps(
 
 async function checkExistingBranch(
   upgrade: BranchUpgradeConfig,
-  existingContent: string
+  existingContent: string,
 ): Promise<string | null> {
   const { packageFile, depName } = upgrade;
   if (!(await checkBranchDepsMatchBaseDeps(upgrade, existingContent))) {
     logger.debug(
       { packageFile, depName },
-      'Rebasing branch after deps list has changed'
+      'Rebasing branch after deps list has changed',
     );
     return null;
   }
   if (!(await confirmIfDepUpdated(upgrade, existingContent))) {
     logger.debug(
       { packageFile, depName },
-      'Rebasing after outdated branch dep found'
+      'Rebasing after outdated branch dep found',
     );
     return null;
   }
@@ -183,7 +183,7 @@ export async function doAutoReplace(
   upgrade: BranchUpgradeConfig,
   existingContent: string,
   reuseExistingBranch: boolean,
-  firstUpdate = true
+  firstUpdate = true,
 ): Promise<string | null> {
   const {
     packageFile,
@@ -223,7 +223,7 @@ export async function doAutoReplace(
   if (searchIndex === -1) {
     logger.info(
       { packageFile, depName, existingContent, replaceString },
-      'Cannot find replaceString in current file content. Was it already updated?'
+      'Cannot find replaceString in current file content. Was it already updated?',
     );
     return existingContent;
   }
@@ -238,37 +238,37 @@ export async function doAutoReplace(
       if (currentValue && newValue) {
         newString = newString.replace(
           regEx(escapeRegExp(currentValue), autoReplaceRegExpFlag),
-          newValue
+          newValue,
         );
       }
       if (depName && newName) {
         newString = newString.replace(
           regEx(escapeRegExp(depName), autoReplaceRegExpFlag),
-          newName
+          newName,
         );
       }
       if (currentDigest && newDigest) {
         newString = newString.replace(
           regEx(escapeRegExp(currentDigest), autoReplaceRegExpFlag),
-          newDigest
+          newDigest,
         );
       } else if (currentDigestShort && newDigest) {
         newString = newString.replace(
           regEx(escapeRegExp(currentDigestShort), autoReplaceRegExpFlag),
-          newDigest
+          newDigest,
         );
       }
     }
     if (!firstUpdate && (await confirmIfDepUpdated(upgrade, existingContent))) {
       logger.debug(
         { packageFile, depName },
-        'Package file is already updated - no work to do'
+        'Package file is already updated - no work to do',
       );
       return existingContent;
     }
     logger.debug(
       { packageFile, depName },
-      `Starting search at index ${searchIndex}`
+      `Starting search at index ${searchIndex}`,
     );
     let newContent = existingContent;
     let nameReplaced = !newName;
@@ -282,7 +282,7 @@ export async function doAutoReplace(
         if (newName && matchAt(newContent, searchIndex, depName!)) {
           logger.debug(
             { packageFile, depName },
-            `Found depName at index ${searchIndex}`
+            `Found depName at index ${searchIndex}`,
           );
           if (nameReplaced) {
             startIndex += 1;
@@ -303,14 +303,14 @@ export async function doAutoReplace(
         ) {
           logger.debug(
             { packageFile, currentValue },
-            `Found currentValue at index ${searchIndex}`
+            `Found currentValue at index ${searchIndex}`,
           );
           // Now test if the result matches
           newContent = replaceAt(
             newContent,
             searchIndex,
             currentValue!,
-            newValue
+            newValue,
           );
           await writeLocalFile(upgrade.packageFile!, newContent);
           valueReplaced = true;
@@ -327,14 +327,14 @@ export async function doAutoReplace(
       } else if (matchAt(newContent, searchIndex, replaceString!)) {
         logger.debug(
           { packageFile, depName },
-          `Found match at index ${searchIndex}`
+          `Found match at index ${searchIndex}`,
         );
         // Now test if the result matches
         newContent = replaceAt(
           newContent,
           searchIndex,
           replaceString!,
-          newString
+          newString,
         );
         await writeLocalFile(upgrade.packageFile!, newContent);
         if (await confirmIfDepUpdated(upgrade, newContent)) {
diff --git a/lib/workers/repository/update/branch/automerge.spec.ts b/lib/workers/repository/update/branch/automerge.spec.ts
index cc016d26471a8ddefc946cecce7e212233614a7e..aad62addb59745fe7f25cd2d4b9b3cf72f45da72 100644
--- a/lib/workers/repository/update/branch/automerge.spec.ts
+++ b/lib/workers/repository/update/branch/automerge.spec.ts
@@ -56,7 +56,7 @@ describe('workers/repository/update/branch/automerge', () => {
       config.automergeType = 'branch';
       platform.getBranchStatus.mockResolvedValueOnce('green');
       expect(await tryBranchAutomerge(config)).toBe(
-        'automerge aborted - PR exists'
+        'automerge aborted - PR exists',
       );
     });
 
diff --git a/lib/workers/repository/update/branch/automerge.ts b/lib/workers/repository/update/branch/automerge.ts
index 3ea24e8ee7711530feb28b09ddd754cece6616ff..40012bb770a3106878f0a2f6be76be273979f6b3 100644
--- a/lib/workers/repository/update/branch/automerge.ts
+++ b/lib/workers/repository/update/branch/automerge.ts
@@ -17,7 +17,7 @@ export type AutomergeResult =
   | 'not ready';
 
 export async function tryBranchAutomerge(
-  config: RenovateConfig
+  config: RenovateConfig,
 ): Promise<AutomergeResult> {
   logger.debug('Checking if we can automerge branch');
   if (!(config.automerge && config.automergeType === 'branch')) {
@@ -28,7 +28,7 @@ export async function tryBranchAutomerge(
   }
   const existingPr = await platform.getBranchPr(
     config.branchName!,
-    config.baseBranch
+    config.baseBranch,
   );
   if (existingPr) {
     return 'automerge aborted - PR exists';
@@ -36,7 +36,7 @@ export async function tryBranchAutomerge(
   const branchStatus = await resolveBranchStatus(
     config.branchName!,
     !!config.internalChecksAsSuccess,
-    config.ignoreTests
+    config.ignoreTests,
   );
   if (branchStatus === 'green') {
     logger.debug(`Automerging branch`);
@@ -59,7 +59,7 @@ export async function tryBranchAutomerge(
         err.message.includes('refusing to merge unrelated histories') ||
         err.message.includes('Not possible to fast-forward') ||
         err.message.includes(
-          'Updates were rejected because the tip of your current branch is behind'
+          'Updates were rejected because the tip of your current branch is behind',
         )
       ) {
         logger.debug({ err }, 'Branch automerge error');
@@ -70,20 +70,20 @@ export async function tryBranchAutomerge(
         if (err.message.includes('status check')) {
           logger.debug(
             { err },
-            'Branch is not ready for automerge: required status checks are remaining'
+            'Branch is not ready for automerge: required status checks are remaining',
           );
           return 'not ready';
         }
         if (err.stack?.includes('reviewers')) {
           logger.info(
             { err },
-            'Branch automerge is not possible due to branch protection (required reviewers)'
+            'Branch automerge is not possible due to branch protection (required reviewers)',
           );
           return 'failed';
         }
         logger.info(
           { err },
-          'Branch automerge is not possible due to branch protection'
+          'Branch automerge is not possible due to branch protection',
         );
         return 'failed';
       }
diff --git a/lib/workers/repository/update/branch/check-existing.spec.ts b/lib/workers/repository/update/branch/check-existing.spec.ts
index 15f347deb8a46b32ca6947704fa026652d234926..8fe37afbc7637e2c510e1ffd575bd8e2f7225e8f 100644
--- a/lib/workers/repository/update/branch/check-existing.spec.ts
+++ b/lib/workers/repository/update/branch/check-existing.spec.ts
@@ -36,7 +36,7 @@ describe('workers/repository/update/branch/check-existing', () => {
         partial<Pr>({
           number: 12,
           state: 'closed',
-        })
+        }),
       );
       expect(await prAlreadyExisted(config)).toEqual({ number: 12 });
       expect(platform.findPr).toHaveBeenCalledTimes(1);
@@ -50,12 +50,12 @@ describe('workers/repository/update/branch/check-existing', () => {
         partial<Pr>({
           number: 12,
           state: 'closed',
-        })
+        }),
       );
       expect(await prAlreadyExisted(config)).toEqual({ number: 12 });
       expect(platform.findPr).toHaveBeenCalledTimes(2);
       expect(logger.debug).toHaveBeenCalledWith(
-        `Found closed PR with current title`
+        `Found closed PR with current title`,
       );
     });
   });
diff --git a/lib/workers/repository/update/branch/check-existing.ts b/lib/workers/repository/update/branch/check-existing.ts
index 1e042068d57e08024f9497b166043d6e8fe2fca2..7c6248d0e362ab7bb7ca9fafd684dc1520bb92c5 100644
--- a/lib/workers/repository/update/branch/check-existing.ts
+++ b/lib/workers/repository/update/branch/check-existing.ts
@@ -5,7 +5,7 @@ import { Pr, platform } from '../../../../modules/platform';
 import type { BranchConfig } from '../../../types';
 
 export async function prAlreadyExisted(
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<Pr | null> {
   logger.trace({ config }, 'prAlreadyExisted');
   if (config.recreateClosed) {
@@ -13,7 +13,7 @@ export async function prAlreadyExisted(
     return null;
   }
   logger.debug(
-    'Check for closed PR because recreating closed PRs is disabled.'
+    'Check for closed PR because recreating closed PRs is disabled.',
   );
   // Return if same PR already existed
   let pr = await platform.findPr({
@@ -27,7 +27,7 @@ export async function prAlreadyExisted(
     pr = await platform.findPr({
       branchName: config.branchName.replace(
         config.branchPrefix!,
-        config.branchPrefixOld!
+        config.branchPrefixOld!,
       ),
       prTitle: config.prTitle,
       state: '!open',
diff --git a/lib/workers/repository/update/branch/commit.ts b/lib/workers/repository/update/branch/commit.ts
index 7a50801e06fec6eb23b4f614f8bcf7fe0e3969c2..d1be2d1654c7d2e9fa6154d20467d4bfcb51a90f 100644
--- a/lib/workers/repository/update/branch/commit.ts
+++ b/lib/workers/repository/update/branch/commit.ts
@@ -9,16 +9,17 @@ import { sanitize } from '../../../../util/sanitize';
 import type { BranchConfig } from '../../../types';
 
 export function commitFilesToBranch(
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<string | null> {
   let updatedFiles = config.updatedPackageFiles!.concat(
-    config.updatedArtifacts!
+    config.updatedArtifacts!,
   );
   // istanbul ignore if
   if (is.nonEmptyArray(config.excludeCommitPaths)) {
     updatedFiles = updatedFiles.filter(({ path: filePath }) => {
       const matchesExcludePaths = config.excludeCommitPaths!.some(
-        (excludedPath) => minimatch(excludedPath, { dot: true }).match(filePath)
+        (excludedPath) =>
+          minimatch(excludedPath, { dot: true }).match(filePath),
       );
       if (matchesExcludePaths) {
         logger.debug(`Excluding ${filePath} from commit`);
@@ -45,7 +46,7 @@ export function commitFilesToBranch(
   ) {
     logger.debug(
       { branchName: config.branchName },
-      'Secrets exposed in branchName or commitMessage'
+      'Secrets exposed in branchName or commitMessage',
     );
     throw new Error(CONFIG_SECRETS_EXPOSED);
   }
diff --git a/lib/workers/repository/update/branch/execute-post-upgrade-commands.spec.ts b/lib/workers/repository/update/branch/execute-post-upgrade-commands.spec.ts
index 82d48d621c6ca41d300e28c15fd948ead37a1a95..da750d0b0a41669eb09cef0ffe2cd1d2c201643c 100644
--- a/lib/workers/repository/update/branch/execute-post-upgrade-commands.spec.ts
+++ b/lib/workers/repository/update/branch/execute-post-upgrade-commands.spec.ts
@@ -43,7 +43,7 @@ describe('workers/repository/update/branch/execute-post-upgrade-commands', () =>
           modified: [],
           not_added: [],
           deleted: [],
-        })
+        }),
       );
       GlobalConfig.set({
         localDir: __dirname,
@@ -60,7 +60,7 @@ describe('workers/repository/update/branch/execute-post-upgrade-commands', () =>
 
       const res = await postUpgradeCommands.postUpgradeCommandsExecutor(
         commands,
-        config
+        config,
       );
 
       expect(res.updatedArtifacts).toHaveLength(3);
@@ -93,7 +93,7 @@ describe('workers/repository/update/branch/execute-post-upgrade-commands', () =>
           modified: [],
           not_added: [],
           deleted: [],
-        })
+        }),
       );
       GlobalConfig.set({
         localDir: __dirname,
@@ -108,7 +108,7 @@ describe('workers/repository/update/branch/execute-post-upgrade-commands', () =>
 
       const res = await postUpgradeCommands.postUpgradeCommandsExecutor(
         commands,
-        config
+        config,
       );
 
       expect(res.updatedArtifacts).toHaveLength(0);
diff --git a/lib/workers/repository/update/branch/execute-post-upgrade-commands.ts b/lib/workers/repository/update/branch/execute-post-upgrade-commands.ts
index bfbf825f80e8801f4303627c3a8c01b690478bf2..a44f1c7906e476a5655d9f39cb4aff6f3010e05f 100644
--- a/lib/workers/repository/update/branch/execute-post-upgrade-commands.ts
+++ b/lib/workers/repository/update/branch/execute-post-upgrade-commands.ts
@@ -26,15 +26,15 @@ export interface PostUpgradeCommandsExecutionResult {
 
 export async function postUpgradeCommandsExecutor(
   filteredUpgradeCommands: BranchUpgradeConfig[],
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<PostUpgradeCommandsExecutionResult> {
   let updatedArtifacts = [...(config.updatedArtifacts ?? [])];
   const artifactErrors = [...(config.artifactErrors ?? [])];
   const allowedPostUpgradeCommands = GlobalConfig.get(
-    'allowedPostUpgradeCommands'
+    'allowedPostUpgradeCommands',
   );
   const allowPostUpgradeCommandTemplating = GlobalConfig.get(
-    'allowPostUpgradeCommandTemplating'
+    'allowPostUpgradeCommandTemplating',
   );
 
   for (const upgrade of filteredUpgradeCommands) {
@@ -44,7 +44,7 @@ export async function postUpgradeCommandsExecutor(
         tasks: upgrade.postUpgradeTasks,
         allowedCommands: allowedPostUpgradeCommands,
       },
-      `Checking for post-upgrade tasks`
+      `Checking for post-upgrade tasks`,
     );
     const commands = upgrade.postUpgradeTasks?.commands;
     const fileFilters = upgrade.postUpgradeTasks?.fileFilters ?? ['**/*'];
@@ -67,7 +67,7 @@ export async function postUpgradeCommandsExecutor(
       for (const cmd of commands) {
         if (
           allowedPostUpgradeCommands!.some((pattern) =>
-            regEx(pattern).test(cmd)
+            regEx(pattern).test(cmd),
           )
         ) {
           try {
@@ -82,7 +82,7 @@ export async function postUpgradeCommandsExecutor(
 
             logger.debug(
               { cmd: compiledCmd, ...execResult },
-              'Executed post-upgrade task'
+              'Executed post-upgrade task',
             );
           } catch (error) {
             artifactErrors.push({
@@ -96,12 +96,12 @@ export async function postUpgradeCommandsExecutor(
               cmd,
               allowedPostUpgradeCommands,
             },
-            'Post-upgrade task did not match any on allowedPostUpgradeCommands list'
+            'Post-upgrade task did not match any on allowedPostUpgradeCommands list',
           );
           artifactErrors.push({
             lockFile: upgrade.packageFile,
             stderr: sanitize(
-              `Post-upgrade command '${cmd}' has not been added to the allowed list in allowedPostUpgradeCommands`
+              `Post-upgrade command '${cmd}' has not been added to the allowed list in allowedPostUpgradeCommands`,
             ),
           });
         }
@@ -114,11 +114,11 @@ export async function postUpgradeCommandsExecutor(
           if (minimatch(pattern, { dot: true }).match(relativePath)) {
             logger.debug(
               { file: relativePath, pattern },
-              'Post-upgrade file saved'
+              'Post-upgrade file saved',
             );
             const existingContent = await readLocalFile(relativePath);
             const existingUpdatedArtifacts = updatedArtifacts.find(
-              (ua) => ua.path === relativePath
+              (ua) => ua.path === relativePath,
             );
             if (existingUpdatedArtifacts?.type === 'addition') {
               existingUpdatedArtifacts.contents = existingContent;
@@ -131,7 +131,7 @@ export async function postUpgradeCommandsExecutor(
             }
             // If the file is deleted by a previous post-update command, remove the deletion from updatedArtifacts
             updatedArtifacts = updatedArtifacts.filter(
-              (ua) => !(ua.type === 'deletion' && ua.path === relativePath)
+              (ua) => !(ua.type === 'deletion' && ua.path === relativePath),
             );
           }
         }
@@ -142,7 +142,7 @@ export async function postUpgradeCommandsExecutor(
           if (minimatch(pattern, { dot: true }).match(relativePath)) {
             logger.debug(
               { file: relativePath, pattern },
-              'Post-upgrade file removed'
+              'Post-upgrade file removed',
             );
             updatedArtifacts.push({
               type: 'deletion',
@@ -150,7 +150,7 @@ export async function postUpgradeCommandsExecutor(
             });
             // If the file is created or modified by a previous post-update command, remove the modification from updatedArtifacts
             updatedArtifacts = updatedArtifacts.filter(
-              (ua) => !(ua.type === 'addition' && ua.path === relativePath)
+              (ua) => !(ua.type === 'addition' && ua.path === relativePath),
             );
           }
         }
@@ -161,7 +161,7 @@ export async function postUpgradeCommandsExecutor(
 }
 
 export default async function executePostUpgradeCommands(
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<PostUpgradeCommandsExecutionResult | null> {
   const hasChangedFiles =
     (is.array(config.updatedPackageFiles) &&
@@ -192,7 +192,7 @@ export default async function executePostUpgradeCommands(
   const updateUpgradeCommands: BranchUpgradeConfig[] = config.upgrades.filter(
     ({ postUpgradeTasks }) =>
       !postUpgradeTasks?.executionMode ||
-      postUpgradeTasks.executionMode === 'update'
+      postUpgradeTasks.executionMode === 'update',
   );
 
   const { updatedArtifacts, artifactErrors } =
diff --git a/lib/workers/repository/update/branch/get-updated.spec.ts b/lib/workers/repository/update/branch/get-updated.spec.ts
index dc47a596f93547c221a78785168bae525b41908e..4a6590790e673477ea11bd61d3c7b313494ab7c4 100644
--- a/lib/workers/repository/update/branch/get-updated.spec.ts
+++ b/lib/workers/repository/update/branch/get-updated.spec.ts
@@ -570,7 +570,7 @@ describe('workers/repository/update/branch/get-updated', () => {
           await getUpdatedPackageFiles(config);
           expect(bundler.updateArtifacts).toHaveBeenCalledOnce();
           expect(bundler.updateArtifacts).toHaveBeenCalledWith(
-            expect.objectContaining({ newPackageFileContent: 'new contents' })
+            expect.objectContaining({ newPackageFileContent: 'new contents' }),
           );
         });
       });
@@ -586,7 +586,7 @@ describe('workers/repository/update/branch/get-updated', () => {
           await getUpdatedPackageFiles(config);
           expect(bundler.updateArtifacts).toHaveBeenCalledOnce();
           expect(bundler.updateArtifacts).toHaveBeenCalledWith(
-            expect.objectContaining({ newPackageFileContent: 'new contents' })
+            expect.objectContaining({ newPackageFileContent: 'new contents' }),
           );
         });
       });
@@ -605,7 +605,7 @@ describe('workers/repository/update/branch/get-updated', () => {
           await getUpdatedPackageFiles(config);
           expect(bundler.updateArtifacts).toHaveBeenCalledOnce();
           expect(bundler.updateArtifacts).toHaveBeenCalledWith(
-            expect.objectContaining({ newPackageFileContent: newContent })
+            expect.objectContaining({ newPackageFileContent: newContent }),
           );
         });
       });
@@ -624,7 +624,7 @@ describe('workers/repository/update/branch/get-updated', () => {
           await getUpdatedPackageFiles(config);
           expect(bundler.updateArtifacts).toHaveBeenCalledOnce();
           expect(bundler.updateArtifacts).toHaveBeenCalledWith(
-            expect.objectContaining({ newPackageFileContent: newContent })
+            expect.objectContaining({ newPackageFileContent: newContent }),
           );
         });
       });
@@ -645,7 +645,7 @@ describe('workers/repository/update/branch/get-updated', () => {
           await getUpdatedPackageFiles(config);
           expect(bundler.updateArtifacts).toHaveBeenCalledOnce();
           expect(bundler.updateArtifacts).toHaveBeenCalledWith(
-            expect.objectContaining({ newPackageFileContent: 'new contents' })
+            expect.objectContaining({ newPackageFileContent: 'new contents' }),
           );
         });
       });
@@ -666,7 +666,7 @@ describe('workers/repository/update/branch/get-updated', () => {
           await getUpdatedPackageFiles(config);
           expect(bundler.updateArtifacts).toHaveBeenCalledOnce();
           expect(bundler.updateArtifacts).toHaveBeenCalledWith(
-            expect.objectContaining({ newPackageFileContent: 'new contents' })
+            expect.objectContaining({ newPackageFileContent: 'new contents' }),
           );
         });
       });
diff --git a/lib/workers/repository/update/branch/get-updated.ts b/lib/workers/repository/update/branch/get-updated.ts
index 94b329e93b184c61531674c4920d0076690ebdd8..648f107c25b2138fd7ba04fc90f1d7dfcdaf1f03 100644
--- a/lib/workers/repository/update/branch/get-updated.ts
+++ b/lib/workers/repository/update/branch/get-updated.ts
@@ -21,12 +21,12 @@ export interface PackageFilesResult {
 }
 
 export async function getUpdatedPackageFiles(
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<PackageFilesResult> {
   logger.trace({ config });
   const reuseExistingBranch = config.reuseExistingBranch!;
   logger.debug(
-    `manager.getUpdatedPackageFiles() reuseExistingBranch=${reuseExistingBranch}`
+    `manager.getUpdatedPackageFiles() reuseExistingBranch=${reuseExistingBranch}`,
   );
   let updatedFileContents: Record<string, string> = {};
   const nonUpdatedFileContents: Record<string, string> = {};
@@ -50,7 +50,7 @@ export async function getUpdatedPackageFiles(
     if (!packageFileContent) {
       packageFileContent = await getFile(
         packageFile,
-        reuseExistingBranch ? config.branchName : config.baseBranch
+        reuseExistingBranch ? config.branchName : config.baseBranch,
       );
     }
     let lockFileContent: string | null = null;
@@ -60,7 +60,7 @@ export async function getUpdatedPackageFiles(
       if (!lockFileContent) {
         lockFileContent = await getFile(
           lockFile,
-          reuseExistingBranch ? config.branchName : config.baseBranch
+          reuseExistingBranch ? config.branchName : config.baseBranch,
         );
       }
     }
@@ -71,7 +71,7 @@ export async function getUpdatedPackageFiles(
     ) {
       logger.debug(
         { packageFile, depName },
-        'Rebasing branch after file not found'
+        'Rebasing branch after file not found',
       );
       return getUpdatedPackageFiles({
         ...config,
@@ -96,7 +96,7 @@ export async function getUpdatedPackageFiles(
       if (reuseExistingBranch && status !== 'already-updated') {
         logger.debug(
           { lockFile, depName, status },
-          'Need to retry branch as it is not already up-to-date'
+          'Need to retry branch as it is not already up-to-date',
         );
         return getUpdatedPackageFiles({
           ...config,
@@ -106,7 +106,7 @@ export async function getUpdatedPackageFiles(
       if (files) {
         updatedFileContents = { ...updatedFileContents, ...files };
         Object.keys(files).forEach(
-          (file) => delete nonUpdatedFileContents[file]
+          (file) => delete nonUpdatedFileContents[file],
         );
       }
       if (status === 'update-failed' || status === 'unsupported') {
@@ -132,14 +132,14 @@ export async function getUpdatedPackageFiles(
           }
         } else if (status === 'already-updated') {
           logger.debug(
-            `Upgrade of ${depName} to ${newVersion} is already done in existing branch`
+            `Upgrade of ${depName} to ${newVersion} is already done in existing branch`,
           );
         } else {
           // something changed
           if (reuseExistingBranch) {
             logger.debug(
               { lockFile, depName, status },
-              'Need to retry branch as upgrade requirements are not mets'
+              'Need to retry branch as upgrade requirements are not mets',
             );
             return getUpdatedPackageFiles({
               ...config,
@@ -149,14 +149,14 @@ export async function getUpdatedPackageFiles(
           if (files) {
             updatedFileContents = { ...updatedFileContents, ...files };
             Object.keys(files).forEach(
-              (file) => delete nonUpdatedFileContents[file]
+              (file) => delete nonUpdatedFileContents[file],
             );
           }
         }
       } else {
         logger.debug(
           { manager },
-          'isLockFileUpdate without updateLockedDependency'
+          'isLockFileUpdate without updateLockedDependency',
         );
         if (!updatedFileContents[packageFile]) {
           nonUpdatedFileContents[packageFile] = packageFileContent!;
@@ -170,7 +170,7 @@ export async function getUpdatedPackageFiles(
           upgrade,
           packageFileContent!,
           reuseExistingBranch,
-          firstUpdate
+          firstUpdate,
         );
         firstUpdate = false;
         if (res) {
@@ -182,7 +182,7 @@ export async function getUpdatedPackageFiles(
             const { bumpedContent } = await bumpPackageVersion(
               res,
               upgrade.packageFileVersion,
-              upgrade.bumpVersion
+              upgrade.bumpVersion,
             );
             res = bumpedContent;
           }
@@ -216,7 +216,7 @@ export async function getUpdatedPackageFiles(
         const { bumpedContent } = await bumpPackageVersion(
           newContent,
           upgrade.packageFileVersion,
-          upgrade.bumpVersion
+          upgrade.bumpVersion,
         );
         newContent = bumpedContent;
       }
@@ -224,7 +224,7 @@ export async function getUpdatedPackageFiles(
         if (reuseExistingBranch) {
           logger.debug(
             { packageFile, depName },
-            'Rebasing branch after error updating content'
+            'Rebasing branch after error updating content',
           );
           return getUpdatedPackageFiles({
             ...config,
@@ -233,7 +233,7 @@ export async function getUpdatedPackageFiles(
         }
         logger.debug(
           { existingContent: packageFileContent, config: upgrade },
-          'Error updating file'
+          'Error updating file',
         );
         throw new Error(WORKER_FILE_UPDATE_FAILED);
       }
@@ -242,7 +242,7 @@ export async function getUpdatedPackageFiles(
           // This ensure it's always 1 commit from the bot
           logger.debug(
             { packageFile, depName },
-            'Need to update package file so will rebase first'
+            'Need to update package file so will rebase first',
           );
           return getUpdatedPackageFiles({
             ...config,
@@ -250,7 +250,7 @@ export async function getUpdatedPackageFiles(
           });
         }
         logger.debug(
-          `Updating ${depName} in ${coerceString(packageFile, lockFile)}`
+          `Updating ${depName} in ${coerceString(packageFile, lockFile)}`,
         );
         updatedFileContents[packageFile] = newContent;
         delete nonUpdatedFileContents[packageFile];
@@ -264,7 +264,7 @@ export async function getUpdatedPackageFiles(
     }
   }
   const updatedPackageFiles: FileAddition[] = Object.keys(
-    updatedFileContents
+    updatedFileContents,
   ).map((name) => ({
     type: 'addition',
     path: name,
@@ -297,7 +297,7 @@ export async function getUpdatedPackageFiles(
     }
   }
   const nonUpdatedPackageFiles: FileAddition[] = Object.keys(
-    nonUpdatedFileContents
+    nonUpdatedFileContents,
   ).map((name) => ({
     type: 'addition',
     path: name,
diff --git a/lib/workers/repository/update/branch/handle-existing.ts b/lib/workers/repository/update/branch/handle-existing.ts
index 3deb228e733a8f9b0caa9da7760512cedd18905d..a8a63ff27617f5fc7e1081dd502adcdc48da8a9c 100644
--- a/lib/workers/repository/update/branch/handle-existing.ts
+++ b/lib/workers/repository/update/branch/handle-existing.ts
@@ -12,7 +12,7 @@ import type { BranchConfig } from '../../../types';
 
 export async function handleClosedPr(
   config: BranchConfig,
-  pr: Pr
+  pr: Pr,
 ): Promise<void> {
   if (pr.state === 'closed') {
     let content;
@@ -30,7 +30,7 @@ export async function handleClosedPr(
     if (!config.suppressNotifications!.includes('prIgnoreNotification')) {
       if (GlobalConfig.get('dryRun')) {
         logger.info(
-          `DRY-RUN: Would ensure closed PR comment in PR #${pr.number}`
+          `DRY-RUN: Would ensure closed PR comment in PR #${pr.number}`,
         );
       } else {
         await ensureComment({
@@ -52,7 +52,7 @@ export async function handleClosedPr(
 
 export async function handleModifiedPr(
   config: BranchConfig,
-  pr: Pr
+  pr: Pr,
 ): Promise<void> {
   if (config.suppressNotifications!.includes('prEditedNotification')) {
     return;
@@ -71,7 +71,7 @@ export async function handleModifiedPr(
     logger.debug('Manual rebase has been requested for PR');
     if (GlobalConfig.get('dryRun')) {
       logger.info(
-        `DRY-RUN: Would remove edited/blocked PR comment in PR #${pr.number}`
+        `DRY-RUN: Would remove edited/blocked PR comment in PR #${pr.number}`,
       );
       return;
     }
@@ -84,7 +84,7 @@ export async function handleModifiedPr(
   } else {
     if (GlobalConfig.get('dryRun')) {
       logger.info(
-        `DRY-RUN: Would ensure edited/blocked PR comment in PR #${pr.number}`
+        `DRY-RUN: Would ensure edited/blocked PR comment in PR #${pr.number}`,
       );
       return;
     }
diff --git a/lib/workers/repository/update/branch/index.spec.ts b/lib/workers/repository/update/branch/index.spec.ts
index 68bcbf57b01d12d33455f8b2b5ae25100a59e6f7..399942f33c8981f42a9729069f7a426da0657206 100644
--- a/lib/workers/repository/update/branch/index.spec.ts
+++ b/lib/workers/repository/update/branch/index.spec.ts
@@ -82,7 +82,7 @@ const adminConfig: RepoGlobalConfig = { localDir: '', cacheDir: '' };
 
 function findFileContent(
   files: FileChange[] | undefined,
-  path: string
+  path: string,
 ): string | null {
   const f = files?.find((file) => file.path === path);
   if (f?.type === 'addition' && f.contents) {
@@ -249,7 +249,7 @@ describe('workers/repository/update/branch/index', () => {
       platform.getBranchPr.mockResolvedValueOnce(
         partial<Pr>({
           state: 'open',
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(false);
       await branchWorker.processBranch(config);
@@ -264,7 +264,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           number: 13,
           state: 'closed',
-        })
+        }),
       );
       await branchWorker.processBranch(config);
       expect(reuse.shouldReuseExistingBranch).toHaveBeenCalledTimes(0);
@@ -279,7 +279,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           number: 13,
           state: 'closed',
-        })
+        }),
       );
       await branchWorker.processBranch(config);
       expect(reuse.shouldReuseExistingBranch).toHaveBeenCalledTimes(0);
@@ -295,7 +295,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           number: 13,
           state: 'merged',
-        })
+        }),
       );
       await branchWorker.processBranch(config);
       expect(reuse.shouldReuseExistingBranch).toHaveBeenCalledTimes(0);
@@ -308,7 +308,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           number: 13,
           state: 'closed',
-        })
+        }),
       );
       await branchWorker.processBranch(config);
       expect(reuse.shouldReuseExistingBranch).toHaveBeenCalledTimes(0);
@@ -326,7 +326,7 @@ describe('workers/repository/update/branch/index', () => {
       await branchWorker.processBranch(config);
       expect(reuse.shouldReuseExistingBranch).toHaveBeenCalledTimes(0);
       expect(logger.debug).toHaveBeenCalledWith(
-        `Matching PR #${pr.number} was merged previously`
+        `Matching PR #${pr.number} was merged previously`,
       );
     });
 
@@ -336,11 +336,11 @@ describe('workers/repository/update/branch/index', () => {
       platform.getBranchPr.mockResolvedValueOnce(
         partial<Pr>({
           state: 'merged',
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       await expect(branchWorker.processBranch(config)).rejects.toThrow(
-        REPOSITORY_CHANGED
+        REPOSITORY_CHANGED,
       );
     });
 
@@ -351,7 +351,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           state: 'open',
           labels: ['rebase'],
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       const res = await branchWorker.processBranch(config);
@@ -382,11 +382,11 @@ describe('workers/repository/update/branch/index', () => {
         result: 'pr-edited',
       });
       expect(logger.debug).toHaveBeenCalledWith(
-        `PR has been edited, PrNo:${pr.number}`
+        `PR has been edited, PrNo:${pr.number}`,
       );
       expect(platform.ensureComment).toHaveBeenCalledTimes(1);
       expect(platform.ensureComment).toHaveBeenCalledWith(
-        expect.objectContaining({ ...ensureCommentConfig })
+        expect.objectContaining({ ...ensureCommentConfig }),
       );
     });
 
@@ -414,11 +414,11 @@ describe('workers/repository/update/branch/index', () => {
         result: 'pr-edited',
       });
       expect(logger.debug).toHaveBeenCalledWith(
-        `PR has been edited, PrNo:${pr.number}`
+        `PR has been edited, PrNo:${pr.number}`,
       );
       expect(platform.ensureComment).toHaveBeenCalledTimes(1);
       expect(platform.ensureComment).toHaveBeenCalledWith(
-        expect.objectContaining({ ...ensureCommentConfig })
+        expect.objectContaining({ ...ensureCommentConfig }),
       );
     });
 
@@ -440,7 +440,7 @@ describe('workers/repository/update/branch/index', () => {
         result: 'pr-edited',
       });
       expect(logger.debug).toHaveBeenCalledWith(
-        `PR has been edited, PrNo:${pr.number}`
+        `PR has been edited, PrNo:${pr.number}`,
       );
       expect(platform.ensureComment).toHaveBeenCalledTimes(0);
     });
@@ -462,7 +462,7 @@ describe('workers/repository/update/branch/index', () => {
           bodyStruct: partial<PrBodyStruct>({
             debugData: partial<PrDebugData>({ targetBranch: 'master' }),
           }),
-        })
+        }),
       );
       config.baseBranch = 'master';
       scm.isBranchModified.mockResolvedValueOnce(false);
@@ -473,11 +473,11 @@ describe('workers/repository/update/branch/index', () => {
         result: 'pr-edited',
       });
       expect(logger.debug).toHaveBeenCalledWith(
-        `PR has been edited, PrNo:${pr.number}`
+        `PR has been edited, PrNo:${pr.number}`,
       );
       expect(platform.ensureComment).toHaveBeenCalledTimes(1);
       expect(platform.ensureComment).toHaveBeenCalledWith(
-        expect.objectContaining({ ...ensureCommentConfig })
+        expect.objectContaining({ ...ensureCommentConfig }),
       );
     });
 
@@ -616,7 +616,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -636,7 +636,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -654,7 +654,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -675,7 +675,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -695,7 +695,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -721,7 +721,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -747,7 +747,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -773,7 +773,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -799,7 +799,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -825,7 +825,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -855,7 +855,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -879,7 +879,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -905,18 +905,18 @@ describe('workers/repository/update/branch/index', () => {
 
     it('skips when automerge is off schedule', async () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
-        partial<PackageFilesResult>()
+        partial<PackageFilesResult>(),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce(
         partial<WriteExistingFilesResult>({
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       scm.branchExists.mockResolvedValue(true);
       automerge.tryBranchAutomerge.mockResolvedValueOnce('off schedule');
       prWorker.ensurePr.mockResolvedValueOnce(
-        partial<ResultWithPr>({ type: 'with-pr' })
+        partial<ResultWithPr>({ type: 'with-pr' }),
       );
       prAutomerge.checkAutoMerge.mockResolvedValueOnce({ automerged: false });
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
@@ -931,7 +931,7 @@ describe('workers/repository/update/branch/index', () => {
         commitSha: null,
       });
       expect(logger.debug).toHaveBeenCalledWith(
-        'Branch cannot automerge now because automergeSchedule is off schedule - skipping'
+        'Branch cannot automerge now because automergeSchedule is off schedule - skipping',
       );
       expect(prWorker.ensurePr).toHaveBeenCalledTimes(0);
       expect(platform.ensureCommentRemoval).toHaveBeenCalledTimes(0);
@@ -942,7 +942,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -966,7 +966,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -991,7 +991,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -1016,7 +1016,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -1031,7 +1031,7 @@ describe('workers/repository/update/branch/index', () => {
       prAutomerge.checkAutoMerge.mockResolvedValueOnce({ automerged: true });
       config.releaseTimestamp = new Date().toISOString();
       await expect(branchWorker.processBranch(config)).rejects.toThrow(
-        Error(MANAGER_LOCKFILE_ERROR)
+        Error(MANAGER_LOCKFILE_ERROR),
       );
     });
 
@@ -1039,7 +1039,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -1077,7 +1077,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -1105,7 +1105,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [partial<ArtifactError>()],
@@ -1120,7 +1120,7 @@ describe('workers/repository/update/branch/index', () => {
         skipBranchUpdate: true,
       });
       expect(logger.debug).toHaveBeenCalledWith(
-        'Base branch changed by user, rebasing the branch onto new base'
+        'Base branch changed by user, rebasing the branch onto new base',
       );
       expect(commit.commitFilesToBranch).toHaveBeenCalled();
       expect(prWorker.ensurePr).toHaveBeenCalledTimes(1);
@@ -1130,7 +1130,7 @@ describe('workers/repository/update/branch/index', () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1157,7 +1157,7 @@ describe('workers/repository/update/branch/index', () => {
       checkExisting.prAlreadyExisted.mockResolvedValueOnce(
         partial<Pr>({
           state: 'closed',
-        })
+        }),
       );
       GlobalConfig.set({ ...adminConfig, dryRun: 'full' });
       expect(await branchWorker.processBranch(config)).toEqual({
@@ -1182,7 +1182,7 @@ describe('workers/repository/update/branch/index', () => {
         result: 'pr-edited',
       });
       expect(logger.info).toHaveBeenCalledWith(
-        `DRY-RUN: Would ensure edited/blocked PR comment in PR #${pr.number}`
+        `DRY-RUN: Would ensure edited/blocked PR comment in PR #${pr.number}`,
       );
       expect(platform.updatePr).toHaveBeenCalledTimes(0);
     });
@@ -1226,7 +1226,7 @@ describe('workers/repository/update/branch/index', () => {
         commitSha: null,
       });
       expect(logger.info).toHaveBeenCalledWith(
-        `DRY-RUN: Would remove edited/blocked PR comment in PR #${pr.number}`
+        `DRY-RUN: Would remove edited/blocked PR comment in PR #${pr.number}`,
       );
     });
 
@@ -1235,7 +1235,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<PackageFilesResult>({
           updatedPackageFiles: [],
           artifactErrors: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1250,7 +1250,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
@@ -1264,7 +1264,7 @@ describe('workers/repository/update/branch/index', () => {
         await branchWorker.processBranch({
           ...config,
           artifactErrors: [],
-        })
+        }),
       ).toEqual({
         branchExists: true,
         updatesVerified: true,
@@ -1273,7 +1273,7 @@ describe('workers/repository/update/branch/index', () => {
         commitSha: null,
       });
       expect(logger.info).toHaveBeenCalledWith(
-        'DRY-RUN: Would ensure comment removal in PR #undefined'
+        'DRY-RUN: Would ensure comment removal in PR #undefined',
       );
     });
 
@@ -1283,7 +1283,7 @@ describe('workers/repository/update/branch/index', () => {
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1298,7 +1298,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
@@ -1324,7 +1324,7 @@ describe('workers/repository/update/branch/index', () => {
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1337,7 +1337,7 @@ describe('workers/repository/update/branch/index', () => {
           state: 'open',
           labels: ['stop-updating'],
           bodyStruct: { hash: hashBody(`- [ ] <!-- rebase-check -->`) },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
@@ -1361,7 +1361,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           sourceBranch: 'old/some-branch',
           state: 'open',
-        })
+        }),
       );
       const inconfig = {
         ...config,
@@ -1388,7 +1388,7 @@ describe('workers/repository/update/branch/index', () => {
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1404,7 +1404,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
@@ -1430,7 +1430,7 @@ describe('workers/repository/update/branch/index', () => {
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1443,7 +1443,7 @@ describe('workers/repository/update/branch/index', () => {
           state: 'open',
           labels: ['stop-updating'],
           bodyStruct: { hash: hashBody(`- [ ] <!-- rebase-check -->`) },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       schedule.isScheduledNow.mockReturnValueOnce(false);
@@ -1493,7 +1493,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       git.getRepoStatus.mockResolvedValueOnce(
@@ -1501,7 +1501,7 @@ describe('workers/repository/update/branch/index', () => {
           modified: ['modified_file'],
           not_added: [],
           deleted: ['deleted_file'],
-        })
+        }),
       );
       fs.readLocalFile.mockResolvedValueOnce('modified file content');
       fs.localPathExists
@@ -1550,12 +1550,12 @@ describe('workers/repository/update/branch/index', () => {
         commitSha: null,
       });
       const errorMessage = expect.stringContaining(
-        "Post-upgrade command 'disallowed task' has not been added to the allowed list in allowedPostUpgradeCommand"
+        "Post-upgrade command 'disallowed task' has not been added to the allowed list in allowedPostUpgradeCommand",
       );
       expect(platform.ensureComment).toHaveBeenCalledWith(
         expect.objectContaining({
           content: errorMessage,
-        })
+        }),
       );
       expect(sanitize.sanitize).toHaveBeenCalledWith(errorMessage);
     });
@@ -1589,7 +1589,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       git.getRepoStatus.mockResolvedValueOnce(
@@ -1597,7 +1597,7 @@ describe('workers/repository/update/branch/index', () => {
           modified: ['modified_file'],
           not_added: [],
           deleted: ['deleted_file'],
-        })
+        }),
       );
 
       fs.readLocalFile.mockResolvedValueOnce('modified file content');
@@ -1639,7 +1639,7 @@ describe('workers/repository/update/branch/index', () => {
       expect(platform.ensureComment).toHaveBeenCalledWith(
         expect.objectContaining({
           content: errorMessage,
-        })
+        }),
       );
       expect(sanitize.sanitize).toHaveBeenCalledWith(errorMessage);
     });
@@ -1674,7 +1674,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       git.getRepoStatus.mockResolvedValueOnce(
@@ -1682,7 +1682,7 @@ describe('workers/repository/update/branch/index', () => {
           modified: ['modified_file'],
           not_added: [],
           deleted: ['deleted_file'],
-        })
+        }),
       );
 
       fs.readLocalFile.mockResolvedValueOnce('modified file content');
@@ -1744,7 +1744,7 @@ describe('workers/repository/update/branch/index', () => {
           updatedPackageFiles: [updatedPackageFile],
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1765,7 +1765,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       git.getRepoStatus
@@ -1774,14 +1774,14 @@ describe('workers/repository/update/branch/index', () => {
             modified: ['modified_file', 'modified_then_deleted_file'],
             not_added: [],
             deleted: ['deleted_file', 'deleted_then_created_file'],
-          })
+          }),
         )
         .mockResolvedValueOnce(
           partial<StatusResult>({
             modified: ['modified_file', 'deleted_then_created_file'],
             not_added: [],
             deleted: ['deleted_file', 'modified_then_deleted_file'],
-          })
+          }),
         );
 
       fs.readLocalFile
@@ -1866,27 +1866,28 @@ describe('workers/repository/update/branch/index', () => {
       const calledWithConfig = commit.commitFilesToBranch.mock.calls[0][0];
       const updatedArtifacts = calledWithConfig.updatedArtifacts;
       expect(findFileContent(updatedArtifacts, 'modified_file')).toBe(
-        'modified file content again'
+        'modified file content again',
       );
       expect(
-        findFileContent(updatedArtifacts, 'deleted_then_created_file')
+        findFileContent(updatedArtifacts, 'deleted_then_created_file'),
       ).toBe('this file was once deleted');
       expect(
         updatedArtifacts?.find(
-          (f) => f.type === 'deletion' && f.path === 'deleted_then_created_file'
-        )
+          (f) =>
+            f.type === 'deletion' && f.path === 'deleted_then_created_file',
+        ),
       ).toBeUndefined();
       expect(
         updatedArtifacts?.find(
           (f) =>
-            f.type === 'addition' && f.path === 'modified_then_deleted_file'
-        )
+            f.type === 'addition' && f.path === 'modified_then_deleted_file',
+        ),
       ).toBeUndefined();
       expect(
         updatedArtifacts?.find(
           (f) =>
-            f.type === 'deletion' && f.path === 'modified_then_deleted_file'
-        )
+            f.type === 'deletion' && f.path === 'modified_then_deleted_file',
+        ),
       ).toBeDefined();
     });
 
@@ -1901,7 +1902,7 @@ describe('workers/repository/update/branch/index', () => {
           updatedPackageFiles: [updatedPackageFile],
           artifactErrors: [],
           updatedArtifacts: [],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -1922,7 +1923,7 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- rebase-check -->`),
             rebaseRequested: true,
           },
-        })
+        }),
       );
       scm.isBranchModified.mockResolvedValueOnce(true);
       git.getRepoStatus.mockResolvedValueOnce(
@@ -1930,7 +1931,7 @@ describe('workers/repository/update/branch/index', () => {
           modified: ['modified_file', 'modified_then_deleted_file'],
           not_added: [],
           deleted: ['deleted_file', 'deleted_then_created_file'],
-        })
+        }),
       );
 
       fs.readLocalFile
@@ -2011,8 +2012,8 @@ describe('workers/repository/update/branch/index', () => {
       expect(
         findFileContent(
           commit.commitFilesToBranch.mock.calls[0][0].updatedArtifacts,
-          'modified_file'
-        )
+          'modified_file',
+        ),
       ).toBe('modified file content');
     });
 
@@ -2027,7 +2028,7 @@ describe('workers/repository/update/branch/index', () => {
       scm.branchExists.mockResolvedValue(true);
       commit.commitFilesToBranch.mockResolvedValueOnce(null);
       expect(
-        await branchWorker.processBranch({ ...config, rebaseWhen: 'never' })
+        await branchWorker.processBranch({ ...config, rebaseWhen: 'never' }),
       ).toMatchObject({ result: 'no-work' });
       expect(commit.commitFilesToBranch).not.toHaveBeenCalled();
     });
@@ -2047,14 +2048,14 @@ describe('workers/repository/update/branch/index', () => {
           ...config,
           rebaseWhen: 'never',
           dependencyDashboardChecks: { 'renovate/some-branch': 'other' },
-        })
+        }),
       ).toMatchObject({ result: 'done' });
       expect(commit.commitFilesToBranch).toHaveBeenCalled();
     });
 
     it('continues when checked by checkedBranches', async () => {
       getUpdated.getUpdatedPackageFiles.mockResolvedValueOnce(
-        updatedPackageFiles
+        updatedPackageFiles,
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -2068,7 +2069,7 @@ describe('workers/repository/update/branch/index', () => {
           dependencyDashboardChecks: {
             'renovate/some-branch': 'global-config',
           },
-        })
+        }),
       ).toMatchObject({ result: 'done' });
       expect(commit.commitFilesToBranch).toHaveBeenCalled();
     });
@@ -2087,7 +2088,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           sourceBranch: 'old/some-branch',
           state: 'open',
-        })
+        }),
       );
       const inconfig = {
         ...config,
@@ -2105,7 +2106,7 @@ describe('workers/repository/update/branch/index', () => {
       });
       expect(logger.debug).toHaveBeenCalledWith('Found existing branch PR');
       expect(logger.debug).toHaveBeenCalledWith(
-        'No package files need updating'
+        'No package files need updating',
       );
     });
 
@@ -2123,7 +2124,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<Pr>({
           sourceBranch: 'old/some-branch',
           state: 'open',
-        })
+        }),
       );
       config.reuseExistingBranch = true;
       config.skipBranchUpdate = true;
@@ -2142,7 +2143,7 @@ describe('workers/repository/update/branch/index', () => {
       });
       expect(logger.debug).toHaveBeenCalledWith('Found existing branch PR');
       expect(logger.debug).not.toHaveBeenCalledWith(
-        'No package files need updating'
+        'No package files need updating',
       );
     });
 
@@ -2151,7 +2152,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [{}],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -2166,14 +2167,14 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- approve-all-pending-prs -->`),
             rebaseRequested: false,
           },
-        })
+        }),
       );
       scm.getBranchCommit.mockResolvedValue('123test'); //TODO:not needed?
       expect(
         await branchWorker.processBranch({
           ...config,
           dependencyDashboardAllPending: true,
-        })
+        }),
       ).toEqual({
         branchExists: true,
         updatesVerified: true,
@@ -2188,7 +2189,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [{}],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -2203,14 +2204,14 @@ describe('workers/repository/update/branch/index', () => {
             hash: hashBody(`- [x] <!-- create-all-rate-limited-prs -->`),
             rebaseRequested: false,
           },
-        })
+        }),
       );
       scm.getBranchCommit.mockResolvedValue('123test'); //TODO:not needed?
       expect(
         await branchWorker.processBranch({
           ...config,
           dependencyDashboardAllRateLimited: true,
-        })
+        }),
       ).toEqual({
         branchExists: true,
         updatesVerified: true,
@@ -2225,7 +2226,7 @@ describe('workers/repository/update/branch/index', () => {
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
           artifactErrors: [{}],
-        })
+        }),
       );
       npmPostExtract.getAdditionalFiles.mockResolvedValueOnce({
         artifactErrors: [],
@@ -2258,12 +2259,12 @@ describe('workers/repository/update/branch/index', () => {
       platform.getBranchPr.mockResolvedValueOnce(
         partial<Pr>({
           state: 'open',
-        })
+        }),
       );
       jest.spyOn(getUpdated, 'getUpdatedPackageFiles').mockResolvedValueOnce(
         partial<PackageFilesResult>({
           updatedPackageFiles: [partial<FileChange>()],
-        })
+        }),
       );
       const inconfig = {
         ...config,
@@ -2294,11 +2295,11 @@ describe('workers/repository/update/branch/index', () => {
       // Check that the last checkoutBranch call is after the only commitFilesToBranch call
       const checkoutBranchCalledTimes = scm.checkoutBranch.mock.calls.length;
       expect(
-        commit.commitFilesToBranch.mock.invocationCallOrder[0]
+        commit.commitFilesToBranch.mock.invocationCallOrder[0],
       ).toBeLessThan(
         scm.checkoutBranch.mock.invocationCallOrder[
           checkoutBranchCalledTimes - 1
-        ]
+        ],
       );
     });
   });
diff --git a/lib/workers/repository/update/branch/index.ts b/lib/workers/repository/update/branch/index.ts
index d7c5541117b51f078b62a3cbb2c228e344ebe908..ed167ba474c9fc45c65309401092aac0cf56594d 100644
--- a/lib/workers/repository/update/branch/index.ts
+++ b/lib/workers/repository/update/branch/index.ts
@@ -51,26 +51,26 @@ import { setConfidence, setStability } from './status-checks';
 
 async function rebaseCheck(
   config: RenovateConfig,
-  branchPr: Pr
+  branchPr: Pr,
 ): Promise<boolean> {
   const titleRebase = branchPr.title?.startsWith('rebase!');
   if (titleRebase) {
     logger.debug(
-      `Manual rebase requested via PR title for #${branchPr.number}`
+      `Manual rebase requested via PR title for #${branchPr.number}`,
     );
     return true;
   }
   const labelRebase = !!branchPr.labels?.includes(config.rebaseLabel!);
   if (labelRebase) {
     logger.debug(
-      `Manual rebase requested via PR labels for #${branchPr.number}`
+      `Manual rebase requested via PR labels for #${branchPr.number}`,
     );
     // istanbul ignore if
     if (GlobalConfig.get('dryRun')) {
       logger.info(
         `DRY-RUN: Would delete label ${config.rebaseLabel!} from #${
           branchPr.number
-        }`
+        }`,
       );
     } else {
       await platform.deleteLabel(branchPr.number, config.rebaseLabel!);
@@ -80,7 +80,7 @@ async function rebaseCheck(
   const prRebaseChecked = !!branchPr.bodyStruct?.rebaseRequested;
   if (prRebaseChecked) {
     logger.debug(
-      `Manual rebase requested via PR checkbox for #${branchPr.number}`
+      `Manual rebase requested via PR checkbox for #${branchPr.number}`,
     );
     return true;
   }
@@ -114,7 +114,7 @@ export interface ProcessBranchResult {
 }
 
 export async function processBranch(
-  branchConfig: BranchConfig
+  branchConfig: BranchConfig,
 ): Promise<ProcessBranchResult> {
   let commitSha: string | null = null;
   let config: BranchConfig = { ...branchConfig };
@@ -124,7 +124,7 @@ export async function processBranch(
   if (!branchExists && config.branchPrefix !== config.branchPrefixOld) {
     const branchName = config.branchName.replace(
       config.branchPrefix!,
-      config.branchPrefixOld!
+      config.branchPrefixOld!,
     );
     branchExists = await scm.branchExists(branchName);
     if (branchExists) {
@@ -135,7 +135,7 @@ export async function processBranch(
 
   let branchPr = await platform.getBranchPr(
     config.branchName,
-    config.baseBranch
+    config.baseBranch,
   );
   logger.debug(`branchExists=${branchExists}`);
   const dependencyDashboardCheck =
@@ -162,7 +162,7 @@ export async function processBranch(
     } else if (!branchPr && existingPr && !dependencyDashboardCheck) {
       logger.debug(
         { prTitle: config.prTitle },
-        'Closed PR already exists. Skipping branch.'
+        'Closed PR already exists. Skipping branch.',
       );
       await handleClosedPr(config, existingPr);
       return {
@@ -223,7 +223,7 @@ export async function processBranch(
     if (branchExists) {
       // check if branch is labelled to stop
       config.stopUpdating = branchPr?.labels?.includes(
-        config.stopUpdatingLabel!
+        config.stopUpdatingLabel!,
       );
 
       const prRebaseChecked = !!branchPr?.bodyStruct?.rebaseRequested;
@@ -231,7 +231,7 @@ export async function processBranch(
       if (branchExists && !dependencyDashboardCheck && config.stopUpdating) {
         if (!prRebaseChecked) {
           logger.info(
-            'Branch updating is skipped because stopUpdatingLabel is present in config'
+            'Branch updating is skipped because stopUpdatingLabel is present in config',
           );
           return {
             branchExists: true,
@@ -247,7 +247,7 @@ export async function processBranch(
         logger.debug('Found existing branch PR');
         if (branchPr.state !== 'open') {
           logger.debug(
-            'PR has been closed or merged since this run started - aborting'
+            'PR has been closed or merged since this run started - aborting',
           );
           throw new Error(REPOSITORY_CHANGED);
         }
@@ -280,12 +280,12 @@ export async function processBranch(
         if (!oldPrSha || oldPrSha === branchSha) {
           logger.debug(
             { oldPrNumber: oldPr.number, oldPrSha, branchSha },
-            'Found old PR matching this branch - will override it'
+            'Found old PR matching this branch - will override it',
           );
         } else {
           logger.debug(
             { oldPrNumber: oldPr.number, oldPrSha, branchSha },
-            'Found old PR but the SHA is different'
+            'Found old PR but the SHA is different',
           );
           return {
             branchExists,
@@ -325,7 +325,7 @@ export async function processBranch(
         };
       }
       logger.debug(
-        'Branch + PR exists but is not scheduled -- will update if necessary'
+        'Branch + PR exists but is not scheduled -- will update if necessary',
       );
     }
     //stability checks
@@ -334,7 +334,7 @@ export async function processBranch(
         (upgrade) =>
           (is.nonEmptyString(upgrade.minimumReleaseAge) &&
             is.nonEmptyString(upgrade.releaseTimestamp)) ||
-          isActiveConfidenceLevel(upgrade.minimumConfidence!)
+          isActiveConfidenceLevel(upgrade.minimumConfidence!),
       )
     ) {
       // Only set a stability status check if one or more of the updates contain
@@ -354,7 +354,7 @@ export async function processBranch(
                 timeElapsed,
                 minimumReleaseAge: upgrade.minimumReleaseAge,
               },
-              'Update has not passed minimum release age'
+              'Update has not passed minimum release age',
             );
             config.stabilityStatus = 'yellow';
             continue;
@@ -373,14 +373,14 @@ export async function processBranch(
               depName,
               currentVersion,
               newVersion,
-              updateType
+              updateType,
             )) ?? 'neutral';
           if (satisfiesConfidenceLevel(confidence, minimumConfidence)) {
             config.confidenceStatus = 'green';
           } else {
             logger.debug(
               { depName, confidence, minimumConfidence },
-              'Update does not meet minimum confidence scores'
+              'Update does not meet minimum confidence scores',
             );
             config.confidenceStatus = 'yellow';
             continue;
@@ -395,7 +395,7 @@ export async function processBranch(
         ['not-pending', 'status-success'].includes(config.prCreation!)
       ) {
         logger.debug(
-          'Skipping branch creation due to internal status checks not met'
+          'Skipping branch creation due to internal status checks not met',
         );
         return {
           branchExists,
@@ -420,11 +420,11 @@ export async function processBranch(
       userRebaseRequested = true;
     } else if (userApproveAllPendingPR) {
       logger.debug(
-        'A user manually approved all pending PRs via the Dependency Dashboard.'
+        'A user manually approved all pending PRs via the Dependency Dashboard.',
       );
     } else if (userOpenAllRateLimtedPR) {
       logger.debug(
-        'A user manually approved all rate-limited PRs via the Dependency Dashboard.'
+        'A user manually approved all rate-limited PRs via the Dependency Dashboard.',
       );
     } else if (
       branchExists &&
@@ -445,7 +445,7 @@ export async function processBranch(
       branchPr.targetBranch !== config.baseBranch
     ) {
       logger.debug(
-        'Base branch changed by user, rebasing the branch onto new base'
+        'Base branch changed by user, rebasing the branch onto new base',
       );
       config.reuseExistingBranch = false;
     } else {
@@ -463,29 +463,29 @@ export async function processBranch(
       config = { ...config, ...res };
       if (config.updatedPackageFiles?.length) {
         logger.debug(
-          `Updated ${config.updatedPackageFiles.length} package files`
+          `Updated ${config.updatedPackageFiles.length} package files`,
         );
       } else {
         logger.debug('No package files need updating');
       }
       const additionalFiles = await getAdditionalFiles(
         config,
-        branchConfig.packageFiles!
+        branchConfig.packageFiles!,
       );
       config.artifactErrors = (config.artifactErrors ?? []).concat(
-        additionalFiles.artifactErrors
+        additionalFiles.artifactErrors,
       );
       config.updatedArtifacts = (config.updatedArtifacts ?? []).concat(
-        additionalFiles.updatedArtifacts
+        additionalFiles.updatedArtifacts,
       );
       if (config.updatedArtifacts?.length) {
         logger.debug(
           {
             updatedArtifacts: config.updatedArtifacts.map((f) =>
-              f.type === 'deletion' ? `${f.path} (delete)` : f.path
+              f.type === 'deletion' ? `${f.path} (delete)` : f.path,
             ),
           },
-          `Updated ${config.updatedArtifacts.length} lock files`
+          `Updated ${config.updatedArtifacts.length} lock files`,
         );
       } else {
         logger.debug('No updated lock files in branch');
@@ -495,7 +495,7 @@ export async function processBranch(
       }
 
       const postUpgradeCommandResults = await executePostUpgradeCommands(
-        config
+        config,
       );
 
       if (postUpgradeCommandResults !== null) {
@@ -512,15 +512,15 @@ export async function processBranch(
           const releaseTimestamp = DateTime.fromISO(config.releaseTimestamp);
           if (releaseTimestamp.plus({ hours: 2 }) < DateTime.local()) {
             logger.debug(
-              'PR is older than 2 hours, raise PR with lock file errors'
+              'PR is older than 2 hours, raise PR with lock file errors',
             );
           } else if (branchExists) {
             logger.debug(
-              'PR is less than 2 hours old but branchExists so updating anyway'
+              'PR is less than 2 hours old but branchExists so updating anyway',
             );
           } else {
             logger.debug(
-              'PR is less than 2 hours old - raise error instead of PR'
+              'PR is less than 2 hours old - raise error instead of PR',
             );
             throw new Error(MANAGER_LOCKFILE_ERROR);
           }
@@ -532,7 +532,7 @@ export async function processBranch(
         // istanbul ignore if
         if (GlobalConfig.get('dryRun')) {
           logger.info(
-            `DRY-RUN: Would ensure comment removal in PR #${branchPr.number}`
+            `DRY-RUN: Would ensure comment removal in PR #${branchPr.number}`,
           );
         } else {
           // Remove artifacts error comment only if this run has successfully updated artifacts
@@ -559,7 +559,7 @@ export async function processBranch(
             ...config,
             logJSON: config.upgrades[0].logJSON,
             releases: config.upgrades[0].releases,
-          }
+          },
         )}`;
 
         logger.trace(`commitMessage: ` + JSON.stringify(config.commitMessage));
@@ -627,7 +627,7 @@ export async function processBranch(
       }
       if (mergeStatus === 'off schedule') {
         logger.debug(
-          'Branch cannot automerge now because automergeSchedule is off schedule - skipping'
+          'Branch cannot automerge now because automergeSchedule is off schedule - skipping',
         );
         return {
           branchExists,
@@ -640,7 +640,7 @@ export async function processBranch(
         ['conflicted', 'never'].includes(config.rebaseWhen!)
       ) {
         logger.warn(
-          'Branch cannot automerge because it is behind base branch and rebaseWhen setting disallows rebasing - raising a PR instead'
+          'Branch cannot automerge because it is behind base branch and rebaseWhen setting disallows rebasing - raising a PR instead',
         );
         config.forcePr = true;
         config.branchAutomergeFailureMessage = mergeStatus;
@@ -651,7 +651,7 @@ export async function processBranch(
         mergeStatus === 'failed'
       ) {
         logger.debug(
-          `Branch automerge not possible, mergeStatus:${mergeStatus}`
+          `Branch automerge not possible, mergeStatus:${mergeStatus}`,
         );
         config.forcePr = true;
         config.branchAutomergeFailureMessage = mergeStatus;
@@ -676,7 +676,7 @@ export async function processBranch(
     }
     if (
       err.message?.startsWith(
-        'ssh_exchange_identification: Connection closed by remote host'
+        'ssh_exchange_identification: Connection closed by remote host',
       )
     ) {
       logger.debug('Throwing bad credentials');
@@ -746,7 +746,7 @@ export async function processBranch(
     logger.debug(
       `There are ${config.errors!.length} errors and ${
         config.warnings!.length
-      } warnings`
+      } warnings`,
     );
     const ensurePrResult = await ensurePr(config);
     if (ensurePrResult.type === 'without-pr') {
@@ -808,7 +808,7 @@ export async function processBranch(
       if (config.artifactErrors?.length) {
         logger.warn(
           { artifactErrors: config.artifactErrors },
-          'artifactErrors'
+          'artifactErrors',
         );
         let content = `Renovate failed to update `;
         content +=
@@ -816,7 +816,7 @@ export async function processBranch(
         content +=
           ' related to this branch. You probably do not want to merge this PR as-is.';
         content += emojify(
-          `\n\n:recycle: Renovate will retry this branch, including artifacts, only when one of the following happens:\n\n`
+          `\n\n:recycle: Renovate will retry this branch, including artifacts, only when one of the following happens:\n\n`,
         );
         content +=
           ' - any of the package files in this branch needs updating, or \n';
@@ -840,7 +840,7 @@ export async function processBranch(
         ) {
           if (GlobalConfig.get('dryRun')) {
             logger.info(
-              `DRY-RUN: Would ensure lock file error comment in PR #${pr.number}`
+              `DRY-RUN: Would ensure lock file error comment in PR #${pr.number}`,
             );
           } else {
             await ensureComment({
diff --git a/lib/workers/repository/update/branch/reuse.ts b/lib/workers/repository/update/branch/reuse.ts
index d2b3a7b1acc3558660608e0c45038ae49683909e..146723c95640e4c7049436ec7d881917d02c8e72 100644
--- a/lib/workers/repository/update/branch/reuse.ts
+++ b/lib/workers/repository/update/branch/reuse.ts
@@ -11,7 +11,7 @@ type ParentBranch = {
 };
 
 export async function shouldReuseExistingBranch(
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<ParentBranch> {
   const { baseBranch, branchName } = config;
   const result: ParentBranch = { reuseExistingBranch: false };
@@ -41,7 +41,7 @@ export async function shouldReuseExistingBranch(
     logger.debug('Branch is up-to-date');
   } else {
     logger.debug(
-      `Skipping behind base branch check due to rebaseWhen=${config.rebaseWhen!}`
+      `Skipping behind base branch check due to rebaseWhen=${config.rebaseWhen!}`,
     );
   }
 
@@ -82,7 +82,7 @@ export async function shouldReuseExistingBranch(
       groupedByPackageFile[packageFile].has('update-lockfile')
     ) {
       logger.debug(
-        `Detected multiple rangeStrategies along with update-lockfile`
+        `Detected multiple rangeStrategies along with update-lockfile`,
       );
       result.reuseExistingBranch = false;
       result.isModified = false;
diff --git a/lib/workers/repository/update/branch/schedule.spec.ts b/lib/workers/repository/update/branch/schedule.spec.ts
index 5b39e3729d2f27c4b0db86cf751c30a208431325..1e7628b02667af3bcd01d51445d218b3d249e5f0 100644
--- a/lib/workers/repository/update/branch/schedule.spec.ts
+++ b/lib/workers/repository/update/branch/schedule.spec.ts
@@ -31,7 +31,7 @@ describe('workers/repository/update/branch/schedule', () => {
 
     it('returns false if using minutes', () => {
       expect(
-        schedule.hasValidSchedule(['every 15 mins every weekday'])[0]
+        schedule.hasValidSchedule(['every 15 mins every weekday'])[0],
       ).toBeFalse();
     });
 
@@ -49,7 +49,7 @@ describe('workers/repository/update/branch/schedule', () => {
 
     it('returns false if any schedule has no days or time range', () => {
       expect(schedule.hasValidSchedule(['at 5:00pm', 'on saturday'])[0]).toBe(
-        false
+        false,
       );
     });
 
@@ -59,13 +59,15 @@ describe('workers/repository/update/branch/schedule', () => {
 
     it('returns true if schedule has days of week', () => {
       expect(schedule.hasValidSchedule(['on friday and saturday'])[0]).toBe(
-        true
+        true,
       );
     });
 
     it('returns true for multi day schedules', () => {
       expect(
-        schedule.hasValidSchedule(['after 5:00pm on wednesday and thursday'])[0]
+        schedule.hasValidSchedule([
+          'after 5:00pm on wednesday and thursday',
+        ])[0],
       ).toBeTrue();
     });
 
@@ -75,7 +77,7 @@ describe('workers/repository/update/branch/schedule', () => {
 
     it('returns true for first day of the month', () => {
       expect(
-        schedule.hasValidSchedule(['on the first day of the month'])[0]
+        schedule.hasValidSchedule(['on the first day of the month'])[0],
       ).toBeTrue();
     });
 
@@ -91,7 +93,7 @@ describe('workers/repository/update/branch/schedule', () => {
 
     it('returns true if schedule has a start and end time', () => {
       expect(
-        schedule.hasValidSchedule(['after 11:00pm and before 6:00am'])[0]
+        schedule.hasValidSchedule(['after 11:00pm and before 6:00am'])[0],
       ).toBeTrue();
     });
 
@@ -99,7 +101,7 @@ describe('workers/repository/update/branch/schedule', () => {
       expect(
         schedule.hasValidSchedule([
           'after 11:00pm and before 6:00am every weekday',
-        ])[0]
+        ])[0],
       ).toBeTrue();
     });
 
@@ -113,7 +115,7 @@ describe('workers/repository/update/branch/schedule', () => {
       expect(
         schedule.hasValidSchedule([
           'before 5am on the first day of the month',
-        ])[0]
+        ])[0],
       ).toBeTrue();
       expect(schedule.hasValidSchedule(['every month'])[0]).toBeTrue();
     });
diff --git a/lib/workers/repository/update/branch/schedule.ts b/lib/workers/repository/update/branch/schedule.ts
index 7e26589f214d4c50de8b5abf1b8d6ebd036c94db..43ed2bc2981f297d22f99c35012ce5bd0f5454cf 100644
--- a/lib/workers/repository/update/branch/schedule.ts
+++ b/lib/workers/repository/update/branch/schedule.ts
@@ -22,7 +22,7 @@ const scheduleMappings: Record<string, string> = {
 
 function parseCron(
   scheduleText: string,
-  timezone?: string
+  timezone?: string,
 ): CronExpression | undefined {
   try {
     return parseExpression(scheduleText, { tz: timezone });
@@ -39,7 +39,7 @@ export function hasValidTimezone(timezone: string): [true] | [false, string] {
 }
 
 export function hasValidSchedule(
-  schedule: string[] | null | 'at any time'
+  schedule: string[] | null | 'at any time',
 ): [true] | [false, string] {
   let message = '';
   if (
@@ -66,7 +66,7 @@ export function hasValidSchedule(
     }
 
     const massagedText = fixShortHours(
-      scheduleMappings[scheduleText] || scheduleText
+      scheduleMappings[scheduleText] || scheduleText,
     );
 
     const parsedSchedule = later.parse.text(massagedText);
@@ -82,7 +82,7 @@ export function hasValidSchedule(
     if (
       !parsedSchedule.schedules.some(
         (s) =>
-          !!s.M || s.d !== undefined || !!s.D || s.t_a !== undefined || !!s.t_b
+          !!s.M || s.d !== undefined || !!s.D || s.t_a !== undefined || !!s.t_b,
       )
     ) {
       message = `Invalid schedule: "${scheduleText}" has no months, days of week or time of day`;
@@ -121,7 +121,7 @@ function cronMatches(cron: string, now: DateTime, timezone?: string): boolean {
 
   if (
     !parsedCron.fields.dayOfWeek.includes(
-      (now.weekday % 7) as DayOfTheWeekRange
+      (now.weekday % 7) as DayOfTheWeekRange,
     )
   ) {
     // Weekdays mismatch
@@ -139,12 +139,12 @@ function cronMatches(cron: string, now: DateTime, timezone?: string): boolean {
 
 export function isScheduledNow(
   config: RenovateConfig,
-  scheduleKey: 'schedule' | 'automergeSchedule' = 'schedule'
+  scheduleKey: 'schedule' | 'automergeSchedule' = 'schedule',
 ): boolean {
   let configSchedule = config[scheduleKey];
   logger.debug(
     // TODO: types (#22198)
-    `Checking schedule(${String(configSchedule)}, ${config.timezone!})`
+    `Checking schedule(${String(configSchedule)}, ${config.timezone!})`,
   );
   if (
     !configSchedule ||
@@ -157,7 +157,7 @@ export function isScheduledNow(
   }
   if (!is.array(configSchedule)) {
     logger.warn(
-      `config schedule is not an array: ${JSON.stringify(configSchedule)}`
+      `config schedule is not an array: ${JSON.stringify(configSchedule)}`,
     );
     configSchedule = [configSchedule];
   }
diff --git a/lib/workers/repository/update/branch/status-checks.ts b/lib/workers/repository/update/branch/status-checks.ts
index 6fe93414f0f6bef8df44e273525cd295e8d266aa..ecb0167dc2d9d62df1dc2e1a082f4318a722bc4b 100644
--- a/lib/workers/repository/update/branch/status-checks.ts
+++ b/lib/workers/repository/update/branch/status-checks.ts
@@ -8,10 +8,10 @@ import type { MergeConfidence } from '../../../../util/merge-confidence/types';
 export async function resolveBranchStatus(
   branchName: string,
   internalChecksAsSuccess: boolean,
-  ignoreTests = false
+  ignoreTests = false,
 ): Promise<BranchStatus> {
   logger.debug(
-    `resolveBranchStatus(branchName=${branchName}, ignoreTests=${ignoreTests})`
+    `resolveBranchStatus(branchName=${branchName}, ignoreTests=${ignoreTests})`,
   );
 
   if (ignoreTests) {
@@ -21,7 +21,7 @@ export async function resolveBranchStatus(
 
   const status = await platform.getBranchStatus(
     branchName,
-    internalChecksAsSuccess
+    internalChecksAsSuccess,
   );
   logger.debug(`Branch status ${status}`);
 
@@ -33,11 +33,11 @@ async function setStatusCheck(
   context: string,
   description: string,
   state: BranchStatus,
-  url?: string
+  url?: string,
 ): Promise<void> {
   const existingState = await platform.getBranchStatusCheck(
     branchName,
-    context
+    context,
   );
   if (existingState === state) {
     logger.debug(`Status check ${context} is already up-to-date`);
@@ -72,7 +72,7 @@ export async function setStability(config: StabilityConfig): Promise<void> {
     context,
     description,
     config.stabilityStatus,
-    config.productLinks?.documentation
+    config.productLinks?.documentation,
   );
 }
 
@@ -100,6 +100,6 @@ export async function setConfidence(config: ConfidenceConfig): Promise<void> {
     context,
     description,
     config.confidenceStatus,
-    config.productLinks?.documentation
+    config.productLinks?.documentation,
   );
 }
diff --git a/lib/workers/repository/update/pr/automerge.ts b/lib/workers/repository/update/pr/automerge.ts
index 2e9465ea9725e3a56bc1958f88e38629912dffd6..b26089677b5c4d87b1eb0d3966461c6f31b4fc71 100644
--- a/lib/workers/repository/update/pr/automerge.ts
+++ b/lib/workers/repository/update/pr/automerge.ts
@@ -28,7 +28,7 @@ export interface AutomergePrResult {
 
 export async function checkAutoMerge(
   pr: Pr,
-  config: BranchConfig
+  config: BranchConfig,
 ): Promise<AutomergePrResult> {
   logger.trace({ config }, 'checkAutoMerge');
   const {
@@ -60,7 +60,7 @@ export async function checkAutoMerge(
   }
   if (!ignoreTests && pr.cannotMergeReason) {
     logger.debug(
-      `Platform reported that PR is not ready for merge. Reason: [${pr.cannotMergeReason}]`
+      `Platform reported that PR is not ready for merge. Reason: [${pr.cannotMergeReason}]`,
     );
     return {
       automerged: false,
@@ -70,11 +70,11 @@ export async function checkAutoMerge(
   const branchStatus = await resolveBranchStatus(
     config.branchName,
     !!config.internalChecksAsSuccess,
-    config.ignoreTests
+    config.ignoreTests,
   );
   if (branchStatus !== 'green') {
     logger.debug(
-      `PR is not ready for merge (branch status is ${branchStatus})`
+      `PR is not ready for merge (branch status is ${branchStatus})`,
     );
     return {
       automerged: false,
@@ -95,7 +95,7 @@ export async function checkAutoMerge(
     // istanbul ignore if
     if (GlobalConfig.get('dryRun')) {
       logger.info(
-        `DRY-RUN: Would add PR automerge comment to PR #${pr.number}`
+        `DRY-RUN: Would add PR automerge comment to PR #${pr.number}`,
       );
       return {
         automerged: false,
@@ -123,7 +123,7 @@ export async function checkAutoMerge(
     logger.info(
       `DRY-RUN: Would merge PR #${
         pr.number
-      } with strategy "${automergeStrategy!}"`
+      } with strategy "${automergeStrategy!}"`,
     );
     return {
       automerged: false,
diff --git a/lib/workers/repository/update/pr/body/config-description.spec.ts b/lib/workers/repository/update/pr/body/config-description.spec.ts
index 4f86453848dd44119f72120554421f8a8da7bbde..25e835bde61f042feee36484d9809daa7fabc634 100644
--- a/lib/workers/repository/update/pr/body/config-description.spec.ts
+++ b/lib/workers/repository/update/pr/body/config-description.spec.ts
@@ -17,7 +17,7 @@ describe('workers/repository/update/pr/body/config-description', () => {
       });
 
       expect(res).toContain(
-        `**Rebasing**: Never, or you tick the rebase/retry checkbox.`
+        `**Rebasing**: Never, or you tick the rebase/retry checkbox.`,
       );
     });
 
@@ -28,7 +28,7 @@ describe('workers/repository/update/pr/body/config-description', () => {
       });
 
       expect(res).toContain(
-        `**Rebasing**: Never, or you tick the rebase/retry checkbox.`
+        `**Rebasing**: Never, or you tick the rebase/retry checkbox.`,
       );
     });
 
@@ -103,7 +103,7 @@ describe('workers/repository/update/pr/body/config-description', () => {
         automergedPreviously: true,
       });
       expect(res).toContain(
-        `**Automerge**: Disabled because a matching PR was automerged previously.`
+        `**Automerge**: Disabled because a matching PR was automerged previously.`,
       );
     });
   });
diff --git a/lib/workers/repository/update/pr/body/config-description.ts b/lib/workers/repository/update/pr/body/config-description.ts
index dbbc3c0033fd45c55586315993aacc1211691c66..9249247ad2569658d8bc2f2d99eddb130bf34d8e 100644
--- a/lib/workers/repository/update/pr/body/config-description.ts
+++ b/lib/workers/repository/update/pr/body/config-description.ts
@@ -33,13 +33,13 @@ export function getPrConfigDescription(config: BranchConfig): string {
   prBody += `, or you tick the rebase/retry checkbox.\n\n`;
   if (config.recreateClosed) {
     prBody += emojify(
-      `:ghost: **Immortal**: This PR will be recreated if closed unmerged. Get [config help](${config.productLinks?.help}) if that's undesired.\n\n`
+      `:ghost: **Immortal**: This PR will be recreated if closed unmerged. Get [config help](${config.productLinks?.help}) if that's undesired.\n\n`,
     );
   } else {
     prBody += emojify(
       `:no_bell: **Ignore**: Close this PR and you won't be reminded about ${
         config.upgrades.length === 1 ? 'this update' : 'these updates'
-      } again.\n\n`
+      } again.\n\n`,
     );
   }
   return prBody;
@@ -47,7 +47,7 @@ export function getPrConfigDescription(config: BranchConfig): string {
 
 function scheduleToString(
   schedule: string[] | undefined,
-  timezone: string | undefined
+  timezone: string | undefined,
 ): string {
   let scheduleString = '';
   if (schedule && schedule[0] !== 'at any time') {
diff --git a/lib/workers/repository/update/pr/body/controls.spec.ts b/lib/workers/repository/update/pr/body/controls.spec.ts
index 1c0abb9d70e84284608f81b78480f98abfe921c7..b53ec1c9d82945ecfd6efc3692bf9512e9b5bfe8 100644
--- a/lib/workers/repository/update/pr/body/controls.spec.ts
+++ b/lib/workers/repository/update/pr/body/controls.spec.ts
@@ -3,7 +3,7 @@ import { getControls } from './controls';
 describe('workers/repository/update/pr/body/controls', () => {
   it('calls getControls', () => {
     expect(getControls()).toBe(
-      `\n\n---\n\n - [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check this box\n\n`
+      `\n\n---\n\n - [ ] <!-- rebase-check -->If you want to rebase/retry this PR, check this box\n\n`,
     );
   });
 });
diff --git a/lib/workers/repository/update/pr/body/footer.spec.ts b/lib/workers/repository/update/pr/body/footer.spec.ts
index 3b8e8de4227f85af7ac2e02b24bc386b4a971466..178e2b2dfee1d76272760028c133fe1ea5efead7 100644
--- a/lib/workers/repository/update/pr/body/footer.spec.ts
+++ b/lib/workers/repository/update/pr/body/footer.spec.ts
@@ -13,7 +13,7 @@ describe('workers/repository/update/pr/body/footer', () => {
         baseBranch: 'base',
         branchName: 'branch',
         upgrades: [],
-      })
+      }),
     ).toBe('');
   });
 
@@ -26,7 +26,7 @@ describe('workers/repository/update/pr/body/footer', () => {
         branchName: 'branch',
         upgrades: [],
         prFooter: 'FOOTER',
-      })
+      }),
     ).toMatchInlineSnapshot(`
       "
       ---
diff --git a/lib/workers/repository/update/pr/body/header.spec.ts b/lib/workers/repository/update/pr/body/header.spec.ts
index 4618598e958eb2fd65a9fe6635c705e077ae0916..b4ed57f5451ab1caa954338faf0f044f5d2494aa 100644
--- a/lib/workers/repository/update/pr/body/header.spec.ts
+++ b/lib/workers/repository/update/pr/body/header.spec.ts
@@ -13,7 +13,7 @@ describe('workers/repository/update/pr/body/header', () => {
         baseBranch: 'base',
         branchName: 'branch',
         upgrades: [],
-      })
+      }),
     ).toBe('');
   });
 
@@ -26,7 +26,7 @@ describe('workers/repository/update/pr/body/header', () => {
         baseBranch: 'base',
         upgrades: [],
         prHeader: 'HEADER',
-      })
+      }),
     ).toMatchInlineSnapshot(`
       "HEADER
 
diff --git a/lib/workers/repository/update/pr/body/index.spec.ts b/lib/workers/repository/update/pr/body/index.spec.ts
index a60caf1a6d226f74562475379b8e015f0bdf9a97..f01e31c357869cd2f596eb452e0fb772d561ffec 100644
--- a/lib/workers/repository/update/pr/body/index.spec.ts
+++ b/lib/workers/repository/update/pr/body/index.spec.ts
@@ -40,7 +40,7 @@ describe('workers/repository/update/pr/body/index', () => {
     beforeEach(() => {
       changelogs.getChangelogs.mockReturnValueOnce('getChangelogs');
       configDescription.getPrConfigDescription.mockReturnValueOnce(
-        'getPrConfigDescription'
+        'getPrConfigDescription',
       );
       controls.getControls.mockReturnValueOnce('getControls');
       footer.getPrFooter.mockReturnValueOnce('getPrFooter');
@@ -65,7 +65,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
       expect(res).toBeEmptyString();
     });
@@ -96,7 +96,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
 
       expect(upgrade).toMatchObject({
@@ -135,7 +135,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
 
       expect(upgrade).toMatchObject({
@@ -164,7 +164,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
       expect(res).toContain('PR BODY');
       expect(res).toContain(`<!--renovate-debug`);
@@ -189,7 +189,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
       expect(res).toContain(['aaa', '**Rebasing**: BAR', 'bbb'].join('\n'));
     });
@@ -212,7 +212,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
 
       const match = prDebugDataRe.exec(res);
@@ -262,7 +262,7 @@ describe('workers/repository/update/pr/body/index', () => {
             targetBranch: 'base',
           },
         },
-        {}
+        {},
       );
       const expected =
         '---\n\n### âš  Dependency Lookup Warnings âš ' +
diff --git a/lib/workers/repository/update/pr/body/index.ts b/lib/workers/repository/update/pr/body/index.ts
index 8e07a77a0afb1dc372f551823fa4b6fa22478399..c05e880f4584bb325007e074117426d0f257f226 100644
--- a/lib/workers/repository/update/pr/body/index.ts
+++ b/lib/workers/repository/update/pr/body/index.ts
@@ -69,7 +69,7 @@ const rebasingRegex = regEx(/\*\*Rebasing\*\*: .*/);
 export function getPrBody(
   branchConfig: BranchConfig,
   prBodyConfig: PrBodyConfig,
-  config: RenovateConfig
+  config: RenovateConfig,
 ): string {
   massageUpdateMetadata(branchConfig);
   let warnings = '';
@@ -78,7 +78,7 @@ export function getPrBody(
     warnings += getDepWarningsPR(
       branchConfig.packageFiles,
       config,
-      branchConfig.dependencyDashboard
+      branchConfig.dependencyDashboard,
     );
   }
   const content = {
@@ -105,7 +105,7 @@ export function getPrBody(
     if (prBodyConfig?.rebasingNotice) {
       prBody = prBody.replace(
         rebasingRegex,
-        `**Rebasing**: ${prBodyConfig.rebasingNotice}`
+        `**Rebasing**: ${prBodyConfig.rebasingNotice}`,
       );
     }
   }
diff --git a/lib/workers/repository/update/pr/body/notes.spec.ts b/lib/workers/repository/update/pr/body/notes.spec.ts
index 730f9788a204330274ed7846be6960db31e030a9..3345fc0facde196d8e3bd54fb67c145a714964cb 100644
--- a/lib/workers/repository/update/pr/body/notes.spec.ts
+++ b/lib/workers/repository/update/pr/body/notes.spec.ts
@@ -54,13 +54,13 @@ describe('workers/repository/update/pr/body/notes', () => {
       isPin: true,
     });
     expect(res).toContain(
-      'If you wish to disable git hash updates, add `":disableDigestUpdates"` to the extends array in your config.'
+      'If you wish to disable git hash updates, add `":disableDigestUpdates"` to the extends array in your config.',
     );
     expect(res).toContain(
-      'This Pull Request updates lock files to use the latest dependency versions.'
+      'This Pull Request updates lock files to use the latest dependency versions.',
     );
     expect(res).toContain(
-      "Add the preset `:preserveSemverRanges` to your config if you don't want to pin your dependencies."
+      "Add the preset `:preserveSemverRanges` to your config if you don't want to pin your dependencies.",
     );
   });
 });
diff --git a/lib/workers/repository/update/pr/body/notes.ts b/lib/workers/repository/update/pr/body/notes.ts
index 5b82ca21b56340329072536274b0ce67cefe6c16..3aac4af21d4f23dddee344bc48f83e66addef26c 100644
--- a/lib/workers/repository/update/pr/body/notes.ts
+++ b/lib/workers/repository/update/pr/body/notes.ts
@@ -29,19 +29,19 @@ export function getPrExtraNotes(config: BranchConfig): string {
   let res = '';
   if (config.upgrades.some((upgrade) => upgrade.gitRef)) {
     res += emojify(
-      ':abcd: If you wish to disable git hash updates, add `":disableDigestUpdates"` to the extends array in your config.\n\n'
+      ':abcd: If you wish to disable git hash updates, add `":disableDigestUpdates"` to the extends array in your config.\n\n',
     );
   }
 
   if (config.updateType === 'lockFileMaintenance') {
     res += emojify(
-      ':wrench: This Pull Request updates lock files to use the latest dependency versions.\n\n'
+      ':wrench: This Pull Request updates lock files to use the latest dependency versions.\n\n',
     );
   }
 
   if (config.isPin) {
     res += emojify(
-      `Add the preset \`:preserveSemverRanges\` to your config if you don't want to pin your dependencies.\n\n`
+      `Add the preset \`:preserveSemverRanges\` to your config if you don't want to pin your dependencies.\n\n`,
     );
   }
 
diff --git a/lib/workers/repository/update/pr/body/updates-table.spec.ts b/lib/workers/repository/update/pr/body/updates-table.spec.ts
index eb65cf39536b922ee6b0dc1ff71af6d3eb689d0e..db9823035d90b04bc99ec0c771056dc30432a9dd 100644
--- a/lib/workers/repository/update/pr/body/updates-table.spec.ts
+++ b/lib/workers/repository/update/pr/body/updates-table.spec.ts
@@ -119,7 +119,7 @@ describe('workers/repository/update/pr/body/updates-table', () => {
         '| [koa](https://github.com/koajs/koa) | dependencies | pin | [`^1.7.0` -> `1.7.0`](https://renovatebot.com/diffs/npm/koa/1.7.0/1.7.0) |\n' +
         '| [mocha](https://mochajs.org/) ([source](https://github.com/mochajs/mocha)) | devDependencies | pin | [`^6.2.3` -> `6.2.3`](https://renovatebot.com/diffs/npm/mocha/6.2.3/6.2.3) |\n' +
         '\n' +
-        '\n'
+        '\n',
     );
   });
 });
diff --git a/lib/workers/repository/update/pr/body/updates-table.ts b/lib/workers/repository/update/pr/body/updates-table.ts
index 568e3c2f28a641d3cb036d745e87d49e091f145c..a70828289eab7df002a50f08a7a245cfdadf7d0e 100644
--- a/lib/workers/repository/update/pr/body/updates-table.ts
+++ b/lib/workers/repository/update/pr/body/updates-table.ts
@@ -10,7 +10,7 @@ type TableDefinition = {
 
 function getRowDefinition(
   prBodyColumns: string[],
-  upgrade: BranchUpgradeConfig
+  upgrade: BranchUpgradeConfig,
 ): TableDefinition[] {
   const res: TableDefinition[] = [];
   if (upgrade.prBodyDefinitions) {
@@ -24,7 +24,7 @@ function getRowDefinition(
 
 function getNonEmptyColumns(
   prBodyColumns: string[],
-  rows: Record<string, string>[]
+  rows: Record<string, string>[],
 ): string[] {
   const res: string[] = [];
   for (const header of prBodyColumns) {
diff --git a/lib/workers/repository/update/pr/changelog/bitbucket/index.spec.ts b/lib/workers/repository/update/pr/changelog/bitbucket/index.spec.ts
index a0ba419de79d14f0c4b81388abb17bf51d842d18..481281f57d6545eac3b2211424f95f279a0d9c43 100644
--- a/lib/workers/repository/update/pr/changelog/bitbucket/index.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/bitbucket/index.spec.ts
@@ -89,7 +89,7 @@ describe('workers/repository/update/pr/changelog/bitbucket/index', () => {
   it('handles release list', async () => {
     const res = await getReleaseList(
       bitbucketProject,
-      partial<ChangeLogRelease>({})
+      partial<ChangeLogRelease>({}),
     );
     expect(res).toBeEmptyArray();
   });
@@ -103,9 +103,9 @@ describe('workers/repository/update/pr/changelog/bitbucket/index', () => {
     it('returns get ref comparison url', () => {
       const source = new BitbucketChangeLogSource();
       expect(
-        source.getCompareURL(baseUrl, 'some-org/some-repo', 'abc', 'xzy')
+        source.getCompareURL(baseUrl, 'some-org/some-repo', 'abc', 'xzy'),
       ).toBe(
-        'https://bitbucket.org/some-org/some-repo/branches/compare/xzy%0Dabc'
+        'https://bitbucket.org/some-org/some-repo/branches/compare/xzy%0Dabc',
       );
     });
   });
diff --git a/lib/workers/repository/update/pr/changelog/bitbucket/index.ts b/lib/workers/repository/update/pr/changelog/bitbucket/index.ts
index 594f32befa65f092cbad75a455cfea9c4849983c..5093e2c03d5d080a809a759988e1ecb5d3a515b8 100644
--- a/lib/workers/repository/update/pr/changelog/bitbucket/index.ts
+++ b/lib/workers/repository/update/pr/changelog/bitbucket/index.ts
@@ -17,7 +17,7 @@ const bitbucketHttp = new BitbucketHttp(id);
 export async function getReleaseNotesMd(
   repository: string,
   apiBaseUrl: string,
-  _sourceDirectory?: string
+  _sourceDirectory?: string,
 ): Promise<ChangeLogFile | null> {
   logger.trace('bitbucket.getReleaseNotesMd()');
 
@@ -25,7 +25,7 @@ export async function getReleaseNotesMd(
     apiBaseUrl,
     `2.0/repositories`,
     repository,
-    'src'
+    'src',
   );
 
   const rootFiles = (
@@ -34,7 +34,7 @@ export async function getReleaseNotesMd(
       {
         paginate: true,
       },
-      PagedSourceResultsSchema
+      PagedSourceResultsSchema,
     )
   ).body.values;
 
@@ -50,7 +50,7 @@ export async function getReleaseNotesMd(
 
   if (files.length !== 0) {
     logger.debug(
-      `Multiple candidates for changelog file, using ${changelogFile.path}`
+      `Multiple candidates for changelog file, using ${changelogFile.path}`,
     );
   }
 
@@ -58,8 +58,8 @@ export async function getReleaseNotesMd(
     joinUrlParts(
       repositorySourceURl,
       changelogFile.commit.hash,
-      changelogFile.path
-    )
+      changelogFile.path,
+    ),
   );
 
   const changelogMd = `${fileRes.body}\n#\n##`;
@@ -68,11 +68,11 @@ export async function getReleaseNotesMd(
 
 export function getReleaseList(
   _project: ChangeLogProject,
-  _release: ChangeLogRelease
+  _release: ChangeLogRelease,
 ): ChangeLogNotes[] {
   logger.trace('bitbucket.getReleaseList()');
   logger.info(
-    'Unsupported Bitbucket Cloud feature.  Skipping release fetching.'
+    'Unsupported Bitbucket Cloud feature.  Skipping release fetching.',
   );
   return [];
 }
diff --git a/lib/workers/repository/update/pr/changelog/bitbucket/source.ts b/lib/workers/repository/update/pr/changelog/bitbucket/source.ts
index bfbc12ea6dd85d6eb3ebd28b93d33de248ef959b..295b95ae4a506f1e69c88bbb27d2dc0d6de347d6 100644
--- a/lib/workers/repository/update/pr/changelog/bitbucket/source.ts
+++ b/lib/workers/repository/update/pr/changelog/bitbucket/source.ts
@@ -14,7 +14,7 @@ export class BitbucketChangeLogSource extends ChangeLogSource {
     baseUrl: string,
     repository: string,
     prevHead: string,
-    nextHead: string
+    nextHead: string,
   ): string {
     return `${baseUrl}${repository}/branches/compare/${nextHead}%0D${prevHead}`;
   }
diff --git a/lib/workers/repository/update/pr/changelog/gitea/index.spec.ts b/lib/workers/repository/update/pr/changelog/gitea/index.spec.ts
index a9316d67d937b8edfa2c093bb9a565d26b18a7f6..77e82d4d325e6f87afa9a13c56facbfa5c4df1cd 100644
--- a/lib/workers/repository/update/pr/changelog/gitea/index.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/gitea/index.spec.ts
@@ -55,7 +55,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           currentVersion: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -65,7 +65,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
           ...upgrade,
           currentVersion: '1.0.0',
           newVersion: '1.0.0',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -74,7 +74,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://gitea.com/help',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -82,7 +82,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchObject({
         hasReleaseNotes: false,
         project: {
@@ -187,7 +187,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchObject({
         hasReleaseNotes: true,
         project: {
@@ -232,7 +232,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchObject({
         hasReleaseNotes: false,
         project: {
@@ -267,7 +267,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchObject({
         hasReleaseNotes: false,
         project: {
@@ -293,7 +293,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -302,7 +302,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'http://example.com',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -311,7 +311,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -320,7 +320,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [{ version: '0.9.0' }],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -335,7 +335,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
           ...upgrade,
           sourceUrl: 'https://gitea-enterprise.example.com/meno/dropzone/',
           endpoint: 'https://gitea-enterprise.example.com/',
-        })
+        }),
       ).toMatchObject({
         hasReleaseNotes: false,
         project: {
@@ -372,7 +372,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
           platform: 'gitea',
           sourceUrl: 'https://git.test.com/meno/dropzone/',
           endpoint: 'https://git.test.com/api/v1/',
-        })
+        }),
       ).toMatchObject({
         hasReleaseNotes: false,
         project: {
@@ -454,7 +454,7 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
           { name: 'v5.5.0' },
         ]);
       expect(
-        await changelogSource.getAllTags('https://git.test.com/', 'some/repo')
+        await changelogSource.getAllTags('https://git.test.com/', 'some/repo'),
       ).toEqual([]);
     });
   });
@@ -483,8 +483,8 @@ describe('workers/repository/update/pr/changelog/gitea/index', () => {
         await getReleaseNotesMd(
           'some/repo',
           'https://git.test.com/api/v1/',
-          'charts/some'
-        )
+          'charts/some',
+        ),
       ).toEqual({
         changelogFile: 'charts/some/CHANGELOG.md',
         changelogMd: 'some content\n#\n##',
diff --git a/lib/workers/repository/update/pr/changelog/gitea/index.ts b/lib/workers/repository/update/pr/changelog/gitea/index.ts
index fe99e2dfc025e138d7c7ce4c69f4d745a09f05f5..ba1b345da8ec745c1ab25110c1b0efe864b71e79 100644
--- a/lib/workers/repository/update/pr/changelog/gitea/index.ts
+++ b/lib/workers/repository/update/pr/changelog/gitea/index.ts
@@ -21,7 +21,7 @@ const http = new GiteaHttp(id);
 export async function getReleaseNotesMd(
   repository: string,
   apiBaseUrl: string,
-  sourceDirectory?: string
+  sourceDirectory?: string,
 ): Promise<ChangeLogFile | null> {
   logger.trace('gitea.getReleaseNotesMd()');
   const apiPrefix = `${apiBaseUrl}repos/${repository}/contents`;
@@ -33,7 +33,7 @@ export async function getReleaseNotesMd(
       {
         paginate: false, // no pagination yet
       },
-      ContentsListResponseSchema
+      ContentsListResponseSchema,
     )
   ).body;
   const allFiles = tree.filter((f) => f.type === 'file');
@@ -50,13 +50,13 @@ export async function getReleaseNotesMd(
   /* istanbul ignore if */
   if (files.length !== 0) {
     logger.debug(
-      `Multiple candidates for changelog file, using ${changelogFile}`
+      `Multiple candidates for changelog file, using ${changelogFile}`,
     );
   }
 
   const fileRes = await http.getJson(
     `${apiPrefix}/${changelogFile}`,
-    ContentsResponseSchema
+    ContentsResponseSchema,
   );
   // istanbul ignore if: should never happen
   if (!fileRes.body.content) {
@@ -70,7 +70,7 @@ export async function getReleaseNotesMd(
 
 export async function getReleaseList(
   project: ChangeLogProject,
-  _release: ChangeLogRelease
+  _release: ChangeLogRelease,
 ): Promise<ChangeLogNotes[]> {
   logger.trace('gitea.getReleaseNotesMd()');
   const apiUrl = `${project.apiBaseUrl}repos/${project.repository}/releases`;
@@ -80,7 +80,7 @@ export async function getReleaseList(
     {
       paginate: true,
     },
-    ReleasesSchema
+    ReleasesSchema,
   );
   return res.body.map((release) => ({
     url: `${project.baseUrl}${project.repository}/releases/tag/${release.tag_name}`,
diff --git a/lib/workers/repository/update/pr/changelog/gitea/source.ts b/lib/workers/repository/update/pr/changelog/gitea/source.ts
index fd78892d78232728404c12502301093d1bd4b319..85b595ae992c1ddd196fd0d9b2582dd4a4dfbe61 100644
--- a/lib/workers/repository/update/pr/changelog/gitea/source.ts
+++ b/lib/workers/repository/update/pr/changelog/gitea/source.ts
@@ -14,7 +14,7 @@ export class GiteaChangeLogSource extends ChangeLogSource {
     baseUrl: string,
     repository: string,
     prevHead: string,
-    nextHead: string
+    nextHead: string,
   ): string {
     return `${baseUrl}${repository}/compare/${prevHead}...${nextHead}`;
   }
diff --git a/lib/workers/repository/update/pr/changelog/github/index.spec.ts b/lib/workers/repository/update/pr/changelog/github/index.spec.ts
index 57d01aa95302f1461ad7c1c8724ce9bc3605cdda..71c496728ede888cb40681e0d93012b520db336a 100644
--- a/lib/workers/repository/update/pr/changelog/github/index.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/github/index.spec.ts
@@ -54,7 +54,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           currentVersion: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -63,7 +63,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com/DefinitelyTyped/DefinitelyTyped',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -73,7 +73,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
           ...upgrade,
           currentVersion: '1.0.0',
           newVersion: '1.0.0',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -82,7 +82,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com/about',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -90,7 +90,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -115,7 +115,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -141,7 +141,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           packageName: '@renovate/no',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -167,7 +167,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           depType: 'engines',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -193,7 +193,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -202,7 +202,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'http://example.com',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -212,7 +212,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com',
-        })
+        }),
       ).toEqual({ error: 'MissingGithubToken' });
     });
 
@@ -222,7 +222,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -231,7 +231,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -240,7 +240,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [{ version: '0.9.0' }],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -254,7 +254,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           endpoint: 'https://github-enterprise.example.com/',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -315,7 +315,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
           ...upgrade,
           sourceUrl: 'https://github-enterprise.example.com/chalk/chalk',
           endpoint: 'https://github-enterprise.example.com/',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -379,7 +379,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
           { version: '1.0.2' },
           { version: 'correctPrefix/target-1.0.2' },
           { version: 'wrongPrefix/target@1.0.2' },
-        ])
+        ]),
       );
 
       const upgradeData = partial<BranchUpgradeConfig>({
@@ -400,7 +400,7 @@ describe('workers/repository/update/pr/changelog/github/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgradeData,
-        })
+        }),
       ).toMatchObject({
         project: {
           apiBaseUrl: 'https://api.github.com/',
diff --git a/lib/workers/repository/update/pr/changelog/github/index.ts b/lib/workers/repository/update/pr/changelog/github/index.ts
index 4bc1432afefb705f7ad5bc73e017cccf9823f3c2..a21a097f59d89f62f5e562a73e8c5855840859e0 100644
--- a/lib/workers/repository/update/pr/changelog/github/index.ts
+++ b/lib/workers/repository/update/pr/changelog/github/index.ts
@@ -22,7 +22,7 @@ const http = new GithubHttp(id);
 export async function getReleaseNotesMd(
   repository: string,
   apiBaseUrl: string,
-  sourceDirectory: string
+  sourceDirectory: string,
 ): Promise<ChangeLogFile | null> {
   logger.trace('github.getReleaseNotesMd()');
   const apiPrefix = `${ensureTrailingSlash(apiBaseUrl)}repos/${repository}`;
@@ -34,7 +34,7 @@ export async function getReleaseNotesMd(
   const res = await http.getJson<GithubGitTree>(
     `${apiPrefix}/git/trees/${defaultBranch}${
       sourceDirectory ? '?recursive=1' : ''
-    }`
+    }`,
   );
 
   // istanbul ignore if
@@ -49,8 +49,8 @@ export async function getReleaseNotesMd(
       .filter((f) => f.path.startsWith(sourceDirectory))
       .filter((f) =>
         changelogFilenameRegex.test(
-          f.path.replace(ensureTrailingSlash(sourceDirectory), '')
-        )
+          f.path.replace(ensureTrailingSlash(sourceDirectory), ''),
+        ),
       );
   }
   if (!files.length) {
@@ -64,13 +64,13 @@ export async function getReleaseNotesMd(
   /* istanbul ignore if */
   if (files.length !== 0) {
     logger.debug(
-      `Multiple candidates for changelog file, using ${changelogFile}`
+      `Multiple candidates for changelog file, using ${changelogFile}`,
     );
   }
 
   // https://docs.github.com/en/rest/reference/git#get-a-blob
   const fileRes = await http.getJson<GithubGitBlob>(
-    `${apiPrefix}/git/blobs/${sha}`
+    `${apiPrefix}/git/blobs/${sha}`,
   );
 
   const changelogMd = fromBase64(fileRes.body.content) + '\n#\n##';
@@ -79,7 +79,7 @@ export async function getReleaseNotesMd(
 
 export async function getReleaseList(
   project: ChangeLogProject,
-  _release: ChangeLogRelease
+  _release: ChangeLogRelease,
 ): Promise<ChangeLogNotes[]> {
   logger.trace('github.getReleaseList()');
   const apiBaseUrl = project.apiBaseUrl;
@@ -88,14 +88,14 @@ export async function getReleaseList(
     apiBaseUrl,
     'repos',
     repository,
-    'releases'
+    'releases',
   );
   const releases = await queryReleases(
     {
       registryUrl: apiBaseUrl,
       packageName: repository,
     },
-    http
+    http,
   );
 
   const result = releases.map(
@@ -106,7 +106,7 @@ export async function getReleaseList(
       tag,
       name,
       body,
-    })
+    }),
   );
   return result;
 }
diff --git a/lib/workers/repository/update/pr/changelog/github/source.ts b/lib/workers/repository/update/pr/changelog/github/source.ts
index 07eb52d2e557ef31578386d5ce7a811418ea367a..e1efda0f7350ea6d7ac82e09c49602e5bc11c1f5 100644
--- a/lib/workers/repository/update/pr/changelog/github/source.ts
+++ b/lib/workers/repository/update/pr/changelog/github/source.ts
@@ -21,7 +21,7 @@ export class GitHubChangeLogSource extends ChangeLogSource {
     baseUrl: string,
     repository: string,
     prevHead: string,
-    nextHead: string
+    nextHead: string,
   ): string {
     return `${baseUrl}${repository}/compare/${prevHead}...${nextHead}`;
   }
@@ -60,19 +60,19 @@ export class GitHubChangeLogSource extends ChangeLogSource {
         if (!GlobalConfig.get('githubTokenWarn')) {
           logger.debug(
             { manager, packageName, sourceUrl },
-            'GitHub token warning has been suppressed. Skipping release notes retrieval'
+            'GitHub token warning has been suppressed. Skipping release notes retrieval',
           );
           return { isValid: false };
         }
         logger.warn(
           { manager, packageName, sourceUrl },
-          'No github.com token has been configured. Skipping release notes retrieval'
+          'No github.com token has been configured. Skipping release notes retrieval',
         );
         return { isValid: false, error: 'MissingGithubToken' };
       }
       logger.debug(
         { manager, packageName, sourceUrl },
-        'Repository URL does not match any known github hosts - skipping changelog retrieval'
+        'Repository URL does not match any known github hosts - skipping changelog retrieval',
       );
       return { isValid: false };
     }
diff --git a/lib/workers/repository/update/pr/changelog/gitlab/index.spec.ts b/lib/workers/repository/update/pr/changelog/gitlab/index.spec.ts
index bce831dde1d34212770e141c65f546a27674ddc9..6691b0241b0b9183f88994ac1c93129b8dc40dd4 100644
--- a/lib/workers/repository/update/pr/changelog/gitlab/index.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/gitlab/index.spec.ts
@@ -53,7 +53,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           currentVersion: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -63,7 +63,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
           ...upgrade,
           currentVersion: '1.0.0',
           newVersion: '1.0.0',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -72,7 +72,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://gitlab.com/help',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -80,7 +80,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: false,
         project: {
@@ -122,7 +122,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -157,7 +157,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: false,
         project: {
@@ -192,7 +192,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: false,
         project: {
@@ -218,7 +218,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -227,7 +227,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'http://example.com',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -236,7 +236,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -245,7 +245,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [{ version: '0.9.0' }],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -261,7 +261,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
           ...upgrade,
           sourceUrl: 'https://gitlab-enterprise.example.com/meno/dropzone/',
           endpoint: 'https://gitlab-enterprise.example.com/',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: false,
         project: {
@@ -296,7 +296,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
           platform: 'gitlab',
           sourceUrl: 'https://git.test.com/meno/dropzone/',
           endpoint: 'https://git.test.com/api/v4/',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: false,
         project: {
@@ -373,7 +373,7 @@ describe('workers/repository/update/pr/changelog/gitlab/index', () => {
           { name: 'v5.5.0' },
         ]);
       expect(
-        await changelogSource.getAllTags('https://git.test.com/', 'some/repo')
+        await changelogSource.getAllTags('https://git.test.com/', 'some/repo'),
       ).toEqual(['v5.2.0', 'v5.4.0', 'v5.5.0']);
     });
   });
diff --git a/lib/workers/repository/update/pr/changelog/gitlab/index.ts b/lib/workers/repository/update/pr/changelog/gitlab/index.ts
index 889d7eb0d7cd684aafbfa2ee0b98989c1f340f95..f507fc91f239e66468e0182ad5ede512d8a946a7 100644
--- a/lib/workers/repository/update/pr/changelog/gitlab/index.ts
+++ b/lib/workers/repository/update/pr/changelog/gitlab/index.ts
@@ -16,7 +16,7 @@ const http = new GitlabHttp(id);
 export async function getReleaseNotesMd(
   repository: string,
   apiBaseUrl: string,
-  sourceDirectory?: string
+  sourceDirectory?: string,
 ): Promise<ChangeLogFile | null> {
   logger.trace('gitlab.getReleaseNotesMd()');
   const urlEncodedRepo = encodeURIComponent(repository);
@@ -30,7 +30,7 @@ export async function getReleaseNotesMd(
       }`,
       {
         paginate: true,
-      }
+      },
     )
   ).body;
   const allFiles = tree.filter((f) => f.type === 'blob');
@@ -46,7 +46,7 @@ export async function getReleaseNotesMd(
   /* istanbul ignore if */
   if (files.length !== 0) {
     logger.debug(
-      `Multiple candidates for changelog file, using ${changelogFile}`
+      `Multiple candidates for changelog file, using ${changelogFile}`,
     );
   }
 
@@ -58,7 +58,7 @@ export async function getReleaseNotesMd(
 
 export async function getReleaseList(
   project: ChangeLogProject,
-  _release: ChangeLogRelease
+  _release: ChangeLogRelease,
 ): Promise<ChangeLogNotes[]> {
   logger.trace('gitlab.getReleaseNotesMd()');
   const apiBaseUrl = project.apiBaseUrl;
diff --git a/lib/workers/repository/update/pr/changelog/gitlab/source.ts b/lib/workers/repository/update/pr/changelog/gitlab/source.ts
index 1127f7bd354206d9016eeed80f5f6aa4523a849c..adcef9cd07fb3a7028ecdd210ca5031ca0823861 100644
--- a/lib/workers/repository/update/pr/changelog/gitlab/source.ts
+++ b/lib/workers/repository/update/pr/changelog/gitlab/source.ts
@@ -14,7 +14,7 @@ export class GitLabChangeLogSource extends ChangeLogSource {
     baseUrl: string,
     repository: string,
     prevHead: string,
-    nextHead: string
+    nextHead: string,
   ): string {
     return `${baseUrl}${repository}/compare/${prevHead}...${nextHead}`;
   }
diff --git a/lib/workers/repository/update/pr/changelog/index.spec.ts b/lib/workers/repository/update/pr/changelog/index.spec.ts
index f54d0761af0a524315f5b1bc08df1db2360a675a..5dcfa186fcc60c1b8e94ad8ff07f2781dacd5550 100644
--- a/lib/workers/repository/update/pr/changelog/index.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/index.spec.ts
@@ -53,7 +53,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           currentVersion: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -62,7 +62,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://dev.azure.com/unknown-repo',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -71,7 +71,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com/DefinitelyTyped/DefinitelyTyped',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -81,7 +81,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
           ...upgrade,
           currentVersion: '1.0.0',
           newVersion: '1.0.0',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -90,7 +90,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com/about',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -110,7 +110,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -145,7 +145,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
       expect(
         await getChangeLogJSON({
           ...upgrade,
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -169,7 +169,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
     it('filters unnecessary warns', async () => {
       githubTagsMock.mockRejectedValue(new Error('Unknown Github Repo'));
       githubReleasesMock.mockRejectedValueOnce(
-        new Error('Unknown Github Repo')
+        new Error('Unknown Github Repo'),
       );
       httpMock.scope(githubApiHost).get(/.*/).reply(200, []).persist();
       const res = await getChangeLogJSON({
@@ -204,7 +204,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           depType: 'engines',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -230,7 +230,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: undefined,
-        })
+        }),
       ).toBeNull();
     });
 
@@ -239,7 +239,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'http://example.com',
-        })
+        }),
       ).toBeNull();
     });
 
@@ -249,7 +249,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github.com',
-        })
+        }),
       ).toEqual({ error: 'MissingGithubToken' });
     });
 
@@ -258,7 +258,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -267,7 +267,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           releases: [{ version: '0.9.0' }],
-        })
+        }),
       ).toBeNull();
     });
 
@@ -292,7 +292,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           endpoint: 'https://github-enterprise.example.com/',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -332,7 +332,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
           ...upgrade,
           sourceUrl: 'https://github-enterprise.example.com/chalk/chalk',
           endpoint: 'https://github-enterprise.example.com/',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
@@ -370,7 +370,7 @@ describe('workers/repository/update/pr/changelog/index', () => {
         await getChangeLogJSON({
           ...upgrade,
           sourceUrl: 'https://github-enterprise.example.com/chalk/chalk',
-        })
+        }),
       ).toMatchSnapshot({
         hasReleaseNotes: true,
         project: {
diff --git a/lib/workers/repository/update/pr/changelog/index.ts b/lib/workers/repository/update/pr/changelog/index.ts
index 229c40c0e9513619953ec3eb3f0c875a8e99bf20..1f46930a0f0b596ad8ab01efa7136e2ebeac1e46 100644
--- a/lib/workers/repository/update/pr/changelog/index.ts
+++ b/lib/workers/repository/update/pr/changelog/index.ts
@@ -10,7 +10,7 @@ import type { ChangeLogResult } from './types';
 export * from './types';
 
 export async function getChangeLogJSON(
-  _config: BranchUpgradeConfig
+  _config: BranchUpgradeConfig,
 ): Promise<ChangeLogResult | null> {
   const sourceUrl = _config.customChangelogUrl ?? _config.sourceUrl!;
   const config: BranchUpgradeConfig = { ..._config, sourceUrl };
@@ -24,7 +24,7 @@ export async function getChangeLogJSON(
       return null;
     }
     logger.debug(
-      `Fetching changelog: ${sourceUrl} (${currentVersion} -> ${newVersion})`
+      `Fetching changelog: ${sourceUrl} (${currentVersion} -> ${newVersion})`,
     );
 
     const platform = detectPlatform(sourceUrl);
@@ -32,7 +32,7 @@ export async function getChangeLogJSON(
     if (is.nullOrUndefined(platform)) {
       logger.info(
         { sourceUrl, hostType: platform },
-        'Unknown platform, skipping changelog fetching.'
+        'Unknown platform, skipping changelog fetching.',
       );
       return null;
     }
@@ -42,7 +42,7 @@ export async function getChangeLogJSON(
     if (is.nullOrUndefined(changeLogSource)) {
       logger.info(
         { sourceUrl, hostType: platform },
-        'Unknown changelog source, skipping changelog fetching.'
+        'Unknown changelog source, skipping changelog fetching.',
       );
       return null;
     }
@@ -55,7 +55,7 @@ export async function getChangeLogJSON(
 }
 
 export function getChangeLogSourceFor(
-  platform: string
+  platform: string,
 ): ChangeLogSource | null {
   return api.get(platform) ?? null;
 }
diff --git a/lib/workers/repository/update/pr/changelog/release-notes.spec.ts b/lib/workers/repository/update/pr/changelog/release-notes.spec.ts
index d3b72ae73836f7bfd1693d7b35a56f9649e9d23b..3044df0675f8306d9c5f28a7b76e781bf00d8037 100644
--- a/lib/workers/repository/update/pr/changelog/release-notes.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/release-notes.spec.ts
@@ -92,23 +92,23 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
   describe('addReleaseNotes()', () => {
     it('returns null if input is null/undefined', async () => {
       expect(
-        await addReleaseNotes(null, partial<BranchUpgradeConfig>())
+        await addReleaseNotes(null, partial<BranchUpgradeConfig>()),
       ).toBeNull();
       expect(
-        await addReleaseNotes(undefined, partial<BranchUpgradeConfig>())
+        await addReleaseNotes(undefined, partial<BranchUpgradeConfig>()),
       ).toBeNull();
     });
 
     it('returns input if invalid', async () => {
       const input = { a: 1 };
       expect(
-        await addReleaseNotes(input as never, partial<BranchUpgradeConfig>())
+        await addReleaseNotes(input as never, partial<BranchUpgradeConfig>()),
       ).toEqual(input);
       expect(
-        await addReleaseNotes(null, partial<BranchUpgradeConfig>())
+        await addReleaseNotes(null, partial<BranchUpgradeConfig>()),
       ).toBeNull();
       expect(
-        await addReleaseNotes({ versions: [] }, partial<BranchUpgradeConfig>())
+        await addReleaseNotes({ versions: [] }, partial<BranchUpgradeConfig>()),
       ).toStrictEqual({ versions: [] });
     });
 
@@ -121,7 +121,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         versions: [{ version: '3.10.0', compare: { url: '' } }],
       };
       expect(
-        await addReleaseNotes(input as never, partial<BranchUpgradeConfig>())
+        await addReleaseNotes(input as never, partial<BranchUpgradeConfig>()),
       ).toEqual({
         hasReleaseNotes: false,
         project: {
@@ -143,7 +143,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
     it('returns ChangeLogResult without release notes', async () => {
       httpMock
         .scope(
-          'https://gitlab.com/api/v4/projects/gitlab-org%2Fgitter%2Fwebapp'
+          'https://gitlab.com/api/v4/projects/gitlab-org%2Fgitter%2Fwebapp',
         )
         .get('/repository/tree?per_page=100&path=lib')
         .reply(200, [])
@@ -165,7 +165,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         ],
       } satisfies ChangeLogResult;
       expect(
-        await addReleaseNotes(input, partial<BranchUpgradeConfig>())
+        await addReleaseNotes(input, partial<BranchUpgradeConfig>()),
       ).toEqual({
         hasReleaseNotes: false,
         project: {
@@ -192,7 +192,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
     it('should return empty array if no apiBaseUrl', async () => {
       const res = await getReleaseList(
         partial<ChangeLogProject>(),
-        partial<ChangeLogRelease>()
+        partial<ChangeLogRelease>(),
       );
       expect(res).toBeEmptyArray();
     });
@@ -222,7 +222,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           ...githubProject,
           repository: 'some/yet-other-repository',
         },
-        partial<ChangeLogRelease>()
+        partial<ChangeLogRelease>(),
       );
       expect(res).toMatchObject([
         {
@@ -243,7 +243,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
       httpMock
         .scope('https://gitlab.com/')
         .get(
-          '/api/v4/projects/some%2Fyet-other-repository/releases?per_page=100'
+          '/api/v4/projects/some%2Fyet-other-repository/releases?per_page=100',
         )
         .reply(200, [
           { tag_name: `v1.0.0` },
@@ -257,7 +257,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           ...gitlabProject,
           repository: 'some/yet-other-repository',
         },
-        partial<ChangeLogRelease>()
+        partial<ChangeLogRelease>(),
       );
       expect(res).toMatchObject([
         {
@@ -280,7 +280,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
       httpMock
         .scope('https://my.custom.domain/')
         .get(
-          '/api/v4/projects/some%2Fyet-other-repository/releases?per_page=100'
+          '/api/v4/projects/some%2Fyet-other-repository/releases?per_page=100',
         )
         .reply(200, [
           { tag_name: `v1.0.0` },
@@ -296,7 +296,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           apiBaseUrl: 'https://my.custom.domain/api/v4/',
           baseUrl: 'https://my.custom.domain/',
         },
-        partial<ChangeLogRelease>()
+        partial<ChangeLogRelease>(),
       );
       expect(res).toMatchObject([
         {
@@ -345,7 +345,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.0',
           gitRef: '1.0.0',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toBeNull();
     });
@@ -380,7 +380,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body [#123](https://github.com/some/other-repository/issues/123), [#124](https://github.com/some/yet-other-repository/issues/124)\n',
@@ -422,7 +422,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: '',
@@ -464,7 +464,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body\n',
@@ -506,7 +506,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body\n',
@@ -548,7 +548,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toBeNull();
     });
@@ -583,7 +583,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body [#123](https://github.com/some/other-repository/issues/123), [#124](https://github.com/some/yet-other-repository/issues/124)\n',
@@ -627,7 +627,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body [#123](https://github.com/some/other-repository/issues/123), [#124](https://github.com/some/yet-other-repository/issues/124)\n',
@@ -671,7 +671,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body [#123](https://github.com/some/other-repository/issues/123), [#124](https://github.com/some/yet-other-repository/issues/124)\n',
@@ -714,7 +714,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body [#123](https://github.com/some/other-repository/issues/123), [#124](https://github.com/some/yet-other-repository/issues/124)\n',
@@ -752,7 +752,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
@@ -789,7 +789,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
@@ -826,7 +826,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         body: 'some body #123, [#124](https://gitlab.com/some/yet-other-repository/issues/124)',
@@ -850,7 +850,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.1',
           gitRef: '1.0.1',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toBeNull();
     });
@@ -893,7 +893,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           version: '1.0.0',
           gitRef: '1.0.0',
         }),
-        partial<BranchUpgradeConfig>()
+        partial<BranchUpgradeConfig>(),
       );
       expect(res).toEqual({
         url: 'correct/url/tag.com',
@@ -929,7 +929,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         }),
         partial<BranchUpgradeConfig>({
           extractVersion: 'app-(?<version>[0-9.]*)',
-        })
+        }),
       );
       expect(res).toEqual({
         url: 'correct/url/tag.com',
@@ -954,7 +954,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '2.0.0',
           gitRef: '2.0.0',
-        })
+        }),
       );
       expect(res).toBeNull();
     });
@@ -979,7 +979,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '2.0.0',
           gitRef: '2.0.0',
-        })
+        }),
       );
       expect(res).toBeNull();
     });
@@ -1003,7 +1003,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '1.0.0',
           gitRef: '1.0.0',
-        })
+        }),
       );
       expect(res).toBeNull();
     });
@@ -1027,7 +1027,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '1.0.0',
           gitRef: '1.0.0',
-        })
+        }),
       );
       expect(res).toBeNull();
     });
@@ -1051,7 +1051,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '1.6.9',
           gitRef: '1.6.9',
-        })
+        }),
       );
       expect(res).toMatchSnapshot({
         notesSourceUrl:
@@ -1065,7 +1065,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
       httpMock
         .scope('https://api.gitlab.com/')
         .get(
-          '/projects/gitlab-org%2Fgitter%2Fwebapp/repository/tree?per_page=100'
+          '/projects/gitlab-org%2Fgitter%2Fwebapp/repository/tree?per_page=100',
         )
         .reply(200, gitlabTreeResponse)
         .get('/projects/gitlab-org%2Fgitter%2Fwebapp/repository/blobs/abcd/raw')
@@ -1079,7 +1079,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '20.26.0',
           gitRef: '20.26.0',
-        })
+        }),
       );
 
       expect(res).toMatchSnapshot({
@@ -1095,7 +1095,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
       httpMock
         .scope('https://my.custom.domain/')
         .get(
-          '/projects/gitlab-org%2Fgitter%2Fwebapp/repository/tree?per_page=100'
+          '/projects/gitlab-org%2Fgitter%2Fwebapp/repository/tree?per_page=100',
         )
         .reply(200, gitlabTreeResponse)
         .get('/projects/gitlab-org%2Fgitter%2Fwebapp/repository/blobs/abcd/raw')
@@ -1110,7 +1110,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '20.26.0',
           gitRef: '20.26.0',
-        })
+        }),
       );
 
       expect(res).toMatchSnapshot({
@@ -1139,7 +1139,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '22.0.0',
           gitRef: '22.0.0',
-        })
+        }),
       );
 
       expect(res).toMatchSnapshot({
@@ -1174,7 +1174,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '3.10.0',
           gitRef: '3.10.0',
-        })
+        }),
       );
 
       expect(res).toMatchSnapshot({
@@ -1203,7 +1203,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '3.10.0',
           gitRef: '3.10.0',
-        })
+        }),
       );
 
       expect(res).toMatchSnapshot({
@@ -1221,7 +1221,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         partial<ChangeLogRelease>({
           version: '3.10.0',
           gitRef: '3.10.0',
-        })
+        }),
       );
       expect(res).toBeNull();
     });
@@ -1249,7 +1249,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           partial<ChangeLogRelease>({
             version: '15.3.0',
             gitRef: '15.3.0',
-          })
+          }),
         );
         versionOneNotes = res!;
 
@@ -1279,7 +1279,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           partial<ChangeLogRelease>({
             version: '15.2.0',
             gitRef: '15.2.0',
-          })
+          }),
         );
         versionTwoNotes = res!;
 
@@ -1294,11 +1294,11 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         httpMock
           .scope('https://gitlab.com/')
           .get(
-            '/api/v4/projects/itentialopensource%2Fadapter-utils/repository/tree?per_page=100'
+            '/api/v4/projects/itentialopensource%2Fadapter-utils/repository/tree?per_page=100',
           )
           .reply(200, gitlabTreeResponse)
           .get(
-            '/api/v4/projects/itentialopensource%2Fadapter-utils/repository/blobs/abcd/raw'
+            '/api/v4/projects/itentialopensource%2Fadapter-utils/repository/blobs/abcd/raw',
           )
           .reply(200, adapterutilsChangelogMd);
         const res = await getReleaseNotesMd(
@@ -1309,7 +1309,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           partial<ChangeLogRelease>({
             version: '4.33.0',
             gitRef: '4.33.0',
-          })
+          }),
         );
         versionTwoNotes = res!;
 
@@ -1329,11 +1329,11 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
         httpMock
           .scope('https://gitlab.com/')
           .get(
-            `/api/v4/projects/itentialopensource%2Fadapter-utils/repository/tree?per_page=100&path=${sourceDirectory}`
+            `/api/v4/projects/itentialopensource%2Fadapter-utils/repository/tree?per_page=100&path=${sourceDirectory}`,
           )
           .reply(200, response)
           .get(
-            '/api/v4/projects/itentialopensource%2Fadapter-utils/repository/blobs/abcd/raw'
+            '/api/v4/projects/itentialopensource%2Fadapter-utils/repository/blobs/abcd/raw',
           )
           .reply(200, adapterutilsChangelogMd);
         const res = await getReleaseNotesMd(
@@ -1345,7 +1345,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           partial<ChangeLogRelease>({
             version: '4.33.0',
             gitRef: '4.33.0',
-          })
+          }),
         );
         versionTwoNotes = res!;
 
@@ -1365,7 +1365,7 @@ describe('workers/repository/update/pr/changelog/release-notes', () => {
           partial<ChangeLogRelease>({
             version: '0.72.3',
             gitRef: '0.72.3',
-          })
+          }),
         );
         expect(res).toBeNull();
       });
diff --git a/lib/workers/repository/update/pr/changelog/release-notes.ts b/lib/workers/repository/update/pr/changelog/release-notes.ts
index 1ca943247c2a09f1a567cc368c64d981b5b6ffbe..4d78fba79e89f9817b7206f61b58eb9869ce607b 100644
--- a/lib/workers/repository/update/pr/changelog/release-notes.ts
+++ b/lib/workers/repository/update/pr/changelog/release-notes.ts
@@ -29,7 +29,7 @@ const repositoriesToSkipMdFetching = ['facebook/react-native'];
 
 export async function getReleaseList(
   project: ChangeLogProject,
-  release: ChangeLogRelease
+  release: ChangeLogRelease,
 ): Promise<ChangeLogNotes[]> {
   logger.trace('getReleaseList()');
   const { apiBaseUrl, repository, type } = project;
@@ -53,7 +53,7 @@ export async function getReleaseList(
     } else {
       logger.debug(
         { repository, type, apiBaseUrl, err },
-        'getReleaseList error'
+        'getReleaseList error',
       );
     }
   }
@@ -62,7 +62,7 @@ export async function getReleaseList(
 
 export function getCachedReleaseList(
   project: ChangeLogProject,
-  release: ChangeLogRelease
+  release: ChangeLogRelease,
 ): Promise<ChangeLogNotes[]> {
   const { repository, apiBaseUrl } = project;
   // TODO: types (#22198)
@@ -79,7 +79,7 @@ export function getCachedReleaseList(
 
 export function massageBody(
   input: string | undefined | null,
-  baseUrl: string
+  baseUrl: string,
 ): string {
   let body = coerceString(input);
   // Convert line returns
@@ -90,14 +90,14 @@ export function massageBody(
     regEx(
       `^##? \\[[^\\]]*\\]\\(${baseUrl}[^/]*/[^/]*/compare/.*?\\n`,
       undefined,
-      false
+      false,
     ),
-    ''
+    '',
   );
   // Clean-up unnecessary commits link
   body = `\n${body}\n`.replace(
     regEx(`\\n${baseUrl}[^/]+/[^/]+/compare/[^\\n]+(\\n|$)`),
-    '\n'
+    '\n',
   );
   // Reduce headings size
   body = body
@@ -110,7 +110,7 @@ export function massageBody(
 
 export function massageName(
   input: string | undefined | null,
-  version: string | undefined
+  version: string | undefined,
 ): string | undefined {
   let name = input ?? '';
 
@@ -129,7 +129,7 @@ export function massageName(
 export async function getReleaseNotes(
   project: ChangeLogProject,
   release: ChangeLogRelease,
-  config: BranchUpgradeConfig
+  config: BranchUpgradeConfig,
 ): Promise<ChangeLogNotes | null> {
   const { packageName, repository } = project;
   const { version, gitRef } = release;
@@ -147,7 +147,7 @@ export async function getReleaseNotes(
         r.tag === version ||
         r.tag === `v${version}` ||
         r.tag === gitRef ||
-        r.tag === `v${gitRef}`
+        r.tag === `v${gitRef}`,
     );
   }
   if (is.undefined(matchedRelease) && config.extractVersion) {
@@ -166,19 +166,19 @@ export async function getReleaseNotes(
 function getExactReleaseMatch(
   packageName: string,
   version: string,
-  releases: ChangeLogNotes[]
+  releases: ChangeLogNotes[],
 ): ChangeLogNotes | undefined {
   const exactReleaseReg = regEx(`${packageName}[@_-]v?${version}`);
   const candidateReleases = releases.filter((r) => r.tag?.endsWith(version));
   const matchedRelease = candidateReleases.find((r) =>
-    exactReleaseReg.test(r.tag!)
+    exactReleaseReg.test(r.tag!),
   );
   return matchedRelease;
 }
 
 async function releaseNotesResult(
   releaseMatch: ChangeLogNotes | undefined,
-  project: ChangeLogProject
+  project: ChangeLogProject,
 ): Promise<ChangeLogNotes | null> {
   if (!releaseMatch) {
     return null;
@@ -238,7 +238,7 @@ function sectionize(text: string, level: number): string[] {
 }
 
 export async function getReleaseNotesMdFileInner(
-  project: ChangeLogProject
+  project: ChangeLogProject,
 ): Promise<ChangeLogFile | null> {
   const { repository, type } = project;
   const apiBaseUrl = project.apiBaseUrl;
@@ -249,25 +249,25 @@ export async function getReleaseNotesMdFileInner(
         return await gitea.getReleaseNotesMd(
           repository,
           apiBaseUrl,
-          sourceDirectory
+          sourceDirectory,
         );
       case 'gitlab':
         return await gitlab.getReleaseNotesMd(
           repository,
           apiBaseUrl,
-          sourceDirectory
+          sourceDirectory,
         );
       case 'github':
         return await github.getReleaseNotesMd(
           repository,
           apiBaseUrl,
-          sourceDirectory
+          sourceDirectory,
         );
       case 'bitbucket':
         return await bitbucket.getReleaseNotesMd(
           repository,
           apiBaseUrl,
-          sourceDirectory
+          sourceDirectory,
         );
       default:
         logger.warn({ apiBaseUrl, repository, type }, 'Invalid project type');
@@ -277,12 +277,12 @@ export async function getReleaseNotesMdFileInner(
     if (err.statusCode === 404) {
       logger.debug(
         { repository, type, apiBaseUrl },
-        'Error 404 getting changelog md'
+        'Error 404 getting changelog md',
       );
     } else {
       logger.debug(
         { err, repository, type, apiBaseUrl },
-        'Error getting changelog md'
+        'Error getting changelog md',
       );
     }
   }
@@ -290,7 +290,7 @@ export async function getReleaseNotesMdFileInner(
 }
 
 export function getReleaseNotesMdFile(
-  project: ChangeLogProject
+  project: ChangeLogProject,
 ): Promise<ChangeLogFile | null> {
   const { sourceDirectory, repository, apiBaseUrl } = project;
   // TODO: types (#22198)
@@ -309,7 +309,7 @@ export function getReleaseNotesMdFile(
 
 export async function getReleaseNotesMd(
   project: ChangeLogProject,
-  release: ChangeLogRelease
+  release: ChangeLogRelease,
 ): Promise<ChangeLogNotes | null> {
   const { baseUrl, repository } = project;
   const version = release.version;
@@ -326,7 +326,7 @@ export async function getReleaseNotesMd(
   const { changelogFile } = changelog;
   const changelogMd = changelog.changelogMd.replace(
     regEx(/\n\s*<a name="[^"]*">.*?<\/a>\n/g),
-    '\n'
+    '\n',
   );
   for (const level of [1, 2, 3, 4, 5, 6, 7]) {
     const changelogParsed = sectionize(changelogMd, level);
@@ -336,7 +336,7 @@ export async function getReleaseNotesMd(
           // replace brackets and parenthesis with space
           const deParenthesizedSection = section.replace(
             regEx(/[[\]()]/g),
-            ' '
+            ' ',
           );
           const [heading] = deParenthesizedSection.split(newlineRegex);
           const title = heading
@@ -374,7 +374,7 @@ export async function getReleaseNotesMd(
         } catch (err) /* istanbul ignore next */ {
           logger.warn(
             { file: changelogFile, err },
-            `Error parsing changelog file`
+            `Error parsing changelog file`,
           );
         }
       }
@@ -412,7 +412,7 @@ export function releaseNotesCacheMinutes(releaseDate?: string | Date): number {
 
 export async function addReleaseNotes(
   input: ChangeLogResult | null | undefined,
-  config: BranchUpgradeConfig
+  config: BranchUpgradeConfig,
 ): Promise<ChangeLogResult | null> {
   if (!input?.versions || !input.project?.type) {
     logger.debug('Missing project or versions');
@@ -447,7 +447,7 @@ export async function addReleaseNotes(
       cacheNamespace,
       cacheKey,
       releaseNotes,
-      cacheMinutes
+      cacheMinutes,
     );
     output.versions!.push({
       ...v,
diff --git a/lib/workers/repository/update/pr/changelog/releases.ts b/lib/workers/repository/update/pr/changelog/releases.ts
index ca8f95f313eeea86a14fd97450d7f8b8670a25a5..08cfbfaf4a1586064d60d804d186cc9fad7427f4 100644
--- a/lib/workers/repository/update/pr/changelog/releases.ts
+++ b/lib/workers/repository/update/pr/changelog/releases.ts
@@ -20,13 +20,13 @@ function matchesMMP(version: VersioningApi, v1: string, v2: string): boolean {
 function matchesUnstable(
   version: VersioningApi,
   v1: string,
-  v2: string
+  v2: string,
 ): boolean {
   return !version.isStable(v1) && matchesMMP(version, v1, v2);
 }
 
 export async function getInRangeReleases(
-  config: BranchUpgradeConfig
+  config: BranchUpgradeConfig,
 ): Promise<Release[] | null> {
   const versioning = config.versioning!;
   const currentVersion = config.currentVersion!;
@@ -43,20 +43,20 @@ export async function getInRangeReleases(
 
     const previousReleases = pkgReleases
       .filter((release) =>
-        version.isCompatible(release.version, currentVersion)
+        version.isCompatible(release.version, currentVersion),
       )
       .filter((release) => !version.isGreaterThan(release.version, newVersion))
       .filter(
         (release) =>
           version.isStable(release.version) ||
           matchesUnstable(version, currentVersion, release.version) ||
-          matchesUnstable(version, newVersion, release.version)
+          matchesUnstable(version, newVersion, release.version),
       );
 
     const releases = previousReleases.filter(
       (release) =>
         version.equals(release.version, currentVersion) ||
-        version.isGreaterThan(release.version, currentVersion)
+        version.isGreaterThan(release.version, currentVersion),
     );
 
     /**
diff --git a/lib/workers/repository/update/pr/changelog/source.spec.ts b/lib/workers/repository/update/pr/changelog/source.spec.ts
index 9e22bffe980fc7c0e5b0fd28657c21f9f5bfaa0a..f7ad2920eb71a4554e23ff01755dc949523696c8 100644
--- a/lib/workers/repository/update/pr/changelog/source.spec.ts
+++ b/lib/workers/repository/update/pr/changelog/source.spec.ts
@@ -16,7 +16,7 @@ describe('workers/repository/update/pr/changelog/source', () => {
         changelogSource.getBaseUrl({
           ...upgrade,
           sourceUrl: undefined,
-        })
+        }),
       ).toBeEmptyString();
     });
 
@@ -31,13 +31,13 @@ describe('workers/repository/update/pr/changelog/source', () => {
         changelogSource.getRepositoryFromUrl({
           ...upgrade,
           sourceUrl: undefined,
-        })
+        }),
       ).toBeEmptyString();
     });
 
     it('handles sourceUrl', () => {
       expect(changelogSource.getRepositoryFromUrl(upgrade)).toBe(
-        'renovatebot/renovate'
+        'renovatebot/renovate',
       );
     });
   });
diff --git a/lib/workers/repository/update/pr/changelog/source.ts b/lib/workers/repository/update/pr/changelog/source.ts
index 92c5f610efe7cb0316903a99792b27d0df308b28..1444e0d6f2f91d591e28baa43f6309f9647878f0 100644
--- a/lib/workers/repository/update/pr/changelog/source.ts
+++ b/lib/workers/repository/update/pr/changelog/source.ts
@@ -27,7 +27,7 @@ export abstract class ChangeLogSource {
       | 'bitbucket-tags'
       | 'gitea-tags'
       | 'github-tags'
-      | 'gitlab-tags'
+      | 'gitlab-tags',
   ) {
     this.cacheNamespace = `changelog-${platform}-release`;
   }
@@ -36,7 +36,7 @@ export abstract class ChangeLogSource {
     baseUrl: string,
     repository: string,
     prevHead: string,
-    nextHead: string
+    nextHead: string,
   ): string;
 
   abstract getAPIBaseUrl(config: BranchUpgradeConfig): string;
@@ -54,7 +54,7 @@ export abstract class ChangeLogSource {
 
     if (is.nullOrUndefined(tags) || is.emptyArray(tags)) {
       logger.debug(
-        `No ${this.datasource} tags found for repository: ${repository}`
+        `No ${this.datasource} tags found for repository: ${repository}`,
       );
 
       return [];
@@ -64,7 +64,7 @@ export abstract class ChangeLogSource {
   }
 
   public async getChangeLogJSON(
-    config: BranchUpgradeConfig
+    config: BranchUpgradeConfig,
   ): Promise<ChangeLogResult | null> {
     logger.trace(`getChangeLogJSON for ${this.platform}`);
 
@@ -130,7 +130,7 @@ export abstract class ChangeLogSource {
       }
       let release = await packageCache.get(
         this.cacheNamespace,
-        this.getCacheKey(sourceUrl, packageName, prev.version, next.version)
+        this.getCacheKey(sourceUrl, packageName, prev.version, next.version),
       );
       if (!release) {
         release = {
@@ -149,7 +149,7 @@ export abstract class ChangeLogSource {
             baseUrl,
             repository,
             prevHead,
-            nextHead
+            nextHead,
           );
         }
         const cacheMinutes = 55;
@@ -157,7 +157,7 @@ export abstract class ChangeLogSource {
           this.cacheNamespace,
           this.getCacheKey(sourceUrl, packageName, prev.version, next.version),
           release,
-          cacheMinutes
+          cacheMinutes,
         );
       }
       changelogReleases.unshift(release);
@@ -185,7 +185,7 @@ export abstract class ChangeLogSource {
     version: allVersioning.VersioningApi,
     packageName: string,
     depNewVersion: string,
-    tags: string[]
+    tags: string[],
   ): string | undefined {
     const regex = regEx(`(?:${packageName}|release)[@-]`, undefined, false);
     const exactReleaseRegex = regEx(`${packageName}[@\\-_]v?${depNewVersion}`);
@@ -202,13 +202,13 @@ export abstract class ChangeLogSource {
     version: allVersioning.VersioningApi,
     packageName: string,
     release: Release,
-    tags: string[]
+    tags: string[],
   ): string | null {
     const tagName = this.findTagOfRelease(
       version,
       packageName,
       release.version,
-      tags
+      tags,
     );
     if (is.nonEmptyString(tagName)) {
       return tagName;
@@ -223,7 +223,7 @@ export abstract class ChangeLogSource {
     sourceUrl: string,
     packageName: string,
     prev: string,
-    next: string
+    next: string,
   ): string {
     return `${slugifyUrl(sourceUrl)}:${packageName}:${prev}:${next}`;
   }
diff --git a/lib/workers/repository/update/pr/code-owners.spec.ts b/lib/workers/repository/update/pr/code-owners.spec.ts
index 2b110cee785fc42d260ffc4b0ceea55a0637300b..4e45521a2d5906ae3e2f3953ae635961ea2e21e0 100644
--- a/lib/workers/repository/update/pr/code-owners.spec.ts
+++ b/lib/workers/repository/update/pr/code-owners.spec.ts
@@ -27,7 +27,7 @@ describe('workers/repository/update/pr/code-owners', () => {
         codeBlock`
           * @jimmy
           yarn.lock
-        `
+        `,
       );
       git.getBranchFiles.mockResolvedValueOnce(['yarn.lock']);
       const codeOwners = await codeOwnersForPr(pr);
@@ -43,7 +43,7 @@ describe('workers/repository/update/pr/code-owners', () => {
 
     it('returns more specific code owners', async () => {
       fs.readLocalFile.mockResolvedValueOnce(
-        ['* @jimmy', 'package.json @john @maria'].join('\n')
+        ['* @jimmy', 'package.json @john @maria'].join('\n'),
       );
       git.getBranchFiles.mockResolvedValueOnce(['package.json']);
       const codeOwners = await codeOwnersForPr(pr);
@@ -178,7 +178,7 @@ describe('workers/repository/update/pr/code-owners', () => {
           server/pom.xml @reviewer-1
           client/package.json @reviewer-1
           client/package-lock.json @reviewer-1
-        `
+        `,
       );
       git.getBranchFiles.mockResolvedValueOnce(['server/pom.xml']);
       const codeOwners = await codeOwnersForPr(pr);
@@ -197,7 +197,7 @@ describe('workers/repository/update/pr/code-owners', () => {
           server/pom.xml @reviewer-1
           client/package.json @reviewer-1
           client/package-lock.json @reviewer-1
-        `
+        `,
       );
       git.getBranchFiles.mockResolvedValueOnce([
         'client/package.json',
@@ -221,7 +221,7 @@ describe('workers/repository/update/pr/code-owners', () => {
           '   * @jimmy     # inline comment     ',
           '        # comment line with leading whitespace',
           ' package.json @john @maria#inline comment without leading whitespace  ',
-        ].join('\n')
+        ].join('\n'),
       );
       git.getBranchFiles.mockResolvedValueOnce(['package.json']);
       const codeOwners = await codeOwnersForPr(pr);
@@ -237,7 +237,7 @@ describe('workers/repository/update/pr/code-owners', () => {
 
     it('returns empty array when no code owners match', async () => {
       fs.readLocalFile.mockResolvedValueOnce(
-        ['package-lock.json @mike'].join('\n')
+        ['package-lock.json @mike'].join('\n'),
       );
       git.getBranchFiles.mockResolvedValueOnce(['yarn.lock']);
       const codeOwners = await codeOwnersForPr(pr);
diff --git a/lib/workers/repository/update/pr/code-owners.ts b/lib/workers/repository/update/pr/code-owners.ts
index 93886333f4cb12f3a6bb3fbfafb86805ce857c16..f45373c063cc10c7d95c202f4e19f05a86f48f31 100644
--- a/lib/workers/repository/update/pr/code-owners.ts
+++ b/lib/workers/repository/update/pr/code-owners.ts
@@ -31,7 +31,7 @@ interface FileOwnersScore {
 
 function matchFileToOwners(
   file: string,
-  rules: FileOwnerRule[]
+  rules: FileOwnerRule[],
 ): FileOwnersScore {
   const usernames = new Map<string, number>();
 
@@ -113,12 +113,12 @@ export async function codeOwnersForPr(pr: Pr): Promise<string[]> {
 
     logger.debug(
       { prFiles, fileOwnerRules },
-      'PR files and rules to match for CODEOWNERS'
+      'PR files and rules to match for CODEOWNERS',
     );
 
     // Apply rules & get list of owners for each prFile
     const emptyRules = fileOwnerRules.filter(
-      (rule) => rule.usernames.length === 0
+      (rule) => rule.usernames.length === 0,
     );
     const fileOwners =
       // Map through all prFiles and match said file(s) with all the rules
@@ -128,7 +128,7 @@ export async function codeOwnersForPr(pr: Pr): Promise<string[]> {
         // Match file again but this time only with emptyRules, to ensure that files which have no owner set remain owner-less
         .map((fileMatch) => {
           const matchEmpty = emptyRules.find((rule) =>
-            rule.match(fileMatch.file)
+            rule.match(fileMatch.file),
           );
           if (matchEmpty) {
             return { ...fileMatch, userScoreMap: new Map<string, number>() };
@@ -139,7 +139,7 @@ export async function codeOwnersForPr(pr: Pr): Promise<string[]> {
     logger.debug(
       `CODEOWNERS matched the following files: ${fileOwners
         .map((f) => f.file)
-        .join(', ')}`
+        .join(', ')}`,
     );
 
     // Get list of all matched users and the files they own (reverse keys of fileOwners)
@@ -151,13 +151,13 @@ export async function codeOwnersForPr(pr: Pr): Promise<string[]> {
         user: userMatch.username,
         score: Array.from(userMatch.fileScoreMap.values()).reduce(
           (acc, score) => acc + score,
-          0
+          0,
         ),
       }))
       .sort((a, b) => b.score - a.score);
 
     logger.debug(
-      `CODEOWNERS matched the following users: ${JSON.stringify(userScore)}`
+      `CODEOWNERS matched the following users: ${JSON.stringify(userScore)}`,
     );
 
     return userScore.map((u) => u.user);
diff --git a/lib/workers/repository/update/pr/index.spec.ts b/lib/workers/repository/update/pr/index.spec.ts
index 77340a78f9874f3361b4d907cae9e3167f527ab7..6e0f81d0ad648d94bc448fafd02c890237d9494f 100644
--- a/lib/workers/repository/update/pr/index.spec.ts
+++ b/lib/workers/repository/update/pr/index.spec.ts
@@ -92,7 +92,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(limits.incLimitedValue).toHaveBeenCalledWith('PullRequests');
         expect(logger.logger.info).toHaveBeenCalledWith(
           { pr: pr.number, prTitle },
-          'PR created'
+          'PR created',
         );
         expect(prCache.setPrCache).toHaveBeenCalled();
       });
@@ -249,7 +249,7 @@ describe('workers/repository/update/pr/index', () => {
 
           expect(res).toEqual({ type: 'without-pr', prBlockedBy: 'Error' });
           expect(logger.logger.warn).toHaveBeenCalledWith(
-            'A pull requests already exists'
+            'A pull requests already exists',
           );
           expect(prCache.setPrCache).not.toHaveBeenCalled();
         });
@@ -280,7 +280,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(platform.createPr).not.toHaveBeenCalled();
         expect(logger.logger.info).toHaveBeenCalledWith(
           { pr: changedPr.number, prTitle },
-          `PR updated`
+          `PR updated`,
         );
         expect(prCache.setPrCache).toHaveBeenCalled();
       });
@@ -300,7 +300,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(prCache.setPrCache).toHaveBeenCalled();
         expect(logger.logger.info).toHaveBeenCalledWith(
           { pr: changedPr.number, prTitle },
-          `PR updated`
+          `PR updated`,
         );
       });
 
@@ -314,7 +314,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(prCache.setPrCache).toHaveBeenCalled();
         expect(logger.logger.info).toHaveBeenCalledWith(
           { pr: pr.number, prTitle },
-          `PR updated`
+          `PR updated`,
         );
         expect(logger.logger.debug).toHaveBeenCalledWith(
           {
@@ -322,7 +322,7 @@ describe('workers/repository/update/pr/index', () => {
             oldBaseBranch: 'base',
             newBaseBranch: 'new_base',
           },
-          'PR base branch has changed'
+          'PR base branch has changed',
         );
         expect(res).toEqual({
           type: 'with-pr',
@@ -348,7 +348,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(platform.createPr).not.toHaveBeenCalled();
         expect(prCache.setPrCache).toHaveBeenCalled();
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'Pull Request #123 does not need updating'
+          'Pull Request #123 does not need updating',
         );
       });
     });
@@ -370,7 +370,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(platform.updatePr).not.toHaveBeenCalled();
         expect(platform.createPr).not.toHaveBeenCalled();
         expect(logger.logger.info).toHaveBeenCalledWith(
-          `DRY-RUN: Would create PR: ${prTitle}`
+          `DRY-RUN: Would create PR: ${prTitle}`,
         );
       });
 
@@ -384,7 +384,7 @@ describe('workers/repository/update/pr/index', () => {
         expect(platform.updatePr).not.toHaveBeenCalled();
         expect(platform.createPr).not.toHaveBeenCalled();
         expect(logger.logger.info).toHaveBeenCalledWith(
-          `DRY-RUN: Would update PR #${pr.number}`
+          `DRY-RUN: Would update PR #${pr.number}`,
         );
       });
 
@@ -600,7 +600,7 @@ describe('workers/repository/update/pr/index', () => {
 
         expect(logger.logger.error).toHaveBeenCalledWith(
           { err },
-          'Failed to ensure PR: ' + prTitle
+          'Failed to ensure PR: ' + prTitle,
         );
       });
 
@@ -621,7 +621,7 @@ describe('workers/repository/update/pr/index', () => {
             automerge: true,
             automergeType: 'pr',
             assignAutomerge: false,
-          })
+          }),
         ).rejects.toThrow(err);
       });
 
@@ -649,9 +649,9 @@ describe('workers/repository/update/pr/index', () => {
               automerge: true,
               automergeType: 'pr',
               assignAutomerge: false,
-            })
+            }),
           ).rejects.toThrow(err);
-        }
+        },
       );
     });
 
@@ -816,7 +816,7 @@ describe('workers/repository/update/pr/index', () => {
           pr: existingPr,
         });
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'Pull Request #123 does not need updating'
+          'Pull Request #123 does not need updating',
         );
         expect(prCache.setPrCache).toHaveBeenCalledTimes(1);
       });
@@ -834,15 +834,15 @@ describe('workers/repository/update/pr/index', () => {
           pr: existingPr,
         });
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'Pull Request #123 does not need updating'
+          'Pull Request #123 does not need updating',
         );
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'PR cache matches but it has been edited in the past 24hrs, so processing PR'
+          'PR cache matches but it has been edited in the past 24hrs, so processing PR',
         );
         expect(prCache.setPrCache).toHaveBeenCalledWith(
           sourceBranch,
           cachedPr.bodyFingerprint,
-          false
+          false,
         );
       });
 
@@ -859,7 +859,7 @@ describe('workers/repository/update/pr/index', () => {
           pr: existingPr,
         });
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'PR fingerprints mismatch, processing PR'
+          'PR fingerprints mismatch, processing PR',
         );
         expect(prCache.setPrCache).toHaveBeenCalledTimes(1);
       });
@@ -872,7 +872,7 @@ describe('workers/repository/update/pr/index', () => {
             generatePrBodyFingerprintConfig({
               ...config,
               fetchChangeLogs: 'pr',
-            })
+            }),
           ),
           lastEdited: new Date('2020-01-20T00:00:00Z').toISOString(),
         };
@@ -883,7 +883,7 @@ describe('workers/repository/update/pr/index', () => {
           pr: existingPr,
         });
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'PR cache matches and no PR changes in last 24hrs, so skipping PR body check'
+          'PR cache matches and no PR changes in last 24hrs, so skipping PR body check',
         );
         expect(embedChangelogs).toHaveBeenCalledTimes(0);
       });
@@ -905,7 +905,7 @@ describe('workers/repository/update/pr/index', () => {
             generatePrBodyFingerprintConfig({
               ...config,
               fetchChangeLogs: 'pr',
-            })
+            }),
           ),
           lastEdited: new Date('2020-01-20T00:00:00Z').toISOString(),
         };
@@ -923,10 +923,10 @@ describe('workers/repository/update/pr/index', () => {
           },
         });
         expect(logger.logger.debug).toHaveBeenCalledWith(
-          'PR rebase requested, so skipping cache check'
+          'PR rebase requested, so skipping cache check',
         );
         expect(logger.logger.debug).not.toHaveBeenCalledWith(
-          `Pull Request #${number} does not need updating`
+          `Pull Request #${number} does not need updating`,
         );
         expect(embedChangelogs).toHaveBeenCalledTimes(1);
       });
@@ -945,7 +945,7 @@ describe('workers/repository/update/pr/index', () => {
         prCache.getPrCache.mockReturnValueOnce(null);
         await ensurePr(config);
         expect(logger.logger.debug).not.toHaveBeenCalledWith(
-          'PR cache not found'
+          'PR cache not found',
         );
       });
     });
diff --git a/lib/workers/repository/update/pr/index.ts b/lib/workers/repository/update/pr/index.ts
index 36bd01f8a2e960454a30c16ff6145c3a9aafd696..5364a9d108af248975706764f4dfda37d153137c 100644
--- a/lib/workers/repository/update/pr/index.ts
+++ b/lib/workers/repository/update/pr/index.ts
@@ -45,12 +45,12 @@ import {
 } from './pr-fingerprint';
 
 export function getPlatformPrOptions(
-  config: RenovateConfig & PlatformPrOptions
+  config: RenovateConfig & PlatformPrOptions,
 ): PlatformPrOptions {
   const usePlatformAutomerge = Boolean(
     config.automerge &&
       (config.automergeType === 'pr' || config.automergeType === 'branch') &&
-      config.platformAutomerge
+      config.platformAutomerge,
   );
 
   return {
@@ -77,7 +77,7 @@ export type EnsurePrResult = ResultWithPr | ResultWithoutPr;
 
 export function updatePrDebugData(
   targetBranch: string,
-  debugData: PrDebugData | undefined
+  debugData: PrDebugData | undefined,
 ): PrDebugData {
   const createdByRenovateVersion = debugData?.createdInVer ?? pkg.version;
   const updatedByRenovateVersion = pkg.version;
@@ -104,7 +104,7 @@ function hasNotIgnoredReviewers(pr: Pr, config: BranchConfig): boolean {
 
 // Ensures that PR exists with matching title/body
 export async function ensurePr(
-  prConfig: BranchConfig
+  prConfig: BranchConfig,
 ): Promise<EnsurePrResult> {
   const config: BranchConfig = { ...prConfig };
   const filteredPrConfig = generatePrBodyFingerprintConfig(config);
@@ -119,7 +119,7 @@ export async function ensurePr(
     upgrades,
   } = config;
   const getBranchStatus = memoize(() =>
-    resolveBranchStatus(branchName, !!internalChecksAsSuccess, ignoreTests)
+    resolveBranchStatus(branchName, !!internalChecksAsSuccess, ignoreTests),
   );
   const dependencyDashboardCheck =
     config.dependencyDashboardChecks?.[config.branchName];
@@ -169,7 +169,7 @@ export async function ensurePr(
         const lastCommitTime = await getBranchLastCommitTime(branchName);
         if (getElapsedHours(lastCommitTime) >= config.prNotPendingHours) {
           logger.debug(
-            'Branch exceeds prNotPending hours - forcing PR creation'
+            'Branch exceeds prNotPending hours - forcing PR creation',
           );
           config.forcePr = true;
         }
@@ -206,7 +206,7 @@ export async function ensurePr(
               elapsedHours < config.prNotPendingHours))
         ) {
           logger.debug(
-            `Branch is ${elapsedHours} hours old - skipping PR creation`
+            `Branch is ${elapsedHours} hours old - skipping PR creation`,
           );
           return {
             type: 'without-pr',
@@ -215,7 +215,7 @@ export async function ensurePr(
         }
         const prNotPendingHours = String(config.prNotPendingHours);
         logger.debug(
-          `prNotPendingHours=${prNotPendingHours} threshold hit - creating PR`
+          `prNotPendingHours=${prNotPendingHours} threshold hit - creating PR`,
         );
       }
       logger.debug('Branch status success');
@@ -226,7 +226,7 @@ export async function ensurePr(
   const commitRepos: string[] = [];
 
   function getRepoNameWithSourceDirectory(
-    upgrade: BranchUpgradeConfig
+    upgrade: BranchUpgradeConfig,
   ): string {
     // TODO: types (#22198)
     return `${upgrade.repoName!}${
@@ -308,7 +308,7 @@ export async function ensurePr(
       if (releaseNotesSources.includes(notesSourceUrl)) {
         logger.debug(
           { depName: upgrade.depName },
-          'Removing duplicate release notes'
+          'Removing duplicate release notes',
         );
         upgrade.hasReleaseNotes = false;
       } else {
@@ -322,10 +322,10 @@ export async function ensurePr(
     {
       debugData: updatePrDebugData(
         config.baseBranch,
-        existingPr?.bodyStruct?.debugData
+        existingPr?.bodyStruct?.debugData,
       ),
     },
-    config
+    config,
   );
 
   try {
@@ -355,7 +355,7 @@ export async function ensurePr(
         // adds or-cache for existing PRs
         setPrCache(branchName, prBodyFingerprint, false);
         logger.debug(
-          `Pull Request #${existingPr.number} does not need updating`
+          `Pull Request #${existingPr.number} does not need updating`,
         );
         return { type: 'with-pr', pr: existingPr };
       }
@@ -374,7 +374,7 @@ export async function ensurePr(
             oldBaseBranch: existingPr?.targetBranch,
             newBaseBranch: config.baseBranch,
           },
-          'PR base branch has changed'
+          'PR base branch has changed',
         );
         updatePrConfig.targetBranch = config.baseBranch;
       }
@@ -385,14 +385,14 @@ export async function ensurePr(
             oldPrTitle: existingPr.title,
             newPrTitle: prTitle,
           },
-          'PR title changed'
+          'PR title changed',
         );
       } else if (!config.committedFiles && !config.rebaseRequested) {
         logger.debug(
           {
             prTitle,
           },
-          'PR body changed'
+          'PR body changed',
         );
       }
 
@@ -450,7 +450,7 @@ export async function ensurePr(
           err.body?.message === 'Validation failed' &&
           err.body.errors?.length &&
           err.body.errors.some((error: { message?: string }) =>
-            error.message?.startsWith('A pull request already exists')
+            error.message?.startsWith('A pull request already exists'),
           )
         ) {
           logger.warn('A pull requests already exists');
@@ -459,7 +459,7 @@ export async function ensurePr(
         if (err.statusCode === 502) {
           logger.warn(
             { branch: branchName },
-            'Deleting branch due to server error'
+            'Deleting branch due to server error',
           );
           await scm.deleteBranch(branchName);
         }
@@ -497,7 +497,7 @@ export async function ensurePr(
         (await getBranchStatus()) !== 'red'
       ) {
         logger.debug(
-          `Skipping assignees and reviewers as automerge=${config.automerge}`
+          `Skipping assignees and reviewers as automerge=${config.automerge}`,
         );
       } else {
         await addParticipants(config, pr);
diff --git a/lib/workers/repository/update/pr/participants.spec.ts b/lib/workers/repository/update/pr/participants.spec.ts
index 8da83ebb2ca885ebb93cf7e20e8cbcdb517aab45..b74e108a47d6911338176ffedf35028504658d05 100644
--- a/lib/workers/repository/update/pr/participants.spec.ts
+++ b/lib/workers/repository/update/pr/participants.spec.ts
@@ -61,7 +61,7 @@ describe('workers/repository/update/pr/participants', () => {
           assigneesFromCodeOwners: true,
           expandCodeOwnersGroups: true,
         },
-        pr
+        pr,
       );
       expect(platform.expandGroupMembers).toHaveBeenCalledWith([
         'user',
@@ -182,7 +182,7 @@ describe('workers/repository/update/pr/participants', () => {
     it('supports additionalReviewers', async () => {
       await addParticipants(
         { ...config, additionalReviewers: ['foo', 'bar', 'baz'] },
-        pr
+        pr,
       );
       expect(platform.addReviewers).toHaveBeenCalledWith(123, [
         'x',
diff --git a/lib/workers/repository/update/pr/participants.ts b/lib/workers/repository/update/pr/participants.ts
index a8ee37928f58acfa7650db764c4b2fd05f49660c..90b119d684dee20730ec407d0c9bf79c927bae69 100644
--- a/lib/workers/repository/update/pr/participants.ts
+++ b/lib/workers/repository/update/pr/participants.ts
@@ -10,7 +10,7 @@ import { codeOwnersForPr } from './code-owners';
 async function addCodeOwners(
   config: RenovateConfig,
   assigneesOrReviewers: string[],
-  pr: Pr
+  pr: Pr,
 ): Promise<string[]> {
   const codeOwners = await codeOwnersForPr(pr);
 
@@ -24,7 +24,7 @@ async function addCodeOwners(
 
 function filterUnavailableUsers(
   config: RenovateConfig,
-  users: string[]
+  users: string[],
 ): Promise<string[]> {
   return config.filterUnavailableUsers && platform.filterUnavailableUsers
     ? platform.filterUnavailableUsers(users)
@@ -33,7 +33,7 @@ function filterUnavailableUsers(
 
 function prepareParticipants(
   config: RenovateConfig,
-  usernames: string[]
+  usernames: string[],
 ): Promise<string[]> {
   const normalizedUsernames = [...new Set(usernames.map(noLeadingAtSymbol))];
   return filterUnavailableUsers(config, normalizedUsernames);
@@ -41,7 +41,7 @@ function prepareParticipants(
 
 export async function addParticipants(
   config: RenovateConfig,
-  pr: Pr
+  pr: Pr,
 ): Promise<void> {
   let assignees = config.assignees ?? [];
   logger.debug(`addParticipants(pr=${pr?.number})`);
@@ -65,7 +65,7 @@ export async function addParticipants(
     } catch (err) {
       logger.debug(
         { assignees: config.assignees, err },
-        'Failed to add assignees'
+        'Failed to add assignees',
       );
     }
   }
@@ -97,7 +97,7 @@ export async function addParticipants(
     } catch (err) {
       logger.debug(
         { reviewers: config.reviewers, err },
-        'Failed to add reviewers'
+        'Failed to add reviewers',
       );
     }
   }
diff --git a/lib/workers/repository/update/pr/pr-cache.spec.ts b/lib/workers/repository/update/pr/pr-cache.spec.ts
index 51455f04190227210dbd67614aab57008dc093ff..3aaea8cc638d065ab2d60f00b6d5e0ba90389a20 100644
--- a/lib/workers/repository/update/pr/pr-cache.spec.ts
+++ b/lib/workers/repository/update/pr/pr-cache.spec.ts
@@ -53,7 +53,7 @@ describe('workers/repository/update/pr/pr-cache', () => {
       cache.getCache.mockReturnValue(dummyCache);
       setPrCache('branch_1', 'fingerprint_hash', false);
       expect(logger.logger.debug).toHaveBeenCalledWith(
-        'setPrCache(): Branch cache not present'
+        'setPrCache(): Branch cache not present',
       );
     });
 
diff --git a/lib/workers/repository/update/pr/pr-cache.ts b/lib/workers/repository/update/pr/pr-cache.ts
index 83dc66d6d1a3e4f738f77554127c2c25b9dbd321..dfd133ca8bf0ae266eaab6ca569be4eaaa129af6 100644
--- a/lib/workers/repository/update/pr/pr-cache.ts
+++ b/lib/workers/repository/update/pr/pr-cache.ts
@@ -6,7 +6,7 @@ export function getPrCache(branchName: string): PrCache | null {
   logger.debug(`getPrCache()`);
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branchName === branch.branchName
+    (branch) => branchName === branch.branchName,
   );
 
   const prCache = branch?.prCache;
@@ -27,12 +27,12 @@ export function getPrCache(branchName: string): PrCache | null {
 export function setPrCache(
   branchName: string,
   bodyFingerprint: string,
-  prModified: boolean
+  prModified: boolean,
 ): void {
   logger.debug(`setPrCache()`);
   const cache = getCache();
   const branch = cache.branches?.find(
-    (branch) => branchName === branch.branchName
+    (branch) => branchName === branch.branchName,
   );
 
   if (!branch) {
diff --git a/lib/workers/repository/update/pr/pr-fingerprint.ts b/lib/workers/repository/update/pr/pr-fingerprint.ts
index 14889efcde7f065981e1b9906b6f11d0ccf37493..874131538661369b72aec94b73a2d5a710c2c667 100644
--- a/lib/workers/repository/update/pr/pr-fingerprint.ts
+++ b/lib/workers/repository/update/pr/pr-fingerprint.ts
@@ -45,7 +45,7 @@ export interface PrBodyFingerprintConfig {
 }
 
 export function generatePrBodyFingerprintConfig(
-  config: BranchConfig
+  config: BranchConfig,
 ): PrBodyFingerprintConfig {
   const filteredUpgrades = config.upgrades.map((upgrade) => {
     return {
@@ -85,7 +85,7 @@ export function generatePrBodyFingerprintConfig(
 
 export function validatePrCache(
   prCache: PrCache,
-  bodyFingerprint: string
+  bodyFingerprint: string,
 ): boolean {
   if (prCache.bodyFingerprint !== bodyFingerprint) {
     logger.debug('PR fingerprints mismatch, processing PR');
@@ -94,13 +94,13 @@ export function validatePrCache(
 
   if (getElapsedHours(prCache.lastEdited) < 24) {
     logger.debug(
-      'PR cache matches but it has been edited in the past 24hrs, so processing PR'
+      'PR cache matches but it has been edited in the past 24hrs, so processing PR',
     );
     return false;
   }
 
   logger.debug(
-    'PR cache matches and no PR changes in last 24hrs, so skipping PR body check'
+    'PR cache matches and no PR changes in last 24hrs, so skipping PR body check',
   );
   return true;
 }
diff --git a/lib/workers/repository/updates/branch-name.spec.ts b/lib/workers/repository/updates/branch-name.spec.ts
index dbebba802078f5c92d729c469f67ee3bcfa0148c..c78652419cc6c2660fd59f937ffd6ab1e4f20e4d 100644
--- a/lib/workers/repository/updates/branch-name.spec.ts
+++ b/lib/workers/repository/updates/branch-name.spec.ts
@@ -89,7 +89,7 @@ describe('workers/repository/updates/branch-name', () => {
       };
       generateBranchName(upgrade);
       expect(upgrade.branchName).toBe(
-        'update-branch-patch-some-group-slug-update-topic'
+        'update-branch-patch-some-group-slug-update-topic',
       );
     });
 
@@ -334,7 +334,7 @@ describe('workers/repository/updates/branch-name', () => {
       };
       generateBranchName(upgrade);
       expect(upgrade.branchName).toBe(
-        'some-group-name-dollarpercentand-or-lessgreater-version-grouptopic'
+        'some-group-name-dollarpercentand-or-lessgreater-version-grouptopic',
       );
     });
 
@@ -349,7 +349,7 @@ describe('workers/repository/updates/branch-name', () => {
       };
       generateBranchName(upgrade);
       expect(upgrade.branchName).toBe(
-        'some-group-name.dollarpercentversion-grouptopic'
+        'some-group-name.dollarpercentversion-grouptopic',
       );
     });
   });
diff --git a/lib/workers/repository/updates/branch-name.ts b/lib/workers/repository/updates/branch-name.ts
index 6ce472bbd2548bea2455514b6dd138c80a285afb..9036a161c07fe1bcd73f427ed85fb672797e94f9 100644
--- a/lib/workers/repository/updates/branch-name.ts
+++ b/lib/workers/repository/updates/branch-name.ts
@@ -26,7 +26,7 @@ const RE_SPECIAL_CHARS_STRICT = regEx(/[`~!@#$%^&*()_=+[\]\\|{};':",.<>?]/g);
  */
 function cleanBranchName(
   branchName: string,
-  branchNameStrict?: boolean
+  branchNameStrict?: boolean,
 ): string {
   let cleanedBranchName = branchName;
 
@@ -52,7 +52,7 @@ export function generateBranchName(update: RenovateConfig): void {
     logger.trace('Using group branchName template');
     // TODO: types (#22198)
     logger.trace(
-      `Dependency ${update.depName!} is part of group ${update.groupName}`
+      `Dependency ${update.depName!} is part of group ${update.groupName}`,
     );
     update.groupSlug = slugify(update.groupSlug ?? update.groupName, {
       lower: true,
@@ -76,19 +76,19 @@ export function generateBranchName(update: RenovateConfig): void {
     let hashLength = update.hashedBranchLength - update.branchPrefix!.length;
     if (hashLength < MIN_HASH_LENGTH) {
       logger.warn(
-        `\`hashedBranchLength\` must allow for at least ${MIN_HASH_LENGTH} characters hashing in addition to \`branchPrefix\`. Using ${MIN_HASH_LENGTH} character hash instead.`
+        `\`hashedBranchLength\` must allow for at least ${MIN_HASH_LENGTH} characters hashing in addition to \`branchPrefix\`. Using ${MIN_HASH_LENGTH} character hash instead.`,
       );
       hashLength = MIN_HASH_LENGTH;
     }
 
     const additionalBranchPrefix = template.compile(
       String(update.additionalBranchPrefix ?? ''),
-      update
+      update,
     );
 
     const branchTopic = template.compile(
       String(update.branchTopic ?? ''),
-      update
+      update,
     );
 
     let hashInput = additionalBranchPrefix + branchTopic;
@@ -102,7 +102,7 @@ export function generateBranchName(update: RenovateConfig): void {
     // TODO: types (#22198)
     update.branchName = `${update.branchPrefix!}${hashedInput.slice(
       0,
-      hashLength
+      hashLength,
     )}`;
   } else {
     update.branchName = template.compile(update.branchName!, update);
@@ -114,6 +114,6 @@ export function generateBranchName(update: RenovateConfig): void {
 
   update.branchName = cleanBranchName(
     update.branchName,
-    update.branchNameStrict
+    update.branchNameStrict,
   );
 }
diff --git a/lib/workers/repository/updates/branchify.ts b/lib/workers/repository/updates/branchify.ts
index a95c5a242045ff025f1419003f73e33e97383c18..b7e65a089b35b86f7a43062662a043ed965dd78a 100644
--- a/lib/workers/repository/updates/branchify.ts
+++ b/lib/workers/repository/updates/branchify.ts
@@ -15,7 +15,7 @@ export type BranchifiedConfig = Merge<
 >;
 export async function branchifyUpgrades(
   config: RenovateConfig,
-  packageFiles: Record<string, any[]>
+  packageFiles: Record<string, any[]>,
 ): Promise<BranchifiedConfig> {
   logger.debug('branchifyUpgrades');
   const updates = await flattenUpdates(config, packageFiles);
@@ -23,7 +23,7 @@ export async function branchifyUpgrades(
     `${updates.length} flattened updates found: ${updates
       .map((u) => u.depName)
       .filter((txt) => txt?.trim().length)
-      .join(', ')}`
+      .join(', ')}`,
   );
   const errors: ValidationMessage[] = [];
   const warnings: ValidationMessage[] = [];
@@ -33,7 +33,7 @@ export async function branchifyUpgrades(
     const update: BranchUpgradeConfig = { ...u } as any;
     branchUpgrades[update.branchName] = branchUpgrades[update.branchName] || [];
     branchUpgrades[update.branchName] = [update].concat(
-      branchUpgrades[update.branchName]
+      branchUpgrades[update.branchName],
     );
   }
   logger.debug(`Returning ${Object.keys(branchUpgrades).length} branch(es)`);
@@ -62,7 +62,7 @@ export async function branchifyUpgrades(
               previousNewValue,
               thisNewValue: newValue,
             },
-            'Ignoring upgrade collision'
+            'Ignoring upgrade collision',
           );
           return false;
         }
@@ -102,7 +102,7 @@ export async function branchifyUpgrades(
         const [sourceUrl, newVersion] = key.split('|');
         logger.debug(
           { sourceUrl, newVersion, branches: value },
-          'Found sourceUrl with multiple branches that should probably be combined into a group'
+          'Found sourceUrl with multiple branches that should probably be combined into a group',
         );
       }
     }
diff --git a/lib/workers/repository/updates/flatten.spec.ts b/lib/workers/repository/updates/flatten.spec.ts
index 2be37367e4396fd566ecfc8c4ed31f2651417cb1..402efb5d6833b2e64326af96883a6dca210a81b6 100644
--- a/lib/workers/repository/updates/flatten.spec.ts
+++ b/lib/workers/repository/updates/flatten.spec.ts
@@ -150,50 +150,50 @@ describe('workers/repository/updates/flatten', () => {
           (upgrade) =>
             upgrade.isLockFileMaintenance ||
             upgrade.isRemediation ||
-            is.number(upgrade.depIndex)
-        )
+            is.number(upgrade.depIndex),
+        ),
       ).toBeTrue();
       expect(
-        res.filter((update) => update.sourceRepoSlug)[0].sourceRepoSlug
+        res.filter((update) => update.sourceRepoSlug)[0].sourceRepoSlug,
       ).toBe('org-repo');
       expect(res.filter((update) => update.sourceRepo)[0].sourceRepo).toBe(
-        'org/repo'
+        'org/repo',
       );
       expect(
-        res.filter((update) => update.sourceRepoOrg)[0].sourceRepoOrg
+        res.filter((update) => update.sourceRepoOrg)[0].sourceRepoOrg,
       ).toBe('org');
       expect(
-        res.filter((update) => update.sourceRepoName)[0].sourceRepoName
+        res.filter((update) => update.sourceRepoName)[0].sourceRepoName,
       ).toBe('repo');
       expect(
-        res.filter((update) => update.sourceRepoSlug)[1].sourceRepoSlug
+        res.filter((update) => update.sourceRepoSlug)[1].sourceRepoSlug,
       ).toBe('org-repo');
       expect(res.filter((update) => update.sourceRepo)[1].sourceRepo).toBe(
-        'org/repo'
+        'org/repo',
       );
       expect(
-        res.filter((update) => update.sourceRepoOrg)[1].sourceRepoOrg
+        res.filter((update) => update.sourceRepoOrg)[1].sourceRepoOrg,
       ).toBe('org');
       expect(
-        res.filter((update) => update.sourceRepoName)[1].sourceRepoName
+        res.filter((update) => update.sourceRepoName)[1].sourceRepoName,
       ).toBe('repo');
       expect(
-        res.filter((update) => update.sourceRepoSlug)[2].sourceRepoSlug
+        res.filter((update) => update.sourceRepoSlug)[2].sourceRepoSlug,
       ).toBe('nodejs-node');
       expect(res.filter((update) => update.sourceRepo)[2].sourceRepo).toBe(
-        'nodejs/node'
+        'nodejs/node',
       );
       expect(
-        res.filter((update) => update.sourceRepoOrg)[2].sourceRepoOrg
+        res.filter((update) => update.sourceRepoOrg)[2].sourceRepoOrg,
       ).toBe('nodejs');
       expect(
-        res.filter((update) => update.sourceRepoName)[2].sourceRepoName
+        res.filter((update) => update.sourceRepoName)[2].sourceRepoName,
       ).toBe('node');
       expect(
         res.filter(
           (r) =>
-            r.updateType === 'lockFileMaintenance' && r.isLockFileMaintenance
-        )
+            r.updateType === 'lockFileMaintenance' && r.isLockFileMaintenance,
+        ),
       ).toHaveLength(2);
       expect(res.filter((r) => r.isVulnerabilityAlert)).toHaveLength(1);
     });
diff --git a/lib/workers/repository/updates/flatten.ts b/lib/workers/repository/updates/flatten.ts
index 77612b799af668f15c591b6342bb6c8716d99609..3832a2f58554426ecd446115c75253e0b92f0a02 100644
--- a/lib/workers/repository/updates/flatten.ts
+++ b/lib/workers/repository/updates/flatten.ts
@@ -45,15 +45,15 @@ export function applyUpdateConfig(input: BranchUpgradeConfig): any {
         .replace(regEx(/-+/g), '-'); // remove multiple hyphens
       updateConfig.sourceRepo = parsedSourceUrl.pathname.replace(
         regEx(/^\//),
-        ''
+        '',
       ); // remove leading slash
       updateConfig.sourceRepoOrg = updateConfig.sourceRepo.replace(
         regEx(/\/.*/g),
-        ''
+        '',
       ); // remove everything after first slash
       updateConfig.sourceRepoName = updateConfig.sourceRepo.replace(
         regEx(/.*\//g),
-        ''
+        '',
       ); // remove everything up to the last slash
     }
   }
@@ -63,7 +63,7 @@ export function applyUpdateConfig(input: BranchUpgradeConfig): any {
 
 export async function flattenUpdates(
   config: RenovateConfig,
-  packageFiles: Record<string, any[]>
+  packageFiles: Record<string, any[]>,
 ): Promise<RenovateConfig[]> {
   const updates = [];
   const updateTypes = [
@@ -110,14 +110,14 @@ export async function flattenUpdates(
             }
             // apply config from datasource
             const datasourceConfig = await getDefaultConfig(
-              depConfig.datasource
+              depConfig.datasource,
             );
             updateConfig = mergeChildConfig(updateConfig, datasourceConfig);
             updateConfig = applyPackageRules(updateConfig);
             // apply major/minor/patch/pin/digest
             updateConfig = mergeChildConfig(
               updateConfig,
-              updateConfig[updateConfig.updateType]
+              updateConfig[updateConfig.updateType],
             );
             for (const updateType of updateTypes) {
               delete updateConfig[updateType];
@@ -139,7 +139,7 @@ export async function flattenUpdates(
         // Apply lockFileMaintenance config before packageRules
         let lockFileConfig = mergeChildConfig(
           packageFileConfig,
-          packageFileConfig.lockFileMaintenance
+          packageFileConfig.lockFileMaintenance,
         );
         lockFileConfig.updateType = 'lockFileMaintenance';
         lockFileConfig.isLockFileMaintenance = true;
@@ -147,7 +147,7 @@ export async function flattenUpdates(
         // Apply lockFileMaintenance and packageRules again
         lockFileConfig = mergeChildConfig(
           lockFileConfig,
-          lockFileConfig.lockFileMaintenance
+          lockFileConfig.lockFileMaintenance,
         );
         lockFileConfig = applyPackageRules(lockFileConfig);
         // Remove unnecessary objects
@@ -170,11 +170,11 @@ export async function flattenUpdates(
             for (const remediation of remediations) {
               let updateConfig = mergeChildConfig(
                 packageFileConfig,
-                remediation
+                remediation,
               );
               updateConfig = mergeChildConfig(
                 updateConfig,
-                config.vulnerabilityAlerts
+                config.vulnerabilityAlerts,
               );
               delete updateConfig.vulnerabilityAlerts;
               updateConfig.isVulnerabilityAlert = true;
diff --git a/lib/workers/repository/updates/generate.spec.ts b/lib/workers/repository/updates/generate.spec.ts
index d47c2eb6070cd2348c8ddcdbc3b928a95fb7801b..db94dcbf3c75315ffe20cc89b2c042c98c5abbf5 100644
--- a/lib/workers/repository/updates/generate.spec.ts
+++ b/lib/workers/repository/updates/generate.spec.ts
@@ -697,10 +697,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(package): update dependency some-dep to v1.2.0'
+        'chore(package): update dependency some-dep to v1.2.0',
       );
       expect(res.commitMessage).toBe(
-        'chore(package): update dependency some-dep to v1.2.0'
+        'chore(package): update dependency some-dep to v1.2.0',
       );
     });
 
@@ -728,7 +728,7 @@ describe('workers/repository/updates/generate', () => {
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe('chore(): update dependency some-dep to v1.2.0');
       expect(res.commitMessage).toBe(
-        'chore(): update dependency some-dep to v1.2.0'
+        'chore(): update dependency some-dep to v1.2.0',
       );
     });
 
@@ -756,10 +756,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(bar): update dependency some-dep to v1.2.0'
+        'chore(bar): update dependency some-dep to v1.2.0',
       );
       expect(res.commitMessage).toBe(
-        'chore(bar): update dependency some-dep to v1.2.0'
+        'chore(bar): update dependency some-dep to v1.2.0',
       );
     });
 
@@ -786,10 +786,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(foo/bar): update dependency some-dep to v1.2.0'
+        'chore(foo/bar): update dependency some-dep to v1.2.0',
       );
       expect(res.commitMessage).toBe(
-        'chore(foo/bar): update dependency some-dep to v1.2.0'
+        'chore(foo/bar): update dependency some-dep to v1.2.0',
       );
     });
 
@@ -813,10 +813,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(foo/bar): update dependency some-dep v1.2.0'
+        'chore(foo/bar): update dependency some-dep v1.2.0',
       );
       expect(res.commitMessage).toBe(
-        'chore(foo/bar): update dependency some-dep v1.2.0'
+        'chore(foo/bar): update dependency some-dep v1.2.0',
       );
     });
 
@@ -840,10 +840,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(foo/bar): update dependency some-dep v3.2.0'
+        'chore(foo/bar): update dependency some-dep v3.2.0',
       );
       expect(res.commitMessage).toBe(
-        'chore(foo/bar): update dependency some-dep v3.2.0'
+        'chore(foo/bar): update dependency some-dep v3.2.0',
       );
     });
 
@@ -868,7 +868,7 @@ describe('workers/repository/updates/generate', () => {
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe('chore(foo/bar): update dependency some-dep v3');
       expect(res.commitMessage).toBe(
-        'chore(foo/bar): update dependency some-dep v3'
+        'chore(foo/bar): update dependency some-dep v3',
       );
     });
 
@@ -891,10 +891,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(foo/bar): update dependency some-dep to v1.2.0'
+        'chore(foo/bar): update dependency some-dep to v1.2.0',
       );
       expect(res.commitMessage).toBe(
-        'chore(foo/bar): update dependency some-dep to v1.2.0'
+        'chore(foo/bar): update dependency some-dep to v1.2.0',
       );
     });
 
@@ -1165,7 +1165,7 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(
-        res.upgrades.map((upgrade) => upgrade.fileReplacePosition)
+        res.upgrades.map((upgrade) => upgrade.fileReplacePosition),
       ).toStrictEqual([undefined, undefined, 4, 1]);
     });
 
@@ -1279,7 +1279,7 @@ describe('workers/repository/updates/generate', () => {
       const res = generateBranchConfig(branch);
       const excludeCommitPaths = res.excludeCommitPaths ?? [];
       expect(excludeCommitPaths.sort()).toStrictEqual(
-        ['some/path', 'some/other/path', 'some/other-manager/path'].sort()
+        ['some/path', 'some/other/path', 'some/other-manager/path'].sort(),
       );
     });
 
@@ -1300,10 +1300,10 @@ describe('workers/repository/updates/generate', () => {
       ] satisfies BranchUpgradeConfig[];
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe(
-        'chore(package): update dependency some-dep to foo-pkg-v3.2.1'
+        'chore(package): update dependency some-dep to foo-pkg-v3.2.1',
       );
       expect(res.commitMessage).toBe(
-        'chore(package): update dependency some-dep to foo-pkg-v3.2.1'
+        'chore(package): update dependency some-dep to foo-pkg-v3.2.1',
       );
     });
 
@@ -1336,7 +1336,7 @@ describe('workers/repository/updates/generate', () => {
       const res = generateBranchConfig(branch);
       expect(res.prTitle).toBe('PATCH: Update dependency some-dep to 1.2.0');
       expect(res.commitMessage).toBe(
-        'PATCH: Update dependency some-dep to 1.2.0'
+        'PATCH: Update dependency some-dep to 1.2.0',
       );
     });
 
diff --git a/lib/workers/repository/updates/generate.ts b/lib/workers/repository/updates/generate.ts
index 8bf3af544f78438f5c8df8f7c48234894b334683..15bbe564abfa38cfc4102469a6bc544b9a1d6df4 100644
--- a/lib/workers/repository/updates/generate.ts
+++ b/lib/workers/repository/updates/generate.ts
@@ -25,7 +25,7 @@ function isTypesGroup(branchUpgrades: BranchUpgradeConfig[]): boolean {
   return (
     branchUpgrades.some(({ depName }) => depName?.startsWith('@types/')) &&
     new Set(
-      branchUpgrades.map(({ depName }) => depName?.replace(/^@types\//, ''))
+      branchUpgrades.map(({ depName }) => depName?.replace(/^@types\//, '')),
     ).size === 1
   );
 }
@@ -34,7 +34,7 @@ function sortTypesGroup(upgrades: BranchUpgradeConfig[]): void {
   const isTypesUpgrade = ({ depName }: BranchUpgradeConfig): boolean =>
     !!depName?.startsWith('@types/');
   const regularUpgrades = upgrades.filter(
-    (upgrade) => !isTypesUpgrade(upgrade)
+    (upgrade) => !isTypesUpgrade(upgrade),
   );
   const typesUpgrades = upgrades.filter(isTypesUpgrade);
   upgrades.splice(0, upgrades.length);
@@ -59,13 +59,13 @@ function getTableValues(upgrade: BranchUpgradeConfig): string[] | null {
       currentVersion,
       newVersion,
     },
-    'Cannot determine table values'
+    'Cannot determine table values',
   );
   return null;
 }
 
 export function generateBranchConfig(
-  upgrades: BranchUpgradeConfig[]
+  upgrades: BranchUpgradeConfig[],
 ): BranchConfig {
   let branchUpgrades = upgrades;
   if (!branchUpgrades.every((upgrade) => upgrade.pendingChecks)) {
@@ -203,7 +203,7 @@ export function generateBranchConfig(
       if (upgrade.semanticCommitScope) {
         semanticPrefix += `(${template.compile(
           upgrade.semanticCommitScope,
-          upgrade
+          upgrade,
         )})`;
       }
       upgrade.commitMessagePrefix = CommitMessage.formatPrefix(semanticPrefix!);
@@ -215,7 +215,7 @@ export function generateBranchConfig(
     // Compile a few times in case there are nested templates
     upgrade.commitMessage = template.compile(
       upgrade.commitMessage ?? '',
-      upgrade
+      upgrade,
     );
     upgrade.commitMessage = template.compile(upgrade.commitMessage, upgrade);
     upgrade.commitMessage = template.compile(upgrade.commitMessage, upgrade);
@@ -223,7 +223,7 @@ export function generateBranchConfig(
     if (upgrade.commitMessage !== sanitize(upgrade.commitMessage)) {
       logger.debug(
         { branchName: config.branchName },
-        'Secrets exposed in commit message'
+        'Secrets exposed in commit message',
       );
       throw new Error(CONFIG_SECRETS_EXPOSED);
     }
@@ -231,7 +231,7 @@ export function generateBranchConfig(
     upgrade.commitMessage = upgrade.commitMessage.replace(regEx(/\s+/g), ' '); // Trim extra whitespace inside string
     upgrade.commitMessage = upgrade.commitMessage.replace(
       regEx(/to vv(\d)/),
-      'to v$1'
+      'to v$1',
     );
     if (upgrade.toLowerCase && upgrade.commitMessageLowerCase !== 'never') {
       // We only need to lowercase the first line
@@ -252,7 +252,7 @@ export function generateBranchConfig(
       if (upgrade.prTitle !== sanitize(upgrade.prTitle)) {
         logger.debug(
           { branchName: config.branchName },
-          'Secrets were exposed in PR title'
+          'Secrets were exposed in PR title',
         );
         throw new Error(CONFIG_SECRETS_EXPOSED);
       }
@@ -335,26 +335,26 @@ export function generateBranchConfig(
     releaseTimestamp: releaseTimestamp!,
   }; // TODO: fixme (#9666)
   config.reuseLockFiles = config.upgrades.every(
-    (upgrade) => upgrade.updateType !== 'lockFileMaintenance'
+    (upgrade) => upgrade.updateType !== 'lockFileMaintenance',
   );
   config.dependencyDashboardApproval = config.upgrades.some(
-    (upgrade) => upgrade.dependencyDashboardApproval
+    (upgrade) => upgrade.dependencyDashboardApproval,
   );
   config.dependencyDashboardPrApproval = config.upgrades.some(
-    (upgrade) => upgrade.prCreation === 'approval'
+    (upgrade) => upgrade.prCreation === 'approval',
   );
   config.prBodyColumns = [
     ...new Set(
       config.upgrades.reduce(
         (existing: string[], upgrade) =>
           existing.concat(upgrade.prBodyColumns!),
-        []
-      )
+        [],
+      ),
     ),
   ].filter(is.nonEmptyString);
   // combine excludeCommitPaths for multiple manager experience
   const hasExcludeCommitPaths = config.upgrades.some(
-    (u) => u.excludeCommitPaths && u.excludeCommitPaths.length > 0
+    (u) => u.excludeCommitPaths && u.excludeCommitPaths.length > 0,
   );
   if (hasExcludeCommitPaths) {
     config.excludeCommitPaths = Object.keys(
@@ -366,7 +366,7 @@ export function generateBranchConfig(
         }
 
         return acc;
-      }, {})
+      }, {}),
     );
   }
 
@@ -376,14 +376,14 @@ export function generateBranchConfig(
     ...new Set(
       config.upgrades
         .map((upgrade) => upgrade.labels ?? [])
-        .reduce((a, b) => a.concat(b), [])
+        .reduce((a, b) => a.concat(b), []),
     ),
   ];
   config.addLabels = [
     ...new Set(
       config.upgrades
         .map((upgrade) => upgrade.addLabels ?? [])
-        .reduce((a, b) => a.concat(b), [])
+        .reduce((a, b) => a.concat(b), []),
     ),
   ];
   if (config.upgrades.some((upgrade) => upgrade.updateType === 'major')) {
@@ -420,7 +420,7 @@ export function generateBranchConfig(
     config.upgrades
       .map((upgrade) => upgrade.additionalReviewers)
       .flat()
-      .filter(is.nonEmptyString)
+      .filter(is.nonEmptyString),
   );
   if (additionalReviewers.length > 0) {
     config.additionalReviewers = additionalReviewers;
diff --git a/test/documentation.spec.ts b/test/documentation.spec.ts
index da63620d84aad3ad94f42a9afcec08d15c1f690c..139f82408638ffc96b126eb509d41d1a3f2db047 100644
--- a/test/documentation.spec.ts
+++ b/test/documentation.spec.ts
@@ -14,7 +14,7 @@ describe('documentation', () => {
       markdownFiles.map(async (markdownFile) => {
         const markdownText = await fs.readFile(markdownFile, 'utf8');
         expect(markdownText).not.toMatch(regEx(/\.md\/#/));
-      })
+      }),
     );
   });
 
@@ -22,7 +22,7 @@ describe('documentation', () => {
     describe('configuration-options', () => {
       const doc = fs.readFileSync(
         'docs/usage/configuration-options.md',
-        'utf8'
+        'utf8',
       );
 
       const headers = doc
@@ -64,7 +64,7 @@ describe('documentation', () => {
     describe('self-hosted-configuration', () => {
       const doc = fs.readFileSync(
         'docs/usage/self-hosted-configuration.md',
-        'utf8'
+        'utf8',
       );
 
       const headers = doc
diff --git a/test/exec-util.ts b/test/exec-util.ts
index ff9a2269eba81c7b6baf3a4dfdaf67fc9b9d0f9e..9b73c3079faeb3fd25559485a46db409ccf2afa6 100644
--- a/test/exec-util.ts
+++ b/test/exec-util.ts
@@ -40,7 +40,7 @@ function execSnapshot(cmd: string, options?: RawExecOptions): ExecSnapshot {
 const defaultExecResult = { stdout: '', stderr: '' };
 
 export function mockExecAll(
-  execResult: ExecResult = defaultExecResult
+  execResult: ExecResult = defaultExecResult,
 ): ExecSnapshots {
   const snapshots: ExecSnapshots = [];
   exec.mockImplementation((cmd, options) => {
diff --git a/test/fixtures.ts b/test/fixtures.ts
index baa9aefe44b653d3e2474502ce5825a3aaca836c..f629830edf9b81c97c05a39e89ebdf5dd535668d 100644
--- a/test/fixtures.ts
+++ b/test/fixtures.ts
@@ -22,7 +22,7 @@ export class Fixtures {
       upath.resolve(Fixtures.getPathToFixtures(fixturesRoot), name),
       {
         encoding: 'utf-8',
-      }
+      },
     );
   }
 
@@ -34,7 +34,7 @@ export class Fixtures {
    */
   static getBinary(name: string, fixturesRoot = '.'): Buffer {
     return realFs.readFileSync(
-      upath.resolve(Fixtures.getPathToFixtures(fixturesRoot), name)
+      upath.resolve(Fixtures.getPathToFixtures(fixturesRoot), name),
     );
   }
 
@@ -50,8 +50,8 @@ export class Fixtures {
         upath.resolve(Fixtures.getPathToFixtures(fixturesRoot), name),
         {
           encoding: 'utf-8',
-        }
-      )
+        },
+      ),
     ) as T;
   }
 
@@ -74,7 +74,7 @@ export class Fixtures {
   static toJSON(
     paths?: PathLike | PathLike[],
     json?: Record<string, unknown>,
-    isRelative?: boolean
+    isRelative?: boolean,
   ): DirectoryJSON {
     return vol.toJSON(paths, json, isRelative);
   }
@@ -126,7 +126,7 @@ export function readFile(fileName: string, options: any): Promise<TDataOut> {
 
 export async function outputFile(
   file: string,
-  data: string | Buffer | Uint8Array
+  data: string | Buffer | Uint8Array,
 ): Promise<void> {
   const dir = upath.dirname(file);
 
diff --git a/test/graphql-snapshot.ts b/test/graphql-snapshot.ts
index 8f9474e0830582ad3320909375f74159f6d4070d..d0893e824027184440b00278c2bafe28a4dc6006 100644
--- a/test/graphql-snapshot.ts
+++ b/test/graphql-snapshot.ts
@@ -15,7 +15,7 @@ import {
 } from 'graphql/language';
 
 function isOperationDefinitionNode(
-  def: DefinitionNode
+  def: DefinitionNode,
 ): def is OperationDefinitionNode {
   return def.kind === Kind.OPERATION_DEFINITION;
 }
@@ -84,7 +84,7 @@ function getArguments(key: string, val: ValueNode): Arguments {
 }
 
 function simplifyArguments(
-  argNodes?: ReadonlyArray<ArgumentNode>
+  argNodes?: ReadonlyArray<ArgumentNode>,
 ): Arguments | null {
   if (argNodes) {
     let result: Arguments = {};
@@ -105,7 +105,7 @@ function simplifyArguments(
 function simplifySelectionSet(
   selectionSet: SelectionSetNode,
   parentArgs: Arguments | null,
-  parentVars: Variables | null
+  parentVars: Variables | null,
 ): SelectionSet {
   const result: SelectionSet = {};
 
@@ -151,7 +151,7 @@ function getTypeName(typeNode: TypeNode): string {
 }
 
 function simplifyVariableDefinitions(
-  varNodes: ReadonlyArray<VariableDefinitionNode> | null
+  varNodes: ReadonlyArray<VariableDefinitionNode> | null,
 ): Variables {
   const result: Variables = {};
   if (varNodes) {
@@ -186,7 +186,7 @@ export interface GraphqlSnapshotInput {
 }
 
 export function makeGraphqlSnapshot(
-  requestBody: GraphqlSnapshotInput
+  requestBody: GraphqlSnapshotInput,
 ): GraphqlSnapshot | null {
   try {
     const { query: queryStr, variables } = requestBody;
diff --git a/test/http-mock.ts b/test/http-mock.ts
index 4e966dfdb48d2240ef12fb328fea5f8e9daff883..064712fb28e10599125132b49f685363ef963d8d 100644
--- a/test/http-mock.ts
+++ b/test/http-mock.ts
@@ -97,7 +97,7 @@ export function getTrace(): RequestLogItem[] /* istanbul ignore next */ {
         'Completed requests:',
         ...requestLog.map(({ method, url }) => `  ${method} ${url}`),
         ...errorLines,
-      ].join('\n')
+      ].join('\n'),
     );
   }
   return requestLog;
diff --git a/test/to-migrate.ts b/test/to-migrate.ts
index 4b18b0c3f48cf86dd6a174355d5eaa01e16f36fc..fd42a5f126ef4424c09fa073aca20fa19fa08794 100644
--- a/test/to-migrate.ts
+++ b/test/to-migrate.ts
@@ -13,7 +13,7 @@ declare global {
       toMigrate(
         originalConfig: RenovateConfig,
         expectedConfig: RenovateConfig,
-        isMigrated?: boolean
+        isMigrated?: boolean,
       ): R;
     }
   }
@@ -24,12 +24,12 @@ expect.extend({
     CustomMigration: MigrationConstructor,
     originalConfig: RenovateConfig,
     expectedConfig: RenovateConfig,
-    isMigrated: boolean = true
+    isMigrated: boolean = true,
   ) {
     class CustomMigrationsService extends MigrationsService {
       public static override getMigrations(
         original: RenovateConfig,
-        migrated: RenovateConfig
+        migrated: RenovateConfig,
       ): ReadonlyArray<Migration> {
         return [new CustomMigration(original, migrated)];
       }
@@ -51,7 +51,7 @@ expect.extend({
       return {
         message: (): string =>
           `Migration failed\n\nReceived config:\n${JSON.stringify(
-            migratedConfig
+            migratedConfig,
           )}\n\nExpected config:\n${JSON.stringify(expectedConfig)}`,
         pass: false,
       };
diff --git a/test/types/jest.d.ts b/test/types/jest.d.ts
index 9a6df8a04805e707f516a340ee4dea0e7855ead1..4d42ef08868a253e926d1e2ff4d74c6d72e1141d 100644
--- a/test/types/jest.d.ts
+++ b/test/types/jest.d.ts
@@ -46,7 +46,7 @@ declare global {
        * expect(desiredHouse).toMatchObject<House>({...standardHouse, kitchen: {area: 20}}) // wherein standardHouse is some base object of type House
        */
       toMatchObject<E extends object | any[]>(
-        expected: E
+        expected: E,
       ): R extends void | Promise<void> ? R : void;
     };
 }
@@ -89,7 +89,7 @@ interface JestEach extends JestItEach {
   (strings: TemplateStringsArray, ...placeholders: any[]): (
     name: string,
     fn: (arg: any) => ReturnType<Global.TestFn>,
-    timeout?: number
+    timeout?: number,
   ) => void;
 }
 
diff --git a/test/util.ts b/test/util.ts
index bb02d434fdaa39b7855ba10debe5c899d16775f4..ee4add46615be09a71da74e5be8ea9554f13a55f 100644
--- a/test/util.ts
+++ b/test/util.ts
@@ -25,7 +25,7 @@ export function mocked<T extends object>(module: T): jest.Mocked<T> {
  * @param func function which is mocked by `jest.mock`
  */
 export function mockedFunction<T extends (...args: any[]) => any>(
-  func: T
+  func: T,
 ): jest.MockedFunction<T> {
   return func as jest.MockedFunction<T>;
 }
@@ -101,7 +101,7 @@ export function getFixturePath(fixtureFile: string, fixtureRoot = '.'): string {
  */
 export const replacingSerializer = (
   search: string,
-  replacement: string
+  replacement: string,
 ): Plugin => ({
   test: (value) => typeof value === 'string' && value.includes(search),
   serialize: (val, config, indent, depth, refs, printer) => {
diff --git a/tools/check-fenced-code.mjs b/tools/check-fenced-code.mjs
index 1da933b7a1a33c80fa9859c2689a375a2f07bcda..ff9f841f4ca28bcc7149d6b5d4482bdf7d39d190 100644
--- a/tools/check-fenced-code.mjs
+++ b/tools/check-fenced-code.mjs
@@ -27,11 +27,11 @@ function checkValidJson(file, token) {
     issues += 1;
     if (process.env.CI) {
       console.log(
-        `::error file=${file},line=${start},endLine=${end},title=${errorTitle}::${err.message}. ${errorBody}`
+        `::error file=${file},line=${start},endLine=${end},title=${errorTitle}::${err.message}. ${errorBody}`,
       );
     } else {
       console.log(
-        `${errorTitle} (${file} lines ${start}-${end}): ${err.message}`
+        `${errorTitle} (${file} lines ${start}-${end}): ${err.message}`,
       );
     }
   }
@@ -61,7 +61,7 @@ await (async () => {
 
   if (issues) {
     console.error(
-      `${issues} issues found. ${errorBody} See above for lines affected.`
+      `${issues} issues found. ${errorBody} See above for lines affected.`,
     );
     process.exit(1);
   }
diff --git a/tools/check-git-version.mjs b/tools/check-git-version.mjs
index e9470d14fd1be0b4f1eaf4d692bb32c4c7c25034..8eb7e592dadb31bbe36aba879b4c942174cb2c24 100644
--- a/tools/check-git-version.mjs
+++ b/tools/check-git-version.mjs
@@ -11,11 +11,11 @@ await (async () => {
     if (!installed || semver.lt(gitVersion, GIT_MINIMUM_VERSION)) {
       if (process.env.CI) {
         console.log(
-          `::error ::Minimum Git version ${GIT_MINIMUM_VERSION} is required, found version '${gitVersion}'.`
+          `::error ::Minimum Git version ${GIT_MINIMUM_VERSION} is required, found version '${gitVersion}'.`,
         );
       } else {
         throw new Error(
-          `Minimum Git version ${GIT_MINIMUM_VERSION} is required, found version '${gitVersion}'.`
+          `Minimum Git version ${GIT_MINIMUM_VERSION} is required, found version '${gitVersion}'.`,
         );
       }
     }
diff --git a/tools/dispatch-release.mjs b/tools/dispatch-release.mjs
index c8cb6f62c9573a2c83b955794ead3e9319d7624f..e1581c56915d25e287c16b8fa9af03e8ff82f1ce 100644
--- a/tools/dispatch-release.mjs
+++ b/tools/dispatch-release.mjs
@@ -30,7 +30,7 @@ console.log(`Dispatching version: ${version}`);
           tag,
         },
       },
-    }
+    },
   );
 })().catch((e) => {
   // Ignore for now
diff --git a/tools/docs/config.ts b/tools/docs/config.ts
index bdc89254134f931d7648921caa4bfe0c22491f45..ac7bfe1bb4f4fddfc5872fe4371f5c1b32a18619 100644
--- a/tools/docs/config.ts
+++ b/tools/docs/config.ts
@@ -205,14 +205,14 @@ export async function generateConfig(dist: string, bot = false): Promise<void> {
   }
 
   const configOptionsRaw = (await readFile(`docs/usage/${configFile}`)).split(
-    '\n'
+    '\n',
   );
 
   const indexed = indexMarkdown(configOptionsRaw);
 
   options
     .filter(
-      (option) => !!option.globalOnly === bot && !managers.has(option.name)
+      (option) => !!option.globalOnly === bot && !managers.has(option.name),
     )
     .forEach((option) => {
       // TODO: fix types (#22198,#9610)
@@ -220,7 +220,7 @@ export async function generateConfig(dist: string, bot = false): Promise<void> {
 
       if (!indexed[option.name]) {
         throw new Error(
-          `Config option "${option.name}" is missing an entry in ${configFile}`
+          `Config option "${option.name}" is missing an entry in ${configFile}`,
         );
       }
 
diff --git a/tools/docs/datasources.ts b/tools/docs/datasources.ts
index aaec81fa262914ba9e343c1b81788486215e58ff..51c06d55788eb9ec0b9ddc3533e8760011a54864 100644
--- a/tools/docs/datasources.ts
+++ b/tools/docs/datasources.ts
@@ -12,7 +12,7 @@ import {
 
 export async function generateDatasources(
   dist: string,
-  datasourceIssuesMap: OpenItems
+  datasourceIssuesMap: OpenItems,
 ): Promise<void> {
   const dsList = getDatasources();
   let datasourceContent = '\nSupported values for `datasource` are:\n\n';
@@ -28,7 +28,7 @@ export async function generateDatasources(
     const displayName = getDisplayName(datasource, definition);
     datasourceContent += `* ${getModuleLink(
       datasource,
-      `\`${datasource}\``
+      `\`${datasource}\``,
     )}\n`;
     let md = codeBlock`
       ---
diff --git a/tools/docs/github-query-items.ts b/tools/docs/github-query-items.ts
index 2bc9dfd02ee133956bbec528a33b4df85b982078..d920f7bcd648198f4664f7ecd91f33d9762b4e19 100644
--- a/tools/docs/github-query-items.ts
+++ b/tools/docs/github-query-items.ts
@@ -55,7 +55,7 @@ export async function getOpenGitHubItems(): Promise<RenovateOpenItems> {
       {
         paginationField: 'items',
         paginate: true,
-      }
+      },
     );
     const rawItems = res.body?.items ?? [];
 
@@ -119,7 +119,7 @@ function stringifyIssues(items: ItemsEntity[] | undefined): string {
 
 export function generateFeatureAndBugMarkdown(
   issuesMap: OpenItems,
-  key: string
+  key: string,
 ): string {
   let md = '\n\n';
 
diff --git a/tools/docs/manager-asdf-supported-plugins.ts b/tools/docs/manager-asdf-supported-plugins.ts
index 8ad906e847e37405d0a66203eff1eda102609b76..7863ea8b7761bb4a1f1dfe131183ffa68f598a12 100644
--- a/tools/docs/manager-asdf-supported-plugins.ts
+++ b/tools/docs/manager-asdf-supported-plugins.ts
@@ -9,7 +9,7 @@ function generateTooling(): string {
 }
 
 export async function generateManagerAsdfSupportedPlugins(
-  dist: string
+  dist: string,
 ): Promise<void> {
   const indexFileName = `${dist}/modules/manager/asdf/index.md`;
   let indexContent = await readFile(indexFileName);
diff --git a/tools/docs/manager.ts b/tools/docs/manager.ts
index f074096686252bafcd9a90829a0d39d99416283c..df886e7d5b0b61d05d49d8ea4bab0d0c74f5e94c 100644
--- a/tools/docs/manager.ts
+++ b/tools/docs/manager.ts
@@ -57,7 +57,7 @@ export const CategoryNames: Record<Category, string> = {
 
 export async function generateManagers(
   dist: string,
-  managerIssuesMap: OpenItems
+  managerIssuesMap: OpenItems,
 ): Promise<void> {
   const allManagers = [...getManagers(), ...getCustomManagers()];
 
@@ -123,7 +123,7 @@ sidebar_label: ${displayName}
       const escapedDatasources = (supportedDatasources || [])
         .map(
           (datasource) =>
-            `[\`${datasource}\`](../../datasource/#${datasource}-datasource)`
+            `[\`${datasource}\`](../../datasource/#${datasource}-datasource)`,
         )
         .join(', ');
       md += `This manager supports extracting the following datasources: ${escapedDatasources}.\n\n`;
@@ -140,7 +140,7 @@ sidebar_label: ${displayName}
     const managerReadmeContent = await readFile(
       `lib/modules/manager/${
         isCustomManager(manager) ? 'custom/' + manager : manager
-      }/readme.md`
+      }/readme.md`,
     );
     if (!isCustomManager(manager)) {
       md += '\n## Additional Information\n\n';
@@ -154,7 +154,7 @@ sidebar_label: ${displayName}
 
   // add noCategoryDisplayName as last option
   const categories = Object.keys(allCategories).filter(
-    (category) => category !== noCategoryID
+    (category) => category !== noCategoryID,
   );
   categories.sort();
   categories.push(noCategoryID);
diff --git a/tools/docs/platforms.ts b/tools/docs/platforms.ts
index 9ef8963198f84c9e1cf7310b738fb6ae27408c39..2ddd1d88c04224026e0fdc108c2367bdd128b9ce 100644
--- a/tools/docs/platforms.ts
+++ b/tools/docs/platforms.ts
@@ -5,7 +5,7 @@ import { getModuleLink, replaceContent } from './utils';
 
 export async function generatePlatforms(
   dist: string,
-  platformIssuesMap: OpenItems
+  platformIssuesMap: OpenItems,
 ): Promise<void> {
   let platformContent = 'Supported values for `platform` are: ';
   const platforms = getPlatformList();
diff --git a/tools/docs/presets.ts b/tools/docs/presets.ts
index 593c11258f7d5ff2000a020cdd4ab7a4eb5edbd0..9fef703a46a69a1e1dcd8fba1b348120626fb18e 100644
--- a/tools/docs/presets.ts
+++ b/tools/docs/presets.ts
@@ -28,7 +28,7 @@ function getEditUrl(name: string): string {
 function generateFrontMatter(
   presetTitle: string,
   order: number,
-  presetName: string
+  presetName: string,
 ): string {
   return `---
 date: 2017-12-07
diff --git a/tools/docs/schema.ts b/tools/docs/schema.ts
index ed40fbe3c25dcd903891a8dd49700e97c79368fb..456b3b00aa3df7bc50ce54fd174bda3768362aa7 100644
--- a/tools/docs/schema.ts
+++ b/tools/docs/schema.ts
@@ -117,6 +117,6 @@ export async function generateSchema(dist: string): Promise<void> {
   createSchemaForChildConfigs();
   await updateFile(
     `${dist}/renovate-schema.json`,
-    `${JSON.stringify(schema, null, 2)}\n`
+    `${JSON.stringify(schema, null, 2)}\n`,
   );
 }
diff --git a/tools/docs/templates.ts b/tools/docs/templates.ts
index 1867c0f3986a6fbb54a7ac0cd3004bca5d23303e..32df7ac21cd8ab53ccb1186d5e5419976442fce9 100644
--- a/tools/docs/templates.ts
+++ b/tools/docs/templates.ts
@@ -8,7 +8,7 @@ export async function generateTemplates(dist: string): Promise<void> {
   exposedConfigOptionsText +=
     exposedConfigOptions
       .map(
-        (field) => `[${field}](/configuration-options/#${field.toLowerCase()})`
+        (field) => `[${field}](/configuration-options/#${field.toLowerCase()})`,
       )
       .join(', ') + '.';
 
diff --git a/tools/docs/utils.ts b/tools/docs/utils.ts
index 56a6d06b1a6548f83c3a6bebe1f990ccd4edfc3e..38971d020cb4f36c228d8d55182fe09fdc5026ce 100644
--- a/tools/docs/utils.ts
+++ b/tools/docs/utils.ts
@@ -17,14 +17,14 @@ export function formatName(input: string): string {
 
 export function getDisplayName(
   moduleName: string,
-  moduleDefinition: ModuleApi
+  moduleDefinition: ModuleApi,
 ): string {
   return moduleDefinition.displayName ?? formatName(moduleName);
 }
 
 export function getNameWithUrl(
   moduleName: string,
-  moduleDefinition: ModuleApi
+  moduleDefinition: ModuleApi,
 ): string {
   const displayName = getDisplayName(moduleName, moduleDefinition);
   if (moduleDefinition.url) {
@@ -59,7 +59,7 @@ export function formatUrls(urls: string[] | null | undefined): string {
 
 export async function formatDescription(
   type: string,
-  name: string
+  name: string,
 ): Promise<string> {
   const content = await readFile(`lib/modules/${type}/${name}/readme.md`);
   if (!content) {
diff --git a/tools/generate-docs.ts b/tools/generate-docs.ts
index 4ef4a1af47b5970d1e200adeae46e1db912badda..da04147eeefc7a7b8d528018fa89a6554480b28f 100644
--- a/tools/generate-docs.ts
+++ b/tools/generate-docs.ts
@@ -75,7 +75,7 @@ process.on('unhandledRejection', (err) => {
 
     await tar.create(
       { file: './tmp/docs.tgz', cwd: './tmp/docs', gzip: true },
-      ['.']
+      ['.'],
     );
   } catch (err) {
     logger.error({ err }, 'Unexpected error');
diff --git a/tools/generate-imports.mjs b/tools/generate-imports.mjs
index 211c082c50b099a9aacfe2b7cbfc0add57ff64af..a7c0738031128c5053470b677066d2ed9039a1ed 100644
--- a/tools/generate-imports.mjs
+++ b/tools/generate-imports.mjs
@@ -54,7 +54,7 @@ function expandPaths(paths) {
           .readdirSync(pathName, { withFileTypes: true })
           .filter(
             (dirent) =>
-              !(dirent.isFile() && ['.DS_Store'].includes(dirent.name))
+              !(dirent.isFile() && ['.DS_Store'].includes(dirent.name)),
           )
           .map((dirent) => upath.join(pathName, dirent.name));
         return expandPaths(dirPaths);
@@ -93,7 +93,7 @@ export async function getManagerHash(managerName, isCustomManager) {
   }
 
   const files = (await glob(folderPattern)).filter((fileName) =>
-    minimatch(fileName, '*.+(snap|spec.ts)', { matchBase: true })
+    minimatch(fileName, '*.+(snap|spec.ts)', { matchBase: true }),
   );
 
   // sort files in case glob order changes
@@ -137,7 +137,7 @@ async function generateData() {
       contentMapDecl,
       contentMapAssignments.join('\n'),
       `export default data;\n`,
-    ].join('\n\n')
+    ].join('\n\n'),
   );
 }
 
@@ -173,13 +173,13 @@ async function generateHash() {
 
     //add manager hashes to hashMap {key->manager, value->hash}
     const hashStrings = (await Promise.all(hashes)).map(
-      ({ manager, hash }) => `hashMap.set('${manager}','${hash}');`
+      ({ manager, hash }) => `hashMap.set('${manager}','${hash}');`,
     );
 
     //write hashMap to fingerprint.generated.ts
     await updateFile(
       'lib/modules/manager/fingerprint.generated.ts',
-      [hashMap, hashStrings.join('\n')].join('\n\n')
+      [hashMap, hashStrings.join('\n')].join('\n\n'),
     );
   } catch (err) {
     console.log('ERROR:', err.message);
@@ -200,7 +200,7 @@ await (async () => {
         .filter((f) => !newFiles.has(f))
         .map(async (file) => {
           await fs.remove(file);
-        })
+        }),
     );
   } catch (e) {
     console.log(e.toString());
diff --git a/tools/utils/index.ts b/tools/utils/index.ts
index ec30b693adf2ed056d98f7eb44e1d97d85a2f62d..0c8cc5ba7a534e47287e23fe6a0e1d7d6bbdadf9 100644
--- a/tools/utils/index.ts
+++ b/tools/utils/index.ts
@@ -34,7 +34,7 @@ export function findModules(dirname: string): string[] {
 export function camelCase(input: string): string {
   return input
     .replace(/(?:^\w|[A-Z]|\b\w)/g, (char, index) =>
-      index === 0 ? char.toLowerCase() : char.toUpperCase()
+      index === 0 ? char.toLowerCase() : char.toUpperCase(),
     )
     .replace(/-/g, '');
 }