diff --git a/docs/usage/self-hosted-configuration.md b/docs/usage/self-hosted-configuration.md
index 3873769761d68f0f0a56d039e81c6112c42e36df..43acd75998a980c5dfdea454e5634c1858cac522 100644
--- a/docs/usage/self-hosted-configuration.md
+++ b/docs/usage/self-hosted-configuration.md
@@ -669,6 +669,11 @@ Set this to an S3 URI to enable S3 backed repository cache.
     AWS_REGION
 ```
 
+<!-- prettier-ignore -->
+!!! tip
+    If you're storing the repository cache on Amazon S3 then you may set a folder hierarchy as part of `repositoryCacheType`.
+    For example, `repositoryCacheType: 's3://bucket-name/dir1/.../dirN/'`.
+
 ## requireConfig
 
 By default, Renovate needs a Renovate config file in each repository where it runs before it will propose any dependency updates.
diff --git a/lib/util/cache/repository/impl/s3.spec.ts b/lib/util/cache/repository/impl/s3.spec.ts
index cfb276a6f38b1323a5acfe9d04380b2dfa6e4052..1b8cf5b031f1715181a73c193267e68170e13670 100644
--- a/lib/util/cache/repository/impl/s3.spec.ts
+++ b/lib/util/cache/repository/impl/s3.spec.ts
@@ -18,21 +18,24 @@ import { RepoCacheS3 } from './s3';
 
 function createGetObjectCommandInput(
   repository: string,
-  url: string
+  url: string,
+  folder = ''
 ): GetObjectCommandInput {
+  const platform = GlobalConfig.get('platform')!;
   return {
     Bucket: parseS3Url(url)?.Bucket,
-    Key: `github/${repository}/cache.json`,
+    Key: `${folder}${platform}/${repository}/cache.json`,
   };
 }
 
 function createPutObjectCommandInput(
   repository: string,
   url: string,
-  data: RepoCacheRecord
+  data: RepoCacheRecord,
+  folder = ''
 ): PutObjectCommandInput {
   return {
-    ...createGetObjectCommandInput(repository, url),
+    ...createGetObjectCommandInput(repository, url, folder),
     Body: JSON.stringify(data),
     ContentType: 'application/json',
   };
@@ -76,6 +79,49 @@ describe('util/cache/repository/impl/s3', () => {
     expect(logger.debug).toHaveBeenCalledWith('RepoCacheS3.read() - success');
   });
 
+  it('successfully reads from s3://bucket/dir1/.../dirN/', async () => {
+    const json = '{}';
+    const folder = 'dir1/dir2/dir3/';
+    s3Cache = new RepoCacheS3(
+      repository,
+      '0123456789abcdef',
+      `${url}/${folder}`
+    );
+    s3Mock
+      .on(
+        GetObjectCommand,
+        createGetObjectCommandInput(repository, url, folder)
+      )
+      .resolvesOnce({ Body: Readable.from([json]) });
+    await expect(s3Cache.read()).resolves.toBe(json);
+    expect(logger.warn).toHaveBeenCalledTimes(0);
+    expect(logger.error).toHaveBeenCalledTimes(0);
+    expect(logger.debug).toHaveBeenCalledWith('RepoCacheS3.read() - success');
+  });
+
+  it('appends a missing traling slash to pathname when instantiating RepoCacheS3', async () => {
+    const json = '{}';
+    const pathname = 'dir1/dir2/dir3/file.ext';
+    s3Cache = new RepoCacheS3(
+      repository,
+      '0123456789abcdef',
+      `${url}/${pathname}`
+    );
+    s3Mock
+      .on(
+        GetObjectCommand,
+        createGetObjectCommandInput(repository, url, pathname + '/')
+      )
+      .resolvesOnce({ Body: Readable.from([json]) });
+    await expect(s3Cache.read()).resolves.toBe(json);
+    expect(logger.debug).toHaveBeenCalledWith('RepoCacheS3.read() - success');
+    expect(logger.warn).toHaveBeenCalledTimes(1);
+    expect(logger.warn).toHaveBeenCalledWith(
+      { pathname },
+      'RepoCacheS3.getCacheFolder() - appending missing trailing slash to pathname'
+    );
+  });
+
   it('gets an unexpected response from s3', async () => {
     s3Mock.on(GetObjectCommand, getObjectCommandInput).resolvesOnce({});
     await expect(s3Cache.read()).resolves.toBeNull();
@@ -117,6 +163,27 @@ describe('util/cache/repository/impl/s3', () => {
     expect(logger.warn).toHaveBeenCalledTimes(0);
   });
 
+  it('successfully writes to s3://bucket/dir1/.../dirN/', async () => {
+    const putObjectCommandOutput: PutObjectCommandOutput = {
+      $metadata: { attempts: 1, httpStatusCode: 200, totalRetryDelay: 0 },
+    };
+    const folder = 'dir1/dir2/dir3/';
+    s3Cache = new RepoCacheS3(
+      repository,
+      '0123456789abcdef',
+      `${url}/${folder}`
+    );
+    s3Mock
+      .on(
+        PutObjectCommand,
+        createPutObjectCommandInput(repository, url, repoCache, folder)
+      )
+      .resolvesOnce(putObjectCommandOutput);
+    await expect(s3Cache.write(repoCache)).toResolve();
+    expect(logger.warn).toHaveBeenCalledTimes(0);
+    expect(logger.error).toHaveBeenCalledTimes(0);
+  });
+
   it('fails to write to s3', async () => {
     s3Mock.on(PutObjectCommand, putObjectCommandInput).rejectsOnce(err);
     await expect(s3Cache.write(repoCache)).toResolve();
diff --git a/lib/util/cache/repository/impl/s3.ts b/lib/util/cache/repository/impl/s3.ts
index da57ba094a7a168ec74c60631939b7293642db89..35f137c979db8255ba0a4e6ea58f2047ed7d0ce6 100644
--- a/lib/util/cache/repository/impl/s3.ts
+++ b/lib/util/cache/repository/impl/s3.ts
@@ -14,10 +14,13 @@ import { RepoCacheBase } from './base';
 export class RepoCacheS3 extends RepoCacheBase {
   private readonly s3Client;
   private readonly bucket;
+  private readonly dir;
 
   constructor(repository: string, fingerprint: string, url: string) {
     super(repository, fingerprint);
-    this.bucket = parseS3Url(url)?.Bucket;
+    const { Bucket, Key } = parseS3Url(url)!;
+    this.dir = this.getCacheFolder(Key);
+    this.bucket = Bucket;
     this.s3Client = getS3Client();
   }
 
@@ -64,7 +67,23 @@ export class RepoCacheS3 extends RepoCacheBase {
     }
   }
 
+  private getCacheFolder(pathname: string | undefined): string {
+    if (!pathname) {
+      return '';
+    }
+
+    if (pathname.endsWith('/')) {
+      return pathname;
+    }
+
+    logger.warn(
+      { pathname },
+      'RepoCacheS3.getCacheFolder() - appending missing trailing slash to pathname'
+    );
+    return pathname + '/';
+  }
+
   private getCacheFileName(): string {
-    return `${this.platform}/${this.repository}/cache.json`;
+    return `${this.dir}${this.platform}/${this.repository}/cache.json`;
   }
 }