diff --git a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-jobs.yaml b/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-jobs.yaml
deleted file mode 100644
index c75ec9374b0ff6a6271531c2e0327905246f7bda..0000000000000000000000000000000000000000
--- a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-jobs.yaml
+++ /dev/null
@@ -1,6 +0,0 @@
-jobs:
-  - job: job_one
-    steps:
-      - task: Bash@3
-        inputs:
-          script: 'echo Hello World'
diff --git a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-stages.yaml b/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-stages.yaml
deleted file mode 100644
index 8dbff5f31f5897d14ee3068104aa5190e12dde94..0000000000000000000000000000000000000000
--- a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-stages.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-stages:
-  - stage: stage_one
-    jobs:
-      - job: job_one
-        steps:
-          - task: Bash@3
-            inputs:
-              script: 'echo Hello World'
diff --git a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-steps.yaml b/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-steps.yaml
deleted file mode 100644
index 28c203d03b993e31803505497abada1ee56adf1d..0000000000000000000000000000000000000000
--- a/lib/modules/manager/azure-pipelines/__fixtures__/azure-pipelines-steps.yaml
+++ /dev/null
@@ -1,4 +0,0 @@
-steps:
-  - task: Bash@3
-    inputs:
-      script: 'echo Hello World'
diff --git a/lib/modules/manager/azure-pipelines/extract.spec.ts b/lib/modules/manager/azure-pipelines/extract.spec.ts
index 7e4f6e40423e03555620d964c2df06e96e8f0f9f..e53b41639e54950ff18cec5a508b9ed67d6b4462 100644
--- a/lib/modules/manager/azure-pipelines/extract.spec.ts
+++ b/lib/modules/manager/azure-pipelines/extract.spec.ts
@@ -1,3 +1,4 @@
+import { codeBlock } from 'common-tags';
 import { Fixtures } from '../../../../test/fixtures';
 import { GlobalConfig } from '../../../config/global';
 import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks';
@@ -15,9 +16,6 @@ const azurePipelines = Fixtures.get('azure-pipelines.yaml');
 const azurePipelinesNoDependency = Fixtures.get(
   'azure-pipelines-no-dependency.yaml',
 );
-const azurePipelinesStages = Fixtures.get('azure-pipelines-stages.yaml');
-const azurePipelinesJobs = Fixtures.get('azure-pipelines-jobs.yaml');
-const azurePipelinesSteps = Fixtures.get('azure-pipelines-steps.yaml');
 
 describe('modules/manager/azure-pipelines/extract', () => {
   afterEach(() => {
@@ -58,12 +56,11 @@ describe('modules/manager/azure-pipelines/extract', () => {
       ).toBeNull();
     });
 
-    it('should return null when reference is not defined', () => {
+    it('should return null when reference is not defined specified', () => {
       expect(
         extractRepository({
           type: 'github',
           name: 'user/repo',
-          ref: null,
         }),
       ).toBeNull();
     });
@@ -138,10 +135,6 @@ describe('modules/manager/azure-pipelines/extract', () => {
         datasource: 'docker',
       });
     });
-
-    it('should return null if image field is missing', () => {
-      expect(extractContainer({ image: null })).toBeNull();
-    });
   });
 
   describe('extractAzurePipelinesTasks()', () => {
@@ -191,11 +184,196 @@ describe('modules/manager/azure-pipelines/extract', () => {
       ).toBeNull();
     });
 
+    it('should extract deployment jobs runonce', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            runOnce:
+              deploy:
+                steps:
+                  - task: Bash@3
+                    inputs:
+                      script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs on failure', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            runOnce:
+              on:
+                failure:
+                  steps:
+                    - task: Bash@3
+                      inputs:
+                        script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs on success', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            runOnce:
+              on:
+                success:
+                  steps:
+                    - task: Bash@3
+                      inputs:
+                        script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs postroute', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            runOnce:
+              postRouteTraffic:
+                steps:
+                  - task: Bash@3
+                    inputs:
+                      script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs predeploy', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            runOnce:
+              preDeploy:
+                steps:
+                  - task: Bash@3
+                    inputs:
+                      script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs route', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            runOnce:
+              routeTraffic:
+                steps:
+                  - task: Bash@3
+                    inputs:
+                      script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs rolling', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            rolling:
+              deploy:
+                steps:
+                  - task: Bash@3
+                    inputs:
+                      script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
+    it('should extract deployment jobs canary', () => {
+      const packageFile = codeBlock`
+        jobs:
+        - deployment: deployment_one
+          strategy:
+            canary:
+              deploy:
+                steps:
+                  - task: Bash@3
+                    inputs:
+                      script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
+      expect(res?.deps).toEqual([
+        {
+          depName: 'Bash',
+          currentValue: '3',
+          datasource: AzurePipelinesTasksDatasource.id,
+        },
+      ]);
+    });
+
     it('should extract stages', () => {
-      const res = extractPackageFile(
-        azurePipelinesStages,
-        azurePipelinesFilename,
-      );
+      const packageFile = codeBlock`
+        stages:
+        - stage: stage_one
+          jobs:
+            - job: job_one
+              steps:
+                - task: Bash@3
+                  inputs:
+                    script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
       expect(res?.deps).toEqual([
         {
           depName: 'Bash',
@@ -206,10 +384,15 @@ describe('modules/manager/azure-pipelines/extract', () => {
     });
 
     it('should extract jobs', () => {
-      const res = extractPackageFile(
-        azurePipelinesJobs,
-        azurePipelinesFilename,
-      );
+      const packageFile = codeBlock`
+        jobs:
+        - job: job_one
+          steps:
+            - task: Bash@3
+              inputs:
+                script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
       expect(res?.deps).toEqual([
         {
           depName: 'Bash',
@@ -220,10 +403,13 @@ describe('modules/manager/azure-pipelines/extract', () => {
     });
 
     it('should extract steps', () => {
-      const res = extractPackageFile(
-        azurePipelinesSteps,
-        azurePipelinesFilename,
-      );
+      const packageFile = codeBlock`
+        steps:
+        - task: Bash@3
+          inputs:
+            script: 'echo Hello World'
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
       expect(res?.deps).toEqual([
         {
           depName: 'Bash',
@@ -234,10 +420,11 @@ describe('modules/manager/azure-pipelines/extract', () => {
     });
 
     it('should return null when task alias used', () => {
-      const content = `
-      steps:
-      - bash: 'echo Hello World'`;
-      const res = extractPackageFile(content, azurePipelinesFilename);
+      const packageFile = codeBlock`
+        steps:
+        - bash: 'echo Hello World';
+      `;
+      const res = extractPackageFile(packageFile, azurePipelinesFilename);
       expect(res).toBeNull();
     });
   });
diff --git a/lib/modules/manager/azure-pipelines/extract.ts b/lib/modules/manager/azure-pipelines/extract.ts
index 5f2a1c7f2c81a5acc3d809b22e9dd4fe016a2d6c..433d4b7c2f987f71619e9a59f409b7433045f898 100644
--- a/lib/modules/manager/azure-pipelines/extract.ts
+++ b/lib/modules/manager/azure-pipelines/extract.ts
@@ -3,12 +3,21 @@ import { logger } from '../../../logger';
 import { coerceArray } from '../../../util/array';
 import { regEx } from '../../../util/regex';
 import { joinUrlParts } from '../../../util/url';
-import { parseSingleYaml } from '../../../util/yaml';
 import { AzurePipelinesTasksDatasource } from '../../datasource/azure-pipelines-tasks';
 import { GitTagsDatasource } from '../../datasource/git-tags';
 import { getDep } from '../dockerfile/extract';
 import type { PackageDependency, PackageFileContent } from '../types';
-import type { AzurePipelines, Container, Repository } from './types';
+import {
+  AzurePipelines,
+  AzurePipelinesYaml,
+  Container,
+  Deploy,
+  Deployment,
+  Job,
+  Jobs,
+  Repository,
+  Step,
+} from './schema';
 
 const AzurePipelinesTaskRegex = regEx(/^(?<name>[^@]+)@(?<version>.*)$/);
 
@@ -68,10 +77,6 @@ export function extractRepository(
 export function extractContainer(
   container: Container,
 ): PackageDependency | null {
-  if (!container.image) {
-    return null;
-  }
-
   const dep = getDep(container.image);
   logger.debug(
     {
@@ -104,15 +109,60 @@ export function parseAzurePipelines(
   content: string,
   packageFile: string,
 ): AzurePipelines | null {
-  let pkg: AzurePipelines | null = null;
-  try {
-    pkg = parseSingleYaml(content, { json: true }) as AzurePipelines;
-  } catch (err) /* istanbul ignore next */ {
-    logger.debug({ packageFile, err }, 'Error parsing azure-pipelines content');
-    return null;
+  const res = AzurePipelinesYaml.safeParse(content);
+  if (res.success) {
+    return res.data;
+  } else {
+    logger.debug(
+      { err: res.error, packageFile },
+      'Error parsing pubspec lockfile.',
+    );
+  }
+  return null;
+}
+
+function extractSteps(
+  steps: Step[] | undefined,
+): PackageDependency<Record<string, any>>[] {
+  const deps = [];
+  for (const step of coerceArray(steps)) {
+    const task = extractAzurePipelinesTasks(step.task);
+    if (task) {
+      deps.push(task);
+    }
   }
+  return deps;
+}
+
+function extractJob(job: Job | undefined): PackageDependency[] {
+  return extractSteps(job?.steps);
+}
+
+function extractDeploy(deploy: Deploy | undefined): PackageDependency[] {
+  const deps = extractJob(deploy?.deploy);
+  deps.push(...extractJob(deploy?.postRouteTraffic));
+  deps.push(...extractJob(deploy?.preDeploy));
+  deps.push(...extractJob(deploy?.routeTraffic));
+  deps.push(...extractJob(deploy?.on?.failure));
+  deps.push(...extractJob(deploy?.on?.success));
+  return deps;
+}
+
+function extractJobs(jobs: Jobs | undefined): PackageDependency[] {
+  const deps: PackageDependency[] = [];
+  for (const jobOrDeployment of coerceArray(jobs)) {
+    const deployment = jobOrDeployment as Deployment;
+    if (deployment.strategy) {
+      deps.push(...extractDeploy(deployment.strategy.canary));
+      deps.push(...extractDeploy(deployment.strategy.rolling));
+      deps.push(...extractDeploy(deployment.strategy.runOnce));
+      continue;
+    }
 
-  return pkg;
+    const job = jobOrDeployment as Job;
+    deps.push(...extractJob(job));
+  }
+  return deps;
 }
 
 export function extractPackageFile(
@@ -142,31 +192,11 @@ export function extractPackageFile(
   }
 
   for (const { jobs } of coerceArray(pkg.stages)) {
-    for (const { steps } of coerceArray(jobs)) {
-      for (const step of coerceArray(steps)) {
-        const task = extractAzurePipelinesTasks(step.task);
-        if (task) {
-          deps.push(task);
-        }
-      }
-    }
-  }
-
-  for (const { steps } of coerceArray(pkg.jobs)) {
-    for (const step of coerceArray(steps)) {
-      const task = extractAzurePipelinesTasks(step.task);
-      if (task) {
-        deps.push(task);
-      }
-    }
+    deps.push(...extractJobs(jobs));
   }
 
-  for (const step of coerceArray(pkg.steps)) {
-    const task = extractAzurePipelinesTasks(step.task);
-    if (task) {
-      deps.push(task);
-    }
-  }
+  deps.push(...extractJobs(pkg.jobs));
+  deps.push(...extractSteps(pkg.steps));
 
   if (!deps.length) {
     return null;
diff --git a/lib/modules/manager/azure-pipelines/schema.ts b/lib/modules/manager/azure-pipelines/schema.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1a1c3252546b95622e355858d62b37a852c38643
--- /dev/null
+++ b/lib/modules/manager/azure-pipelines/schema.ts
@@ -0,0 +1,81 @@
+import { z } from 'zod';
+import { LooseArray, Yaml } from '../../../util/schema-utils';
+
+export const Step = z.object({
+  task: z.string(),
+});
+export type Step = z.infer<typeof Step>;
+
+export const Job = z.object({
+  steps: LooseArray(Step),
+});
+export type Job = z.infer<typeof Job>;
+
+export const Deploy = z
+  .object({
+    deploy: Job,
+    preDeploy: Job,
+    routeTraffic: Job,
+    postRouteTraffic: Job,
+    on: z
+      .object({
+        failure: Job,
+        success: Job,
+      })
+      .partial(),
+  })
+  .partial();
+export type Deploy = z.infer<typeof Deploy>;
+
+export const Deployment = z
+  .object({
+    strategy: z
+      .object({
+        runOnce: Deploy,
+        rolling: Deploy,
+        canary: Deploy,
+      })
+      .partial(),
+  })
+  .partial();
+export type Deployment = z.infer<typeof Deployment>;
+
+export const Jobs = LooseArray(z.union([Job, Deployment]));
+export type Jobs = z.infer<typeof Jobs>;
+
+export const Stage = z.object({
+  jobs: Jobs,
+});
+export type Stage = z.infer<typeof Stage>;
+
+export const Container = z.object({
+  image: z.string(),
+});
+export type Container = z.infer<typeof Container>;
+
+export const Repository = z.object({
+  type: z.enum(['git', 'github', 'bitbucket']),
+  name: z.string(),
+  ref: z.string().optional(),
+});
+export type Repository = z.infer<typeof Repository>;
+
+export const Resources = z
+  .object({
+    repositories: LooseArray(Repository),
+    containers: LooseArray(Container),
+  })
+  .partial();
+export type Resources = z.infer<typeof Resources>;
+
+export const AzurePipelines = z
+  .object({
+    resources: Resources,
+    stages: LooseArray(Stage),
+    jobs: Jobs,
+    steps: LooseArray(Step),
+  })
+  .partial();
+export type AzurePipelines = z.infer<typeof AzurePipelines>;
+
+export const AzurePipelinesYaml = Yaml.pipe(AzurePipelines);
diff --git a/lib/modules/manager/azure-pipelines/types.ts b/lib/modules/manager/azure-pipelines/types.ts
deleted file mode 100644
index f532a4fb83eb9a5e111e9cbad8d7c34e459e57d0..0000000000000000000000000000000000000000
--- a/lib/modules/manager/azure-pipelines/types.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-export interface Container {
-  image?: string | null;
-}
-export interface Repository {
-  type: 'git' | 'github' | 'bitbucket';
-  name: string;
-  ref?: string | null;
-}
-export interface Resources {
-  repositories?: Repository[];
-  containers?: Container[];
-}
-export interface AzurePipelines {
-  resources?: Resources;
-  stages?: Stage[];
-  jobs?: Job[];
-  steps?: Step[];
-}
-
-export interface Stage {
-  jobs?: Job[];
-}
-
-export interface Job {
-  steps?: Step[];
-}
-
-export interface Step {
-  task: string;
-}