diff --git a/.gitignore b/.gitignore
index fbed1f5ac378b9d8dfe0f1418a022425d44e0773..e398cad1c8a2ff7c54ca1ce9156e86ad4cbd8b1e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -113,3 +113,6 @@ service-definitions.yml
 
 # Rendered API docs
 /api-docs/
+
+# Flamebearer
+flamegraph.html
diff --git a/config/custom-environment-variables.yml b/config/custom-environment-variables.yml
index 468c4bf5de7a17d35ab74727df36925d88c117e7..effec83c27f8e41d441cc5f6947eea2f18c85a6b 100644
--- a/config/custom-environment-variables.yml
+++ b/config/custom-environment-variables.yml
@@ -48,9 +48,6 @@ public:
       authorizedOrigins: 'TEAMCITY_ORIGINS'
     trace: 'TRACE_SERVICES'
 
-  profiling:
-    makeBadge: 'PROFILE_MAKE_BADGE'
-
   cacheHeaders:
     defaultCacheLengthSeconds: 'BADGE_MAX_AGE_SECONDS'
 
diff --git a/config/default.yml b/config/default.yml
index 7adbce874e3ec7da0d07156ef133200db5aee120..405670561c37a7463f59a295d5a90887763ab01c 100644
--- a/config/default.yml
+++ b/config/default.yml
@@ -23,9 +23,6 @@ public:
         intervalSeconds: 200
     trace: false
 
-  profiling:
-    makeBadge: false
-
   cacheHeaders:
     defaultCacheLengthSeconds: 120
 
diff --git a/core/base-service/base-static.js b/core/base-service/base-static.js
index ae1ab70229c771b1a209ec368f8b4b52c91536fb..c932b317bc9a256e22d19248cf037a9c109fc494 100644
--- a/core/base-service/base-static.js
+++ b/core/base-service/base-static.js
@@ -13,9 +13,6 @@ const { prepareRoute, namedParamsForMatch } = require('./route')
 
 module.exports = class BaseStaticService extends BaseService {
   static register({ camp, metricInstance }, serviceConfig) {
-    const {
-      profiling: { makeBadge: shouldProfileMakeBadge },
-    } = serviceConfig
     const { regex, captureNames } = prepareRoute(this.route)
 
     const metricHelper = MetricHelper.create({
@@ -52,16 +49,9 @@ module.exports = class BaseStaticService extends BaseService {
       const format = (match.slice(-1)[0] || '.svg').replace(/^\./, '')
       badgeData.format = format
 
-      if (shouldProfileMakeBadge) {
-        console.time('makeBadge total')
-      }
-      const svg = makeBadge(badgeData)
-      if (shouldProfileMakeBadge) {
-        console.timeEnd('makeBadge total')
-      }
-
       setCacheHeadersForStaticResource(ask.res)
 
+      const svg = makeBadge(badgeData)
       makeSend(format, ask.res, end)(svg)
 
       metricHandle.noteResponseSent()
diff --git a/core/server/server.js b/core/server/server.js
index b4843e420bd2bea0d610903759dbeb14779362f3..3f4f5a4238ad0db17e0f1830c70305874a731146 100644
--- a/core/server/server.js
+++ b/core/server/server.js
@@ -123,9 +123,6 @@ const publicConfigSchema = Joi.object({
     teamcity: defaultService,
     trace: Joi.boolean().required(),
   }).required(),
-  profiling: {
-    makeBadge: Joi.boolean().required(),
-  },
   cacheHeaders: {
     defaultCacheLengthSeconds: Joi.number()
       .integer()
@@ -341,7 +338,6 @@ class Server {
         {
           handleInternalErrors: config.public.handleInternalErrors,
           cacheHeaders: config.public.cacheHeaders,
-          profiling: config.public.profiling,
           fetchLimitBytes: bytes(config.public.fetchLimit),
           rasterUrl: config.public.rasterUrl,
           private: config.private,
diff --git a/doc/flamegraph.png b/doc/flamegraph.png
new file mode 100644
index 0000000000000000000000000000000000000000..e7611aedb5057ff37991487a22804b65ed0b9ec5
Binary files /dev/null and b/doc/flamegraph.png differ
diff --git a/doc/performance-testing.md b/doc/performance-testing.md
new file mode 100644
index 0000000000000000000000000000000000000000..8e48fd0610eda17f7ac56a478f755af1b81c598b
--- /dev/null
+++ b/doc/performance-testing.md
@@ -0,0 +1,45 @@
+# Performance testing
+
+Shields has some basic tooling available to help you get started with
+performance testing.
+
+## Benchmarking the badge generation
+
+Want to micro-benchmark a section of the code responsible for generating the
+static badges? Follow these two simple steps:
+
+1. Surround the code you want to time with `console.time` and `console.timeEnd`
+   statements. For example:
+
+```
+console.time('makeBadge')
+const svg = makeBadge(badgeData)
+console.timeEnd('makeBadge')
+```
+
+2. Run `npm run benchmark:badge` in your terminal. An average timing will
+   be displayed!
+
+If you want to change the number of iterations in the benchmark, you can modify
+the values specified by the `benchmark:badge` script in _package.json_. If
+you want to benchmark a specific code path not covered by the static badge, you
+can modify the badge URL in _scripts/benchmark-performance.js_.
+
+## Profiling the full code
+
+Want to have an overview of how the entire application is performing? Simply
+run `npm run profile:server` in your terminal. This will start the
+backend server (i.e. without the frontend) in profiling mode and any requests
+you make on `localhost:8080` will generate data in a file with a name
+similar to _isolate-00000244AB6ED3B0-11920-v8.log_.
+
+You can then make use of this profiling data in various tools, for example
+[flamebearer](https://github.com/mapbox/flamebearer):
+
+```
+npm install -g flamebearer
+node --prof-process --preprocess -j isolate-00000244AB6ED3B0-11920-v8.log | flamebearer
+```
+
+An example output is the following:
+![](https://raw.github.com/badges/shields/master/doc/flamegraph.png)
diff --git a/package.json b/package.json
index e1bb13324ae241187645e19af5d121ca9d49f26a..0b0b61b6eb1429747782c1955c0ab75f661497dd 100644
--- a/package.json
+++ b/package.json
@@ -106,6 +106,8 @@
     "start:server:e2e-on-build": "node server 8080",
     "start:server": "cross-env NODE_CONFIG_ENV=development nodemon server 8080",
     "debug:server": "cross-env NODE_CONFIG_ENV=development nodemon --inspect server.js 8080",
+    "profile:server": "cross-env NODE_CONFIG_ENV=development node --prof server 8080",
+    "benchmark:badge": "cross-env NODE_CONFIG_ENV=test node scripts/benchmark-performance.js --iterations 10100 | node scripts/capture-timings.js --warmup-iterations 100",
     "prestart": "run-s --silent depcheck defs features",
     "start": "concurrently --names server,frontend \"npm run start:server\" \"cross-env GATSBY_BASE_URL=http://localhost:8080 gatsby develop --port 3000\"",
     "e2e": "start-server-and-test start http://localhost:3000 test:e2e",
diff --git a/scripts/benchmark-performance.js b/scripts/benchmark-performance.js
new file mode 100644
index 0000000000000000000000000000000000000000..7686138f7ceac4b5eff84a4c68d67c3ac65d65b8
--- /dev/null
+++ b/scripts/benchmark-performance.js
@@ -0,0 +1,26 @@
+'use strict'
+
+const config = require('config').util.toObject()
+const got = require('got')
+const minimist = require('minimist')
+const Server = require('../core/server/server')
+
+async function main() {
+  const server = new Server(config)
+  await server.start()
+  const args = minimist(process.argv)
+  const iterations = parseInt(args.iterations) || 10000
+  for (let i = 0; i < iterations; ++i) {
+    await got(`${server.baseUrl}badge/coverage-${i}-green.svg`)
+  }
+  await server.stop()
+}
+
+;(async () => {
+  try {
+    await main()
+  } catch (e) {
+    console.error(e)
+    process.exit(1)
+  }
+})()
diff --git a/scripts/benchmark-performance.sh b/scripts/benchmark-performance.sh
deleted file mode 100755
index ea77308c623ea3c7b9a47b00720a413fdd34643d..0000000000000000000000000000000000000000
--- a/scripts/benchmark-performance.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/sh
-
-PROFILE_MAKE_BADGE=1 node server 1111 >perftest.log &
-sleep 2
-for ((i=0;i<10000;i++)); do
-  curl -s http://localhost:1111/badge/coverage-"$i"%-green.svg >/dev/null
-done
-kill $(jobs -p)
-<perftest.log grep 'makeBadge total' | \
-  grep -Eo '[0-9\.]+' | \
-  awk '{s+=$1;n++} END {print s/n}'
diff --git a/scripts/capture-timings.js b/scripts/capture-timings.js
new file mode 100644
index 0000000000000000000000000000000000000000..fee8bf24a27a0aa75a196f0c3bb976c566f8eead
--- /dev/null
+++ b/scripts/capture-timings.js
@@ -0,0 +1,61 @@
+'use strict'
+
+const readline = require('readline')
+const minimist = require('minimist')
+
+async function captureTimings(warmupIterations) {
+  const rl = readline.createInterface({
+    input: process.stdin,
+  })
+
+  const times = {}
+  let timingsCount = 0
+  let labelsCount = 0
+  const timing = /^(.+): ([0-9.]+)ms$/i
+
+  for await (const line of rl) {
+    const match = timing.exec(line)
+    if (match) {
+      labelsCount = Object.keys(times).length
+      if (timingsCount > warmupIterations * labelsCount) {
+        const label = match[1]
+        const time = parseFloat(match[2])
+        times[label] = time + (times[label] || 0)
+      }
+      ++timingsCount
+    }
+  }
+  return { times, iterations: timingsCount / labelsCount }
+}
+
+function logResults({ times, iterations, warmupIterations }) {
+  if (isNaN(iterations)) {
+    console.log(
+      `No timings captured. Have you included console.time statements in the badge creation code path?`
+    )
+  } else {
+    const timedIterations = iterations - warmupIterations
+    for (const [label, time] of Object.entries(times)) {
+      const averageTime = time / timedIterations
+      console.log(
+        `Average '${label}' time over ${timedIterations} iterations: ${averageTime}ms`
+      )
+    }
+  }
+}
+
+async function main() {
+  const args = minimist(process.argv)
+  const warmupIterations = parseInt(args['warmup-iterations']) || 100
+  const { times, iterations } = await captureTimings(warmupIterations)
+  logResults({ times, iterations, warmupIterations })
+}
+
+;(async () => {
+  try {
+    await main()
+  } catch (e) {
+    console.error(e)
+    process.exit(1)
+  }
+})()