diff --git a/packages/cli/src/commands/proxy/connect.ts b/packages/cli/src/commands/proxy/connect.ts
index 41cac82a..bf2ca5b9 100644
--- a/packages/cli/src/commands/proxy/connect.ts
+++ b/packages/cli/src/commands/proxy/connect.ts
@@ -91,8 +91,8 @@ export default class Connect extends ProfileCommand<typeof Connect> {
 
     const inspector = commands.proxy.inspectRunningComposeApp(composeProject)
     const networks = await inspector.getComposeNetworks()
-
-    const model = commands.proxy.initProxyComposeModel({
+    const projectDirectory = await inspector.getWorkingDirectory()
+    const model = await commands.proxy.initProxyComposeModel({
       version: this.config.version,
       envId,
       debug: this.flags.debug,
@@ -102,6 +102,7 @@ export default class Connect extends ProfileCommand<typeof Connect> {
       privateMode: flags['private-env'],
       injectLivecycleScript: flags['disable-widget'] ? undefined : flags['livecycle-widget-url'],
       tunnelingKeyThumbprint: await jwkThumbprint(tunnelingKey),
+      projectDirectory,
     })
 
     const composeTmpDir = await model.write({ tunnelingKey, knownServerPublicKey: tunnelServerPublicKey })
diff --git a/packages/cli/src/commands/proxy/disconnect.ts b/packages/cli/src/commands/proxy/disconnect.ts
index c5d17f76..27f36c3e 100644
--- a/packages/cli/src/commands/proxy/disconnect.ts
+++ b/packages/cli/src/commands/proxy/disconnect.ts
@@ -24,11 +24,10 @@ export default class Disconnect extends ProfileCommand<typeof Disconnect> {
   async run(): Promise<unknown> {
     const { args } = await this.parse(Disconnect)
     const inspector = commands.proxy.inspectRunningComposeApp(args['compose-project'])
-    const agentContainerId = await inspector.getPreevyAgentContainer()
-
-    if (agentContainerId) {
-      await execPromiseStdout(`docker rm -f ${agentContainerId}`)
-      this.log(`Removed ${agentContainerId}, disconnected ${args['compose-project']} tunnel`)
+    const agentContainer = await inspector.getPreevyAgentContainer()
+    if (agentContainer) {
+      await execPromiseStdout(`docker rm -f ${agentContainer.id}`)
+      this.log(`Removed ${agentContainer.id}, disconnected ${args['compose-project']} tunnel`)
     }
     return undefined
   }
diff --git a/packages/cli/src/tunnel-server-client.ts b/packages/cli/src/tunnel-server-client.ts
index 4f0aa397..a4bdbdbe 100644
--- a/packages/cli/src/tunnel-server-client.ts
+++ b/packages/cli/src/tunnel-server-client.ts
@@ -33,7 +33,7 @@ export const connectToTunnelServerSsh = async ({ tunnelOpts, log, tunnelingKey,
     log,
     tunnelOpts,
     clientPrivateKey: tunnelingKey,
-    username: process.env.USER || 'preview',
+    username: process.env.USER || 'preevy',
     confirmHostFingerprint: async (...args) => {
       spinner?.stop()
       return await confirmHostFingerprint(...args)
diff --git a/packages/common/src/ssh/base-client.ts b/packages/common/src/ssh/base-client.ts
index 2ed7ab3a..1d9b1027 100644
--- a/packages/common/src/ssh/base-client.ts
+++ b/packages/common/src/ssh/base-client.ts
@@ -140,6 +140,7 @@ export const baseSshClient = async (
     ssh.on('ready', () => resolve(result))
     ssh.on('error', err => {
       reject(err)
+      ssh.end()
     })
     ssh.connect({
       debug: msg => log.debug(msg),
diff --git a/packages/compose-tunnel-agent/index.ts b/packages/compose-tunnel-agent/index.ts
index 23e09fce..e152405e 100644
--- a/packages/compose-tunnel-agent/index.ts
+++ b/packages/compose-tunnel-agent/index.ts
@@ -95,11 +95,6 @@ const main = async () => {
     log: sshLog,
   })
 
-  sshClient.ssh.on('error', async err => {
-    log.error('ssh client error: %j', inspect(err))
-    await sshClient.end()
-  })
-
   sshClient.ssh.on('close', () => {
     if (!endRequested) {
       log.error('ssh client closed unexpectedly')
@@ -147,9 +142,10 @@ const SHUTDOWN_TIMEOUT = 5000
 
 void main().then(
   ({ end }) => {
-    ['SIGTERM', 'SIGINT'].forEach(signal => {
-      process.once(signal, async () => {
-        log.info(`shutting down on ${signal}`)
+    ['SIGTERM', 'SIGINT', 'uncaughtException'].forEach(signal => {
+      process.once(signal, async (...args) => {
+        const argsStr = args ? args.map(arg => inspect(arg)).join(', ') : undefined
+        log.warn(`shutting down on ${[signal, argsStr].filter(Boolean).join(': ')}`)
         const endResult = await Promise.race([
           end().then(() => true),
           new Promise<void>(resolve => { setTimeout(resolve, SHUTDOWN_TIMEOUT) }),
@@ -157,7 +153,7 @@ void main().then(
         if (!endResult) {
           log.error(`timed out while waiting ${SHUTDOWN_TIMEOUT}ms for server to close, exiting`)
         }
-        process.exit(0)
+        process.exit(1)
       })
     })
   },
diff --git a/packages/compose-tunnel-agent/src/docker/services.test.ts b/packages/compose-tunnel-agent/src/docker/services.test.ts
deleted file mode 100644
index e1df42bc..00000000
--- a/packages/compose-tunnel-agent/src/docker/services.test.ts
+++ /dev/null
@@ -1,68 +0,0 @@
-/* eslint-disable @typescript-eslint/no-non-null-assertion */
-import { describe, test, expect } from '@jest/globals'
-import { scriptInjectionFromLabels } from './services'
-
-describe('parse script injection labels', () => {
-  test('should parse correctly', () => {
-    const labels = {
-      'preevy.inject_script.widget.src': 'https://my-script',
-      'preevy.inject_script.widget.defer': 'true',
-      'preevy.inject_script.widget.async': 'false',
-      'preevy.inject_script.widget.path_regex': 't.*t',
-    }
-    const scriptInjections = scriptInjectionFromLabels(labels)
-    expect(scriptInjections).toHaveLength(1)
-    const [script] = scriptInjections
-    expect(script).toMatchObject({
-      src: 'https://my-script',
-      defer: true,
-      async: false,
-      pathRegex: expect.any(RegExp),
-    })
-  })
-  test('should revive regex correctly', () => {
-    const labels = {
-      'preevy.inject_script.widget.src': 'https://my-script',
-      'preevy.inject_script.widget.path_regex': 't.*t',
-    }
-    const [script] = scriptInjectionFromLabels(labels)
-    expect('test').toMatch(script.pathRegex!)
-    expect('best').not.toMatch(script.pathRegex!)
-  })
-
-  test('should ignore scripts with invalid regex', () => {
-    const labels = {
-      'preevy.inject_script.widget.src': 'https://my-script',
-      'preevy.inject_script.widget.path_regex': '[',
-    }
-    expect(scriptInjectionFromLabels(labels)).toHaveLength(0)
-  })
-
-  test('should drop scripts without src', () => {
-    const labels = {
-      'preevy.inject_script.widget.defer': 'true',
-    }
-    expect(scriptInjectionFromLabels(labels)).toHaveLength(0)
-  })
-
-  test('should support multiple scripts', () => {
-    const labels = {
-      'preevy.inject_script.widget.src': 'https://my-script',
-      'preevy.inject_script.widget2.src': 'https://my-script2',
-      'preevy.inject_script.widget3.src': 'https://my-script3',
-    }
-    const scripts = scriptInjectionFromLabels(labels)
-    expect(scripts).toHaveLength(3)
-    expect(scripts).toMatchObject([
-      {
-        src: 'https://my-script',
-      },
-      {
-        src: 'https://my-script2',
-      },
-      {
-        src: 'https://my-script3',
-      },
-    ])
-  })
-})
diff --git a/packages/compose-tunnel-agent/src/ssh/tunnel-client.ts b/packages/compose-tunnel-agent/src/ssh/tunnel-client.ts
index 582cca09..dc0f34d3 100644
--- a/packages/compose-tunnel-agent/src/ssh/tunnel-client.ts
+++ b/packages/compose-tunnel-agent/src/ssh/tunnel-client.ts
@@ -42,6 +42,11 @@ export const sshClient = async ({
     connectionConfig,
   })
 
+  ssh.on('error', err => {
+    log.error('ssh client error: %j', inspect(err))
+    // baseSshClient calls end
+  })
+
   const currentForwards = new Map<string, Forward>()
 
   ssh.on('unix connection', ({ socketPath: forwardRequestId }, accept, reject) => {
diff --git a/packages/core/src/child-process.ts b/packages/core/src/child-process.ts
index b0fa7aac..6818957d 100644
--- a/packages/core/src/child-process.ts
+++ b/packages/core/src/child-process.ts
@@ -1,5 +1,5 @@
 import { ProcessOutputBuffers, orderedOutput } from '@preevy/common'
-import childProcess, { ChildProcess } from 'child_process'
+import childProcess, { ChildProcess, ExecOptions } from 'child_process'
 import { Readable, Writable } from 'stream'
 import { promisify } from 'util'
 
@@ -69,7 +69,7 @@ export const spawnPromise = async (
 
 export const execPromise = promisify(childProcess.exec)
 
-export const execPromiseStdout = async (command: string) => (await execPromise(command)).stdout.trim()
+export const execPromiseStdout = async (command: string, opts?: Pick<ExecOptions, 'cwd'>) => (await execPromise(command, { cwd: opts?.cwd })).stdout.trim()
 
 export type PartialStdioStringOption = 'inherit' | 'ignore'
 export type PartialStdioOptions = PartialStdioStringOption
diff --git a/packages/core/src/commands/proxy.ts b/packages/core/src/commands/proxy.ts
index 8d11b9ef..528636c0 100644
--- a/packages/core/src/commands/proxy.ts
+++ b/packages/core/src/commands/proxy.ts
@@ -9,6 +9,7 @@ import { widgetScriptInjector } from '../compose/script-injection'
 import { ComposeModel } from '../compose'
 import { TunnelOpts } from '../ssh'
 import { EnvId } from '../env-id'
+import { EnvMetadata, detectGitMetadata } from '../env-metadata'
 
 export const agentServiceName = COMPOSE_TUNNEL_AGENT_SERVICE_NAME
 
@@ -31,29 +32,41 @@ export function inspectRunningComposeApp(projectName: string) {
     const getNetworkName = (labels: string) => labels.split(',').map(l => l.split('=')).find(l => l[0] === 'com.docker.compose.network')?.[1]
     return Object.fromEntries(composeNetworks.map(x => ([getNetworkName(x.Labels), { name: x.Name }])))
   }
+
+  function parseJSONContainer(s: string) {
+    const ctr = JSON.parse(s) as {Names: string; ID: string; Labels: string }
+    return { names: ctr.Names, id: ctr.ID, labels: Object.fromEntries(ctr.Labels.split(',').map(l => l.split('='))) as Record<string, string> }
+  }
+
   const getPreevyAgentContainer = async () => {
-    const agentContainerId = await dockerCmd(`ps --filter ${projectFilter} --filter label=com.docker.compose.service=${agentServiceName} -q`)
-    if (!agentContainerId) {
+    const agentContainer = await dockerCmd(`ps --filter ${projectFilter} --filter label=com.docker.compose.service=${agentServiceName} --format json`)
+    if (!agentContainer) {
       return null
     }
-    return agentContainerId
+    return parseJSONContainer(agentContainer)
   }
 
   const getEnvId = async () => {
-    const agentContainerId = await getPreevyAgentContainer()
-    if (agentContainerId) {
-      return await dockerCmd(`inspect ${agentContainerId}  --format '{{ index .Config.Labels "preevy.env_id"}}'`)
-    }
-    return null
+    const agentContainer = await getPreevyAgentContainer()
+    return agentContainer?.labels['preevy.env_id']
+  }
+
+  const listAllContainers = async () => ((await dockerCmd(`ps -a --filter ${projectFilter} --format json`)).split('\n') ?? [])
+    .map(parseJSONContainer)
+
+  const getWorkingDirectory = async () => {
+    const containers = await listAllContainers()
+    return containers.find(x => x.labels['com.docker.compose.service'] !== agentServiceName)?.labels['com.docker.compose.project.working_dir']
   }
   return {
     getComposeNetworks,
     getPreevyAgentContainer,
     getEnvId,
+    getWorkingDirectory,
   }
 }
 
-export function initProxyComposeModel(opts: {
+export async function initProxyComposeModel(opts: {
   envId: EnvId
   projectName: string
   tunnelOpts: TunnelOpts
@@ -72,11 +85,12 @@ export function initProxyComposeModel(opts: {
   }
 
   const privateMode = Boolean(opts.privateMode)
-  const envMetadata = {
+  const envMetadata:EnvMetadata = {
     id: opts.envId,
     lastDeployTime: new Date(),
     version: opts.version,
     profileThumbprint: opts.tunnelingKeyThumbprint,
+    git: opts.projectDirectory ? await detectGitMetadata(opts.projectDirectory) : undefined,
   }
 
   let newComposeModel = addComposeTunnelAgentService({
diff --git a/packages/core/src/env-id.ts b/packages/core/src/env-id.ts
index 110e1644..d9aba845 100644
--- a/packages/core/src/env-id.ts
+++ b/packages/core/src/env-id.ts
@@ -1,5 +1,5 @@
 import { detectCiProvider } from './ci-providers'
-import { gitBranchName } from './git'
+import { gitContext } from './git'
 import { ComposeModel } from './compose'
 import { Logger } from './log'
 
@@ -45,7 +45,7 @@ const findAmbientEnvIdSuffix = async () => {
       return { value: envIdFromBranch(branch), basedOn: 'CI branch' }
     }
   }
-  const branch = await gitBranchName()
+  const branch = await gitContext().branchName()
   if (branch) {
     return { value: envIdFromBranch(branch), basedOn: 'local git branch' }
   }
diff --git a/packages/core/src/env-metadata.ts b/packages/core/src/env-metadata.ts
index f528f826..bf662124 100644
--- a/packages/core/src/env-metadata.ts
+++ b/packages/core/src/env-metadata.ts
@@ -1,4 +1,4 @@
-import * as git from './git'
+import { gitContext } from './git'
 import { detectCiProvider } from './ci-providers'
 
 export type GitAuthor = { name: string; email: string }
@@ -31,21 +31,22 @@ export type EnvMetadata = {
   profileThumbprint?: string
 }
 
-const detectGitMetadata = async (): Promise<EnvGitMetadata | undefined> => {
+export const detectGitMetadata = async (workingDir: string): Promise<EnvGitMetadata | undefined> => {
+  const git = gitContext(workingDir)
   const ciProvider = detectCiProvider()
-  const branch = await git.gitBranchName()
+  const branch = await git.branchName()
   if (!branch) {
     return undefined
   }
-  const commit = ciProvider?.gitCommit() ?? await git.gitCommit() as string
+  const commit = ciProvider?.gitCommit() ?? await git.commit() as string
 
   return {
     ciProvider: ciProvider?.id,
     branch: ciProvider?.branchName() ?? branch,
     commit,
-    author: await git.gitAuthor(commit) as GitAuthor,
+    author: await git.author(commit) as GitAuthor,
     pullRequestNumber: ciProvider?.pullRequestNumber(),
-    repoUrl: ciProvider?.repoUrl() || await git.gitRemoteTrackingBranchUrl(),
+    repoUrl: ciProvider?.repoUrl() || await git.remoteTrackingBranchUrl(),
   }
 }
 
@@ -53,9 +54,10 @@ export const envMetadata = async ({
   envId,
   version,
   profileThumbprint,
-}: { envId: string; version: string; profileThumbprint?: string }): Promise<Omit<EnvMetadata, 'driver'>> => ({
+  workingDir = process.cwd(),
+}: { envId: string; version: string; profileThumbprint?: string; workingDir?: string }): Promise<Omit<EnvMetadata, 'driver'>> => ({
   id: envId,
-  git: await detectGitMetadata(),
+  git: await detectGitMetadata(workingDir),
   lastDeployTime: new Date(),
   version,
   profileThumbprint,
diff --git a/packages/core/src/git.ts b/packages/core/src/git.ts
index 9ad7c59e..3fe923d4 100644
--- a/packages/core/src/git.ts
+++ b/packages/core/src/git.ts
@@ -1,24 +1,34 @@
 import { execPromiseStdout } from './child-process'
 
-export const gitBranchName = async () => await execPromiseStdout('git rev-parse --abbrev-ref HEAD')
-  .catch(() => undefined)
+export function gitContext(cwd: string = process.cwd()) {
+  const execGit = async (command: string) => await execPromiseStdout(`git ${command}`, { cwd })
+  const branchName = async () => await execGit('rev-parse --abbrev-ref HEAD')
+    .catch(() => undefined)
 
-export const gitCommit = async () => await execPromiseStdout('git rev-parse HEAD')
-  .catch(() => undefined)
+  const head = async () => await execGit('rev-parse HEAD')
+    .catch(() => undefined)
 
-export const gitAuthor = async (commit?: string) => {
-  const [email, name] = await Promise.all([
-    `git log -1 ${commit} --pretty=format:'%ae'`,
-    `git log -1 ${commit} --pretty=format:'%an'`,
-  ].map(cmd => execPromiseStdout(cmd).catch(() => undefined)))
-  return email === undefined || name === undefined ? undefined : { name, email }
-}
+  const author = async (commit?: string) => {
+    const [email, name] = await Promise.all([
+      `log -1 ${commit} --pretty=format:'%ae'`,
+      `log -1 ${commit} --pretty=format:'%an'`,
+    ].map(cmd => execGit(cmd).catch(() => undefined)))
+    return email === undefined || name === undefined ? undefined : { name, email }
+  }
+
+  const remoteTrackingBranchUrl = async (localBranch?: string) => {
+    const b = localBranch ?? (await execGit('rev-parse --abbrev-ref HEAD'))
+    const trackingRemote = await execGit(`config branch.${b}.remote || true`)
+    if (!trackingRemote) {
+      return undefined
+    }
+    return await execGit(`config remote.${trackingRemote}.url`)
+  }
 
-export const gitRemoteTrackingBranchUrl = async (localBranch?: string) => {
-  const b = localBranch ?? (await execPromiseStdout('git rev-parse --abbrev-ref HEAD'))
-  const trackingRemote = await execPromiseStdout(`git config branch.${b}.remote || true`)
-  if (!trackingRemote) {
-    return undefined
+  return {
+    branchName,
+    commit: head,
+    author,
+    remoteTrackingBranchUrl,
   }
-  return await execPromiseStdout(`git config remote.${trackingRemote}.url`)
 }
diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts
index bb180b45..76f8e2a4 100644
--- a/packages/core/src/index.ts
+++ b/packages/core/src/index.ts
@@ -63,6 +63,6 @@ export { ciProviders, detectCiProvider } from './ci-providers'
 export { paginationIterator } from './pagination'
 export { ensureDefined, extractDefined, HasRequired } from './nulls'
 export { pSeries } from './p-series'
-export * as git from './git'
+export { gitContext } from './git'
 export * as config from './config'
 export { login, getTokensFromLocalFs as getLivecycleTokensFromLocalFs, TokenExpiredError } from './login'
diff --git a/packages/plugin-github-pr-link/src/config.ts b/packages/plugin-github-pr-link/src/config.ts
index f9a6578c..2f4f7e6f 100644
--- a/packages/plugin-github-pr-link/src/config.ts
+++ b/packages/plugin-github-pr-link/src/config.ts
@@ -1,7 +1,7 @@
 import { defaults } from 'lodash'
 import fs from 'fs'
 import { Config as OclifConfig } from '@oclif/core'
-import { Logger, detectCiProvider, git } from '@preevy/core'
+import { Logger, detectCiProvider, gitContext } from '@preevy/core'
 import { tryParseRepo, tryParseUrlToRepo } from './repo'
 import { ParsedFlags, flagsDef, prefixedFlagsDef } from './flags'
 import { defaultCommentTemplate } from './lib/github-comment'
@@ -47,7 +47,7 @@ const ambientGithubConfig = async ({ log }: { log: Logger }): Promise<Partial<Gi
 
   log.debug('ambientGithubConfig, ciProvider: %j', ciProvider?.name)
 
-  const repoUrlStr = ciProvider?.repoUrl() ?? await git.gitRemoteTrackingBranchUrl().catch(() => undefined)
+  const repoUrlStr = ciProvider?.repoUrl() ?? await gitContext().remoteTrackingBranchUrl().catch(() => undefined)
 
   log.debug('ambientGithubConfig, repoUrlStr: %j', repoUrlStr)
 
diff --git a/site/docs/drivers/azure.md b/site/docs/drivers/azure.md
index f1b48a29..b48ab74d 100644
--- a/site/docs/drivers/azure.md
+++ b/site/docs/drivers/azure.md
@@ -18,10 +18,10 @@ Microsoft Azure also offers free 12 months for new users which is suited for try
 Preevy uses the Microsoft Azure SDK which can obtain the application [default credentials](https://github.com/Azure/azure-sdk-for-js/tree/main/sdk/identity/identity#defaultazurecredential).
 The simplest way is to use `az login` command.
 
-### Required Azure permissions
-
-TBD
-
+See the video below for a step-by-step guide on how to configure Azure credentials ans use them with Preevy.
 
+<p align="center"><iframe width="816" height="480" src="https://www.youtube.com/embed/AdoAzHuyzb0?si=0Yz5qSs-vpDDmz1k" title="YouTube video player" frameborder="0" allow="accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share" allowfullscreen></iframe></p>
 
+### Required Azure permissions
 
+TBD
diff --git a/tunnel-server/index.ts b/tunnel-server/index.ts
index f2b4cc21..e5aa2a78 100644
--- a/tunnel-server/index.ts
+++ b/tunnel-server/index.ts
@@ -45,17 +45,17 @@ const saasPublicKey = createPublicKey(SAAS_PUBLIC_KEY)
 const SAAS_JWT_ISSUER = process.env.SAAS_JWT_ISSUER ?? 'app.livecycle.run'
 
 const activeTunnelStore = inMemoryActiveTunnelStore({ log })
-const appSessionStore = cookieSessionStore({ domain: BASE_URL.hostname, schema: claimsSchema, keys: process.env.COOKIE_SECRETS?.split(' ') })
+const sessionStore = cookieSessionStore({ domain: BASE_URL.hostname, schema: claimsSchema, keys: process.env.COOKIE_SECRETS?.split(' ') })
 const loginUrl = new URL('/login', editUrl(BASE_URL, { hostname: `auth.${BASE_URL.hostname}` })).toString()
 const app = createApp({
-  sessionStore: appSessionStore,
+  sessionStore,
   activeTunnelStore,
   baseUrl: BASE_URL,
   proxy: proxy({
     activeTunnelStore,
     log,
     loginUrl,
-    sessionStore: appSessionStore,
+    sessionStore,
     saasPublicKey,
     jwtSaasIssuer: SAAS_JWT_ISSUER,
     baseHostname: BASE_URL.hostname,
@@ -65,7 +65,6 @@ const app = createApp({
   jwtSaasIssuer: SAAS_JWT_ISSUER,
   saasPublicKey,
 })
-const sshLogger = log.child({ name: 'ssh_server' })
 
 const tunnelUrl = (
   rootUrl: URL,
@@ -74,7 +73,7 @@ const tunnelUrl = (
 ) => editUrl(rootUrl, { hostname: `${activeTunnelStoreKey(clientId, tunnel)}.${rootUrl.hostname}` }).toString()
 
 const sshServer = createSshServer({
-  log: sshLogger,
+  log: log.child({ name: 'ssh_server' }),
   sshPrivateKey,
   socketDir: '/tmp', // TODO
   activeTunnelStore,
diff --git a/tunnel-server/jest.config.cjs b/tunnel-server/jest.config.cjs
index 1fd541c2..f64e6b81 100644
--- a/tunnel-server/jest.config.cjs
+++ b/tunnel-server/jest.config.cjs
@@ -1,6 +1,17 @@
 /** @type {import('ts-jest').JestConfigWithTsJest} */
 module.exports = {
-  preset: 'ts-jest',
+  preset: 'ts-jest/presets/default-esm',
   testEnvironment: 'node',
   testMatch: ['!dist/', '**/*.test.ts'],
-};
\ No newline at end of file
+  extensionsToTreatAsEsm: ['.ts'],
+  transform: {
+    // '^.+\\.[tj]sx?$' to process js/ts with `ts-jest`
+    // '^.+\\.m?[tj]sx?$' to process js/ts/mjs/mts with `ts-jest`
+    '^.+\\.tsx?$': [
+      'ts-jest',
+      {
+        useESM: true,
+      },
+    ],
+  },
+}
diff --git a/tunnel-server/package.json b/tunnel-server/package.json
index 0dbe3e63..981e96a4 100644
--- a/tunnel-server/package.json
+++ b/tunnel-server/package.json
@@ -14,14 +14,20 @@
     "iconv-lite": "^0.6.3",
     "jose": "^4.14.4",
     "lodash": "^4.17.21",
+    "node-fetch": "2.6.9",
+    "p-timeout": "^6.1.2",
     "pino": "^8.11.0",
     "pino-pretty": "^9.4.0",
     "prom-client": "^14.2.0",
     "ssh2": "^1.12.0",
     "ts-node": "^10.9.1",
     "ts-pattern": "^5.0.4",
+    "tseep": "^1.1.1",
     "zod": "^3.21.4"
   },
+  "engines": {
+    "node": ">=18.0.0"
+  },
   "devDependencies": {
     "@jest/globals": "^29.5.0",
     "@types/content-type": "^1.1.5",
@@ -29,6 +35,7 @@
     "@types/http-proxy": "^1.17.9",
     "@types/lodash": "^4.14.192",
     "@types/node": "18",
+    "@types/node-fetch": "^2.6.4",
     "@types/ssh2": "^1.11.8",
     "@typescript-eslint/eslint-plugin": "^5.55.0",
     "@typescript-eslint/parser": "^5.55.0",
@@ -40,7 +47,7 @@
     "wait-for-expect": "^3.0.2"
   },
   "scripts": {
-    "test": "yarn jest",
+    "test": "yarn node --experimental-vm-modules $(yarn bin jest)",
     "start": "ts-node ./index.ts",
     "build": "tsc --noEmit",
     "dev": "DEBUG=1 yarn nodemon ./index.ts",
diff --git a/tunnel-server/src/app.ts b/tunnel-server/src/app.ts
index e969f50f..cb58d644 100644
--- a/tunnel-server/src/app.ts
+++ b/tunnel-server/src/app.ts
@@ -74,11 +74,12 @@ export const app = ({ proxy, sessionStore, baseUrl, activeTunnelStore, log, logi
         res.statusCode = 400
         return { error: 'returnPath must be a relative path' }
       }
-      const activeTunnel = await activeTunnelStore.get(envId)
-      if (!activeTunnel) {
+      const activeTunnelEntry = await activeTunnelStore.get(envId)
+      if (!activeTunnelEntry) {
         res.statusCode = 404
         return { error: 'unknown envId' }
       }
+      const { value: activeTunnel } = activeTunnelEntry
       const session = sessionStore(req.raw, res.raw, activeTunnel.publicKeyThumbprint)
       if (!session.user) {
         const auth = jwtAuthenticator(
diff --git a/tunnel-server/src/events.test.ts b/tunnel-server/src/events.test.ts
new file mode 100644
index 00000000..2e0d5624
--- /dev/null
+++ b/tunnel-server/src/events.test.ts
@@ -0,0 +1,68 @@
+import { EventEmitter } from 'tseep'
+import { afterAll, beforeAll, beforeEach, describe, expect, it, jest } from '@jest/globals'
+import { TimeoutError } from 'p-timeout'
+import { onceWithTimeout } from './events'
+
+describe('onceWithTimeout', () => {
+  beforeAll(() => {
+    jest.useFakeTimers()
+  })
+  afterAll(() => {
+    jest.useRealTimers()
+  })
+
+  let emitter: EventEmitter<{ foo: () => void; error: (err: Error) => void }>
+  beforeEach(() => {
+    emitter = new EventEmitter()
+  })
+
+  describe('when no timeout occurs', () => {
+    let p: Promise<void>
+    beforeEach(() => {
+      p = onceWithTimeout(emitter, 'foo', { milliseconds: 10 })
+      emitter.emit('foo')
+    })
+    it('resolves to undefined', async () => {
+      await expect(p).resolves.toBeUndefined()
+    })
+  })
+
+  describe('when an error is emitted', () => {
+    let p: Promise<void>
+    const e = new Error('boom')
+    beforeEach(() => {
+      p = onceWithTimeout(emitter, 'foo', { milliseconds: 10 })
+      emitter.emit('error', e)
+    })
+    it('rejects with the error', async () => {
+      await expect(p).rejects.toThrow(e)
+    })
+  })
+
+  describe('when a timeout occurs', () => {
+    describe('when no fallback is specified', () => {
+      let p: Promise<void>
+      beforeEach(() => {
+        p = onceWithTimeout(emitter, 'foo', { milliseconds: 10 })
+        jest.advanceTimersByTime(10)
+      })
+
+      it('rejects with a TimeoutError', async () => {
+        await expect(p).rejects.toThrow(TimeoutError)
+        await expect(p).rejects.toThrow('timed out after 10ms')
+      })
+    })
+
+    describe('when a fallback is specified', () => {
+      let p: Promise<12 | void>
+      beforeEach(() => {
+        p = onceWithTimeout(emitter, 'foo', { milliseconds: 10, fallback: async () => 12 as const })
+        jest.advanceTimersByTime(10)
+      })
+
+      it('resolves with the fallback', async () => {
+        await expect(p).resolves.toBe(12)
+      })
+    })
+  })
+})
diff --git a/tunnel-server/src/events.ts b/tunnel-server/src/events.ts
new file mode 100644
index 00000000..61b2cc12
--- /dev/null
+++ b/tunnel-server/src/events.ts
@@ -0,0 +1,36 @@
+import events from 'events'
+import { TimeoutError } from 'p-timeout'
+
+interface NodeEventTarget {
+  once(eventName: string | symbol, listener: (...args: unknown[]) => void): this
+}
+
+export async function onceWithTimeout(
+  target: NodeEventTarget,
+  event: string | symbol,
+  opts: { milliseconds: number },
+): Promise<void>
+export async function onceWithTimeout <T = unknown>(
+  target: NodeEventTarget,
+  event: string | symbol,
+  opts: { milliseconds: number; fallback: () => T | Promise<T> },
+): Promise<T | void>
+export async function onceWithTimeout <T = unknown>(
+  target: NodeEventTarget,
+  event: string | symbol,
+  { milliseconds, fallback }: { milliseconds: number; fallback?: () => T | Promise<T> },
+): Promise<T | void> {
+  const signal = AbortSignal.timeout(milliseconds)
+  return await events.once(target, event, { signal }).then(
+    () => undefined,
+    async e => {
+      if (!signal.aborted || (e as Error).name !== 'AbortError') {
+        throw e
+      }
+      if (fallback) {
+        return await fallback()
+      }
+      throw new TimeoutError(`timed out after ${milliseconds}ms`)
+    },
+  )
+}
diff --git a/tunnel-server/src/id-generator.ts b/tunnel-server/src/id-generator.ts
new file mode 100644
index 00000000..15a66c22
--- /dev/null
+++ b/tunnel-server/src/id-generator.ts
@@ -0,0 +1,10 @@
+export const idGenerator = () => {
+  let nextId = 0
+  return {
+    next: () => {
+      const result = nextId
+      nextId += 1
+      return result
+    },
+  }
+}
diff --git a/tunnel-server/src/memoize.test.ts b/tunnel-server/src/memoize.test.ts
new file mode 100644
index 00000000..d476694f
--- /dev/null
+++ b/tunnel-server/src/memoize.test.ts
@@ -0,0 +1,64 @@
+import { afterAll, beforeAll, beforeEach, describe, it, expect, jest } from '@jest/globals'
+import { memoizeForDuration } from './memoize'
+
+describe('memoizeForDuration', () => {
+  beforeAll(() => {
+    jest.useFakeTimers()
+  })
+  afterAll(() => {
+    jest.useRealTimers()
+  })
+
+  let fn: jest.Mock<() => number>
+  let memoized: () => number
+
+  beforeEach(() => {
+    fn = jest.fn(() => 12)
+    memoized = memoizeForDuration(fn, 1000)
+  })
+
+  describe('before the first call', () => {
+    it('does not call the specified function', () => {
+      expect(fn).not.toHaveBeenCalled()
+    })
+  })
+
+  describe('on the first call', () => {
+    let v: number
+    beforeEach(() => {
+      v = memoized()
+    })
+    it('calls the specified function', () => {
+      expect(fn).toHaveBeenCalledTimes(1)
+    })
+    it('returns the memoized value', () => {
+      expect(v).toBe(12)
+    })
+
+    describe('on the second call, when the expiry duration has not passed', () => {
+      beforeEach(() => {
+        jest.advanceTimersByTime(999)
+        v = memoized()
+      })
+      it('does not call the specified function again', () => {
+        expect(fn).toHaveBeenCalledTimes(1)
+      })
+      it('returns the memoized value', () => {
+        expect(v).toBe(12)
+      })
+    })
+
+    describe('on the second call, when the expiry duration has passed', () => {
+      beforeEach(() => {
+        jest.advanceTimersByTime(1000)
+        v = memoized()
+      })
+      it('calls the specified function again', () => {
+        expect(fn).toHaveBeenCalledTimes(2)
+      })
+      it('returns the memoized value', () => {
+        expect(v).toBe(12)
+      })
+    })
+  })
+})
diff --git a/tunnel-server/src/memoize.ts b/tunnel-server/src/memoize.ts
new file mode 100644
index 00000000..65652279
--- /dev/null
+++ b/tunnel-server/src/memoize.ts
@@ -0,0 +1,9 @@
+export const memoizeForDuration = <T>(f: () => T, milliseconds: number) => {
+  let cache: { value: T; expiry: number } | undefined
+  return () => {
+    if (!cache || cache.expiry <= Date.now()) {
+      cache = { value: f(), expiry: Date.now() + milliseconds }
+    }
+    return cache.value
+  }
+}
diff --git a/tunnel-server/src/memory-store.ts b/tunnel-server/src/memory-store.ts
new file mode 100644
index 00000000..3d2fdb99
--- /dev/null
+++ b/tunnel-server/src/memory-store.ts
@@ -0,0 +1,53 @@
+import { Logger } from 'pino'
+import { IEventEmitter, EventEmitter } from 'tseep'
+import { nextTick } from 'process'
+import { idGenerator } from './id-generator'
+
+export class KeyAlreadyExistsError<V> extends Error {
+  constructor(readonly key: string, readonly value: V) {
+    super(`key already exists: "${key}"`)
+  }
+}
+
+export type TransactionDescriptor = { readonly txId: number | string }
+
+type StoreEvents = {
+  delete: () => void
+}
+
+export type EntryWatcher = {
+  once: (event: 'delete', listener: () => void) => void
+}
+
+export const inMemoryStore = <V extends {}>({ log }: { log: Logger }) => {
+  type MapValue = { value: V; watcher: IEventEmitter<StoreEvents>; setTx: TransactionDescriptor }
+  const map = new Map<string, MapValue>()
+  const txIdGen = idGenerator()
+  return {
+    get: async (key: string) => {
+      const entry = map.get(key)
+      return entry === undefined ? undefined : { value: entry.value, watcher: entry.watcher }
+    },
+    set: async (key: string, value: V) => {
+      const existing = map.get(key)
+      if (existing !== undefined) {
+        throw new KeyAlreadyExistsError<V>(key, existing.value)
+      }
+      const tx: TransactionDescriptor = { txId: txIdGen.next() }
+      log.debug('setting key %s id %s: %j', key, tx.txId, value)
+      const watcher = new EventEmitter<StoreEvents>()
+      map.set(key, { value, watcher, setTx: tx })
+      return { tx, watcher: watcher as EntryWatcher }
+    },
+    delete: async (key: string, setTx?: TransactionDescriptor) => {
+      const value = map.get(key)
+      if (value && (setTx === undefined || value.setTx.txId === setTx.txId) && map.delete(key)) {
+        nextTick(() => { value.watcher.emit('delete') })
+        return true
+      }
+      return false
+    },
+  }
+}
+
+export type Store<V extends {}> = ReturnType<typeof inMemoryStore<V>>
diff --git a/tunnel-server/src/tunnel-store/array-map.test.ts b/tunnel-server/src/multimap.test.ts
similarity index 65%
rename from tunnel-server/src/tunnel-store/array-map.test.ts
rename to tunnel-server/src/multimap.test.ts
index 18ad284c..8e1a2f84 100644
--- a/tunnel-server/src/tunnel-store/array-map.test.ts
+++ b/tunnel-server/src/multimap.test.ts
@@ -1,8 +1,9 @@
 import { describe, expect, it, beforeEach } from '@jest/globals'
-import { MultiMap, multimap } from './array-map'
+import { MultiMap, multimap } from './multimap'
 
 describe('multimap', () => {
-  let a: MultiMap<string, { x: number }>
+  type ObjType = { x: number }
+  let a: MultiMap<string, ObjType>
   const expectedValues = [{ x: 12 }, { x: 13 }] as const
   beforeEach(() => {
     a = multimap()
@@ -17,7 +18,7 @@ describe('multimap', () => {
   })
 
   describe('when the key exists', () => {
-    let values: readonly { x: number }[] | undefined
+    let values: readonly ObjType[] | undefined
     beforeEach(() => {
       values = a.get('foo')
     })
@@ -28,11 +29,26 @@ describe('multimap', () => {
       expect(values).toContain(expectedValues[1])
     })
 
+    describe('when the returned array is mutated', () => {
+      beforeEach(() => {
+        (values as ObjType[]).push({ x: 14 })
+      })
+      it('does not affect the multimap', () => {
+        expect(a.get('foo')).toHaveLength(2)
+      })
+    })
+
     describe('when delete is called with a predicate that returns false for everything', () => {
+      let deleteReturn: boolean
       beforeEach(() => {
-        a.delete('foo', () => false)
+        deleteReturn = a.delete('foo', () => false)
         values = a.get('foo')
       })
+
+      it('returns false', () => {
+        expect(deleteReturn).toBe(false)
+      })
+
       it('does not delete the values', () => {
         expect(values).toBeDefined()
         expect(values).toHaveLength(2)
@@ -42,21 +58,32 @@ describe('multimap', () => {
     })
 
     describe('when delete is called with a predicate that returns true for everything', () => {
+      let deleteReturn: boolean
       beforeEach(() => {
-        a.delete('foo', () => true)
+        deleteReturn = a.delete('foo', () => true)
         values = a.get('foo')
       })
+
+      it('returns true', () => {
+        expect(deleteReturn).toBe(true)
+      })
+
       it('deletes the values', () => {
         expect(values).toBeUndefined()
       })
     })
 
     describe('when delete is called with a predicate that returns true for a specific value', () => {
+      let deleteReturn: boolean
       beforeEach(() => {
-        a.delete('foo', ({ x }) => x === expectedValues[0].x)
+        deleteReturn = a.delete('foo', ({ x }) => x === expectedValues[0].x)
         values = a.get('foo')
       })
 
+      it('returns true', () => {
+        expect(deleteReturn).toBe(true)
+      })
+
       it('deletes the specific value', () => {
         expect(values).toBeDefined()
         expect(values).toHaveLength(1)
diff --git a/tunnel-server/src/tunnel-store/array-map.ts b/tunnel-server/src/multimap.ts
similarity index 72%
rename from tunnel-server/src/tunnel-store/array-map.ts
rename to tunnel-server/src/multimap.ts
index 40f57e6d..3c01d68a 100644
--- a/tunnel-server/src/tunnel-store/array-map.ts
+++ b/tunnel-server/src/multimap.ts
@@ -1,13 +1,13 @@
 export type MultiMap<K, V> = {
   get: (key: K) => readonly V[] | undefined
   add: (key: K, value: V) => void
-  delete: (key: K, pred: (value: V) => boolean) => void
+  delete: (key: K, pred: (value: V) => boolean) => boolean
 }
 
 export const multimap = <K, V>(): MultiMap<K, V> => {
   const map = new Map<K, V[]>()
   return {
-    get: (key: K) => map.get(key),
+    get: (key: K) => map.get(key)?.slice(),
     add: (key: K, value: V) => {
       let ar = map.get(key)
       if (ar === undefined) {
@@ -19,15 +19,19 @@ export const multimap = <K, V>(): MultiMap<K, V> => {
     delete: (key: K, pred: (value: V) => boolean) => {
       let ar = map.get(key)
       if (ar === undefined) {
-        return undefined
+        return false
       }
+      const prevLength = ar.length
       ar = ar.filter(value => !pred(value))
+      if (prevLength === ar.length) {
+        return false
+      }
       if (ar.length === 0) {
         map.delete(key)
       } else {
         map.set(key, ar)
       }
-      return undefined
+      return true
     },
   }
 }
diff --git a/tunnel-server/src/proxy/common.ts b/tunnel-server/src/proxy/common.ts
deleted file mode 100644
index 440c7111..00000000
--- a/tunnel-server/src/proxy/common.ts
+++ /dev/null
@@ -1 +0,0 @@
-export const INJECT_SCRIPTS_HEADER = 'x-preevy-inject-scripts'
diff --git a/tunnel-server/src/proxy/html-manipulation/index.ts b/tunnel-server/src/proxy/html-manipulation/index.ts
index 6409a44d..eb206ad4 100644
--- a/tunnel-server/src/proxy/html-manipulation/index.ts
+++ b/tunnel-server/src/proxy/html-manipulation/index.ts
@@ -3,65 +3,98 @@ import zlib from 'zlib'
 import stream from 'stream'
 import { parse as parseContentType } from 'content-type'
 import iconv from 'iconv-lite'
-import { INJECT_SCRIPTS_HEADER } from '../common'
+import { inspect } from 'node:util'
+import { Logger } from 'pino'
 import { InjectHtmlScriptTransform } from './inject-transform'
-import { ScriptInjection } from '../../tunnel-store'
+import { ScriptInjectionBase, ScriptInjection } from '../../tunnel-store'
 
-const compressionsForContentEncoding = (contentEncoding: string) => {
+const compressionsForContentEncoding = (
+  contentEncoding: string | undefined,
+): [stream.Transform, stream.Transform] | undefined => {
+  if (!contentEncoding || contentEncoding === 'identity') {
+    return undefined
+  }
   if (contentEncoding === 'gzip') {
-    return [zlib.createGunzip(), zlib.createGzip()] as const
+    return [zlib.createGunzip(), zlib.createGzip()]
   }
   if (contentEncoding === 'deflate') {
-    return [zlib.createInflate(), zlib.createDeflate()] as const
+    return [zlib.createInflate(), zlib.createDeflate()]
   }
   if (contentEncoding === 'br') {
-    return [zlib.createBrotliDecompress(), zlib.createBrotliCompress()] as const
+    return [zlib.createBrotliDecompress(), zlib.createBrotliCompress()]
   }
-  if (contentEncoding === 'identity') {
-    return undefined
+  throw new Error(`unsupported content encoding: ${inspect(contentEncoding)}`)
+}
+
+const streamsForContentEncoding = (
+  contentEncoding: string | undefined,
+  input: stream.Readable,
+  output: stream.Writable,
+): [stream.Readable, stream.Writable] => {
+  const compress = compressionsForContentEncoding(contentEncoding)
+  if (!compress) {
+    return [input, output]
   }
-  throw new Error(`unsupported content encoding: "${contentEncoding}"`)
+  compress[1].pipe(output)
+  return [input.pipe(compress[0]), compress[1]]
 }
 
-export const injectScripts = (
+const proxyWithInjection = (
   proxyRes: stream.Readable & Pick<IncomingMessage, 'headers' | 'statusCode'>,
-  req: Pick<IncomingMessage, 'headers'>,
   res: stream.Writable & Pick<ServerResponse<IncomingMessage>, 'writeHead'>,
+  injects: Omit<ScriptInjection, 'pathRegex'>[],
+  charset = 'utf-8',
 ) => {
-  res.writeHead(proxyRes.statusCode as number, proxyRes.headers)
+  res.writeHead(proxyRes.statusCode as number, { ...proxyRes.headers, 'transfer-encoding': '' })
 
-  const injectsStr = req.headers[INJECT_SCRIPTS_HEADER] as string | undefined
-  const contentTypeHeader = proxyRes.headers['content-type']
+  const [input, output] = streamsForContentEncoding(proxyRes.headers['content-encoding'], proxyRes, res)
 
-  if (!injectsStr || !contentTypeHeader) {
-    proxyRes.pipe(res)
-    return undefined
-  }
+  const transform = new InjectHtmlScriptTransform(injects)
 
-  const {
-    type: contentType,
-    parameters: { charset: reqCharset },
-  } = parseContentType(contentTypeHeader)
+  input
+    .pipe(iconv.decodeStream(charset))
+    .pipe(transform)
+    .pipe(iconv.encodeStream(charset))
+    .pipe(output)
+}
 
-  if (contentType !== 'text/html') {
-    proxyRes.pipe(res)
-    return undefined
+const proxyWithoutInjection = (
+  proxyRes: stream.Readable & Pick<IncomingMessage, 'headers' | 'statusCode'>,
+  res: stream.Writable & Pick<ServerResponse<IncomingMessage>, 'writeHead'>,
+) => {
+  res.writeHead(proxyRes.statusCode as number, proxyRes.headers)
+  proxyRes.pipe(res)
+}
+
+export const proxyResHandler = (
+  { log, injectsMap }: { log: Logger; injectsMap: Pick<WeakMap<object, ScriptInjectionBase[]>, 'get' | 'delete'> },
+) => (
+  proxyRes: stream.Readable & Pick<IncomingMessage, 'headers' | 'statusCode'>,
+  req: Pick<IncomingMessage, never>,
+  res: stream.Writable & Pick<ServerResponse<IncomingMessage>, 'writeHead'>,
+) => {
+  const injects = injectsMap.get(req)
+  if (!injects) {
+    return proxyWithoutInjection(proxyRes, res)
   }
+  injectsMap.delete(req)
 
-  const compress = compressionsForContentEncoding(proxyRes.headers['content-encoding'] || 'identity')
+  const contentTypeHeader = proxyRes.headers['content-type']
 
-  const [input, output] = compress
-    ? [proxyRes.pipe(compress[0]), res.pipe(compress[1])]
-    : [proxyRes, res]
+  if (!contentTypeHeader) {
+    return proxyWithoutInjection(proxyRes, res)
+  }
 
-  const injects = JSON.parse(injectsStr) as Omit<ScriptInjection, 'pathRegex'>[]
-  const transform = new InjectHtmlScriptTransform(injects)
+  const { type: contentType, parameters: { charset } } = parseContentType(contentTypeHeader)
+  if (contentType !== 'text/html') {
+    return proxyWithoutInjection(proxyRes, res)
+  }
 
-  input
-    .pipe(iconv.decodeStream(reqCharset || 'utf-8'))
-    .pipe(transform)
-    .pipe(iconv.encodeStream(reqCharset || 'utf-8'))
-    .pipe(output)
+  try {
+    return proxyWithInjection(proxyRes, res, injects, charset)
+  } catch (e) {
+    log.warn(`error trying to inject scripts: ${inspect(e)}`)
+  }
 
-  return undefined
+  return proxyWithoutInjection(proxyRes, res)
 }
diff --git a/tunnel-server/src/proxy/html-manipulation/inject-transform.ts b/tunnel-server/src/proxy/html-manipulation/inject-transform.ts
index e12af5ff..59ff60db 100644
--- a/tunnel-server/src/proxy/html-manipulation/inject-transform.ts
+++ b/tunnel-server/src/proxy/html-manipulation/inject-transform.ts
@@ -17,23 +17,17 @@ const htmlDetector = (): HtmlDetector => {
   const parser = new Parser({
     onopentag: name => {
       if (name === 'head') {
-        detected = { position: 'head-content-start', offset: parser.endIndex + 1 }
+        detected ||= { position: 'head-content-start', offset: parser.endIndex + 1 }
       }
     },
     onopentagname: name => {
-      if (
-        name === 'body'
-        && !detected // do not set if head already detected
-      ) {
-        detected = { position: 'before-body-tag', offset: parser.startIndex }
+      if (name === 'body') {
+        detected ||= { position: 'before-body-tag', offset: parser.startIndex }
       }
     },
     onclosetag: name => {
-      if (
-        name === 'html'
-        && !detected // do not set if head already detected
-      ) {
-        detected = { position: 'html-content-end', offset: parser.startIndex }
+      if (name === 'html') {
+        detected ||= { position: 'html-content-end', offset: parser.startIndex }
       }
     },
   }, { decodeEntities: false, lowerCaseTags: true })
@@ -85,11 +79,11 @@ export class InjectHtmlScriptTransform extends stream.Transform {
   override _transform(chunk: string, _encoding: BufferEncoding | 'buffer', callback: stream.TransformCallback): void {
     if (typeof chunk !== 'string') {
       // chunk must be string rather than Buffer so htmlDetector offsets would be in character units, not bytes
-      throw new Error(`Invalid chunk, expected string, received ${typeof chunk}: ${chunk}`)
+      throw new Error(`Invalid chunk, expected string, received ${Buffer.isBuffer(chunk) ? 'Buffer' : typeof chunk}: ${chunk}`)
     }
 
     if (this.injected) {
-      // pass chunks through as-is after the injection
+      // after the injection happened, pass chunks through as-is
       this.pushNonEmpty(chunk)
       callback(null)
       return undefined
diff --git a/tunnel-server/src/proxy/index.ts b/tunnel-server/src/proxy/index.ts
index 16f12d33..833580db 100644
--- a/tunnel-server/src/proxy/index.ts
+++ b/tunnel-server/src/proxy/index.ts
@@ -5,14 +5,13 @@ import type { Logger } from 'pino'
 import { inspect } from 'util'
 import { KeyObject } from 'crypto'
 import stream from 'stream'
-import { ActiveTunnel, ActiveTunnelStore } from '../tunnel-store'
+import { ActiveTunnel, ActiveTunnelStore, ScriptInjectionBase } from '../tunnel-store'
 import { requestsCounter } from '../metrics'
 import { Claims, jwtAuthenticator, AuthenticationResult, AuthError, saasIdentityProvider, cliIdentityProvider } from '../auth'
 import { SessionStore } from '../session'
 import { BadGatewayError, BadRequestError, BasicAuthUnauthorizedError, RedirectError, UnauthorizedError, errorHandler, errorUpgradeHandler, tryHandler, tryUpgradeHandler } from '../http-server-helpers'
 import { TunnelFinder, proxyRouter } from './router'
-import { injectScripts } from './html-manipulation'
-import { INJECT_SCRIPTS_HEADER } from './common'
+import { proxyResHandler } from './html-manipulation'
 
 const loginRedirectUrl = (loginUrl: string) => ({ env, returnPath }: { env: string; returnPath?: string }) => {
   const url = new URL(loginUrl)
@@ -43,8 +42,9 @@ export const proxy = ({
   saasPublicKey: KeyObject
   jwtSaasIssuer: string
 }) => {
-  const theProxy = httpProxy.createProxy({})
-  theProxy.on('proxyRes', injectScripts)
+  const theProxy = httpProxy.createProxyServer({ xfwd: true })
+  const injectsMap = new WeakMap<IncomingMessage, ScriptInjectionBase[]>()
+  theProxy.on('proxyRes', proxyResHandler({ log, injectsMap }))
 
   const loginRedirectUrlForRequest = loginRedirectUrl(loginUrl)
   const saasIdp = saasIdentityProvider(jwtSaasIssuer, saasPublicKey)
@@ -137,11 +137,11 @@ export const proxy = ({
     requestsCounter.inc({ clientId: activeTunnel.clientId })
 
     const injects = activeTunnel.inject
-      ?.filter(({ pathRegex }) => !pathRegex || pathRegex.test(mutatedReq.url || ''))
+      ?.filter(({ pathRegex }) => !pathRegex || (mutatedReq.url && pathRegex.test(mutatedReq.url)))
       ?.map(({ src, defer, async }) => ({ src, defer, async }))
 
     if (injects?.length) {
-      mutatedReq.headers[INJECT_SCRIPTS_HEADER] = JSON.stringify(injects)
+      injectsMap.set(mutatedReq, injects)
     }
 
     return theProxy.web(
@@ -154,7 +154,7 @@ export const proxy = ({
         target: {
           socketPath: activeTunnel.target,
         },
-        selfHandleResponse: true, // handled by the injectScripts onProxyRes hook
+        selfHandleResponse: true, // handled by the onProxyRes hook
       },
       err => errorHandler(log, err, req, res)
     )
diff --git a/tunnel-server/src/proxy/router.ts b/tunnel-server/src/proxy/router.ts
index 3587f42d..fa2031af 100644
--- a/tunnel-server/src/proxy/router.ts
+++ b/tunnel-server/src/proxy/router.ts
@@ -44,7 +44,7 @@ export const proxyRouter = (
     return async activeTunnelStore => {
       const activeTunnel = await activeTunnelStore.get(parsed.firstLabel)
       return activeTunnel
-        ? { path: url as string, activeTunnel }
+        ? { path: url as string, activeTunnel: activeTunnel.value }
         : undefined
     }
   }
@@ -66,7 +66,7 @@ export const proxyRouter = (
     return async activeTunnelStore => {
       const activeTunnel = await activeTunnelStore.get(tunnel)
       return activeTunnel
-        ? { path: path as string, activeTunnel }
+        ? { path: path as string, activeTunnel: activeTunnel.value }
         : undefined
     }
   }
diff --git a/tunnel-server/src/ssh/base-server.ts b/tunnel-server/src/ssh/base-server.ts
index 4703cc25..03400b07 100644
--- a/tunnel-server/src/ssh/base-server.ts
+++ b/tunnel-server/src/ssh/base-server.ts
@@ -1,12 +1,15 @@
-import crypto, { randomBytes } from 'crypto'
+import crypto, { createPublicKey, randomBytes } from 'crypto'
 import { FastifyBaseLogger } from 'fastify/types/logger'
 import net from 'net'
 import path from 'path'
-import ssh2, { ParsedKey, SocketBindInfo } from 'ssh2'
+import ssh2, { SocketBindInfo } from 'ssh2'
 import { inspect } from 'util'
-import EventEmitter from 'node:events'
+import { EventEmitter, IEventEmitter } from 'tseep'
+import { calculateJwkThumbprintUri, exportJWK } from 'jose'
 import { ForwardRequest, parseForwardRequest } from '../forward-request'
 import { createDestroy } from '../destroy-server'
+import { onceWithTimeout } from '../events'
+import { memoizeForDuration } from '../memoize'
 
 const clientIdFromPublicSsh = (key: Buffer) =>
   crypto.createHash('sha1').update(key).digest('base64url').replace(/[_-]/g, '')
@@ -28,65 +31,52 @@ const parseForwardRequestFromSocketBindInfo = (
   }
 }
 
-export interface ClientForward extends EventEmitter {
-  on: (
-    (event: 'close', listener: () => void) => this
-  ) & (
-    (event: 'error', listener: (err: Error) => void) => this
-  )
+export type ClientForward = IEventEmitter<{
+  close: () => void
+  error: (err: Error) => void
+}>
+
+export type BaseSshClientEvents = {
+  forward: (
+    requestId: string,
+    request: ForwardRequest,
+    localSocketPath: string,
+    accept: () => Promise<ClientForward>,
+    reject: (reason: Error) => void,
+  ) => void
+  exec: (
+    command: string,
+    respondWithJson: (content: unknown) => void,
+    reject: () => void,
+  ) => void
+  error: (err: Error) => void
+  end: () => void
 }
 
-export interface BaseSshClient extends EventEmitter {
+export interface BaseSshClient extends IEventEmitter<BaseSshClientEvents> {
   envId: string
   clientId: string
-  publicKey: ParsedKey
-  on: (
-    (
-      event: 'forward',
-      listener: (
-        requestId: string,
-        request: ForwardRequest,
-        localSocketPath: string,
-        accept: () => Promise<ClientForward>,
-        reject: (reason: Error) => void,
-      ) => void
-    ) => this
-  ) & (
-    (
-      event: 'exec',
-      listener: (
-        command: string,
-        respondWithJson: (content: unknown) => void,
-        reject: () => void,
-      ) => void
-    ) => this
-  ) & (
-    (
-      event: 'error',
-      listener: (err: Error) => void,
-    ) => this
-  )
+  publicKey: crypto.KeyObject
+  publicKeyThumbprint: string
+  end: () => Promise<void>
+  ping: () => Promise<boolean>
+  connectionId: string
+  log: FastifyBaseLogger
+}
+
+type BaseSshServerEvents = {
+  client: (client: BaseSshClient) => void
+  error: (err: Error) => void
 }
 
-export interface BaseSshServer extends EventEmitter {
+export interface BaseSshServer extends IEventEmitter<BaseSshServerEvents> {
   close: ssh2.Server['close']
   listen: ssh2.Server['listen']
-  on: (
-    (
-      event: 'client',
-      listener: (client: BaseSshClient) => void,
-    ) => this
-  ) & (
-    (
-      event: 'error',
-      listener: (err: Error) => void,
-    ) => this
-  )
 }
 
 export const baseSshServer = (
   {
-    log,
+    log: serverLog,
     sshPrivateKey,
     socketDir,
   }: {
@@ -95,7 +85,7 @@ export const baseSshServer = (
     socketDir: string
   }
 ): BaseSshServer => {
-  const serverEmitter = new EventEmitter({ captureRejections: true }) as Omit<BaseSshServer, 'close' | 'listen'>
+  const serverEmitter = new EventEmitter<BaseSshServerEvents>()
   const server = new ssh2.Server(
     {
       // debug: x => log.debug(x),
@@ -105,10 +95,32 @@ export const baseSshServer = (
     },
     client => {
       let preevySshClient: BaseSshClient
+      const connectionId = `ssh-client-${Math.random().toString(36).substring(2, 9)}`
+      let log = serverLog.child({ connectionId })
       const socketServers = new Map<string, net.Server>()
+      let ended = false
+      const end = async () => {
+        if (!ended) {
+          await new Promise(resolve => {
+            client.once('end', resolve)
+            client.end()
+          })
+        }
+      }
+
+      let authContext: ssh2.AuthContext
+      let key: ssh2.ParsedKey
+
+      const REKEY_TIMEOUT = 5000
+      const ping = memoizeForDuration(async () => {
+        const result = onceWithTimeout(client, 'rekey', { milliseconds: REKEY_TIMEOUT })
+          .then(() => 'pong' as const, () => 'error' as const)
+        client.rekey()
+        return (await result) === 'pong'
+      }, REKEY_TIMEOUT)
 
       client
-        .on('authentication', ctx => {
+        .on('authentication', async ctx => {
           log.debug('authentication: %j', ctx)
           if (ctx.method !== 'publickey') {
             ctx.reject(['publickey'])
@@ -130,13 +142,28 @@ export const baseSshServer = (
             return
           }
 
-          preevySshClient = Object.assign(new EventEmitter({ captureRejections: true }), {
-            publicKey: keyOrError,
-            clientId: clientIdFromPublicSsh(keyOrError.getPublicSSH()),
-            envId: ctx.username,
-          })
-          log.debug('accepting clientId %j envId %j', preevySshClient.clientId, preevySshClient.envId)
+          authContext = ctx
+          key = keyOrError
+          log.debug('accepting connection')
           ctx.accept()
+        })
+        .on('ready', async () => {
+          const publicKey = createPublicKey(key.getPublicPEM())
+          const envId = authContext.username
+          const clientId = clientIdFromPublicSsh(key.getPublicSSH())
+          log = serverLog.child({ clientId, envId, connectionId })
+
+          preevySshClient = Object.assign(new EventEmitter<BaseSshClientEvents>(), {
+            connectionId,
+            clientId,
+            envId,
+            publicKey,
+            publicKeyThumbprint: await calculateJwkThumbprintUri(await exportJWK(publicKey)),
+            end,
+            ping,
+            log,
+          })
+
           serverEmitter.emit('client', preevySshClient)
         })
         .on('request', async (accept, reject, name, info) => {
@@ -214,7 +241,7 @@ export const baseSshServer = (
                   log.debug('streamlocal-forward@openssh.com: request %j calling accept: %j', request, accept)
                   accept?.()
                   socketServers.set(request, socketServer)
-                  resolveForward(socketServer)
+                  resolveForward(socketServer as ClientForward)
                 })
                 .on('error', (err: unknown) => {
                   log.error('socketServer request %j error: %j', request, err)
@@ -241,6 +268,11 @@ export const baseSshServer = (
           preevySshClient?.emit('error', err)
           client.end()
         })
+        .once('end', () => {
+          log.debug('client end')
+          ended = true
+          preevySshClient?.emit('end')
+        })
         .on('session', accept => {
           log.debug('session')
           const session = accept()
@@ -266,7 +298,7 @@ export const baseSshServer = (
     }
   )
     .on('error', (err: unknown) => {
-      log.error('ssh server error: %j', err)
+      serverLog.error('ssh server error: %j', err)
     })
 
   return Object.assign(serverEmitter, {
diff --git a/tunnel-server/src/ssh/index.ts b/tunnel-server/src/ssh/index.ts
index 727d1c51..f89f2356 100644
--- a/tunnel-server/src/ssh/index.ts
+++ b/tunnel-server/src/ssh/index.ts
@@ -1,13 +1,13 @@
 import { Logger } from 'pino'
-import { createPublicKey } from 'crypto'
-import { calculateJwkThumbprintUri, exportJWK } from 'jose'
 import { inspect } from 'util'
 import { Gauge } from 'prom-client'
-import { baseSshServer } from './base-server'
-import { ActiveTunnelStore, KeyAlreadyExistsError, activeTunnelStoreKey } from '../tunnel-store'
+import { BaseSshClient, baseSshServer } from './base-server'
+import { ActiveTunnelStore, activeTunnelStoreKey } from '../tunnel-store'
+import { KeyAlreadyExistsError } from '../memory-store'
+import { onceWithTimeout } from '../events'
 
 export const createSshServer = ({
-  log,
+  log: serverLog,
   sshPrivateKey,
   socketDir,
   activeTunnelStore,
@@ -22,41 +22,54 @@ export const createSshServer = ({
   tunnelUrl: (clientId: string, remotePath: string) => string
   helloBaseResponse: Record<string, unknown>
   tunnelsGauge: Pick<Gauge, 'inc' | 'dec'>
-}) => baseSshServer({
-  log,
-  sshPrivateKey,
-  socketDir,
-})
-  .on('client', client => {
-    const { clientId, publicKey, envId } = client
-    const pk = createPublicKey(publicKey.getPublicPEM())
+}) => {
+  const storeKeyToClient = new Map<string, BaseSshClient>()
+  const onClient = (client: BaseSshClient) => {
+    const { clientId, publicKey, envId, connectionId, publicKeyThumbprint, log } = client
     const tunnels = new Map<string, string>()
-    const jwkThumbprint = (async () => await calculateJwkThumbprintUri(await exportJWK(pk)))()
     client
       .on('forward', async (requestId, { path: tunnelPath, access, meta, inject }, localSocketPath, accept, reject) => {
         const key = activeTunnelStoreKey(clientId, tunnelPath)
+
         log.info('creating tunnel %s for localSocket %s', key, localSocketPath)
-        const setTx = await activeTunnelStore.set(key, {
+        const set = async (): ReturnType<typeof activeTunnelStore['set']> => await activeTunnelStore.set(key, {
           tunnelPath,
           envId,
           target: localSocketPath,
           clientId,
-          publicKey: pk,
+          publicKey,
           access,
           hostname: key,
-          publicKeyThumbprint: await jwkThumbprint,
+          publicKeyThumbprint,
           meta,
           inject,
-        }).catch(e => {
-          reject(
-            e instanceof KeyAlreadyExistsError
-              ? new Error(`duplicate path: ${key}, client map contains path: ${tunnels.has(key)}`)
-              : new Error(`error setting tunnel ${key}: ${e}`, { cause: e }),
-          )
+          client,
+        }).catch(async e => {
+          if (!(e instanceof KeyAlreadyExistsError)) {
+            throw e
+          }
+          const existingEntry = await activeTunnelStore.get(key)
+          if (!existingEntry) {
+            return await set() // retry
+          }
+          const otherClient = existingEntry.value.client as BaseSshClient
+          if (otherClient.connectionId === connectionId) {
+            throw new Error(`duplicate path: ${key}, from same connection ${connectionId}`)
+          }
+          if (!await otherClient.ping()) {
+            const existingDelete = onceWithTimeout(existingEntry.watcher, 'delete', { milliseconds: 2000 })
+            void otherClient.end()
+            await existingDelete
+            return await set() // retry
+          }
+          throw new Error(`duplicate path: ${key}, from different connection ${connectionId}`)
         })
-        if (!setTx) {
+
+        const setResult = await set().catch(err => { reject(err) })
+        if (!setResult) {
           return undefined
         }
+        const { tx: setTx } = setResult
         const forward = await accept().catch(async e => {
           log.warn('error accepting forward %j: %j', requestId, inspect(e))
           await activeTunnelStore.delete(key, setTx)
@@ -64,6 +77,7 @@ export const createSshServer = ({
         if (!forward) {
           return undefined
         }
+        storeKeyToClient.set(key, client)
         tunnels.set(requestId, tunnelUrl(clientId, tunnelPath))
         const onForwardClose = (event: 'close' | 'error') => (err?: Error) => {
           if (err) {
@@ -72,6 +86,7 @@ export const createSshServer = ({
             log.info('%s: deleting tunnel %s', event, key)
           }
           tunnels.delete(requestId)
+          storeKeyToClient.delete(key)
           void activeTunnelStore.delete(key, setTx)
           tunnelsGauge.dec({ clientId })
         }
@@ -106,4 +121,11 @@ export const createSshServer = ({
         reject()
         return undefined
       })
-  })
+  }
+
+  return baseSshServer({
+    log: serverLog,
+    sshPrivateKey,
+    socketDir,
+  }).on('client', onClient)
+}
diff --git a/tunnel-server/src/tunnel-store/index.test.ts b/tunnel-server/src/tunnel-store/index.test.ts
index 77f2d004..8c113c5e 100644
--- a/tunnel-server/src/tunnel-store/index.test.ts
+++ b/tunnel-server/src/tunnel-store/index.test.ts
@@ -1,27 +1,36 @@
-import { describe, it, expect, beforeEach } from '@jest/globals'
+import { describe, it, expect, beforeEach, jest } from '@jest/globals'
 import pinoPretty from 'pino-pretty'
 import { Logger, pino } from 'pino'
-import { ActiveTunnel, ActiveTunnelStore, TransactionDescriptor, inMemoryActiveTunnelStore } from '.'
+import { nextTick } from 'process'
+import { ActiveTunnel, ActiveTunnelStore, inMemoryActiveTunnelStore } from '.'
+import { EntryWatcher, TransactionDescriptor } from '../memory-store'
 
 describe('inMemoryActiveTunnelStore', () => {
   let store: ActiveTunnelStore
   let log: Logger
 
   beforeEach(() => {
-    log = pino({ level: 'debug' }, pinoPretty())
+    log = pino({ level: 'silent' }, pinoPretty())
     store = inMemoryActiveTunnelStore({ log })
   })
 
   describe('when setting a new key', () => {
-    let desc: TransactionDescriptor
+    let tx: TransactionDescriptor
     let val: ActiveTunnel
+    let watcher: EntryWatcher
     beforeEach(async () => {
       val = { publicKeyThumbprint: 'pk1' } as ActiveTunnel
-      desc = await store.set('foo', val)
+      const setResult = await store.set('foo', val)
+      tx = setResult.tx
+      watcher = setResult.watcher
     })
 
     it('returns a descriptor', async () => {
-      expect(desc).toBeDefined()
+      expect(tx).toBeDefined()
+    })
+
+    it('returns a watcher', async () => {
+      expect(watcher).toBeDefined()
     })
 
     describe('when getting a non-existant key', () => {
@@ -43,13 +52,20 @@ describe('inMemoryActiveTunnelStore', () => {
 
     describe('when getting the key', () => {
       let gotVal: ActiveTunnel | undefined
+      let gotWatcher: EntryWatcher | undefined
       beforeEach(async () => {
-        gotVal = await store.get('foo')
+        const getResult = await store.get('foo')
+        gotVal = getResult?.value
+        gotWatcher = getResult?.watcher
       })
 
       it('returns the value', () => {
         expect(gotVal).toBe(val)
       })
+
+      it('returns a watcher', () => {
+        expect(gotWatcher).toBeDefined()
+      })
     })
 
     describe('when getting an existing value by thumbprint', () => {
@@ -65,8 +81,13 @@ describe('inMemoryActiveTunnelStore', () => {
     })
 
     describe('when deleting a non-existant value', () => {
+      let deleteResult: boolean
       beforeEach(async () => {
-        await store.delete('bar')
+        deleteResult = await store.delete('bar')
+      })
+
+      it('returns false', () => {
+        expect(deleteResult).toBe(false)
       })
 
       describe('when getting a non-existing value by thumbprint', () => {
@@ -94,8 +115,13 @@ describe('inMemoryActiveTunnelStore', () => {
     })
 
     describe('when deleting an existing value without a tx arg', () => {
+      let deleteResult: boolean
       beforeEach(async () => {
-        await store.delete('foo')
+        deleteResult = await store.delete('foo')
+      })
+
+      it('returns true', () => {
+        expect(deleteResult).toBe(true)
       })
 
       describe('when getting the deleted key', () => {
@@ -117,8 +143,13 @@ describe('inMemoryActiveTunnelStore', () => {
     })
 
     describe('when deleting an existing value with a correct tx arg', () => {
+      let deleteResult: boolean
       beforeEach(async () => {
-        await store.delete('foo', desc)
+        deleteResult = await store.delete('foo', tx)
+      })
+
+      it('returns true', () => {
+        expect(deleteResult).toBe(true)
       })
 
       describe('when getting the deleted key', () => {
@@ -140,14 +171,19 @@ describe('inMemoryActiveTunnelStore', () => {
     })
 
     describe('when deleting an existing value with an incorrect tx arg', () => {
+      let deleteResult: boolean
       beforeEach(async () => {
-        await store.delete('foo', { txId: -1 })
+        deleteResult = await store.delete('foo', { txId: -1 })
+      })
+
+      it('returns false', () => {
+        expect(deleteResult).toBe(false)
       })
 
       describe('when getting the key', () => {
         let gotVal: ActiveTunnel | undefined
         beforeEach(async () => {
-          gotVal = await store.get('foo')
+          gotVal = (await store.get('foo'))?.value
         })
 
         it('returns the value', () => {
@@ -167,5 +203,41 @@ describe('inMemoryActiveTunnelStore', () => {
         })
       })
     })
+
+    describe('when watching an item', () => {
+      let deleteListener: jest.Mock<() => void>
+      beforeEach(() => {
+        deleteListener = jest.fn<() => void>()
+        watcher.once('delete', deleteListener)
+      })
+
+      describe('when deleting the item', () => {
+        beforeEach(async () => {
+          await store.delete('foo')
+          await new Promise(nextTick)
+        })
+
+        it('calls the delete listener on the next tick', () => {
+          expect(deleteListener).toHaveBeenCalled()
+        })
+      })
+
+      describe('when another item is in the store', () => {
+        beforeEach(async () => {
+          await store.set('bar', { publicKeyThumbprint: 'pk2' } as ActiveTunnel)
+        })
+
+        describe('when deleting the other item', () => {
+          beforeEach(async () => {
+            await store.delete('bar')
+            await new Promise(nextTick)
+          })
+
+          it("does not call the first item's delete listener on the next tick", () => {
+            expect(deleteListener).not.toHaveBeenCalled()
+          })
+        })
+      })
+    })
   })
 })
diff --git a/tunnel-server/src/tunnel-store/index.ts b/tunnel-server/src/tunnel-store/index.ts
index a7f7c652..6df0a7ac 100644
--- a/tunnel-server/src/tunnel-store/index.ts
+++ b/tunnel-server/src/tunnel-store/index.ts
@@ -1,16 +1,20 @@
 import { KeyObject } from 'crypto'
 import { Logger } from 'pino'
-import { multimap } from './array-map'
+import { multimap } from '../multimap'
+import { Store, TransactionDescriptor, inMemoryStore } from '../memory-store'
 
 export { activeTunnelStoreKey } from './key'
 
-export type ScriptInjection = {
-  pathRegex?: RegExp
+export type ScriptInjectionBase = {
   src: string
   async?: boolean
   defer?: boolean
 }
 
+export type ScriptInjection = ScriptInjectionBase & {
+  pathRegex?: RegExp
+}
+
 export type ActiveTunnel = {
   envId: string
   clientId: string
@@ -22,58 +26,35 @@ export type ActiveTunnel = {
   access: 'private' | 'public'
   meta: Record<string, unknown>
   inject?: ScriptInjection[]
+  client: unknown
 }
 
-export class KeyAlreadyExistsError extends Error {
-  constructor(readonly key: string) {
-    super(`key already exists: "${key}"`)
-  }
-}
-
-export type TransactionDescriptor = { txId: number }
-
-export type ActiveTunnelStore = {
-  get: (key: string) => Promise<ActiveTunnel | undefined>
+export type ActiveTunnelStore = Store<ActiveTunnel> & {
   getByPkThumbprint: (pkThumbprint: string) => Promise<readonly ActiveTunnel[] | undefined>
-  set: (key: string, value: ActiveTunnel) => Promise<TransactionDescriptor>
-  delete: (key: string, tx?: TransactionDescriptor) => Promise<void>
-}
-
-const idGenerator = () => {
-  let nextId = 0
-  return {
-    next: () => {
-      const result = nextId
-      nextId += 1
-      return result
-    },
-  }
 }
 
 export const inMemoryActiveTunnelStore = ({ log }: { log: Logger }): ActiveTunnelStore => {
-  const keyToTunnel = new Map<string, ActiveTunnel & { txId: number }>()
-  const pkThumbprintToTunnel = multimap<string, string>()
-  const txIdGen = idGenerator()
-  return {
-    get: async key => keyToTunnel.get(key),
-    getByPkThumbprint: async pkThumbprint => pkThumbprintToTunnel.get(pkThumbprint)
-      ?.map(key => keyToTunnel.get(key) as ActiveTunnel),
-    set: async (key, value) => {
-      if (keyToTunnel.has(key)) {
-        throw new KeyAlreadyExistsError(key)
-      }
-      const txId = txIdGen.next()
-      log.debug('setting tunnel key %s id %s: %j', key, txId, value)
-      keyToTunnel.set(key, Object.assign(value, { txId }))
-      pkThumbprintToTunnel.add(value.publicKeyThumbprint, key)
-      return { txId }
+  const keyToTunnel = inMemoryStore<ActiveTunnel>({ log })
+  const pkThumbprintToTunnel = multimap<string, { key: string; tx: TransactionDescriptor }>()
+  const { set: storeSet } = keyToTunnel
+  return Object.assign(keyToTunnel, {
+    getByPkThumbprint: async (pkThumbprint: string) => {
+      const entries = pkThumbprintToTunnel.get(pkThumbprint) ?? []
+      const result = (
+        await Promise.all(entries.map(async ({ key }) => (await keyToTunnel.get(key))?.value))
+      ).filter(Boolean) as ActiveTunnel[]
+      return result.length ? result : undefined
     },
-    delete: async (key, tx) => {
-      const tunnel = keyToTunnel.get(key)
-      if (tunnel && (tx === undefined || tunnel.txId === tx.txId)) {
-        pkThumbprintToTunnel.delete(tunnel.publicKeyThumbprint, k => k === key)
-        keyToTunnel.delete(key)
-      }
+    set: async (key: string, value: ActiveTunnel) => {
+      const result = await storeSet(key, value)
+      pkThumbprintToTunnel.add(value.publicKeyThumbprint, { key, tx: result.tx })
+      result.watcher.once('delete', () => {
+        pkThumbprintToTunnel.delete(
+          value.publicKeyThumbprint,
+          entry => entry.key === key && entry.tx.txId === result.tx.txId,
+        )
+      })
+      return result
     },
-  }
+  })
 }
diff --git a/tunnel-server/src/url.test.ts b/tunnel-server/src/url.test.ts
index 442deebe..8d2851d8 100644
--- a/tunnel-server/src/url.test.ts
+++ b/tunnel-server/src/url.test.ts
@@ -5,24 +5,30 @@ describe('url', () => {
   describe('editUrl', () => {
     let baseUrl: URL
     beforeEach(() => {
-      baseUrl = new URL('http://example.com/?x=12&y=13')
+      baseUrl = new URL('http://example.com/mypath?x=12&y=13')
     })
 
     describe('when given a hostname', () => {
       it('should override the hostname', () => {
-        expect(editUrl(baseUrl, { hostname: 'other.org' }).toJSON()).toEqual(new URL('http://other.org/?x=12&y=13').toJSON())
+        expect(editUrl(baseUrl, { hostname: 'other.org' }).toJSON()).toEqual(new URL('http://other.org/mypath?x=12&y=13').toJSON())
       })
     })
 
     describe('when given query params', () => {
       it('should override the given query params', () => {
-        expect(editUrl(baseUrl, { queryParams: { x: '15', z: '16' } }).toJSON()).toEqual(new URL('http://example.com/?x=15&z=16&y=13').toJSON())
+        expect(editUrl(baseUrl, { queryParams: { x: '15', z: '16' } }).toJSON()).toEqual(new URL('http://example.com/mypath?x=15&z=16&y=13').toJSON())
       })
     })
 
     describe('when given username and password', () => {
       it('should override the username and password', () => {
-        expect(editUrl(baseUrl, { username: 'user1', password: 'hunter2' }).toJSON()).toEqual(new URL('http://user1:hunter2@example.com/?x=12&y=13').toJSON())
+        expect(editUrl(baseUrl, { username: 'user1', password: 'hunter2' }).toJSON()).toEqual(new URL('http://user1:hunter2@example.com/mypath?x=12&y=13').toJSON())
+      })
+    })
+
+    describe('when given a path', () => {
+      it('should override the path', () => {
+        expect(editUrl(baseUrl, { path: 'otherpath' }).toJSON()).toEqual(new URL('http://example.com/otherpath?x=12&y=13').toJSON())
       })
     })
   })
diff --git a/tunnel-server/src/url.ts b/tunnel-server/src/url.ts
index e59ca393..b2c65a3f 100644
--- a/tunnel-server/src/url.ts
+++ b/tunnel-server/src/url.ts
@@ -2,13 +2,14 @@ import lodash from 'lodash'
 
 export const editUrl = (
   url: URL | string,
-  { hostname, queryParams, username, password }: Partial<{
+  { hostname, queryParams, username, password, path }: Partial<{
     hostname: string
     queryParams: Record<string, string>
     username: string
     password: string
+    path: string
   }>,
-) => {
+): URL => {
   const u = new URL(url.toString())
   return Object.assign(u, {
     ...hostname ? { hostname } : {},
@@ -17,5 +18,6 @@ export const editUrl = (
     } : {},
     ...username ? { username } : {},
     ...password ? { password } : {},
+    ...path ? { pathname: path } : {},
   })
 }
diff --git a/tunnel-server/yarn.lock b/tunnel-server/yarn.lock
index f6428baf..1fa6a9d3 100644
--- a/tunnel-server/yarn.lock
+++ b/tunnel-server/yarn.lock
@@ -852,6 +852,14 @@
   resolved "https://registry.yarnpkg.com/@types/mime/-/mime-1.3.2.tgz#93e25bf9ee75fe0fd80b594bc4feb0e862111b5a"
   integrity sha512-YATxVxgRqNH6nHEIsvg6k2Boc1JHI9ZbH5iWFFv/MTkchz3b1ieGDa5T0a9RznNdI0KhVbdbWSN+KWWrQZRxTw==
 
+"@types/node-fetch@^2.6.4":
+  version "2.6.4"
+  resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.4.tgz#1bc3a26de814f6bf466b25aeb1473fa1afe6a660"
+  integrity sha512-1ZX9fcN4Rvkvgv4E6PAY5WXUFWFcRWxZa3EW83UjycOB9ljJCedb2CupIP4RZMEwF/M3eTcCihbBRgwtGbg5Rg==
+  dependencies:
+    "@types/node" "*"
+    form-data "^3.0.0"
+
 "@types/node@*", "@types/node@^18.11.18":
   version "18.13.0"
   resolved "https://registry.yarnpkg.com/@types/node/-/node-18.13.0.tgz#0400d1e6ce87e9d3032c19eb6c58205b0d3f7850"
@@ -1134,6 +1142,11 @@ asn1@^0.2.6:
   dependencies:
     safer-buffer "~2.1.0"
 
+asynckit@^0.4.0:
+  version "0.4.0"
+  resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
+  integrity sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==
+
 atomic-sleep@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b"
@@ -1414,6 +1427,13 @@ colorette@^2.0.7:
   resolved "https://registry.yarnpkg.com/colorette/-/colorette-2.0.19.tgz#cdf044f47ad41a0f4b56b3a0d5b4e6e1a2d5a798"
   integrity sha512-3tlv/dIP7FWvj3BsbHrGLJ6l/oKh1O3TcgBqMn+yyCagOxc23fyzDS6HypQbgxWbkpDnf52p1LuR4eWDQ/K9WQ==
 
+combined-stream@^1.0.8:
+  version "1.0.8"
+  resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
+  integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==
+  dependencies:
+    delayed-stream "~1.0.0"
+
 concat-map@0.0.1:
   version "0.0.1"
   resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b"
@@ -1503,6 +1523,11 @@ deepmerge@^4.2.2:
   resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.3.1.tgz#44b5f2147cd3b00d4b56137685966f26fd25dd4a"
   integrity sha512-3sUqbMEc77XqpdNO7FRyRog+eW3ph+GYCbj+rK+uYyRMuwsVy0rMiVtPn+QJlKFvWP/1PYpapqYn0Me2knFn+A==
 
+delayed-stream@~1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
+  integrity sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==
+
 depd@~2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/depd/-/depd-2.0.0.tgz#b696163cc757560d09cf22cc8fad1571b79e76df"
@@ -1956,6 +1981,15 @@ follow-redirects@^1.0.0:
   resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13"
   integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==
 
+form-data@^3.0.0:
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/form-data/-/form-data-3.0.1.tgz#ebd53791b78356a99af9a300d4282c4d5eb9755f"
+  integrity sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==
+  dependencies:
+    asynckit "^0.4.0"
+    combined-stream "^1.0.8"
+    mime-types "^2.1.12"
+
 forwarded@0.2.0:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.2.0.tgz#2269936428aad4c15c7ebe9779a84bf0b2a81811"
@@ -2827,6 +2861,18 @@ micromatch@^4.0.4:
     braces "^3.0.2"
     picomatch "^2.3.1"
 
+mime-db@1.52.0:
+  version "1.52.0"
+  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
+  integrity sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==
+
+mime-types@^2.1.12:
+  version "2.1.35"
+  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.35.tgz#381a871b62a734450660ae3deee44813f70d959a"
+  integrity sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==
+  dependencies:
+    mime-db "1.52.0"
+
 mimic-fn@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b"
@@ -2876,6 +2922,13 @@ natural-compare@^1.4.0:
   resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
   integrity sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw==
 
+node-fetch@2.6.9:
+  version "2.6.9"
+  resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.9.tgz#7c7f744b5cc6eb5fd404e0c7a9fec630a55657e6"
+  integrity sha512-DJm/CJkZkRjKKj4Zi4BsKVZh3ValV5IR5s7LVZnW+6YMh0W1BfNA8XSs6DLMGYlId5F3KnA70uu2qepcR08Qqg==
+  dependencies:
+    whatwg-url "^5.0.0"
+
 node-int64@^0.4.0:
   version "0.4.0"
   resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b"
@@ -2980,6 +3033,11 @@ p-locate@^5.0.0:
   dependencies:
     p-limit "^3.0.2"
 
+p-timeout@^6.1.2:
+  version "6.1.2"
+  resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-6.1.2.tgz#22b8d8a78abf5e103030211c5fc6dee1166a6aa5"
+  integrity sha512-UbD77BuZ9Bc9aABo74gfXhNvzC9Tx7SxtHSh1fxvx3jTLLYvmVhiQZZrJzqqU0jKbN32kb5VOKiLEQI/3bIjgQ==
+
 p-try@^2.0.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6"
@@ -3577,6 +3635,11 @@ touch@^3.1.0:
   dependencies:
     nopt "~1.0.10"
 
+tr46@~0.0.3:
+  version "0.0.3"
+  resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
+  integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==
+
 ts-jest@^29.1.0:
   version "29.1.1"
   resolved "https://registry.yarnpkg.com/ts-jest/-/ts-jest-29.1.1.tgz#f58fe62c63caf7bfcc5cc6472082f79180f0815b"
@@ -3615,6 +3678,11 @@ ts-pattern@^5.0.4:
   resolved "https://registry.yarnpkg.com/ts-pattern/-/ts-pattern-5.0.4.tgz#11508e1fb09c4a65b3fa85fd297941792c0ab7d1"
   integrity sha512-D5iVliqugv2C9541W2CNXFYNEZxr4TiHuLPuf49tKEdQFp/8y8fR0v1RExUvXkiWozKCwE7zv07C6EKxf0lKuQ==
 
+tseep@^1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/tseep/-/tseep-1.1.1.tgz#76d333d4a354cbfc627e957e49903cc53f46e17e"
+  integrity sha512-w2MjaqNWGDeliT5/W+/lhhnR0URiVwXXsXbkAZjQVywrOpdKhQAOL1ycyHfNOV1QBjouC7FawNmJePet1sGesw==
+
 tslib@^1.8.1:
   version "1.14.1"
   resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00"
@@ -3715,6 +3783,19 @@ walker@^1.0.8:
   dependencies:
     makeerror "1.0.12"
 
+webidl-conversions@^3.0.0:
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871"
+  integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==
+
+whatwg-url@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d"
+  integrity sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==
+  dependencies:
+    tr46 "~0.0.3"
+    webidl-conversions "^3.0.0"
+
 which@^2.0.1:
   version "2.0.2"
   resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1"